From d46d6913b93a1e4f435dcdf89f3d79505e0ef494 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Wed, 22 Apr 2020 14:48:08 -0400 Subject: [PATCH 01/90] merge latest master (#4) * Isotopes for CICE (#423) Co-authored-by: apcraig Co-authored-by: David Bailey Co-authored-by: Elizabeth Hunke --- .travis.yml | 4 +- LICENSE.pdf | Bin 59402 -> 92397 bytes README.md | 1 + cice.setup | 160 +++- .../cicedynB/analysis/ice_diagnostics.F90 | 124 +++- .../cicedynB/analysis/ice_history_bgc.F90 | 119 ++- .../dynamics/ice_transport_driver.F90 | 15 +- cicecore/cicedynB/general/ice_flux.F90 | 37 +- cicecore/cicedynB/general/ice_flux_bgc.F90 | 31 +- cicecore/cicedynB/general/ice_forcing.F90 | 20 +- cicecore/cicedynB/general/ice_forcing_bgc.F90 | 26 +- cicecore/cicedynB/general/ice_init.F90 | 63 +- cicecore/cicedynB/general/ice_step_mod.F90 | 70 +- .../infrastructure/comm/mpi/ice_boundary.F90 | 14 +- .../cicedynB/infrastructure/ice_blocks.F90 | 14 +- .../cicedynB/infrastructure/ice_domain.F90 | 1 + .../cicedynB/infrastructure/ice_restoring.F90 | 4 +- .../io/io_binary/ice_restart.F90 | 56 +- .../io/io_netcdf/ice_restart.F90 | 18 +- .../infrastructure/io/io_pio/ice_restart.F90 | 14 +- .../infrastructure/io/io_pio2/ice_restart.F90 | 14 +- cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 37 +- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 31 +- .../drivers/mct/cesm1/CICE_RunMod.F90_debug | 696 ------------------ cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 42 +- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 31 +- cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 46 +- cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 | 32 +- .../drivers/nuopc/dmi/CICE_RunMod.F90_debug | 686 ----------------- .../drivers/standalone/cice/CICE_InitMod.F90 | 37 +- .../drivers/standalone/cice/CICE_RunMod.F90 | 31 +- .../standalone/cice/CICE_RunMod.F90_debug | 40 +- cicecore/shared/ice_distribution.F90 | 32 +- cicecore/shared/ice_domain_size.F90 | 7 +- cicecore/shared/ice_fileunits.F90 | 8 +- cicecore/shared/ice_init_column.F90 | 39 +- cicecore/shared/ice_restart_column.F90 | 85 +++ codecov.yml | 6 + configuration/scripts/cice.batch.csh | 2 +- configuration/scripts/cice.build | 2 +- configuration/scripts/cice.settings | 1 + configuration/scripts/cice.test.setup.csh | 7 + configuration/scripts/ice_in | 9 +- .../scripts/machines/Macros.gaffney_gnu | 20 +- .../scripts/machines/Macros.gordon_gnu | 20 +- .../scripts/machines/Macros.izumi_gnu | 16 +- .../scripts/machines/Macros.onyx_gnu | 20 +- .../scripts/machines/Macros.travisCI_gnu | 20 +- .../scripts/machines/env.badger_intel | 3 +- configuration/scripts/options/set_nml.isotope | 2 + configuration/scripts/tests/base_suite.ts | 3 + configuration/scripts/tests/nothread_suite.ts | 12 + .../scripts/tests/report_results.csh | 33 +- .../scripts/tests/test_logbfb.script | 5 - .../scripts/tests/test_restart.script | 10 - configuration/scripts/tests/test_smoke.script | 5 - doc/source/developer_guide/dg_other.rst | 4 +- doc/source/science_guide/sg_tracers.rst | 4 +- doc/source/user_guide/ug_case_settings.rst | 4 + doc/source/user_guide/ug_testing.rst | 58 +- icepack | 2 +- 61 files changed, 1259 insertions(+), 1694 deletions(-) delete mode 100644 cicecore/drivers/mct/cesm1/CICE_RunMod.F90_debug delete mode 100644 cicecore/drivers/nuopc/dmi/CICE_RunMod.F90_debug create mode 100644 codecov.yml create mode 100644 configuration/scripts/options/set_nml.isotope diff --git a/.travis.yml b/.travis.yml index bb8dd37b4..f8f5aeadc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,9 +33,9 @@ install: script: # verify cice.setup --case and cice.setup --test don't error then run test suite - - "./cice.setup --case trcase --mach travisCI --env gnu --pes 2x2 -s diag1" + - "./cice.setup --case trcase --mach travisCI --env gnu --pes 2x2 -s diag1 && sleep 4" - "./cice.setup --test smoke --testid trtest --mach travisCI --env gnu - --pes 2x2 -s diag1" + --pes 2x2 -s diag1 && sleep 4" - "./cice.setup --suite travis_suite --testid travisCItest --mach travisCI --env gnu; cd testsuite.travisCItest && diff --git a/LICENSE.pdf b/LICENSE.pdf index 5be71376b88f9119eb9f3e5df50222734d8d0185..da37344cf0234bb32481f59a2268ef9ae91516cd 100644 GIT binary patch delta 33153 zcmcG%2SAQ(+W>4OJ2Da>kwW9%BMmA^N)io8+S+?*TxPN|LPbfU%t$I!GE%9?jFi$w zQ)y`EKaT6ZZ;$7BpXYu5@BQEZc|X^6Ugtc{V;{$Pc7F$6jBI%+baUKif+|UobhC1t zsI08GV#PXLr8Twzer}#NUXlztw5(Uu@U*q`c5rjmwDh)>)l{NUs2nPl#--9JEGl)= zVqUwOr>rpyTC6A-ExTAz&&}r77#fAbfd6qBEGAc;!jhp-WELx~^K`TE8EI|n8S%bm z;b_GT4&KhT5tnM_5vr;aZCz~;DB2&uX}Gz1+q!xKTr3hUJzE<8#V{^ ztDv6BB*^(!8B!&wlpm28JWus>@< zTW>SPb(&g=8*Kf(&7pEbfV(X~X>a?-CaE|Lp9Ffg-j;-orMIOdgGtVaUA_4GA;)S! zqqCd+AHzv__`}sLy=)OyMFV5)6*>k>H#oT1dabqfTkqy#>AKEPZ-b(yt(P?**V5G+ z`vEpdJSeVowRW>{aJ5%lZDUKgdOLUr$ZIPa`dE1*xVSDwaf91NR|n{2E6L#SHj8aI z%WtqC(0o{QZC&lXw@K3JOxmxRO!Rbl7{GnvHdR%hMT9YBvXMUZ%qLrbEyhc7M(?9 z$w>l#6C6bNqGVd)nA3t=H7zuLoZ$Qro<}-IWv-{rvnC z{OAg9p7x4VE|;rFp()a6^3X%xE5Oy;(qG=yYXNT(!8}L~PX~7tFiD)((#p+;@K%wM z!j*pg$KRQY`>&N;y%fk@DpEfdJ3k9%2Wf-DF>KI$p7OEuv z<*8J88g~Pgu0&xeftFEL{7bJtu=B@wH_r`jZqBNxhkonNul>sa&`l0@0sIAN6nP2* z{%-??tHc136R4t;C=}i-zjgiVBzzwJhiU#i%kO=En+K@yOXDh%>?J8FigZ`T7E(ik zf>ezttC1p<75`NC+blNLN_K9ZE|%V^mhSG(4%Vc?#@@<`e>DBMKZv-qEq_5ipOK8c z!rA-JwFnDxrn8s>a=enWrK`P)l)t=1vwSS(H1nIA< zBNg@Zlr-I}eUQOcYpVMAIM^t$ELj{oj+K=>)y|43Ph&EySs{l9(@l7DRZe?9ka^Znx@#BZJc zKNtUTm4BZ3zgYjjSmnQ1|9`s5KdnDb^1s;sKTY-vI4{$~Au z?fhTY2v+&$3H}%B|L0Zy7wi9jS>^xz`u}c~-y$6d#8jmGAdJCymmdi6BVRJg+-VhEx>9K#s*LQatRr@MTyH#d~ z{7|avbL;IYQWH*<9{B$5#>gkzCQmwfUTRifyYzuj-6|&4$Jq&g`srAG9ryKK$h9LyN)0T#D6+eUH1`c9(P)L>0!X%+mky;_PSR zsTT4bykzlO$zJz%W52IoHmW7(%&BX7c<|Jo<61vUMOO#sq-l<9Dt|D0 zHe+CP=IFVh)6Nc)a6clG*p)tFU9N|U`gL(HE-_V;rY8BiOKr68rMSMq$@g}=ym@e~ z#E6XDi;jMtubFT@?Aeve^>oo?Z%+iL?t8FyLQZE(jk!wqPsX7Gdl^d8t@X|=A3gkn z6;<^9&wxm}^2^7PI|GObBiBBr>^bcIVPBc_UH^Sk?V@}4GtH#BgFjzDcQbPRfZ+l!bNuVfIz z+h$Xom5w^r4Q7Z{1^Z38KTrC{vH`K<1(dPOzU3V)H6`5@rVss#Cd8@-t)OO7b$g#t zWa>sfo4S2*STf<@e9(=0^Tv+TrnV8PYUxxvdDRNxL-(Vb1LDOx$E2S&>YjVD$d_Ix zXJ?&%Es?3HrFo*LIAdzd;?l_3~$n#MG( z-4=iKroBh|5uemyGrFI*UaJ(LpYiK9NY)hqDQVwiCLo8;^G%(fN>F zSb5Yr;l*0pWBCi8zCN$(sY7FKDwG;DS@v0VXp604-tpL=3gO$&FBVU&SRTCn=#nNy z#q#>&(|Sa1uS=1<9e49SkWtmoaiWDIy`=Iri|5x`HqP;=qY)fvMsdlk+%YS67 zoaa=^ZCNZNwPE>zuq+dTq7+{$vS*{rWaXu|Bx5DntEbuz9?o*@FuiYR@o2E)P1&n~ zo#P*gMHOCs_LGyfNcgLn3}#Ms^ZcGWxoMa%p; zob2vQse3=Xq1*5COh-$ffcv?<=Fd4NWNLcDnSQPffrblSo1}^FmayDOOeom7Y}tX^ zSB{Su>Kys4a9L({a`l0UHp`u|BLmOAJ@M+tsHU|^Z?=s;*C%OqYP`3i?wDx<^?9Ci zE`43+Z2YNfXcf zr}60A47UKK_h*(I{&{^+OuX~1hwWWAf+{;}F6ZGKqid%_yoS+)tHWG}eQaAZq}e4q zOP|urW%v)~RoCXLUa@7I6Aht!bc`^6=P|`MsqkzDr_$!|8M{znJGrehV>ao{ao_5~ zs3=sOlu=lCy>fY}iGSIrX?N5c?#Mg)#tgd{{La(;%In+GQ{IWrk}IpO>gpaC>HEBa z2v4)JbC;MJ#uop%I%I6Z>zG%?172e?~>I9Z`bNYy>&M+`=qoqlqgFcf4Vf+CEuy7TQ=MELUJx^N#Ex2 zJ&|Isj2<4WmGD?m|5T!DLcOYsl6IExN^2Vn-|^Z96o%icG?l%SrhO@MX3NIMb(5W^ zo|x($=y1{N%`ovMySCym>+;!Qcjro!w6penHWR;8*5tkF!i%(>$2-D|hUqzG^j-1_ z?)dsF>KV~KdhgpATdvy$C*94L-+wOn@N=)dd&;BUN-wuah?)2NXkW*v6>O=k@X$ zm0`!G3AvY!F(1(;aw}`8)%1w`BYGFConB6SuNPq;+_yIAn9MhWm-7d=8hghSOncP* zHm^2;Hhk-mqT46Lf5g}i8%$MI-8b6$b?S-o$~}yUdhxeviCfw_$-x0LQYQ~vF)>A3 z%ST7CR_gAIQ4fw^&>bt^dT^TL#fvYV6{~8#@|F``(Gk}7GJvtmyEd)A?WDxVNAYL! z#)}6nc{Qpm@R4u0ave=xYt8939XC&iNHa`kE?!klJ(_9%$ofDYGwV))7DKUh;{+9x z5zSM4l8&FS$>3hRAwuX=KF@TW;x+3>XS|Syf2(!3(v6X2bjR-&|Z2xGrJD5yQA?np+i^mJgjc8#FXrE7Z@VI{b*rUbLQd`8L@9p`p z!G<&Z)4Y{MPA@|y#RVml9n1Hbv{1o^Vx=N%QF>(h{6Jz}Q;hgWmgyXc9p|D#&#f!#Z?E6T819%na9BTbvs0syuj%`% zy2LvBAMfLLXm^%X%yfFW+Eq6{c4yri$J7~d3kvMF)hv$}uXxorv+i_Daic6J#Y_FV zuvFV6=KZ`kQ46Ngyl<(@cdEGWp6z0Bv%JhZ`FsjB!!F|N*ae?kCrnLLFsjqYO8vf~ zL?myEcfc;=9nawvSoq?Wu}Dm$!~Mq8#Fu94{ICJbXdI%eUI1X0;%tuKFKSyVlsXt7rMzr?6J9H)_BNpcBt&> z(RIS?M%|?nXU&_x$=1#0E}W;u2{OO8pulzY7h#zR;?uTGoFqKgoB73V(J0fKhD~?c zI=5Rjlzh9pXwSXRjw03xkGbLpZ5|O}=M6T`I&G{VPq{-3Zcpla7&7}q#oOqQI5F)tZI@Uf#AYt6lBfB$;?H1LsGpH8;4eTCJ%ENoqJ;AkP*QHH{^y6bgkrJrLy4 z+&m?jyi*jxxdRNv;{`l^pz)3k$g>S&Gm06T4a793!NCX()?pB4bUIy1x%cMNG8pDet|W^$f<-mMX-KePZ{)67TuB1= zM1BB>SVm9TqCGbGtevW5-ov2nkWYz~PEvIyJf0Cc=T9Of^$5icB$BP4} z!B7$VNG%#1!2$AdIVzoM#-IT8=+K|egN;fD;E?1@APd_SkvJibO==JNsYGY82EenE;5V<&``20}su zDItZ84(J53(4n0Mq@lrJHVPhtNunPY$FaEJuSH~;r!hfY;2UTK49KQ{qF~d37f_8rALM8l%tb~52BZVy zk^O=3KtzG7C`=lhiL%Y<9Hchv54nm1>H@%LApwvzK_r=fu)GXPf9 zzYJIgm&83XfaT~+*gFN-3BiLh!v7nE1(g29UVp`-fG~f36MwBB`4*7=k8j{%wE{TrA3#uWshhbI(&8u%Bk5CDlPoxuWi1AL8j4AyU& zn2!U=*j_7GJwBvVGbx}HW)J+s5JaWfLIK=ITOiE;cXcBfM@@3G3>+!_7mti zvNP}k6Zn?g3Ggkc9gyYF(m{VFs2Q>i)C?Q61uiIW zMqncomF+@su-@MK=RdLme#6f{lWFFgz)(`a)4{1o<%9@@eK1IgM&tsxFfED{7`9T z$W@3>77!cjNTo%GD>EdbVITu^K9*TTp1!FB@A5zrAd8u&B}HY^E>6%oh)5LjG58YmZ1PH}(GkAX|`)h81? zJto}sVZwwoAOxwfaUnL>9I3{}1zcS0vA{ja+~so?`p0!;$^H^3q`a9(ovq{gIyX9-G)Ag>Xzf#z)Rg1|bViXoBB z0Cx%;40AS+0bMiz6vT-QsAU32u-P;->NVuiy6jR^hVViGVUiAQkCfFaRg zqjAl-upKTMCpySKmja#}uK}c!1Oqy-p&m3HEXt*D1?}L#!$9=-U{bir2mioqV;^ue z5bX#)*njv2@X2=Y7C;S?eZWEBqMFBg5JE0&lSbfR8u%%EByvGQxfo0V61gHE3sR5P4hg^oDS-=w;GAO}w&DC(2K~=PbYcU9 zkX+Q?*Z?etT7XKya0J+v3&PKV8Mq*GTu}D>2ACQclWYLOaKX=K0uI0hgSY{fhMAG| zNSgoo+nftpfs5=(Abe~)O4nbak_!CPIZBc+7$TBX7 zdYadQ$Og{E79c%5c-#&Jga<^5tVCJ?7x2aa`5{=u8=&w4?-f)56tD|3BOm}Cz`P(iPykq{s4;MTDvfPMr7Ee!L1nv2h%2K!2AZ74}hHx=?#I3|MMI2AN&fK z3iKjmNx=O;*@D}*0$GP9)P{7>gs8ls00Kk@UC09n6(SunO}sYHh*Y={%Wnhk7rwxP zljFc#!575h zA@~vi1J<8P2bId3lLJ>egH9;;4Q%vX$lZ8LvAfOrmVSt+Dx4|R~(1&Qmpnz={Wdbl%bgtk5 zo&~B;jAY<}kdR;46Z9Lu4QByU$8Uq}FhE1`+EA}CfXZYWfi5C^0#(u2K?&ypLZelK z0{K9F%4;NhphtrBNNEO88f_I6U{gTW->e>F%aFtnq=L(Z@PlC#*fU6^A=Hrn;I~1V z;kPzmcyyY1!`PrP`Q{O7aUviquZe@Qz_XlC3!7j7S@})ik^Ip_rtLuZNKip?L16>X zaAhA?0&>Cwj3d}NC_uJN;GRJxlM3n|BNtHa;86h)p+G*sVn925!Eg*A`E4*GT4R12 zEQ~=fzYS*r{lROaQq4HTpI7?BG32fMfBiNGQ-iRfr-Ylq-~b+=P{1dE0>Xyg60Z%a z2qu_wej@@$`sO$g*bE*#X95a14N#Xi5ZDa9AQ~Vi1vY~(o=X7*oCR2nlsG1uK{9Ly zg9-)giSaXvKNBeGw|e%74fU3J%o3dMGatE(;tJ*dOQ%cwl@7%!t7<6(e0z zWii3A{}=a!3EmMEgI(ULNOp*J`2)cJ7mR?ZQGl5s!2F(^KbkO0MrEPn5A=cp1Jx-c zY|%SH`-!a(jV$dabbb~3n?wK7H0yt z2WJBR7an|hWg!Y!Kvj~mEL6=HL-Qn(h1#D5WF;jM{u1G@GDy1r=mer?hx?+_a5bbL z3rI!!-&8P1zgQmQRA5k`Jo3&zcyRxz9#%lYKzz(2F2w79IE84bk?NqudGR+{K&Vh# zvp`#;IfVkozdXN)10gD}KgPk(0Z@)Yh)EX6ngf$Tv`kuED1gTYi_<`*p-B{&VqTn% zkr&@gBN&)}hUkN5%Xv2095V2Lel`Si$hcGtnMqTCo-u%f^WgG=Hzu8M4Gb_z1p#dU zHcW%8zy<}5sahxi#@WE=xFsmS2H6;y|;6JQ^cn^IOKECixZjY?_YerCj5`6!y|3fC1`Fs7-+P|=gQ4lTg7@zeBU*8}oLj3NfX^@E{kv^ijoC15bvI{tYA--~sM1Z&vV{NM7M0)yTX7xhl{Adc#8Lt{}gMS3atELL7A@u`?g&OPhoAgvI9%aXlbulc@Ct%zkOvwC zR0O|T01EmV6bM0h)no=9_X8M(2MG$~Un$_0@+L>KK|yOoP7bH8{AvURPq@iyJmV%I zgm@3~t^!cF0#Jahp;`b6oR9E&ga1s!Bm+4KBpG=Btmk)Nqi+9Kl zXqpKAa-@v{MZyMUgMOwUQ@}y>2dERgk?H~z;3N>80G>b@VmgZto#6dz4z!y{J%AkB zaC?F`A#e_UH(1=JOGgxkQ@F}@_3Sh7?*yOE(0Vf8OY+eEM2rw(@2|@uF z7M)c%_5%BZK_omto1raWfi|b{G%)r8X9ONVQt;}afDgPyc<>Yx z6tE5KC#pm!05o{Fh1?7L`6_;aC~>K{KDZ_{6iYlKfC6>}dE=nP!ZT1BDbiqIpa2;Q zCf6JaxF|X%n8}9{HljeI!DV5djs+SLV`V6iZFt&1w&Btc=fT+y7RdF%^C2364e3xVJ;sStYV|xMH;aY^K8UC zasnF=PtYL1;}HoDMd1s{iM_`^n#%lI|7Em|0GL5c}$y;7=+sa z@C8UkqXGp)75c&?Dgm+Z#WN&OV3|QI0Gtbq#s$}kjU(`|l$@44F~y@&k}vSAl!Hnf z4I;P}^{+;RDGBXo0Ye*Nv~xXav;aIJ_f;@&u9x<3VoV!B@x-g7I#)A@DQ| zGI(GD-QWxS9*hq#27_uCi)nETsjx3z&k?9j;6Y5g;9lUzOu&5r4Z?x*FX)En9LSy! zWRcM$x-%rWq#KFhAdCY#k*)^-3u8%F40Ctrk|F!R^$C~{X@!Ttq+H-BFl2DC8drhI zFfxJytO88R#D4_H01GyO`AZx`P6Y}MLEz=Gfhj;jNZbOSV@8+>ej>64nXZ6CbGQ@% z_{L*jyz&JFxI;jPOMF}uA&j>m?}Y{6KiC9jvB?AnXdwoZ03-ubV%`xl$j}H}2ObYM?NenDs04xYb~sWDLF&>u!4S@F!5hBa{T3kzVdh5;kM1fw4)pLn(osfrjDvUZjT! zgy&t~KqrsKu_y_c|AJ|NV(>s10gZ4<3&9bv7gRv%8k-UEV3H74CPchkxZ_BHEy07Q z9H0RAFhDARU$6;~4?i+Q_J_0wR3IY30RY~IW^n!K6aWIJBY=(I1E50nL%}eH0^&$! zN-n+=>GHCa28}HUK`GWUIN+11^)*= zL2AiHz%qOZfI$(&NRr=%(|`@-4}@v(;F@d$qx36x4z(}|oN)v0V1sZY^Yfhd*0VHAQgO=mBcD(Y6%K)!n&KnB2C*&094+UHX zSccz*dm_MN;Wy&apdU$yA#;yke7Zn}9lu~nFf35OX)sNLG()zB2cU-6hO?l$Bim3b zU|>m}2T_sL@J=@=RdA^rzd-+?Btr=wz+F6iBX{AoZXUjoyC4%tiWd|>JW!m#fWQZ? zjxH$h0u*o()P>Lv1x!hR5TjfGIAj-aCeR6BBlu8HBfwF>2GSTXKsxY#H#`L0fqif? zJQX2@8>ImkhMWav(0Iv3D1emVny?(4SzXk_jP=nB`!;S3P_ z@H9SP6uN-wg$L?s=u1Gz2xl|Ege1isK5H|oEo>;$=ERMpT1E(zowaEqvdfB~QYf8YT)ra}A97F*BS|MrT z(1wwg2zUzY6EuF`nKreG(2%HkFI8qM& zBNNzyI}|t_dH%yIpo@WP!TiXtsN(Us1%Ct#aY6^Zk2z+%ACA)@5Xd0-79c>tG=d2Q zZvnuVT85vIgN1;**4X;>*3eNyu3XFWXWF#TMWNd2EU<1{?3&o<+n-D4)OnZYz6*=nl}Uy6gK{O6a2;- z#NW6-_-iLicWqk-`)%HUM;iaf*1RRzlV^gy!6r1cm>Yw$ zNU~MS(#63!K$iURHc9yPHc9g5+ZOZM9Gq=Yp^&TLpF^#+bg@+={_*$Tu!|x5#G1GD zHpR908&S^WG{!t2;YNevYWO`l2kRBC_RhAF6h%Y$^DQID-u&_RR)73`mZAxNZy-*; zVFq?i2vGCSKUF{_f2ct4I|YtgCl4P(88$p}*vWtWvomsc|M3s?@D3K2|3e69e<4_8 z{pm0FHjdqJGJpEdR=GC#1pry)^ICJ2O*JQLR=8_a&p(iP-b!!O$&Kq4IzBjlX}*@o zohKTbJ*QoqdtmJ4fGEd`59eeOnR#Iz8fU_c4zIbIQtIDqQ1yMpYW-I|u|tHPUF$&S zv)E--<{R#CMs6{gMC`xxEn-2!%f4lg^gqN%A8fv^_jR0!qWuh;`iyjyxg+l>EWN?; z(%86B`bF~SqbtkAzj)1lXRbIZYq0FH17WVXljcL$`n^;B2^?n z*-5PIQY9nWtT$E4raiLqk)vIfSl9W;L+_Sxtsc(Y<)Pj9vZ_YZ=Rzgz`JSjLc1wFo zKgEAueE+CX!^^1&l<`gmPhQ^HF}wFz)+I%kq)mC^_ioINHlxk3Uffvk8@^?t>Z283 z^|JTsz2|IVeP2HA$n{ms8$)Af$wsfge3l3=bevN^*G@@%p4Y(=+Jx89)srOZnR5-M z^|z0sty5V4@r~4+!62bFcZKx$;r3Da)5H6Z_^VqUa;niwDb}TDK6}>Sy-#iQRw=u! z!%uzk83@PmPFQ_nWs}3yIp@}RuQt)#VIteA6REs7XXD^)R?2|1g?FvboTyM|)tzkahuHx+ znH~}{1^ad_-sR`Lvr%ovZp{(aksSHtf|H)v+Y_riEPRF@5576^u+Wrsy!PhId7jfk zBrO_V?DqV^y!X7A9i~?3dn?w7*PJ8euOMP* z@|twxY>8>c@ltB~t5x17!e5WQr*-7a{1M{u=i_EBs#$ryKy<12^z$n=PtLG4=B`qR zC=x&EnUi(l9C3oCbo$tZBA>(c7npBO+r+kfdz%_q0=QGnsR_+Y?Ps^@wI;Nh*T1nv-Wgeo|tfqae#Vi5pDg< z*V{%fh_y3hS&PdjWn4N@nV9x{%lP#6$}3++ekkcZF^T%zkeC{*@AoDC)500YQ@PrE z!nCPJ{bng@9r$J?x?qjm)@fntDSPxUX$475IxzlL!i3{fXyyth%ZWT`uccm>F7-01 zos#R=C05y^I?y5Jz$NrLA!&dc14?I+(CN8F#mJumT^*mx-9fJAt| z@V&5Fl?~StDFJIY)+Bm|5wAyn$i3omKZ*M*?-zD?u#DsHRucls>pRQ=q`(o6$&t39u;;G{oWS_^z z5w69Jv)9d6H+da%ZIS)rSt0L>w+|5o!52(Zd=eBIm9HMJj@l!eCAC)SR9l{2o_E&n zZ4xySn-9yT2ua&G4y;V+z%iSz8ENBSi)7MdPWZS^?+d|}~;_shWXxMvxLtJ)^DSUA7y7E&x> z%{4k`w4Rv!C^kHok-3!f?BcFDt5;8X=CVr3@50de1Dsq;Hw_4=TE>aa#WPvSwF|q`R&S3zcvy6wsN}+h zqdeER`|mN<9=1&S$=LY%muVMnx;ndzjg>8aU?OAiaO;jGX`9BccUymN)={0$pG}4? z_2uWS8;Uw$6!lqaG;#XeRH2%|=ROjmiF)O7L#;irow4~ny>t4{YTIX|U6Y;_-6S%~ zDq@M;hdZU8Z-r6w3&)phXeVSJmS%4{kbU_6&Q%)SpM$55+@HPrr1z7@WgoTt8oyqW z_Ri~_n@s7BbqWY^f7^d^|C7$y37J}EgX&T>AJ28#Nu{jG2+z=5Wk!q~5I<-+@u1L| zn*EEMv&0rVyS_~kD;Gbw$yte-u4=~T zo*%`pFE7m%dpXs`V8K>q?_rb1if_R}3sZv1`%hnXvX+ln(zn4_>3ZuMo`*g&Iu5HyaVZvh>Eo#wMEfto@^E`j` z>l!(Qjb`iDF~8@|+0{3DH#@J}ds0uqimpl3OYEPHdAYUZvk}MnWTe+F*~6D)gJN7V zlJu|mQ=iINr(V&J47L&XEEm6e=1`sRh^(x@79#gYw_s0><6`my}~jVUU=@&x*yxVEgxuGyJXVj_e5*__K1sljY)AS9y=cp+{MxNp4gYwRNtGm0Y@YT{)UC`ENWbbYLv(SFT{c(!!S{^vGVGKyYoT;Q=OY~0J- z25n`-pEh5M=jOb<%?_0;eXX?a%rS28=#L*?*}wXDz2)WWCCR3l<>vfM7yzRseS1LKw)j&xUhytJi8RjcIIk@h85Zh8%Bibgai7>*E6a<3n$!`LSm zyE}Hu&QnulK5E8ERo?8=8{FL|{k5gE8cUmmnwuTWgQcXM4G(R-XxQta2KT`+-4pC&R##C+`j z`C@I)&3BhS@~tokuMNE-RPsVJOT+*1`yA_UpG4CV8qYl0eZlgkrMr%8Ez4_4>{eab z*d14gRZ^m^jmqyoI4B*m!gpWCF4Orp8eT?8Og1;Kqn5o}Io>mVx~;~UZAVQWnQ7)@kRh?XToIoqW;NaFsZ~zFJy4N9}5%Q%(H0 zRa;gY7(}nXA-As5z{ZeR@9d}i#i7U5)7i)6ncDHLgB2Rd5vDR;YA02@21@()HjZzb zKA7zP&d@~r;7Yv?F~^mslipwDsF+q>t6Ac5w%fWS#QdA%p@5)G>5Cf2%!;rXa1xs# z>q?aw%Z^!_Dw!GoZZ#3R+VXVtb)EZzvs4nZlc+6+C=(^hdQ@%8q*vc7DW2aEck14| zJ8^z)eg|6@Zq&`#e=bOq-L=I$tSR3o(aHXcc{^p_X}gEBvaY(>_m^=i?%%gzNAGqx z68kXQ?n0??)9bgbCPA6j>sCH(5}xP%{S90B^Op#2xmrcrg~ph;W#Yt_&-?e@xYwbQ zmOQX&+#4EGrS4792i1w)Ca!}D8@K;ZZ{552S~x=P{n+Cxd*(#K?X#*Z<*cs$T7XK;O5pcdhIrEXKmc@Lq2Vebqr70MQi zijF#avP!zMep6OW_t6>eKkUeVIdrdgfoN`T;|tflL9uT|hYyb5X|`PG#kkMQ#fZs5 zGlnY;m!Ii0T&>#4MzVRq)ssouN#4gUz0z4fPHvBDn8h(qkMU!d$CZ@7cRRgKw=h2P zkx}9_>*gnKn6CME)(2MT6ql=Sw~RaNJLBk$<^>@I$`1~Q%q#sN-6MX>lv84@_i@ip zsR5H=gL7@B2E~rQJ7#cAe8~t;O48Z4iKXg<--@`s6?e|rkDOARS=6{SXi`vfX!bCd zynL%F32n7`2bo2Cir(lG;p3m$zLH5lHey-p!G)3e&#a&4<_IJ39*a*hVOZiHy{I)w<)T)&Z*rf2R?oR*aQIA7@1 zV%i)RLikhK!*|N+Tc)~Botl1gV6^{hJ-gJ!6kYw`q7dx^^^(#1{ExZOrrgix^gP%* zcIG?BlD0+CHO!jbuAVzOiFB_Mc|mXPi{F|OFmz`3h0!Tm3xrF#6Jzp&M7raOy&g2J zHJ%jv;QbBD{<$}WBwh{^X%2LrWIZYFNbuQDfft&VZy?U>9@aRQ!rWcmXLI!A&e$P8 zmubW2o*VtC|Nih8p;N=21WhQFf7C15`)yp~gvLdU!i}Nun^{jA4ido?m_8~KRKg5{D(8b3FZj0(+`^Ur>CR~`m@qJ*q{ZTKc z6LPf`jq52bGYg6L!!9?(=rt-WaG57Hpxi+}c1)b;{c*cw%!Ql#?*z2^f27kQ4o(S( z8D(jU*pL4+elLsO%EsWI;mXF($2erRUA z+hki@revS-)|RI=i)d3je?AQ^6E-$5A3l)vF}Qz(^uj&8)3s&yOt#L=-A2>19TdHf4&8k;aETNtPWuaKDITH83BOaBqy z<+9PpB75I<;ic_?7jwTYNPJurvA0#HL%;Q6sr3TmZD%5-lkT>C@=7YS@;e;xHoL0t z++x>F$v#DQA0M(Ud9JHOqm{|Np1HtRwPp7krMFc*H}$)O^iFWE7AQ8A_0Ksz@#0gB zULwJ+?{@jnCaXdJtB2kO-?Vm{pG+~nvbjTfRpDb*Y2V@z9YUG&dLJCh9%Jpb`}r3O z2Nza;_}vHIHubBoAK$FVyk=qb&hfaS-P^)DHmO{j&I8KPt@mY5x<(wT$$ao((9>#z z__tg8d&jf}9?fHU?Y?#Rtd8dPk3H!>*4ITo4LD$|Ti!*hG%r!E z%1u`4t=x9#naIy2kG);KDo*O0`fRF1pvBJzcOq@FDyeTD=FNVaG%>JzK>}k@Vrk00 z-O^_m>pQid&Auvm5{s-vVWbl$7w9swcDivWq?aI#ojZm^b%SxTVzjb7@t$`*Zjo#_1Y|P*C{WaHn%d&9QvohS~*Q$iQ zD*21|!#q+vJx7JO5~+uLo=CLlCry2LgJwNX=4>&cZ2G;`F-Ub|XRKUI8!gJohqFg5 zowZP7>WwqO1r51Pxh-bx7ncVp*H^y`(Q4Ej9GR}_8}k0Z=d<}uwe`JsyN!KWPqTET zmVHj&_x9B}|EZrvzIm3ed3COLnL~M5YV+Xvd3F&Aw=b`Kzty4S7NMr0Gu>Y9-i*&P zTP3_Bdp;Sj=>PDoEO*U|%&Q#zDF@>-vuDMLi1%+w=>M87)bjF&W}~Ruyzgf|w}h6} zrDk3f4Vd1s=2&r7s>hk|cPsB-$WTqmG$?x)Gokj!7yPZ{6t0e_HrCcFNPaZecqM?nN9Y;{2U7 z8>`|DRf=Y7XNtuC3<*`0398i;?H4~W_O5qI!->NFO7q9#ncrx z)};8K`!1HL<|Eg(K+8VwOUb72nxNcsqc5*tKlK?)`fOx(!N|VFJ&}!*ZcCIb8@)Na zUfO`W_DJ`gmX$wrcTV%3n&~%kWz~$zhT6Nt1&x8ZepMMai|6V+9i`^ZqF$X)TCG?x zMCi?L&tqP_+`q*rX@jjwa@&?v@$_{)!#(e(&!4Ar?a|#iRPz&U=VF>#ZUx)#Ep970 zTkCuE$K7WlW%MTDHp9V-Bfd(CoZTbz(zK~OB29#oGvlRSGP_)&+gC$QxY^jKNUxn! zFHzX**qNfgr)QI`T*#07_-O+J8-(J5Q~DtPqfnl7EM3b_rl&^ba=XOc51D0h>*6K_ zghbmcTO5(6CT2KSqslV({I}L+o~Lrun~Gg5uQ}>`EErfCHd2goBg(q+*|oq_Iq&iN zMP}D^KKK1L^Gu17XklpGK9&OUJno#e%@`k{&&Cbr3iXr~K6{HCmWe(0XgfLnW0QE% zzP zrS`3zeww(+jtouFRX3eC*yz9ZTp!i-wxQGgQ+Hh%3SEX)Uovbb^rx!PPxYKE=d?6z zY}hnAcY8$ZvE2-7*O8KT>;7 z)5$VISpVqwvWh)F&l>M+*$|cBXnjJa$@H?kY2c=nhrZVpwMyI_wP?HkQ_UV>+TJ14 zvX71M|B}b;%T@ch>)g=u-SsDQ#@-tJJaqe$K<%&G$1CPPzPVH9>Kb9+gnJW1wW1s3 zZ#E4ya%h@86Nu+8ZUqjDOT1}u{V$RI3`j_f5`a3eug%-xmvlmX?B}&LooU4BS@Vp^|b@ty~AXnekc&2CXU8-EpoNhA@iNWZmt*(^k#R(#ATf8Rw zAB&IuBuSE z(Da)nrS2c+?5zA^R5xb@;qq>1TYd{C+@o6O!>ak>Zd-OPCl$upAzSHsjF%pF#=O)`=|psbG~ovh_tMev9PR=Tdbq9^y1SQ;y1X5dv4d+ z>uK%Fw+R9V=6F$kPIDyTzVXTQ{F2PTw!nqbqb9GPwawFDz2vdT_DdUn%+J~?NnBhw zKj*>|hxyi9J`E30j8L|^J|pjg)iNvbc=abb>xHL?YA%{GO)JUwiuch+pI$gD`=FkZ zKIyu=Rjko8&wiD_p3um73lm=bd|76r+SYt_(4(jFqe}nBlEmA!1NVhbSDu>|bH*ln zV$O=%EWZvyxMk9-F(rwrTJ(L*4+2i??+_*~$<~`L>9%g}wX^sdlKyH_-j>2$gxHQ( zO^ze=rou=SQwH-Vh`D ze~1e0o3=ijpbBOB#NN6#GI#qMv+%y8b83m)d8b2y_6@sVP1EU;Yns?LyPe)%d~T!u zf|W$%8##9S`+L{lRX&`03F6dXpk5p;OXlEh%;_QK@P56LRtTtyApPOoq7%kL@MKcV8ji8&vnzSMF)+@U6EHY85ibh$)Kc za_#E~$RJF8Z# zn=G(hz5V^$h}|Z)cZPfY{QA}OvZ2bHkB@7(5f{@VO1VkR<%Mz2oeo>CbPHT*Z1HT% z#+-YzOj?N4*1=LUi=JB_`|c5$Sx0`hT6!cI{;0ecJZ#zeU>OIGjG0BV-lR&#rgHidsPp6}YT`&CcyO@;ZPyB=IS`{{Xm^v?F$ zT$|AMgA3Y(L>tBTCUBlehKgG6|9Q--N~E%XlT8+TGZZ4Z~%&~E)U zqO);B()UX0#lf?WRpTCQA6x&$H7lXqrg)_UC1l6U`cG?n{VK{oY)h?SK4D!P)R~^} z(p9bbtc~X6J~`R)k_Vlw!>{YORh{cNclB|X-;tJ?d9)RITMx=P(TE+clS^Jt^BCu( zyeCy6Y~F~^Ldt%pQVyxwpIVSJ_Cw*@c`NRUt&MkOemJqi@$zEV=}pPaE-43hS;f_A zpW9!&@>1M5dCqK>&>auf;o602}WIF=`G_wZ26H{{SOVb6?Y7dcV7B+`Rv(31@xZB zkrVsn+g*+})WBiswLS8?pQPq;-&16TG)~IfLdQa)nV#_WqW;vzfMFQi&_ZjBqg z*T#dmUv;T(O~lurTAexLPphPbH(2XG6RFtgBknt?|M^Qfuf>NS#3bJB*_S+(Xw6>v zRrbF1iJfx_avZ&1(MDUF%9gcC%$U0K)~UEt(X+Rm4H({YUuN6sGMz7_x-nPVUMM$d z&2p>M=YDQnx^>F#u1=BSa%BU(;Z?4Oq{j{!J9rl^4U1oA`hH%WX~LWbeOJumbW^uG z`ArvZqwCl9(F@x3_l8f0l({0L^RV=%l`aHWzY5dDJ{c9r`yT4_( zIolrZ*ca^-%Wiu-SiOic`%77HXH?PA^Z8kQ?AO;1afQ30F20B;zk7XI$?p0=TgEZ! zBI1*H^kCh)*O!*lTTfQ7oZ7R#$iZ6HglOR_WC~{rw-EbM4Jso{YL{N8M!uG?uMg={Xx;aI zR?sR9xmz}yEp;|baI}vx&-3qP6`3$TWXwq4eEdt`8Czm#76)jlu^EPLhZBW^H zt2d7??Y{GQS)H>{%97dk>rbW3*>Hu!n0{*i@Y%XGDzDO>JaO0+yyEN{p(SfQbEa?G z$7l+!%-}c?cHidavM)>MF0CF=OJ?lM_qaIq!W`YHi_$V*SOj0|(&-#kx_YwG@E&&Q zK&i*9#*cR?_IHD}m@TupcK82j?7V~F+TTAOac{6}LLqW2X<*@RdnELMq57GcA65hQwal33BAMf)C^`DW&u-#@?4nK|c~_w$}PbLPyP znKS3S-cR|V#7kIPF89>VSnq=tmes4!!aW=bR>HAzCm(pWA53)w$e$0x-OFtB?(-k` zOoF_-ms>bh-xQ(uSkG~qv|zXwWslL?Q(V6;w9YI3s0fyPq0&U z)-6P*3wPL`n=>MqgMYr9!`C_ir>LP%&(-GE( zrsOrhQVCw>9gTsVek5_HONR^74Rah#no`YxY7Gq2`jB=Q{*vj08OsrZYEbL<2~*I+ zOae=EsZkLv5e2gmKkMr$9M;QJY?VLpp(hv zQSC-SIOuuc+ltVyxhk8#Hc(vp^z#Vvta^z1YrP1)^NJtd-rrtG^uH#MxQp9-U4?Lf z4P80nu1NH;RP8Ehz4WRDpFUraCR%n6x=i?EX{r+g>g(ZXR8sO>J@r36ElUrJDSfMe z za>HQ!JfSL6&{*c0v;2+jUhFVeWTXi6*h%(%mV$t6$Dy)`q%P%AZWQxjNuymf1|El zp)u`w+;7jXdd2fHBX3dLlbN*H9#B-16&)9p64T*p)#(b!h>F2n_c zz}0adLZ9t=san=a_Vq5nqT+jXdbGksm*{s=hfea+3RSe@CuK0-2?%AH$GL8)10C}U{zM+0jwHz29 z%Dg=mV}x_4dRy9?^vir)1S9loqtVk9r@ZlRUvpas8ZCkNDsMnYe_jhn4-?z8tk26#r60 zrBEfSUN=B8V20B6!K24F+26pgM#icsoc(?*E%=YdFgwo>iJ|4BHEaf2o_#}X!}$@@ zpHmEo$3`SR@dy@II?Q_kM5ZQV3Kc=_ZmhBmN&6;wp@xsV7ibgAH{d;=*@r7vtdfBH zNRh-ygL?vtl5_#CUU%MtH%epno*qBot9Ue;WFJV{i6>hrd%g*SA0294xJpe}WK~GW z?6m(RWkJ>b$dT0XV^g8;qK?kKHCCdEDB*>wOaXIl7@=DGF>|p&cx?hzR-5ELIedUF z2~;s!*0e}ty;a$?xbEMBs^XWP?GhWN4?k`a6|_z2QG_-^1q|HcVqh6}R zjaE$~TrK@4j95d4)bV1q!Z*SG)$h!gH4H{ZIMQDto(x78u|^z`M|M?DJnpmczQ~#g zTDoxjH10M>sNKgIv%9NblwFD=>(?`P9453_^Y`+|`SgeamCus8YYcmDq8Db=4_E%0 zojrMxYJ;8ttAf)$s^o_ZU3{tXjZRDelcHDDfJ&H4l)S@E~7M((}H6kMr#?)^M8@JeFO?tqMAm@%h^XA7@=+QG!=CHIPZyaFM&(3Hb(kT zGc?u@!D8*sS9;Y~oDNj}(i0k(^~V!=-5rMDD(j1%%uWieSA_IWw5FGlO9h;AZ`52) zBK@=Dd$})%67QFntQt4)rh zs;TGJkP~+-OzS?d9T-{;lIK~Lf=s7g#^r~+UdslCTU8C`Uaw1@jHX0W4%eC~<-3B- z12)bx_bSn*3h*QmjlzPCIq$J+tkH`+N!jzAW{L=JwvS_QD|gm@)~fYXx$RTL=169V zT1Tq`yc26&U5VrPpoN8xdDcc^k%yhs-MfZE-rvK-(~G~;xYja!d5F}$SVX)P^x3`K zXd08+q%G{Bbm$~L8e8L4Gz@bkFr+P}3jRBm2GUUzBr8Iz@;54MB&KscoAmfrIVt9wgx_w2SfY5l5x4ZUJ1Awr!fez{L%@ zQ1Rzs_xPz?8Dw6f=jzVWd2`YYh7VCw|sKfIsHi!()pVT8Qv+VEf1BeTkSHbHX*s@ z5ruPjEyEGIq1qC)FAfH3JrQPxyP6jRRZj;z@W&X)aK|jJpfi?98k#4l{)lSfuoBa@ zD6P+U5OPou5ZK{N6sK0}ltgn~gMKUFM=z9c>@CqmzrbE!{-PrNT&OcP;?)sR>9Q=% zPWu!2s|IYWMe3GZzZ`21eO0)`8rQB#F2b*=g6)2LhJ9aEERx+S@Y>Qy|SOk_*O81j6+U zEJouFNB`xA9Cf0`FcPnsGjowMV&b!J3ZDc%fM+gosyr4m6k+R)lsB$A$KE|E(Ulp* z9CX3(v3VXN5o;T9N*nUW6ph#-cq3?5F^|Mgh?N#-kt7Fo`Nk;wwul+K1o2k4(sIGg zhuTiCg1hOa@yHfKsR>J_2bhJ<9IV^y3>D`52`J=(#hoWe#7g5(sh+cz3<4Sw-Yr+` z?vfitR6FoZc2?u 1) then exit -1 endif +if ($codecov == 1 && $report == 1) then + echo "${0}: ERROR in arguments, not recommmended to set both --codecov and --report" + exit -1 +endif + +if ($codecov == 1 && "$compilers" != "gnu") then + echo "${0}: ERROR in arguments, must use --env gnu with --codecov" + exit -1 +endif + +if ($codecov == 1 && `where curl` == "" && `where wget` == "") then + echo "${0}: ERROR 'curl' or 'wget' is required for --codecov" + exit -1 +endif + if (${dosuite} == 0) then + if ($report == 1) then + echo "${0}: ERROR in arguments, must use --suite with --report" + exit -1 + endif + if ($codecov == 1) then + echo "${0}: ERROR in arguments, must use --suite with --codecov" + exit -1 + endif if ("$compilers" =~ "*,*") then echo "${0}: ERROR in arguments, cannot set multiple compilers without --suite" exit -1 @@ -355,7 +393,7 @@ endif if ( ${tdir} != ${spval} ) then set tsdir = ${tdir} endif -if (-e $tsfile) then +if (-e ${tsfile}) then echo "${0}: ERROR in tsfile, this should never happen" exit -1 endif @@ -416,11 +454,15 @@ else set nonomatch && rm -f ciceexe.* && unset nonomatch set dobuild = true +set doreuse = true set dorun = false set dosubmit = true if (\$?SUITE_BUILD) then set dobuild = "\${SUITE_BUILD}" endif +if (\$?SUITE_REUSEBUILD) then + set doreuse = "\${SUITE_REUSEBUILD}" +endif if (\$?SUITE_RUN) then set dorun = "\${SUITE_RUN}" endif @@ -429,6 +471,7 @@ if (\$?SUITE_SUBMIT) then endif echo \${0}: dobuild = \${dobuild} +echo \${0}: doreuse = \${doreuse} echo \${0}: dorun = \${dorun} echo \${0}: dosubmit = \${dosubmit} @@ -449,16 +492,30 @@ echo "#hash = ${hash}" >> results.log echo "#hshs = ${shhash}" >> results.log echo "#hshu = ${hashuser}" >> results.log echo "#hshd = ${hashdate}" >> results.log +echo "#suit = ${testsuite}" >> results.log echo "#date = ${cdate}" >> results.log echo "#time = ${ctime}" >> results.log echo "#mach = ${machine}" >> results.log echo "#user = ${user}" >> results.log echo "#vers = ${vers}" >> results.log echo "#------- " >> results.log +EOF0 + +cat >! ${tsdir}/report_codecov.csh << EOF0 +#!/bin/csh -f + +#setenv CODECOV_TOKEN "1d09241f-ed9e-47d8-847c-038bab024b53" # consortium cice +#setenv CODECOV_TOKEN "f3236008-0b92-4707-9ad5-ad906f5d2ba7" # apcraig cice +setenv CODECOV_TOKEN "0dcc6066-fdce-47b6-b84a-c55e2a0af4c0" # apcraig test_cice_icepack +set report_name = "${shhash}:${branch}:${machine} ${testsuite}" + +set use_curl = 1 + EOF0 chmod +x ${tsdir}/suite.submit chmod +x ${tsdir}/results.csh + chmod +x ${tsdir}/report_codecov.csh endif @@ -734,6 +791,8 @@ EOF endif endif + set rundir = ${ICE_MACHINE_WKDIR}/${casename} + #------------------------------------------------------------ # Compute a default blocksize @@ -755,6 +814,7 @@ EOF echo "ICE_CASEDIR = ${casedir}" echo "ICE_MACHINE = ${machine}" echo "ICE_COMPILER = ${compiler}" + echo "ICE_RUNDIR = ${rundir}" echo "ICE_PES = ${task}x${thrd}" echo "ICE_GRID = ${grid} (${ICE_DECOMP_NXGLOB}x${ICE_DECOMP_NYGLOB}) blocksize=${ICE_DECOMP_BLCKX}x${ICE_DECOMP_BLCKY}x${ICE_DECOMP_MXBLCKS}" echo "ICE_DECOMP = ${ICE_DECOMP_DECOMP} ${ICE_DECOMP_DSHAPE}" @@ -809,7 +869,7 @@ setenv ICE_CASEDIR ${casedir} setenv ICE_MACHINE ${machine} setenv ICE_COMPILER ${compiler} setenv ICE_MACHCOMP ${machcomp} -setenv ICE_RUNDIR ${ICE_MACHINE_WKDIR}/${casename} +setenv ICE_RUNDIR ${rundir} setenv ICE_GRID ${grid} #setenv ICE_NXGLOB ${ICE_DECOMP_NXGLOB} # moved to namelist #setenv ICE_NYGLOB ${ICE_DECOMP_NYGLOB} # moved to namelist @@ -829,6 +889,7 @@ setenv ICE_TESTID ${testid} setenv ICE_BFBCOMP ${fbfbcomp} setenv ICE_ACCOUNT ${acct} setenv ICE_QUEUE ${queue} +setenv ICE_CODECOV ${codecovflag} EOF1 if (${sets} != "") then @@ -941,9 +1002,6 @@ EOF2 exit -1 endif -# # Initial test_output file -# echo "#---" >! test_output -# echo "PEND ${testname_noid} " >> test_output endif #------------------------------------------------------------ @@ -955,6 +1013,12 @@ EOF2 cat >> ${tsdir}/results.csh << EOF cat ${testname_base}/test_output >> results.log +EOF + + cat >> ${tsdir}/report_codecov.csh << EOF +mkdir ${testname_base}/codecov_output +cp ${rundir}/compile/*.{gcno,gcda} ${testname_base}/codecov_output/ + EOF cat >> ${tsdir}/suite.submit << EOF @@ -964,9 +1028,13 @@ echo "${testname_base}" cd ${testname_base} source ./cice.settings if (\${dobuild} == true) then - set ciceexe = "../ciceexe.\${ICE_COMPILER}.\${ICE_COMMDIR}.\${ICE_BLDDEBUG}.\${ICE_THREADED}.\${ICE_IOTYPE}" - ./cice.build --exe \${ciceexe} - if !(-e \${ciceexe}) cp -p \${ICE_RUNDIR}/cice \${ciceexe} + if (\${doreuse} == true) then + set ciceexe = "../ciceexe.\${ICE_COMPILER}.\${ICE_COMMDIR}.\${ICE_BLDDEBUG}.\${ICE_THREADED}.\${ICE_IOTYPE}" + ./cice.build --exe \${ciceexe} + if !(-e \${ciceexe}) cp -p \${ICE_RUNDIR}/cice \${ciceexe} + else + ./cice.build + endif endif if (\${dosubmit} == true) then ./cice.submit | tee -a ../suite.jobs @@ -1004,31 +1072,55 @@ EOF0 # Add code to results.csh to count the number of failures cat >> ${tsdir}/results.csh << EOF cat ./results.log -set pends = \`cat ./results.log | grep PEND | wc -l\` -set failures = \`cat ./results.log | grep FAIL | wc -l\` -set success = \`cat ./results.log | grep 'PASS\|COPY' | wc -l\` -set comments = \`cat ./results.log | grep "#" | wc -l\` -set alltotal = \`cat ./results.log | wc -l\` +set pends = \`cat ./results.log | grep PEND | wc -l\` +set misses = \`cat ./results.log | grep MISS | wc -l\` +set failures = \`cat ./results.log | grep FAIL | wc -l\` +set failbuild = \`cat ./results.log | grep FAIL | grep " build " | wc -l\` +set failrun = \`cat ./results.log | grep FAIL | grep " run " | wc -l\` +set failtest = \`cat ./results.log | grep FAIL | grep " test " | wc -l\` +set failcomp = \`cat ./results.log | grep FAIL | grep " compare " | wc -l\` +set failbfbc = \`cat ./results.log | grep FAIL | grep " bfbcomp " | wc -l\` +set failgen = \`cat ./results.log | grep FAIL | grep " generate " | wc -l\` +set success = \`cat ./results.log | grep 'PASS\|COPY' | wc -l\` +set comments = \`cat ./results.log | grep "#" | wc -l\` +set alltotal = \`cat ./results.log | wc -l\` @ total = \$alltotal - \$comments +@ chkcnt = \$pends + \$misses + \$failures + \$success echo "#------- " >> results.log echo " " >> results.log -echo "#totl = \$total" >> results.log +echo "#totl = \$total total" >> results.log +echo "#chkd = \$chkcnt checked" >> results.log echo "#pass = \$success" >> results.log -echo "#fail = \$failures" >> results.log echo "#pend = \$pends" >> results.log +echo "#miss = \$misses" >> results.log +echo "#fail = \$failures" >> results.log +echo " #failbuild = \$failbuild" >> results.log +echo " #failrun = \$failrun" >> results.log +echo " #failtest = \$failtest" >> results.log +echo " #failcomp = \$failcomp" >> results.log +echo " #failbfbc = \$failbfbc" >> results.log +echo " #failgen = \$failgen" >> results.log echo "" echo "Descriptors:" echo " PASS - successful completion" echo " COPY - previously compiled code was copied for new test" echo " MISS - comparison data is missing" -echo " PEND - run has been submitted to queue and is waiting or failed submission" -echo " FAIL - test is still executing, did not complete, or completed and failed" +echo " PEND - status is undertermined; test may still be queued, running, or timed out" +echo " FAIL - test failed" echo "" -echo "\$success of \$total tests PASSED" -echo "\$failures of \$total tests FAILED" -echo "\$pends of \$total tests PENDING" +echo "\$chkcnt measured results of \$total total results" +echo "\$success of \$chkcnt tests PASSED" +echo "\$pends of \$chkcnt tests PENDING" +echo "\$misses of \$chkcnt tests MISSING data" +echo "\$failures of \$chkcnt tests FAILED" +#echo " \$failbuild of \$failures FAILED build" +#echo " \$failrun of \$failures FAILED run" +#echo " \$failtest of \$failures FAILED test" +#echo " \$failcomp of \$failures FAILED compare" +#echo " \$failbfbc of \$failures FAILED bfbcomp" +#echo " \$failgen of \$failures FAILED generate" exit \$failures EOF @@ -1038,9 +1130,24 @@ setenv ICE_MACHINE_QSTAT ${ICE_MACHINE_QSTAT} EOF0 endif +cat >> ${tsdir}/report_codecov.csh << EOF +source ${ICE_SCRIPTS}/machines/env.${machcomp} + +if ( \${use_curl} == 1 ) then + bash -c "bash <(curl -s https://codecov.io/bash) -n '\${report_name}' -y ./codecov.yml " +else + bash -c "bash <(wget -O - https://codecov.io/bash) -n '\${report_name}' -y ./codecov.yml " +endif + +sleep 10 +rm -r -f ./*/codecov_output + +EOF + # build and submit tests cd ${tsdir} setenv SUITE_BUILD ${suitebuild} + setenv SUITE_REUSEBUILD ${suitereuse} setenv SUITE_RUN ${suiterun} setenv SUITE_SUBMIT ${suitesubmit} ./suite.submit | tee suite.log @@ -1050,6 +1157,11 @@ EOF0 ./results.csh ./report_results.csh endif + if ($codecov == 1) then + echo "Generating codecov reports" + ./poll_queue.csh + ./report_codecov.csh + endif cd ${ICE_SANDBOX} endif diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedynB/analysis/ice_diagnostics.F90 index 64137446d..40da6cb64 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics.F90 @@ -18,7 +18,7 @@ module ice_diagnostics use ice_fileunits, only: flush_fileunit use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero + use icepack_intfc, only: icepack_max_aero, icepack_max_iso use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_flags use icepack_intfc, only: icepack_query_tracer_indices @@ -79,6 +79,10 @@ module ice_diagnostics toten , & ! total ice/snow energy (J) totes ! total ice/snow energy (J) + real (kind=dbl_kind), dimension(icepack_max_iso) :: & + totison , & ! total isotope mass + totisos ! total isotope mass + real (kind=dbl_kind), dimension(icepack_max_aero) :: & totaeron , & ! total aerosol mass totaeros ! total aerosol mass @@ -89,8 +93,8 @@ module ice_diagnostics integer (kind=int_kind), parameter, public :: & check_step = 999999999, & ! begin printing at istep1=check_step iblkp = 1, & ! block number - ip = 2, & ! i index - jp = 11, & ! j index + ip = 72, & ! i index + jp = 11, & ! j index mtask = 0 ! my_task !======================================================================= @@ -113,7 +117,7 @@ subroutine runtime_diags (dt) use ice_constants, only: c1, c1000, c2, p001, p5, & field_loc_center, m2_to_km2 use ice_domain, only: distrb_info, nblocks - use ice_domain_size, only: ncat, n_aero, max_blocks, nfsd + use ice_domain_size, only: ncat, n_iso, n_aero, max_blocks, nfsd use ice_flux, only: alvdr, alidr, alvdf, alidf, evap, fsnow, frazil, & fswabs, fswthru, flw, flwout, fsens, fsurf, flat, frzmlt_init, frain, fpond, & fhocn_ai, fsalt_ai, fresh_ai, frazil_diag, & @@ -121,7 +125,7 @@ subroutine runtime_diags (dt) dsnow, congel, sst, sss, Tf, fhocn, & swvdr, swvdf, swidr, swidf, & alvdr_init, alvdf_init, alidr_init, alidf_init - use ice_flux_bgc, only: faero_atm, faero_ocn + use ice_flux_bgc, only: faero_atm, faero_ocn, fiso_atm, fiso_ocn use ice_global_reductions, only: global_sum, global_sum_prod, global_maxval use ice_grid, only: lmask_n, lmask_s, tarean, tareas use ice_state ! everything @@ -138,10 +142,11 @@ subroutine runtime_diags (dt) integer (kind=int_kind) :: & i, j, k, n, iblk, nc, & ktherm, & - nt_tsfc, nt_aero, nt_fbri, nt_apnd, nt_hpnd, nt_fsd + nt_tsfc, nt_aero, nt_fbri, nt_apnd, nt_hpnd, nt_fsd, & + nt_isosno, nt_isoice logical (kind=log_kind) :: & - tr_pond_topo, tr_brine, tr_aero, calc_Tsfc, tr_fsd + tr_pond_topo, tr_brine, tr_iso, tr_aero, calc_Tsfc, tr_fsd real (kind=dbl_kind) :: & rhow, rhos, rhoi, puny, awtvdr, awtidr, awtvdf, awtidf, & @@ -166,6 +171,13 @@ subroutine runtime_diags (dt) delein, werrn, herrn, msltn, delmsltn, serrn, & deleis, werrs, herrs, mslts, delmslts, serrs + ! isotope diagnostics + real (kind=dbl_kind), dimension(icepack_max_aero) :: & + fisoan, fisoon, isorn, & + fisoas, fisoos, isors, & + isomx1n, isomx1s, & + isototn, isotots + ! aerosol diagnostics real (kind=dbl_kind), dimension(icepack_max_aero) :: & faeran, faeron, aerrn, & @@ -188,10 +200,10 @@ subroutine runtime_diags (dt) call icepack_query_parameters(ktherm_out=ktherm, calc_Tsfc_out=calc_Tsfc) call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_pond_topo_out=tr_pond_topo, tr_fsd_out=tr_fsd) + tr_pond_topo_out=tr_pond_topo, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_fbri_out=nt_fbri, nt_Tsfc_out=nt_Tsfc, & nt_aero_out=nt_aero, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & - nt_fsd_out=nt_fsd) + nt_fsd_out=nt_fsd,nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_query_parameters(Tffresh_out=Tffresh, rhos_out=rhos, & rhow_out=rhow, rhoi_out=rhoi, puny_out=puny, & awtvdr_out=awtvdr, awtidr_out=awtidr, awtvdf_out=awtvdf, awtidf_out=awtidf, & @@ -683,6 +695,45 @@ subroutine runtime_diags (dt) serrn = (sfsaltn + delmsltn) / (msltn + c1) serrs = (sfsalts + delmslts) / (mslts + c1) + ! isotopes + if (tr_iso) then + do n = 1, n_iso + fisoan(n) = global_sum_prod(fiso_atm(:,:,n,:), aice_init, & + distrb_info, field_loc_center, tarean) + fisoas(n) = global_sum_prod(fiso_atm(:,:,n,:), aice_init, & + distrb_info, field_loc_center, tareas) + fisoan(n) = fisoan(n)*dt + fisoas(n) = fisoas(n)*dt + fisoon(n) = global_sum_prod(fiso_ocn(:,:,n,:), aice, & + distrb_info, field_loc_center, tarean) + fisoos(n) = global_sum_prod(fiso_ocn(:,:,n,:), aice, & + distrb_info, field_loc_center, tareas) + fisoon(n) = fisoon(n)*dt + fisoos(n) = fisoos(n)*dt + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + do k = 1, n_iso + work1(i,j,iblk) = work1(i,j,iblk) & + + vsno(i,j,iblk)*trcr(i,j,nt_isosno+k-1,iblk) & + + vice(i,j,iblk)*trcr(i,j,nt_isoice+k-1,iblk) + enddo + enddo + enddo + enddo + !$OMP END PARALLEL DO + isototn(n) = global_sum(work1, distrb_info, field_loc_center, tarean) + isotots(n) = global_sum(work1, distrb_info, field_loc_center, tareas) + isomx1n(n) = global_maxval(work1, distrb_info, lmask_n) + isomx1s(n) = global_maxval(work1, distrb_info, lmask_s) + isorn(n) = (totison(n)-isototn(n)+fisoan(n)-fisoon(n))/(isototn(n)+c1) + isors(n) = (totisos(n)-isotots(n)+fisoas(n)-fisoos(n))/(isotots(n)+c1) + enddo ! n_iso + endif ! tr_iso + ! aerosols if (tr_aero) then do n = 1, n_aero @@ -917,6 +968,17 @@ subroutine runtime_diags (dt) write(nu_diag,901) 'arwt salt flx error = ',serrn,serrs write(nu_diag,*) '----------------------------' + if (tr_iso) then + do n = 1, n_iso + write(nu_diag,*) ' isotope ',n + write(nu_diag,901) 'fiso_atm (kg/m2) = ', fisoan(n), fisoas(n) + write(nu_diag,901) 'fiso_ocn (kg/m2) = ', fisoon(n), fisoos(n) + write(nu_diag,901) 'total iso (kg/m2) = ', isototn(n), isotots(n) + write(nu_diag,901) 'iso error = ', isorn(n), isors(n) + write(nu_diag,901) 'maximum iso (kg/m2) = ', isomx1n(n),isomx1s(n) + enddo + write(nu_diag,*) '----------------------------' + endif ! tr_iso if (tr_aero) then do n = 1, n_aero write(nu_diag,*) ' aerosol ',n @@ -1030,16 +1092,16 @@ subroutine init_mass_diags use ice_blocks, only: nx_block, ny_block use ice_constants, only: field_loc_center use ice_domain, only: distrb_info, nblocks - use ice_domain_size, only: n_aero, ncat, max_blocks + use ice_domain_size, only: n_iso, n_aero, ncat, max_blocks use ice_global_reductions, only: global_sum use ice_grid, only: tareas, tarean use ice_state, only: aicen, vice, vsno, trcrn, trcr - integer (kind=int_kind) :: n, i, j, iblk, & - nt_hpnd, nt_apnd, nt_aero + integer (kind=int_kind) :: n, i, j, k, iblk, & + nt_hpnd, nt_apnd, nt_aero, nt_isosno, nt_isoice logical (kind=log_kind) :: & - tr_aero, tr_pond_topo + tr_iso, tr_aero, tr_pond_topo real (kind=dbl_kind) :: & shmaxn, snwmxn, shmaxs, snwmxs, totpn, totps, & @@ -1051,7 +1113,8 @@ subroutine init_mass_diags character(len=*), parameter :: subname = '(init_mass_diags)' call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_pond_topo_out=tr_pond_topo) - call icepack_query_tracer_indices( & + call icepack_query_tracer_flags(tr_iso_out=tr_iso) + call icepack_query_tracer_indices(nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice, & nt_hpnd_out=nt_hpnd, nt_apnd_out=nt_apnd, nt_aero_out=nt_aero) call icepack_query_parameters( & rhoi_out=rhoi, rhos_out=rhos, rhofresh_out=rhofresh) @@ -1094,6 +1157,27 @@ subroutine init_mass_diags enddo ! npnt endif ! print_points + if (tr_iso) then + do n=1,n_iso + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + do k = 1, n_iso + work1(i,j,iblk) = work1(i,j,iblk) & + + vsno(i,j,iblk)*trcr(i,j,nt_isosno+k-1,iblk) & + + vice(i,j,iblk)*trcr(i,j,nt_isoice+k-1,iblk) + enddo + enddo + enddo + enddo + !$OMP END PARALLEL DO + totison(n)= global_sum(work1, distrb_info, field_loc_center, tarean) + totisos(n)= global_sum(work1, distrb_info, field_loc_center, tareas) + enddo + endif + if (tr_aero) then do n=1,n_aero !$OMP PARALLEL DO PRIVATE(iblk,i,j) @@ -1480,18 +1564,20 @@ subroutine print_state(plabel,i,j,iblk) qi, qs, Tsnow, & rad_to_deg, puny, rhoi, lfresh, rhos, cp_ice - integer (kind=int_kind) :: n, k, nt_Tsfc, nt_qice, nt_qsno, nt_fsd + integer (kind=int_kind) :: n, k, nt_Tsfc, nt_qice, nt_qsno, nt_fsd, & + nt_isosno, nt_isoice - logical (kind=log_kind) :: tr_fsd + logical (kind=log_kind) :: tr_fsd, tr_iso type (block) :: & this_block ! block information for current block character(len=*), parameter :: subname = '(print_state)' - call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & - nt_qsno_out=nt_qsno, nt_fsd_out=nt_fsd) + nt_qsno_out=nt_qsno, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_query_parameters( & rad_to_deg_out=rad_to_deg, puny_out=puny, rhoi_out=rhoi, lfresh_out=lfresh, & rhos_out=rhos, cp_ice_out=cp_ice) @@ -1521,6 +1607,8 @@ subroutine print_state(plabel,i,j,iblk) endif write(nu_diag,*) 'Tsfcn',trcrn(i,j,nt_Tsfc,n,iblk) if (tr_fsd) write(nu_diag,*) 'afsdn',trcrn(i,j,nt_fsd,n,iblk) ! fsd cat 1 +! if (tr_iso) write(nu_diag,*) 'isosno',trcrn(i,j,nt_isosno,n,iblk) ! isotopes in snow +! if (tr_iso) write(nu_diag,*) 'isoice',trcrn(i,j,nt_isoice,n,iblk) ! isotopes in ice write(nu_diag,*) ' ' ! dynamics (transport and/or ridging) causes the floe size distribution to become non-normal diff --git a/cicecore/cicedynB/analysis/ice_history_bgc.F90 b/cicecore/cicedynB/analysis/ice_history_bgc.F90 index c27683423..1ae572b30 100644 --- a/cicecore/cicedynB/analysis/ice_history_bgc.F90 +++ b/cicecore/cicedynB/analysis/ice_history_bgc.F90 @@ -14,13 +14,13 @@ module ice_history_bgc use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero, icepack_max_dic, & - icepack_max_doc, icepack_max_don, & + use icepack_intfc, only: icepack_max_iso, icepack_max_aero, & + icepack_max_dic, icepack_max_doc, icepack_max_don, & icepack_max_algae, icepack_max_fe use icepack_intfc, only: icepack_query_tracer_flags, & icepack_query_tracer_indices, icepack_query_parameters, & icepack_query_parameters - use ice_domain_size, only: max_nstrm, n_aero, & + use ice_domain_size, only: max_nstrm, n_iso, n_aero, & n_algae, n_dic, n_doc, n_don, n_zaero, n_fed, n_fep implicit none @@ -35,6 +35,8 @@ module ice_history_bgc ! specified in input_templates !-------------------------------------------------------------- character (len=max_nstrm), public :: & + f_fiso_atm = 'x', f_fiso_ocn = 'x', & + f_iso = 'x', & f_faero_atm = 'x', f_faero_ocn = 'x', & f_aero = 'x', & f_fzsal = 'm', f_fzsal_ai = 'm', & @@ -124,6 +126,8 @@ module ice_history_bgc !--------------------------------------------------------------- namelist / icefields_bgc_nml / & + f_fiso_atm , f_fiso_ocn , & + f_iso , & f_faero_atm , f_faero_ocn , & f_aero , & f_fbio , f_fbio_ai , & @@ -154,6 +158,12 @@ module ice_history_bgc n_fzsal_g , n_fzsal_g_ai , & n_zsal + integer(kind=int_kind), dimension(icepack_max_iso,max_nstrm) :: & + n_fiso_atm , & + n_fiso_ocn , & + n_isosno , & + n_isoice + integer(kind=int_kind), dimension(icepack_max_aero,max_nstrm) :: & n_faero_atm , & n_faero_ocn , & @@ -266,7 +276,7 @@ subroutine init_hist_bgc_2D integer (kind=int_kind) :: nml_error ! namelist i/o error flag character (len=3) :: nchar character (len=16) :: vname_in ! variable name - logical (kind=log_kind) :: tr_zaero, tr_aero, tr_brine, & + logical (kind=log_kind) :: tr_zaero, tr_aero, tr_brine, tr_iso, & tr_bgc_Nit, tr_bgc_Am, tr_bgc_Sil, & tr_bgc_DMS, tr_bgc_PON, & tr_bgc_N, tr_bgc_C, tr_bgc_chl, & @@ -276,7 +286,8 @@ subroutine init_hist_bgc_2D call icepack_query_parameters(skl_bgc_out=skl_bgc, & solve_zsal_out=solve_zsal, z_tracers_out=z_tracers) - call icepack_query_tracer_flags(tr_zaero_out =tr_zaero, & + call icepack_query_tracer_flags( & + tr_iso_out =tr_iso, tr_zaero_out =tr_zaero, & tr_aero_out =tr_aero, tr_brine_out =tr_brine, & tr_bgc_Nit_out=tr_bgc_Nit, tr_bgc_Am_out =tr_bgc_Am, & tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & @@ -313,6 +324,12 @@ subroutine init_hist_bgc_2D call abort_ice(subname//'ERROR: reading icefields_bgc_nml') endif + if (.not. tr_iso) then + f_fiso_atm = 'x' + f_fiso_ocn = 'x' + f_iso = 'x' + endif + if (.not. tr_aero) then f_faero_atm = 'x' f_faero_ocn = 'x' @@ -609,6 +626,9 @@ subroutine init_hist_bgc_2D f_iki = 'x' endif + call broadcast_scalar (f_fiso_atm, master_task) + call broadcast_scalar (f_fiso_ocn, master_task) + call broadcast_scalar (f_iso, master_task) call broadcast_scalar (f_faero_atm, master_task) call broadcast_scalar (f_faero_ocn, master_task) call broadcast_scalar (f_aero, master_task) @@ -758,10 +778,44 @@ subroutine init_hist_bgc_2D ! 2D variables - if (tr_aero .or. tr_brine .or. solve_zsal .or. skl_bgc) then + if (tr_iso .or. tr_aero .or. tr_brine .or. solve_zsal .or. skl_bgc) then do ns = 1, nstreams + if (f_iso(1:1) /= 'x') then + do n=1,n_iso + write(nchar,'(i3.3)') n + write(vname_in,'(a,a)') 'isosno', trim(nchar) + call define_hist_field(n_isosno(n,:),vname_in,"kg/kg", & + tstr2D, tcstr,"snow isotope mass concentration","none", c1, c0, & + ns, f_iso) + write(vname_in,'(a,a)') 'isoice', trim(nchar) + call define_hist_field(n_isoice(n,:),vname_in,"kg/kg", & + tstr2D, tcstr,"ice isotope mass concentration","none", c1, c0, & + ns, f_iso) + enddo + endif + + if (f_fiso_atm(1:1) /= 'x') then + do n=1,n_iso + write(nchar,'(i3.3)') n + write(vname_in,'(a,a)') 'fiso_atm', trim(nchar) + call define_hist_field(n_fiso_atm(n,:),vname_in,"kg/m^2 s", & + tstr2D, tcstr,"isotope deposition rate","none", c1, c0, & + ns, f_fiso_atm) + enddo + endif + + if (f_fiso_ocn(1:1) /= 'x') then + do n=1,n_iso + write(nchar,'(i3.3)') n + write(vname_in,'(a,a)') 'fiso_ocn', trim(nchar) + call define_hist_field(n_fiso_ocn(n,:),vname_in,"kg/m^2 s", & + tstr2D, tcstr,"isotope flux to ocean","none", c1, c0, & + ns, f_fiso_ocn) + enddo + endif + ! zsalinity call define_hist_field(n_fzsal,"fzsal","kg/m^2/s",tstr2D, tcstr, & @@ -1839,8 +1893,8 @@ subroutine accum_hist_bgc (iblk) use ice_domain, only: blocks_ice use ice_domain_size, only: nblyr use ice_flux, only: sss - use ice_flux_bgc, only: faero_atm, faero_ocn, flux_bio, flux_bio_ai, & - fzsal_ai, fzsal_g_ai + use ice_flux_bgc, only: fiso_atm, fiso_ocn, faero_atm, faero_ocn, & + flux_bio, flux_bio_ai, fzsal_ai, fzsal_g_ai use ice_history_shared, only: n2D, a2D, a3Dc, & n3Dzcum, n3Dbcum, a3Db, a3Da, & ncat_hist, accum_hist_field, nzblyr, nzalyr @@ -1873,15 +1927,16 @@ subroutine accum_hist_bgc (iblk) workii logical (kind=log_kind) :: & - skl_bgc, z_tracers, tr_aero, tr_brine, solve_zsal - - integer(kind=int_kind) :: nt_aero, nt_fbri, & - nt_bgc_Nit, nt_bgc_Am, nt_bgc_Sil, nt_bgc_DMSPp, & - nt_bgc_DMSPd, nt_bgc_DMS, nt_bgc_PON, nt_bgc_S, & - nt_zbgc_frac, nlt_chl_sw, & - nlt_bgc_Nit, nlt_bgc_Am, nlt_bgc_Sil, & - nlt_bgc_DMS, nlt_bgc_PON, & - nlt_bgc_DMSPp, nlt_bgc_DMSPd, & + skl_bgc, z_tracers, tr_iso, tr_aero, tr_brine, solve_zsal + + integer(kind=int_kind) :: & + nt_isosno, nt_isoice, nt_aero, nt_fbri, & + nt_bgc_Nit, nt_bgc_Am, nt_bgc_Sil, nt_bgc_DMSPp, & + nt_bgc_DMSPd, nt_bgc_DMS, nt_bgc_PON, nt_bgc_S, & + nt_zbgc_frac, nlt_chl_sw, & + nlt_bgc_Nit, nlt_bgc_Am, nlt_bgc_Sil, & + nlt_bgc_DMS, nlt_bgc_PON, & + nlt_bgc_DMSPp, nlt_bgc_DMSPd, & nt_bgc_hum, nlt_bgc_hum integer (kind=int_kind), dimension(icepack_max_aero) :: & @@ -1915,11 +1970,13 @@ subroutine accum_hist_bgc (iblk) call icepack_query_parameters(rhos_out=rhos, rhoi_out=rhoi, & rhow_out=rhow, puny_out=puny, sk_l_out=sk_l) - call icepack_query_tracer_flags( & + call icepack_query_tracer_flags(tr_iso_out=tr_iso, & tr_aero_out=tr_aero, tr_brine_out=tr_brine) call icepack_query_parameters(skl_bgc_out=skl_bgc, & solve_zsal_out=solve_zsal, z_tracers_out=z_tracers) - call icepack_query_tracer_indices( nt_aero_out=nt_aero, & + call icepack_query_tracer_indices( & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice, & + nt_aero_out=nt_aero, & nt_fbri_out=nt_fbri, nt_bgc_DOC_out=nt_bgc_DOC, & nt_zaero_out=nt_zaero, nt_bgc_DIC_out=nt_bgc_DIC, & nt_bgc_DON_out=nt_bgc_DON, nt_bgc_N_out=nt_bgc_N, & @@ -1955,7 +2012,7 @@ subroutine accum_hist_bgc (iblk) ! increment field !--------------------------------------------------------------- - if (tr_aero .or. tr_brine .or. solve_zsal .or. skl_bgc) then + if (tr_iso .or. tr_aero .or. tr_brine .or. solve_zsal .or. skl_bgc) then ! 2d bgc fields @@ -1971,6 +2028,28 @@ subroutine accum_hist_bgc (iblk) if (f_zsal (1:1) /= 'x') & call accum_hist_field(n_zsal, iblk, zsal_tot(:,:,iblk), a2D) + ! isotopes + if (f_fiso_atm(1:1) /= 'x') then + do n=1,n_iso + call accum_hist_field(n_fiso_atm(n,:),iblk, & + fiso_atm(:,:,n,iblk), a2D) + enddo + endif + if (f_fiso_ocn(1:1) /= 'x') then + do n=1,n_iso + call accum_hist_field(n_fiso_ocn(n,:),iblk, & + fiso_ocn(:,:,n,iblk), a2D) + enddo + endif + if (f_iso(1:1) /= 'x') then + do n=1,n_iso + call accum_hist_field(n_isosno(n,:), iblk, & + trcr(:,:,nt_isosno+n-1,iblk)/rhos, a2D) + call accum_hist_field(n_isoice(n,:), iblk, & + trcr(:,:,nt_isoice+n-1,iblk)/rhos, a2D) + enddo + endif + ! Aerosols if (f_faero_atm(1:1) /= 'x') then do n=1,n_aero diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 index 86c5a67c4..7eaba64cf 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 @@ -44,11 +44,11 @@ module ice_transport_driver integer (kind=int_kind) :: & ntrace ! number of tracers in use - integer (kind=int_kind), dimension(:), allocatable :: & + integer (kind=int_kind), dimension(:), allocatable, public :: & tracer_type ,&! = 1, 2, or 3 (depends on 0, 1 or 2 other tracers) depend ! tracer dependencies (see below) - logical (kind=log_kind), dimension (:), allocatable :: & + logical (kind=log_kind), dimension (:), allocatable, public :: & has_dependents ! true if a tracer has dependent tracers integer (kind=int_kind), parameter :: & @@ -82,7 +82,7 @@ subroutine init_transport integer (kind=int_kind) :: ntrcr, nt_Tsfc, nt_qice, nt_qsno, & nt_sice, nt_fbri, nt_iage, nt_FY, nt_alvl, nt_vlvl, & - nt_apnd, nt_hpnd, nt_ipnd, nt_fsd, nt_bgc_Nit, nt_bgc_S + nt_apnd, nt_hpnd, nt_ipnd, nt_fsd, nt_isosno, nt_isoice, nt_bgc_Nit, nt_bgc_S character(len=*), parameter :: subname = '(init_transport)' @@ -93,7 +93,8 @@ subroutine init_transport nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, nt_fbri_out=nt_fbri, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_alvl_out=nt_alvl, nt_fsd_out=nt_fsd, & nt_vlvl_out=nt_vlvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & - nt_ipnd_out=nt_ipnd, nt_bgc_Nit_out=nt_bgc_Nit, nt_bgc_S_out=nt_bgc_S) + nt_ipnd_out=nt_ipnd, nt_bgc_Nit_out=nt_bgc_Nit, nt_bgc_S_out=nt_bgc_S, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -194,6 +195,12 @@ subroutine init_transport if (nt-k==nt_fsd) & write(nu_diag,*) 'nt_fsd ',nt,depend(nt),tracer_type(nt),& has_dependents(nt) + if (nt-k==nt_isosno) & + write(nu_diag,*) 'nt_isosno',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_isoice) & + write(nu_diag,*) 'nt_isoice',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) if (nt-k==nt_bgc_Nit) & write(nu_diag,*) 'nt_bgc_Nit',nt,depend(nt),tracer_type(nt),& has_dependents(nt) diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index afefef9d3..607b763eb 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -527,7 +527,7 @@ end subroutine alloc_flux subroutine init_coupler_flux use ice_arrays_column, only: Cdn_atm - use ice_flux_bgc, only: flux_bio_atm, flux_bio, faero_atm, & + use ice_flux_bgc, only: flux_bio_atm, flux_bio, faero_atm, fiso_atm, & fnit, famm, fsil, fdmsp, fdms, fhum, fdust, falgalN, & fdoc, fdon, fdic, ffed, ffep use ice_grid, only: bathymetry @@ -617,6 +617,7 @@ subroutine init_coupler_flux fsensn_f (:,:,:,:) = c0 ! sensible heat flux (W/m^2) endif ! + fiso_atm (:,:,:,:) = c0 ! isotope deposition rate (kg/m2/s) faero_atm (:,:,:,:) = c0 ! aerosol deposition rate (kg/m2/s) flux_bio_atm (:,:,:,:) = c0 ! zaero and bio deposition rate (kg/m2/s) @@ -727,6 +728,8 @@ end subroutine init_coupler_flux subroutine init_flux_atm + use ice_flux_bgc, only: fiso_evap, Qref_iso, Qa_iso + character(len=*), parameter :: subname = '(init_flux_atm)' !----------------------------------------------------------------- @@ -748,6 +751,10 @@ subroutine init_flux_atm Qref (:,:,:) = c0 Uref (:,:,:) = c0 + fiso_evap(:,:,:,:) = c0 + Qref_iso (:,:,:,:) = c0 + Qa_iso (:,:,:,:) = c0 + end subroutine init_flux_atm !======================================================================= @@ -763,7 +770,7 @@ end subroutine init_flux_atm subroutine init_flux_ocn - use ice_flux_bgc, only: faero_ocn + use ice_flux_bgc, only: faero_ocn, fiso_ocn, HDO_ocn, H2_16O_ocn, H2_18O_ocn character(len=*), parameter :: subname = '(init_flux_ocn)' @@ -776,7 +783,12 @@ subroutine init_flux_ocn fpond (:,:,:) = c0 fhocn (:,:,:) = c0 fswthru (:,:,:) = c0 - faero_ocn(:,:,:,:) = c0 + + faero_ocn (:,:,:,:) = c0 + fiso_ocn (:,:,:,:) = c0 + HDO_ocn (:,:,:) = c0 + H2_16O_ocn (:,:,:) = c0 + H2_18O_ocn (:,:,:) = c0 end subroutine init_flux_ocn @@ -972,7 +984,11 @@ subroutine scale_fluxes (nx_block, ny_block, & fzsal, fzsal_g, & flux_bio, & fsurf, fcondtop, & - Uref, wind ) + Uref, wind, & + Qref_iso, & + fiso_evap,fiso_ocn) + + use icepack_intfc, only: icepack_max_iso integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -1030,6 +1046,13 @@ subroutine scale_fluxes (nx_block, ny_block, & fzsal , & ! salt flux to ocean with prognositic salinity (kg/m2/s) fzsal_g ! Gravity drainage salt flux to ocean (kg/m2/s) + ! isotopes + real (kind=dbl_kind), dimension(nx_block,ny_block,icepack_max_iso), & + optional, intent(inout) :: & + Qref_iso , & ! isotope air sp hum reference level (kg/kg) + fiso_evap, & ! isotope evaporation (kg/m2/s) + fiso_ocn ! isotope flux to ocean (kg/m2/s) + ! local variables real (kind=dbl_kind) :: & @@ -1078,6 +1101,9 @@ subroutine scale_fluxes (nx_block, ny_block, & fzsal_g (i,j) = fzsal_g (i,j) * ar flux_bio (i,j,:) = flux_bio (i,j,:) * ar faero_ocn(i,j,:) = faero_ocn(i,j,:) * ar + if (present(Qref_iso )) Qref_iso (i,j,:) = Qref_iso (i,j,:) * ar + if (present(fiso_evap)) fiso_evap(i,j,:) = fiso_evap(i,j,:) * ar + if (present(fiso_ocn )) fiso_ocn (i,j,:) = fiso_ocn (i,j,:) * ar else ! zero out fluxes strairxT(i,j) = c0 strairyT(i,j) = c0 @@ -1103,6 +1129,9 @@ subroutine scale_fluxes (nx_block, ny_block, & fzsal_g (i,j) = c0 flux_bio (i,j,:) = c0 faero_ocn(i,j,:) = c0 + if (present(Qref_iso )) Qref_iso (i,j,:) = c0 + if (present(fiso_evap)) fiso_evap(i,j,:) = c0 + if (present(fiso_ocn )) fiso_ocn (i,j,:) = c0 endif ! tmask and aice > 0 enddo ! i enddo ! j diff --git a/cicecore/cicedynB/general/ice_flux_bgc.F90 b/cicecore/cicedynB/general/ice_flux_bgc.F90 index 2ff193b2f..56e644431 100644 --- a/cicecore/cicedynB/general/ice_flux_bgc.F90 +++ b/cicecore/cicedynB/general/ice_flux_bgc.F90 @@ -12,7 +12,7 @@ module ice_flux_bgc use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero, icepack_max_nbtrcr, & + use icepack_intfc, only: icepack_max_iso, icepack_max_aero, icepack_max_nbtrcr, & icepack_max_algae, icepack_max_doc, icepack_max_don, icepack_max_dic, icepack_max_fe, & icepack_query_tracer_indices, icepack_query_tracer_flags, icepack_query_parameters @@ -23,22 +23,22 @@ module ice_flux_bgc ! in from atmosphere - real (kind=dbl_kind), & !coupling variable for both tr_aero and tr_zaero + real (kind=dbl_kind), & ! coupling variable for both tr_aero and tr_zaero dimension (:,:,:,:), allocatable, public :: & + fiso_atm, & ! isotope deposition rate (kg/m^2 s) faero_atm ! aerosol deposition rate (kg/m^2 s) real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & flux_bio_atm ! all bio fluxes to ice from atmosphere - ! in from ocean + ! out to ocean real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & + fiso_ocn, & ! isotope flux to ocean (kg/m^2/s) faero_ocn ! aerosol flux to ocean (kg/m^2/s) - ! out to ocean - real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & flux_bio , & ! all bio fluxes to ocean @@ -95,6 +95,19 @@ module ice_flux_bgc real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & zaeros ! ocean aerosols (mmol/m^3) + ! isotopes + real (kind=dbl_kind), & ! coupling variable for tr_iso + dimension (:,:,:,:), allocatable, public :: & + fiso_evap , & ! isotope evaporation rate (kg/m^2 s) + Qa_iso , & ! isotope specific humidity (kg/kg) + Qref_iso ! 2m atm reference isotope spec humidity (kg/kg) + + real (kind=dbl_kind), & ! coupling variable for tr_iso + dimension (:,:,:), allocatable, public :: & + HDO_ocn , & ! seawater concentration of HDO (kg/kg) + H2_16O_ocn, & ! seawater concentration of H2_16O (kg/kg) + H2_18O_ocn ! seawater concentration of H2_18O (kg/kg) + !======================================================================= contains @@ -125,6 +138,14 @@ subroutine alloc_flux_bgc fdust (nx_block,ny_block,max_blocks), & ! ice-ocean dust flux (kg/m^2/s), positive to ocean hin_old (nx_block,ny_block,ncat,max_blocks), & ! old ice thickness dsnown (nx_block,ny_block,ncat,max_blocks), & ! change in snow thickness in category n (m) + HDO_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of HDO (kg/kg) + H2_16O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_16O (kg/kg) + H2_18O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_18O (kg/kg) + Qa_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope specific humidity (kg/kg) + Qref_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! 2m atm reference isotope spec humidity (kg/kg) + fiso_atm (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope deposition rate (kg/m^2 s) + fiso_evap (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope evaporation rate (kg/m^2 s) + fiso_ocn (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope flux to ocean (kg/m^2/s) faero_atm (nx_block,ny_block,icepack_max_aero,max_blocks), & ! aerosol deposition rate (kg/m^2 s) faero_ocn (nx_block,ny_block,icepack_max_aero,max_blocks), & ! aerosol flux to ocean (kg/m^2/s) zaeros (nx_block,ny_block,icepack_max_aero,max_blocks), & ! ocean aerosols (mmol/m^3) diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index 0632408bf..64f4b4834 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -72,7 +72,7 @@ module ice_forcing sublim_file, & snow_file - character (char_len_long), dimension(:), allocatable :: & ! input data file names + character (char_len_long), dimension(:), allocatable, public :: & ! input data file names topmelt_file, & botmelt_file @@ -84,10 +84,10 @@ module ice_forcing oldrecnum = 0 , & ! old record number (save between steps) oldrecnum4X = 0 ! - real (kind=dbl_kind), dimension(:,:,:), allocatable :: & + real (kind=dbl_kind), dimension(:,:,:), allocatable, public :: & cldf ! cloud fraction - real (kind=dbl_kind), dimension(:,:,:,:), allocatable :: & + real (kind=dbl_kind), dimension(:,:,:,:), allocatable, public :: & fsw_data, & ! field values at 2 temporal data points cldf_data, & fsnow_data, & @@ -107,8 +107,7 @@ module ice_forcing sublim_data, & frain_data - real (kind=dbl_kind), & - dimension(:,:,:,:,:), allocatable :: & + real (kind=dbl_kind), dimension(:,:,:,:,:), allocatable, public :: & topmelt_data, & botmelt_data @@ -141,8 +140,7 @@ module ice_forcing frcidr = 0.31_dbl_kind, & ! frac of incoming sw in near IR direct band frcidf = 0.17_dbl_kind ! frac of incoming sw in near IR diffuse band - real (kind=dbl_kind), & - dimension (:,:,:,:,:), allocatable :: & + real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable, public :: & ocn_frc_m ! ocn data for 12 months logical (kind=log_kind), public :: & @@ -4362,8 +4360,8 @@ subroutine hycom_atm_files fsw_file = trim(atm_data_dir)//'/forcing.shwflx.nc' flw_file = trim(atm_data_dir)//'/forcing.radflx.nc' rain_file = trim(atm_data_dir)//'/forcing.precip.nc' - uwind_file = trim(atm_data_dir)//'/forcing.ewndsp.nc' !actually Xward, not Eward - vwind_file = trim(atm_data_dir)//'/forcing.nwndsp.nc' !actually Yward, not Nward + uwind_file = trim(atm_data_dir)//'/forcing.wndewd.nc' + vwind_file = trim(atm_data_dir)//'/forcing.wndnwd.nc' tair_file = trim(atm_data_dir)//'/forcing.airtmp.nc' humid_file = trim(atm_data_dir)//'/forcing.vapmix.nc' @@ -4469,11 +4467,11 @@ subroutine hycom_atm_data call read_data_nc_hycom (read6, recnum, & tair_file, fieldname, Tair_data, & field_loc_center, field_type_scalar) - fieldname = 'ewndsp' + fieldname = 'wndewd' call read_data_nc_hycom (read6, recnum, & uwind_file, fieldname, uatm_data, & field_loc_center, field_type_vector) - fieldname = 'nwndsp' + fieldname = 'wndnwd' call read_data_nc_hycom (read6, recnum, & vwind_file, fieldname, vatm_data, & field_loc_center, field_type_vector) diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedynB/general/ice_forcing_bgc.F90 index 6e543a056..4eedcfb80 100644 --- a/cicecore/cicedynB/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedynB/general/ice_forcing_bgc.F90 @@ -29,16 +29,17 @@ module ice_forcing_bgc implicit none private public :: get_forcing_bgc, get_atm_bgc, fzaero_data, alloc_forcing_bgc, & - init_bgc_data, faero_data, faero_default, faero_optics + init_bgc_data, faero_data, faero_default, faero_optics, & + fiso_default integer (kind=int_kind) :: & bgcrecnum = 0 ! old record number (save between steps) - real (kind=dbl_kind), dimension(:,:,:), allocatable :: & - nitdat , & ! data value toward which nitrate is restored - sildat ! data value toward which silicate is restored + real (kind=dbl_kind), dimension(:,:,:), allocatable, public :: & + nitdat , & ! data value toward which nitrate is restored + sildat ! data value toward which silicate is restored - real (kind=dbl_kind), dimension(:,:,:,:), allocatable, save :: & + real (kind=dbl_kind), dimension(:,:,:,:), allocatable, public :: & nit_data, & ! field values at 2 temporal data points sil_data @@ -538,6 +539,21 @@ end subroutine get_atm_bgc !======================================================================= +! constant values for atmospheric water isotopes +! +! authors: David Bailey, NCAR + + subroutine fiso_default + + use ice_flux_bgc, only: fiso_atm + character(len=*), parameter :: subname='(fiso_default)' + + fiso_atm(:,:,:,:) = 1.e-14_dbl_kind ! kg/m^2 s + + end subroutine fiso_default + +!======================================================================= + ! constant values for atmospheric aerosols ! ! authors: Elizabeth Hunke, LANL diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 41ff70aec..ffb070644 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -61,7 +61,7 @@ subroutine input_data use ice_diagnostics, only: diag_file, print_global, print_points, latpnt, lonpnt use ice_domain, only: close_boundaries use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd, nfreq, & - n_aero, n_zaero, n_algae, & + n_iso, n_aero, n_zaero, n_algae, & n_doc, n_dic, n_don, n_fed, n_fep, & max_nstrm use ice_calendar, only: year_init, istep0, histfreq, histfreq_n, & @@ -71,7 +71,7 @@ subroutine input_data use ice_arrays_column, only: oceanmixed_ice use ice_restart_column, only: restart_age, restart_FY, restart_lvl, & restart_pond_cesm, restart_pond_lvl, restart_pond_topo, restart_aero, & - restart_fsd + restart_fsd, restart_iso use ice_restart_shared, only: & restart, restart_ext, restart_dir, restart_file, pointer_file, & runid, runtype, use_restart_time, restart_format, lcdf64 @@ -126,12 +126,13 @@ subroutine input_data logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec - logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_fsd + logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond + logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo integer (kind=int_kind) :: numin, numax ! unit number limits integer (kind=int_kind) :: rpcesm, rplvl, rptopo - real (kind=dbl_kind) :: Cf, puny + real (kind=dbl_kind) :: Cf, ksno, puny integer :: abort_flag character (len=64) :: tmpstr @@ -168,13 +169,14 @@ subroutine input_data tr_pond_cesm, restart_pond_cesm, & tr_pond_lvl, restart_pond_lvl, & tr_pond_topo, restart_pond_topo, & + tr_iso, restart_iso, & tr_aero, restart_aero, & tr_fsd, restart_fsd, & - n_aero, n_zaero, n_algae, & + n_iso, n_aero, n_zaero, n_algae, & n_doc, n_dic, n_don, n_fed, n_fep namelist /thermo_nml/ & - kitd, ktherm, conduct, & + kitd, ktherm, conduct, ksno, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, & dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy @@ -294,6 +296,7 @@ subroutine input_data krdg_redist = 1 ! 1 = new redistribution, 0 = Hibler 80 mu_rdg = 3 ! e-folding scale of ridged ice, krdg_partic=1 (m^0.5) Cf = 17.0_dbl_kind ! ratio of ridging work to PE change in ridging + ksno = 0.3_dbl_kind ! snow thermal conductivity close_boundaries = .false. ! true = set land on edges of grid basalstress= .false. ! if true, basal stress for landfast is on k1 = 8.0_dbl_kind ! 1st free parameter for landfast parameterization @@ -392,11 +395,14 @@ subroutine input_data restart_pond_lvl = .false. ! melt ponds restart tr_pond_topo = .false. ! explicit melt ponds (topographic) restart_pond_topo = .false. ! melt ponds restart + tr_iso = .false. ! isotopes + restart_iso = .false. ! isotopes restart tr_aero = .false. ! aerosols restart_aero = .false. ! aerosols restart tr_fsd = .false. ! floe size distribution restart_fsd = .false. ! floe size distribution restart + n_iso = 0 n_aero = 0 n_zaero = 0 n_algae = 0 @@ -571,6 +577,7 @@ subroutine input_data call broadcast_scalar(krdg_redist, master_task) call broadcast_scalar(mu_rdg, master_task) call broadcast_scalar(Cf, master_task) + call broadcast_scalar(ksno, master_task) call broadcast_scalar(basalstress, master_task) call broadcast_scalar(k1, master_task) call broadcast_scalar(k2, master_task) @@ -660,6 +667,8 @@ subroutine input_data call broadcast_scalar(restart_pond_lvl, master_task) call broadcast_scalar(tr_pond_topo, master_task) call broadcast_scalar(restart_pond_topo, master_task) + call broadcast_scalar(tr_iso, master_task) + call broadcast_scalar(restart_iso, master_task) call broadcast_scalar(tr_aero, master_task) call broadcast_scalar(restart_aero, master_task) call broadcast_scalar(tr_fsd, master_task) @@ -669,6 +678,7 @@ subroutine input_data call broadcast_scalar(nilyr, master_task) call broadcast_scalar(nslyr, master_task) call broadcast_scalar(nblyr, master_task) + call broadcast_scalar(n_iso, master_task) call broadcast_scalar(n_aero, master_task) call broadcast_scalar(n_zaero, master_task) call broadcast_scalar(n_algae, master_task) @@ -721,6 +731,7 @@ subroutine input_data if (my_task == master_task) & write(nu_diag,*) subname//' WARNING: ice_ic = none or default, setting restart flags to .false.' restart = .false. + restart_iso = .false. restart_aero = .false. restart_fsd = .false. restart_age = .false. @@ -828,6 +839,15 @@ subroutine input_data abort_flag = 8 endif + if (tr_iso .and. n_iso==0) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: isotopes activated but' + write(nu_diag,*) subname//' ERROR: not allocated in tracer array.' + write(nu_diag,*) subname//' ERROR: Activate in compilation script.' + endif + abort_flag = 31 + endif + if (tr_aero .and. n_aero==0) then if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: aerosols activated but' @@ -931,6 +951,7 @@ subroutine input_data ice_IOUnitsMaxUnit = numax call icepack_init_parameters(Cf_in=Cf) + call icepack_init_parameters(ksno_in=ksno) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname//'Icepack Abort1', & file=__FILE__, line=__LINE__) @@ -1055,6 +1076,7 @@ subroutine input_data trim(advection) write(nu_diag,1030) ' shortwave = ', & trim(shortwave) + write(nu_diag,1000) ' ksno = ', ksno if (cpl_bgc) then write(nu_diag,1000) ' BGC coupling is switched ON' else @@ -1197,6 +1219,8 @@ subroutine input_data write(nu_diag,1010) ' restart_pond_lvl = ', restart_pond_lvl write(nu_diag,1010) ' tr_pond_topo = ', tr_pond_topo write(nu_diag,1010) ' restart_pond_topo = ', restart_pond_topo + write(nu_diag,1010) ' tr_iso = ', tr_iso + write(nu_diag,1010) ' restart_iso = ', restart_iso write(nu_diag,1010) ' tr_aero = ', tr_aero write(nu_diag,1010) ' restart_aero = ', restart_aero write(nu_diag,1010) ' tr_fsd = ', tr_fsd @@ -1207,6 +1231,7 @@ subroutine input_data write(nu_diag,1020) ' nilyr = ', nilyr write(nu_diag,1020) ' nslyr = ', nslyr write(nu_diag,1020) ' nblyr = ', nblyr + write(nu_diag,1020) ' n_iso = ', n_iso write(nu_diag,1020) ' n_aero = ', n_aero write(nu_diag,1020) ' n_zaero = ', n_zaero write(nu_diag,1020) ' n_algae = ', n_algae @@ -1270,10 +1295,12 @@ subroutine input_data wave_spec_in=wave_spec, nfreq_in=nfreq, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type) call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & - tr_lvl_in=tr_lvl, tr_aero_in=tr_aero, tr_fsd_in=tr_fsd, tr_pond_in=tr_pond, & + tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & + tr_fsd_in=tr_fsd, tr_pond_in=tr_pond, & tr_pond_cesm_in=tr_pond_cesm, tr_pond_lvl_in=tr_pond_lvl, tr_pond_topo_in=tr_pond_topo) call icepack_init_tracer_sizes(ncat_in=ncat, nilyr_in=nilyr, nslyr_in=nslyr, nblyr_in=nblyr, & - nfsd_in=nfsd, n_algae_in=n_algae, n_aero_in=n_aero, n_DOC_in=n_DOC, n_DON_in=n_DON, & + nfsd_in=nfsd, n_algae_in=n_algae, n_iso_in=n_iso, n_aero_in=n_aero, & + n_DOC_in=n_DOC, n_DON_in=n_DON, & n_DIC_in=n_DIC, n_fed_in=n_fed, n_fep_in=n_fep, n_zaero_in=n_zaero) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -1301,7 +1328,7 @@ subroutine init_state use ice_blocks, only: block, get_block, nx_block, ny_block use ice_domain, only: nblocks, blocks_ice - use ice_domain_size, only: ncat, nilyr, nslyr, n_aero, nfsd + use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero, nfsd use ice_flux, only: sst, Tf, Tair, salinz, Tmltz use ice_grid, only: tmask, ULON, TLAT use ice_state, only: trcr_depend, aicen, trcrn, vicen, vsnon, & @@ -1322,11 +1349,11 @@ subroutine init_state heat_capacity ! from icepack integer (kind=int_kind) :: ntrcr - logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_aero, tr_fsd + logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_fsd logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY - integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero - integer (kind=int_kind) :: nt_fsd + integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd + integer (kind=int_kind) :: nt_isosno, nt_isoice, nt_aero, nt_fsd type (block) :: & this_block ! block information for current block @@ -1338,12 +1365,14 @@ subroutine init_state call icepack_query_parameters(heat_capacity_out=heat_capacity) call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_aero_out=tr_aero, tr_fsd_out=tr_fsd, & + tr_lvl_out=tr_lvl, tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_fsd_out=tr_fsd, & tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & nt_qice_out=nt_qice, nt_qsno_out=nt_qsno, nt_iage_out=nt_iage, nt_fy_out=nt_fy, & nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & - nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd) + nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -1423,6 +1452,12 @@ subroutine init_state trcr_depend(nt_fsd + it - 1) = 0 ! area-weighted floe size distribution enddo endif + if (tr_iso) then ! isotopes + do it = 1, n_iso + trcr_depend(nt_isosno+it-1) = 2 ! snow + trcr_depend(nt_isoice+it-1) = 1 ! ice + enddo + endif if (tr_aero) then ! volume-weighted aerosols do it = 1, n_aero trcr_depend(nt_aero+(it-1)*4 ) = 2 ! snow diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index b8d796710..e389adc87 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -27,7 +27,7 @@ module ice_step_mod use icepack_intfc, only: icepack_biogeochemistry, icepack_load_ocean_bio_array use icepack_intfc, only: icepack_max_algae, icepack_max_nbtrcr, icepack_max_don use icepack_intfc, only: icepack_max_doc, icepack_max_dic, icepack_max_aero - use icepack_intfc, only: icepack_max_fe + use icepack_intfc, only: icepack_max_fe, icepack_max_iso use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes use icepack_intfc, only: icepack_query_tracer_indices @@ -161,7 +161,7 @@ subroutine step_therm1 (dt, iblk) use ice_blocks, only: block, get_block, nx_block, ny_block use ice_calendar, only: yday use ice_domain, only: blocks_ice - use ice_domain_size, only: ncat, nilyr, nslyr, n_aero + use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero use ice_flux, only: frzmlt, sst, Tf, strocnxT, strocnyT, rside, fbot, Tbot, Tsnice, & meltsn, melttn, meltbn, congeln, snoicen, uatm, vatm, fside, & wind, rhoa, potT, Qa, zlvl, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & @@ -170,7 +170,8 @@ subroutine step_therm1 (dt, iblk) flat, fswabs, flwout, evap, evaps, evapi, Tref, Qref, Uref, fresh, fsalt, fhocn, & fswthru, meltt, melts, meltb, congel, snoice, & flatn_f, fsensn_f, fsurfn_f, fcondtopn_f - use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn + use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn, fiso_atm, fiso_ocn, & + Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn use ice_grid, only: lmask_n, lmask_s, tmask use ice_state, only: aice, aicen, aice_init, aicen_init, vicen_init, & vice, vicen, vsno, vsnon, trcrn, uvel, vvel, vsnon_init @@ -198,10 +199,11 @@ subroutine step_therm1 (dt, iblk) integer (kind=int_kind) :: & ntrcr, nt_apnd, nt_hpnd, nt_ipnd, nt_alvl, nt_vlvl, nt_Tsfc, & - nt_iage, nt_FY, nt_qice, nt_sice, nt_aero, nt_qsno + nt_iage, nt_FY, nt_qice, nt_sice, nt_aero, nt_qsno, & + nt_isosno, nt_isoice logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_aero, tr_pond, tr_pond_cesm, & + tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, tr_pond_cesm, & tr_pond_lvl, tr_pond_topo, calc_Tsfc real (kind=dbl_kind) :: & @@ -210,6 +212,9 @@ subroutine step_therm1 (dt, iblk) real (kind=dbl_kind), dimension(n_aero,2,ncat) :: & aerosno, aeroice ! kg/m^2 + real (kind=dbl_kind), dimension(n_iso,ncat) :: & + isosno, isoice ! kg/m^2 + type (block) :: & this_block ! block information for current block @@ -219,7 +224,7 @@ subroutine step_therm1 (dt, iblk) call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags( & - tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_iso_out=tr_iso, & tr_aero_out=tr_aero, tr_pond_out=tr_pond, tr_pond_cesm_out=tr_pond_cesm, & tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) call icepack_query_tracer_indices( & @@ -227,7 +232,8 @@ subroutine step_therm1 (dt, iblk) nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_Tsfc_out=nt_Tsfc, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, & nt_qice_out=nt_qice, nt_sice_out=nt_sice, & - nt_aero_out=nt_aero, nt_qsno_out=nt_qsno) + nt_aero_out=nt_aero, nt_qsno_out=nt_qsno, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -236,6 +242,8 @@ subroutine step_therm1 (dt, iblk) prescribed_ice = .false. #endif + isosno (:,:) = c0 + isoice (:,:) = c0 aerosno(:,:,:) = c0 aeroice(:,:,:) = c0 @@ -270,8 +278,16 @@ subroutine step_therm1 (dt, iblk) do j = jlo, jhi do i = ilo, ihi - if (tr_aero) then - ! trcrn(nt_aero) has units kg/m^3 + if (tr_iso) then ! trcrn(nt_iso*) has units kg/m^3 + do n=1,ncat + do k=1,n_iso + isosno(k,n) = trcrn(i,j,nt_isosno+k-1,n,iblk) * vsnon_init(i,j,n,iblk) + isoice(k,n) = trcrn(i,j,nt_isoice+k-1,n,iblk) * vicen_init(i,j,n,iblk) + enddo + enddo + endif ! tr_iso + + if (tr_aero) then ! trcrn(nt_aero) has units kg/m^3 do n=1,ncat do k=1,n_aero aerosno (k,:,n) = & @@ -311,15 +327,19 @@ subroutine step_therm1 (dt, iblk) FY = trcrn (i,j,nt_FY ,:,iblk), & aerosno = aerosno (:,:,:), & aeroice = aeroice (:,:,:), & + isosno = isosno (:,:), & + isoice = isoice (:,:), & uatm = uatm (i,j, iblk), & vatm = vatm (i,j, iblk), & wind = wind (i,j, iblk), & zlvl = zlvl (i,j, iblk), & Qa = Qa (i,j, iblk), & + Qa_iso = Qa_iso (i,j,:,iblk), & rhoa = rhoa (i,j, iblk), & Tair = Tair (i,j, iblk), & Tref = Tref (i,j, iblk), & Qref = Qref (i,j, iblk), & + Qref_iso = Qref_iso (i,j,:,iblk), & Uref = Uref (i,j, iblk), & Cdn_atm_ratio= Cdn_atm_ratio(i,j, iblk), & Cdn_ocn = Cdn_ocn (i,j, iblk), & @@ -389,6 +409,12 @@ subroutine step_therm1 (dt, iblk) fcondtopn_f = fcondtopn_f (i,j,:,iblk), & faero_atm = faero_atm (i,j,1:n_aero,iblk), & faero_ocn = faero_ocn (i,j,1:n_aero,iblk), & + fiso_atm = fiso_atm (i,j,:,iblk), & + fiso_ocn = fiso_ocn (i,j,:,iblk), & + fiso_evap = fiso_evap (i,j,:,iblk), & + HDO_ocn = HDO_ocn (i,j, iblk), & + H2_16O_ocn = H2_16O_ocn (i,j, iblk), & + H2_18O_ocn = H2_18O_ocn (i,j, iblk), & dhsn = dhsn (i,j,:,iblk), & ffracn = ffracn (i,j,:,iblk), & meltt = meltt (i,j, iblk), & @@ -408,6 +434,19 @@ subroutine step_therm1 (dt, iblk) frz_onset = frz_onset (i,j, iblk), & yday=yday, prescribed_ice=prescribed_ice) + if (tr_iso) then + do n = 1, ncat + if (vicen(i,j,n,iblk) > puny) & + isoice(:,n) = isoice(:,n)/vicen(i,j,n,iblk) + if (vsnon(i,j,n,iblk) > puny) & + isosno(:,n) = isosno(:,n)/vsnon(i,j,n,iblk) + do k = 1, n_iso + trcrn(i,j,nt_isosno+k-1,n,iblk) = isosno(k,n) + trcrn(i,j,nt_isoice+k-1,n,iblk) = isoice(k,n) + enddo + enddo + endif ! tr_iso + if (tr_aero) then do n = 1, ncat if (vicen(i,j,n,iblk) > puny) & @@ -452,7 +491,8 @@ subroutine step_therm2 (dt, iblk) use ice_flux, only: fresh, frain, fpond, frzmlt, frazil, frz_onset, & update_ocn_f, fsalt, Tf, sss, salinz, fhocn, rside, fside, & meltl, frazil_diag - use ice_flux_bgc, only: flux_bio, faero_ocn + use ice_flux_bgc, only: flux_bio, faero_ocn, & + fiso_ocn, HDO_ocn, H2_16O_ocn, H2_18O_ocn use ice_grid, only: tmask use ice_state, only: aice, aicen, aice0, trcr_depend, & aicen_init, vicen_init, trcrn, vicen, vsnon, & @@ -550,7 +590,12 @@ subroutine step_therm2 (dt, iblk) ocean_bio = ocean_bio (i,j,1:nbtrcr,iblk), & frazil_diag= frazil_diag(i,j,iblk), & frz_onset = frz_onset (i,j, iblk), & - yday = yday, nfsd=nfsd, & + yday = yday, & + fiso_ocn = fiso_ocn (i,j,:,iblk), & + HDO_ocn = HDO_ocn (i,j, iblk), & + H2_16O_ocn = H2_16O_ocn(i,j, iblk), & + H2_18O_ocn = H2_18O_ocn(i,j, iblk), & + nfsd = nfsd, & wave_sig_ht= wave_sig_ht(i,j,iblk), & wave_spectrum = wave_spectrum(i,j,:,iblk), & wavefreq = wavefreq(:), & @@ -820,7 +865,7 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) dvirdgdt, opening, fpond, fresh, fhocn, & aparticn, krdgn, aredistn, vredistn, dardg1ndt, dardg2ndt, & dvirdgndt, araftn, vraftn, fsalt - use ice_flux_bgc, only: flux_bio, faero_ocn + use ice_flux_bgc, only: flux_bio, faero_ocn, fiso_ocn use ice_grid, only: tmask use ice_state, only: trcrn, vsnon, aicen, vicen, & aice, aice0, trcr_depend, n_trcr_strata, & @@ -897,6 +942,7 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) fresh = fresh (i,j, iblk), & fhocn = fhocn (i,j, iblk), & faero_ocn = faero_ocn(i,j,:,iblk), & + fiso_ocn = fiso_ocn (i,j,:,iblk), & aparticn = aparticn (i,j,:,iblk), & krdgn = krdgn (i,j,:,iblk), & aredistn = aredistn (i,j,:,iblk), & diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 index 3dce5a42e..884ee6331 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 @@ -100,19 +100,19 @@ module ice_boundary ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind), public :: & bufSizeSend, &! max buffer size for send messages bufSizeRecv ! max buffer size for recv messages - integer (int_kind), dimension(:,:), allocatable :: & + integer (int_kind), dimension(:,:), allocatable, public :: & bufSendI4, &! buffer for use to send in 2D i4 halo updates bufRecvI4 ! buffer for use to recv in 2D i4 halo updates - real (real_kind), dimension(:,:), allocatable :: & + real (real_kind), dimension(:,:), allocatable, public :: & bufSendR4, &! buffer for use to send in 2D r4 halo updates bufRecvR4 ! buffer for use to recv in 2D r4 halo updates - real (dbl_kind), dimension(:,:), allocatable :: & + real (dbl_kind), dimension(:,:), allocatable, public :: & bufSendR8, &! buffer for use to send in 2D r8 halo updates bufRecvR8 ! buffer for use to recv in 2D r8 halo updates @@ -122,13 +122,13 @@ module ice_boundary ! !----------------------------------------------------------------------- - integer (int_kind), dimension(:,:), allocatable :: & + integer (int_kind), dimension(:,:), allocatable, public :: & bufTripoleI4 - real (real_kind), dimension(:,:), allocatable :: & + real (real_kind), dimension(:,:), allocatable, public :: & bufTripoleR4 - real (dbl_kind), dimension(:,:), allocatable :: & + real (dbl_kind), dimension(:,:), allocatable, public :: & bufTripoleR8 !*********************************************************************** diff --git a/cicecore/cicedynB/infrastructure/ice_blocks.F90 b/cicecore/cicedynB/infrastructure/ice_blocks.F90 index b95ad6acb..5177dd047 100644 --- a/cicecore/cicedynB/infrastructure/ice_blocks.F90 +++ b/cicecore/cicedynB/infrastructure/ice_blocks.F90 @@ -89,15 +89,15 @@ module ice_blocks ! !----------------------------------------------------------------------- - type (block), dimension(:), allocatable :: & + type (block), dimension(:), allocatable, public :: & all_blocks ! block information for all blocks in domain - integer (int_kind), dimension(:,:),allocatable :: & + integer (int_kind), dimension(:,:),allocatable, public :: & all_blocks_ij ! block index stored in Cartesian order ! useful for determining block index ! of neighbor blocks - integer (int_kind), dimension(:,:), allocatable, target :: & + integer (int_kind), dimension(:,:), allocatable, target, public :: & i_global, &! global i index for each point in each block j_global ! global j index for each point in each block @@ -157,10 +157,10 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & ! !---------------------------------------------------------------------- - allocate(all_blocks(nblocks_tot)) - allocate(i_global(nx_block,nblocks_tot), & - j_global(ny_block,nblocks_tot)) - allocate(all_blocks_ij(nblocks_x,nblocks_y)) + if (.not.allocated(all_blocks)) allocate(all_blocks(nblocks_tot)) + if (.not.allocated(i_global)) allocate(i_global(nx_block,nblocks_tot)) + if (.not.allocated(j_global)) allocate(j_global(ny_block,nblocks_tot)) + if (.not.allocated(all_blocks_ij)) allocate(all_blocks_ij(nblocks_x,nblocks_y)) !---------------------------------------------------------------------- ! diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index 51a9eaa69..3be2449f7 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -188,6 +188,7 @@ subroutine init_domain_blocks ( (dble(nx_global-1)/dble(block_size_x + 1)) * & (dble(ny_global-1)/dble(block_size_y + 1)) ) & / dble(nprocs)) + max_blocks=max(1,max_blocks) write(nu_diag,'(/,a52,i6,/)') & '(ice_domain): max_block < 1: max_block estimated to ',max_blocks endif diff --git a/cicecore/cicedynB/infrastructure/ice_restoring.F90 b/cicecore/cicedynB/infrastructure/ice_restoring.F90 index 1ef7b9531..09db9c273 100644 --- a/cicecore/cicedynB/infrastructure/ice_restoring.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restoring.F90 @@ -32,12 +32,12 @@ module ice_restoring ! state of the ice for each category !----------------------------------------------------------------- - real (kind=dbl_kind), dimension (:,:,:,:), allocatable :: & + real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & aicen_rest , & ! concentration of ice vicen_rest , & ! volume per unit area of ice (m) vsnon_rest ! volume per unit area of snow (m) - real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable :: & + real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable, public :: & trcrn_rest ! tracers !======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 index 9e9150b6c..8ecfeb6f1 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 @@ -15,10 +15,11 @@ module ice_restart use ice_fileunits, only: nu_diag, nu_rst_pointer use ice_fileunits, only: nu_dump, nu_dump_eap, nu_dump_FY, nu_dump_age use ice_fileunits, only: nu_dump_lvl, nu_dump_pond, nu_dump_hbrine - use ice_fileunits, only: nu_dump_bgc, nu_dump_aero, nu_dump_fsd + use ice_fileunits, only: nu_dump_bgc, nu_dump_aero, nu_dump_fsd, nu_dump_iso use ice_fileunits, only: nu_restart, nu_restart_eap, nu_restart_FY, nu_restart_age use ice_fileunits, only: nu_restart_lvl, nu_restart_pond, nu_restart_hbrine use ice_fileunits, only: nu_restart_bgc, nu_restart_aero, nu_restart_fsd + use ice_fileunits, only: nu_restart_iso use ice_exit, only: abort_ice use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_sizes @@ -52,7 +53,7 @@ subroutine init_restart_read(ice_ic) logical (kind=log_kind) :: & solve_zsal, tr_fsd, & - tr_iage, tr_FY, tr_lvl, tr_aero, tr_pond_cesm, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & tr_pond_topo, tr_pond_lvl, tr_brine character(len=char_len_long) :: & @@ -77,7 +78,7 @@ subroutine init_restart_read(ice_ic) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & - tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -320,6 +321,26 @@ subroutine init_restart_read(ice_ic) endif endif + if (tr_iso) then + if (my_task == master_task) then + n = index(filename0,trim(restart_file)) + if (n == 0) call abort_ice(subname//'ERROR: iso restart: filename discrepancy') + string1 = trim(filename0(1:n-1)) + string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) + write(filename,'(a,a,a,a)') & + string1(1:lenstr(string1)), & + restart_file(1:lenstr(restart_file)),'.iso', & + string2(1:lenstr(string2)) + if (restart_ext) then + call ice_open_ext(nu_restart_iso,filename,0) + else + call ice_open(nu_restart_iso,filename,0) + endif + read (nu_restart_iso) iignore,rignore,rignore + write(nu_diag,*) 'Reading ',filename(1:lenstr(filename)) + endif + endif + if (tr_aero) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) @@ -366,7 +387,7 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & solve_zsal, tr_fsd, & - tr_iage, tr_FY, tr_lvl, tr_aero, tr_pond_cesm, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & tr_pond_topo, tr_pond_lvl, tr_brine integer (kind=int_kind) :: & @@ -383,7 +404,7 @@ subroutine init_restart_write(filename_spec) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & - tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -618,6 +639,26 @@ subroutine init_restart_write(filename_spec) endif endif + if (tr_iso) then + + write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & + restart_dir(1:lenstr(restart_dir)), & + restart_file(1:lenstr(restart_file)),'.iso.', & + iyear,'-',month,'-',mday,'-',sec + + if (restart_ext) then + call ice_open_ext(nu_dump_iso,filename,0) + else + call ice_open(nu_dump_iso,filename,0) + endif + + if (my_task == master_task) then + write(nu_dump_iso) istep1,time,time_forc + write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) + endif + + endif + if (tr_aero) then write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & @@ -767,7 +808,7 @@ subroutine final_restart() logical (kind=log_kind) :: & solve_zsal, & - tr_iage, tr_FY, tr_lvl, tr_aero, tr_pond_cesm, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & tr_pond_topo, tr_pond_lvl, tr_brine integer (kind=int_kind) :: & @@ -781,7 +822,7 @@ subroutine final_restart() nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & - tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -790,6 +831,7 @@ subroutine final_restart() if (my_task == master_task) then close(nu_dump) + if (tr_iso) close(nu_dump_iso) if (tr_aero) close(nu_dump_aero) if (tr_iage) close(nu_dump_age) if (tr_FY) close(nu_dump_FY) diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index ecabcc089..d4decf6f7 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -113,7 +113,7 @@ subroutine init_restart_write(filename_spec) time, time_forc, year_init use ice_communicate, only: my_task, master_task use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & - n_aero, nblyr, n_zaero, n_algae, n_doc, & + n_iso, n_aero, nblyr, n_zaero, n_algae, n_doc, & n_dic, n_don, n_fed, n_fep, nfsd use ice_arrays_column, only: oceanmixed_ice use ice_dyn_shared, only: kdyn @@ -124,13 +124,13 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & solve_zsal, skl_bgc, z_tracers, tr_fsd, & - tr_iage, tr_FY, tr_lvl, tr_aero, tr_pond_cesm, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & tr_pond_topo, tr_pond_lvl, tr_brine, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & - tr_bgc_chl, tr_bgc_Am, & + tr_bgc_chl, tr_bgc_Am, & tr_bgc_PON, tr_bgc_DON, & - tr_zaero, tr_bgc_Fe, & + tr_zaero, tr_bgc_Fe, & tr_bgc_hum integer (kind=int_kind) :: & @@ -160,7 +160,7 @@ subroutine init_restart_write(filename_spec) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & - tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & @@ -470,6 +470,14 @@ subroutine init_restart_write(filename_spec) enddo endif + if (tr_iso) then + do k=1,n_iso + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'isosno'//trim(nchar),dims) + call define_rest_field(ncid,'isoice'//trim(nchar),dims) + enddo + endif + if (tr_aero) then do k=1,n_aero write(nchar,'(i3.3)') k diff --git a/cicecore/cicedynB/infrastructure/io/io_pio/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio/ice_restart.F90 index 8dc9e94a9..d673c7f7a 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio/ice_restart.F90 @@ -125,7 +125,7 @@ subroutine init_restart_write(filename_spec) time, time_forc, year_init use ice_communicate, only: my_task, master_task use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & - n_aero, nblyr, n_zaero, n_algae, n_doc, & + n_iso, n_aero, nblyr, n_zaero, n_algae, n_doc, & n_dic, n_don, n_fed, n_fep use ice_dyn_shared, only: kdyn use ice_arrays_column, only: oceanmixed_ice @@ -134,7 +134,7 @@ subroutine init_restart_write(filename_spec) solve_zsal, skl_bgc, z_tracers logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_aero, tr_pond_cesm, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & tr_pond_topo, tr_pond_lvl, tr_brine, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & @@ -171,7 +171,7 @@ subroutine init_restart_write(filename_spec) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & - tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & @@ -473,6 +473,14 @@ subroutine init_restart_write(filename_spec) call define_rest_field(File,'qsno'//trim(nchar),dims) enddo + if (tr_iso) then + do k=1,n_iso + write(nchar,'(i3.3)') k + call define_rest_field(File,'isosno'//nchar, dims) + call define_rest_field(File,'isoice'//nchar, dims) + enddo + endif + if (tr_aero) then do k=1,n_aero write(nchar,'(i3.3)') k diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index 8dc9e94a9..d673c7f7a 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -125,7 +125,7 @@ subroutine init_restart_write(filename_spec) time, time_forc, year_init use ice_communicate, only: my_task, master_task use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & - n_aero, nblyr, n_zaero, n_algae, n_doc, & + n_iso, n_aero, nblyr, n_zaero, n_algae, n_doc, & n_dic, n_don, n_fed, n_fep use ice_dyn_shared, only: kdyn use ice_arrays_column, only: oceanmixed_ice @@ -134,7 +134,7 @@ subroutine init_restart_write(filename_spec) solve_zsal, skl_bgc, z_tracers logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_aero, tr_pond_cesm, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & tr_pond_topo, tr_pond_lvl, tr_brine, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & @@ -171,7 +171,7 @@ subroutine init_restart_write(filename_spec) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & - tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & @@ -473,6 +473,14 @@ subroutine init_restart_write(filename_spec) call define_rest_field(File,'qsno'//trim(nchar),dims) enddo + if (tr_iso) then + do k=1,n_iso + write(nchar,'(i3.3)') k + call define_rest_field(File,'isosno'//nchar, dims) + call define_rest_field(File,'isoice'//nchar, dims) + enddo + endif + if (tr_aero) then do k=1,n_aero write(nchar,'(i3.3)') k diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index b5d2608a3..b72745e30 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -77,7 +77,7 @@ subroutine cice_init(mpicom_ice) use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & get_forcing_atmo, get_forcing_ocn, get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default, faero_optics, alloc_forcing_bgc + faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runtype @@ -95,7 +95,7 @@ subroutine cice_init(mpicom_ice) mpicom_ice ! communicator for sequential ccsm logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_fsd, wave_spec + tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' call init_communicate(mpicom_ice) ! initial setup for message passing @@ -122,7 +122,6 @@ subroutine cice_init(mpicom_ice) call ice_timer_start(timer_total) ! start timing entire run call init_grid2 ! grid variables call init_zbgc ! vertical biogeochemistry initialization - call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file @@ -134,6 +133,7 @@ subroutine cice_init(mpicom_ice) endif call init_coupler_flux ! initialize fluxes exchanged with coupler + #ifdef popcice call sst_sss ! POP data for CICE initialization #endif @@ -181,6 +181,7 @@ subroutine cice_init(mpicom_ice) call init_history_dyn ! initialize dynamic history variables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -211,6 +212,8 @@ subroutine cice_init(mpicom_ice) call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -239,20 +242,21 @@ subroutine init_restart use ice_calendar, only: time, calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_aero, nfsd + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & - init_aerosol, init_hbrine, init_bgc, init_fsd + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & restart_hbrine, read_restart_hbrine, & restart_zsal, restart_bgc @@ -265,13 +269,13 @@ subroutine init_restart iblk ! block index logical(kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & - tr_pond_topo, tr_fsd, tr_aero, tr_brine, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_iage, nt_FY, nt_aero, nt_fsd + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -285,10 +289,11 @@ subroutine init_restart call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_fsd_out=tr_fsd) + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & - nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd) + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -393,6 +398,20 @@ subroutine init_restart call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) endif endif + + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + if (tr_aero) then ! ice aerosol if (trim(runtype) == 'continue') restart_aero = .true. if (restart_aero) then diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index 26d40a431..09cffa0c7 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -22,7 +22,7 @@ module CICE_RunMod use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero + use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes @@ -48,12 +48,12 @@ subroutine CICE_Run use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default + fiso_default, faero_default use ice_flux, only: init_flux_atm, init_flux_ocn use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_couple, timer_step logical (kind=log_kind) :: & - tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd character(len=*), parameter :: subname = '(CICE_Run)' !-------------------------------------------------------------------- @@ -65,7 +65,8 @@ subroutine CICE_Run call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, & wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_aero_out=tr_aero, & + call icepack_query_tracer_flags(tr_iso_out=tr_iso, & + tr_aero_out=tr_aero, & tr_zaero_out=tr_zaero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -95,6 +96,8 @@ subroutine CICE_Run call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -150,7 +153,7 @@ subroutine ice_step use ice_restart_column, only: write_restart_age, write_restart_FY, & write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_bgc, write_restart_hbrine + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & @@ -172,7 +175,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -183,7 +186,7 @@ subroutine ice_step call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_fsd_out=tr_fsd) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -236,7 +239,7 @@ subroutine ice_step if (.not.prescribed_ice) & call step_therm2 (dt, iblk) ! ice thickness distribution thermo - endif + endif ! ktherm > 0 enddo ! iblk !$OMP END PARALLEL DO @@ -335,6 +338,7 @@ subroutine ice_step if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd + if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & call write_restart_bgc @@ -369,7 +373,8 @@ subroutine coupling_prep (iblk) swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind - use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai, & + use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai fnit, fsil, famm, fdmsp, fdms, fhum, fdust, falgalN, & fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn use ice_grid, only: tmask @@ -552,7 +557,8 @@ subroutine coupling_prep (iblk) !----------------------------------------------------------------- call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, icepack_max_aero, & + tmask (:,:,iblk), nbtrcr, & + icepack_max_aero, & aice (:,:,iblk), Tf (:,:,iblk), & Tair (:,:,iblk), Qa (:,:,iblk), & strairxT (:,:,iblk), strairyT(:,:,iblk), & @@ -566,7 +572,10 @@ subroutine coupling_prep (iblk) alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio(:,:,1:nbtrcr,iblk), & + flux_bio (:,:,1:nbtrcr,iblk), & + Qref_iso =Qref_iso (:,:,:,iblk), & + fiso_evap=fiso_evap(:,:,:,iblk), & + fiso_ocn =fiso_ocn (:,:,:,iblk), & Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) !----------------------------------------------------------------- diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90_debug b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90_debug deleted file mode 100644 index 4f8b0a352..000000000 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90_debug +++ /dev/null @@ -1,696 +0,0 @@ -!======================================================================= -! -! Main driver for time stepping of CICE. -! -! authors Elizabeth C. Hunke, LANL -! Philip W. Jones, LANL -! William H. Lipscomb, LANL -! -! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency -! 2006 ECH: Converted to free source form (F90) -! 2007 BPB: Modified Delta-Eddington shortwave interface -! 2008 ECH: moved ESMF code to its own driver - - module CICE_RunMod - - use ice_kinds_mod - use perf_mod, only : t_startf, t_stopf, t_barrierf - use ice_fileunits, only: nu_diag - use ice_arrays_column, only: oceanmixed_ice - use ice_constants, only: c0, c1 - use ice_constants, only: field_loc_center, field_type_scalar - use ice_exit, only: abort_ice - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero - use icepack_intfc, only: icepack_query_parameters - use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_numbers - - implicit none - private - public :: CICE_Run, ice_step - -!======================================================================= - - contains - -!======================================================================= -! -! This is the main driver routine for advancing CICE forward in time. -! -! author Elizabeth C. Hunke, LANL -! Philip W. Jones, LANL -! William H. Lipscomb, LANL - - subroutine CICE_Run - - use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar - use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, atm_data_type - use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default - use ice_flux, only: init_flux_atm, init_flux_ocn - use ice_timers, only: ice_timer_start, ice_timer_stop, & - timer_couple, timer_step - - logical (kind=log_kind) :: & - tr_aero, tr_zaero, skl_bgc, z_tracers - character(len=*), parameter :: subname = '(CICE_Run)' - - !-------------------------------------------------------------------- - ! initialize error code and step timer - !-------------------------------------------------------------------- - - call ice_timer_start(timer_step) ! start timing entire run - - call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) - call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !-------------------------------------------------------------------- - ! timestep loop - !-------------------------------------------------------------------- - -! timeLoop: do - - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - - call ice_timer_start(timer_couple) ! atm/ocn coupling - -#ifndef coupled -#ifndef CESMCOUPLED - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data - - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif - if (z_tracers) call get_atm_bgc ! biogeochemistry - - call init_flux_atm ! Initialize atmosphere fluxes sent to coupler - call init_flux_ocn ! initialize ocean fluxes sent to coupler - - call calendar(time) ! at the end of the timestep - - call ice_timer_stop(timer_couple) ! atm/ocn coupling - - call ice_step - -! if (stop_now >= 1) exit timeLoop -! enddo timeLoop - - !-------------------------------------------------------------------- - ! end of timestep loop - !-------------------------------------------------------------------- - - call ice_timer_stop(timer_step) ! end timestepping loop timer - - end subroutine CICE_Run - -!======================================================================= -! -! Calls drivers for physics components, some initialization, and output -! -! author Elizabeth C. Hunke, LANL -! William H. Lipscomb, LANL - - subroutine ice_step - - use ice_boundary, only: ice_HaloUpdate - use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_calendar, only: idate, sec - use ice_diagnostics, only: init_mass_diags, runtime_diags - use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags - use ice_domain, only: halo_info, nblocks - use ice_dyn_eap, only: write_restart_eap - use ice_dyn_shared, only: kdyn, kridge - use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd - use ice_history, only: accum_hist - use ice_history_bgc, only: init_history_bgc - use ice_restart, only: final_restart - use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & - write_restart_pond_topo, write_restart_aero, & - write_restart_bgc, write_restart_hbrine - use ice_restart_driver, only: dumpfile - use ice_restoring, only: restore_ice, ice_HaloRestore - use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & - update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, save_init - use ice_timers, only: ice_timer_start, ice_timer_stop, & - timer_diags, timer_column, timer_thermo, timer_bound, & - timer_hist, timer_readwrite - use ice_communicate, only: MPI_COMM_ICE - use ice_prescribed_mod - - integer (kind=int_kind) :: & - iblk , & ! block index - k , & ! dynamics supercycling index - ktherm ! thermodynamics is off when ktherm = -1 - - real (kind=dbl_kind) :: & - offset ! d(age)/dt time offset - - logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & - calc_Tsfc, skl_bgc, solve_zsal, z_tracers - - character(len=*), parameter :: subname = '(ice_step)' - - character (len=char_len) :: plabeld - - plabeld = 'beginning time step' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm) - call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & - tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !----------------------------------------------------------------- - ! restoring on grid boundaries - !----------------------------------------------------------------- - - if (restore_ice) call ice_HaloRestore - - !----------------------------------------------------------------- - ! initialize diagnostics and save initial state values - !----------------------------------------------------------------- - - call ice_timer_start(timer_diags) ! diagnostics/history - call init_mass_diags ! diagnostics per timestep - call init_history_therm - call init_history_bgc - call ice_timer_stop(timer_diags) ! diagnostics/history - - if (prescribed_ice) then ! read prescribed ice - call t_barrierf('cice_run_presc_BARRIER',MPI_COMM_ICE) - call t_startf ('cice_run_presc') - call ice_prescribed_run(idate, sec) - call t_stopf ('cice_run_presc') - endif - - call save_init - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) then - - !----------------------------------------------------------------- - ! scale radiation fields - !----------------------------------------------------------------- - - if (calc_Tsfc) call prep_radiation (iblk) - - plabeld = 'post prep_radiation' - call debug_ice (iblk, plabeld) - - !----------------------------------------------------------------- - ! thermodynamics and biogeochemistry - !----------------------------------------------------------------- - - call step_therm1 (dt, iblk) ! vertical thermodynamics - - plabeld = 'post step_therm1' - call debug_ice (iblk, plabeld) - - call biogeochemistry (dt, iblk) ! biogeochemistry - - plabeld = 'post biogeochemistry' - call debug_ice (iblk, plabeld) - - if (.not.prescribed_ice) & - call step_therm2 (dt, iblk) ! ice thickness distribution thermo - - plabeld = 'post step_therm2' - call debug_ice (iblk, plabeld) - - endif - - enddo ! iblk - !$OMP END PARALLEL DO - - ! clean up, update tendency diagnostics - offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) - - call ice_timer_stop(timer_thermo) ! thermodynamics - call ice_timer_stop(timer_column) ! column physics - - !----------------------------------------------------------------- - ! dynamics, transport, ridging - !----------------------------------------------------------------- - - if (.not.prescribed_ice) then - do k = 1, ndtd - - ! momentum, stress, transport - call step_dyn_horiz (dt_dyn) - - plabeld = 'post step_dyn_horiz' - call debug_ice (iblk, plabeld) - - ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) - enddo - !$OMP END PARALLEL DO - - ! clean up, update tendency diagnostics - offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) - - enddo - - plabeld = 'post dynamics' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - - endif - - !----------------------------------------------------------------- - ! albedo, shortwave radiation - !----------------------------------------------------------------- - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) call step_radiation (dt, iblk) - - plabeld = 'post step_radiation' - call debug_ice (iblk, plabeld) - - !----------------------------------------------------------------- - ! get ready for coupling and the next time step - !----------------------------------------------------------------- - - call coupling_prep (iblk) - - plabeld = 'post coupling_prep' - call debug_ice (iblk, plabeld) - - enddo ! iblk - !$OMP END PARALLEL DO - - call ice_timer_start(timer_bound) - call ice_HaloUpdate (scale_factor, halo_info, & - field_loc_center, field_type_scalar) - call ice_timer_stop(timer_bound) - - call ice_timer_stop(timer_thermo) ! thermodynamics - call ice_timer_stop(timer_column) ! column physics - - !----------------------------------------------------------------- - ! write data - !----------------------------------------------------------------- - - call ice_timer_start(timer_diags) ! diagnostics - if (mod(istep,diagfreq) == 0) then - call runtime_diags(dt) ! log file - if (solve_zsal) call zsal_diags - if (skl_bgc .or. z_tracers) call bgc_diags - if (tr_brine) call hbrine_diags - endif - call ice_timer_stop(timer_diags) ! diagnostics - - call ice_timer_start(timer_hist) ! history - call accum_hist (dt) ! history file - call ice_timer_stop(timer_hist) ! history - - call ice_timer_start(timer_readwrite) ! reading/writing - if (write_restart == 1) then - call dumpfile ! core variables for restarting - if (tr_iage) call write_restart_age - if (tr_FY) call write_restart_FY - if (tr_lvl) call write_restart_lvl - if (tr_pond_cesm) call write_restart_pond_cesm - if (tr_pond_lvl) call write_restart_pond_lvl - if (tr_pond_topo) call write_restart_pond_topo - if (tr_aero) call write_restart_aero - if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc - if (tr_brine) call write_restart_hbrine - if (kdyn == 2) call write_restart_eap - call final_restart - endif - - call ice_timer_stop(timer_readwrite) ! reading/writing - - end subroutine ice_step - -!======================================================================= -! -! Prepare for coupling -! -! authors: Elizabeth C. Hunke, LANL - - subroutine coupling_prep (iblk) - - use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn - use ice_blocks, only: nx_block, ny_block, get_block, block - use ice_domain, only: blocks_ice - use ice_calendar, only: dt, nstreams - use ice_domain_size, only: ncat - use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & - albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & - alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & - fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & - swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & - scale_fluxes, frzmlt_init, frzmlt, Uref, wind, fsurfn_f, flatn_f - use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai, & - fnit, fsil, famm, fdmsp, fdms, fhum, fdust, falgalN, & - fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn - use ice_grid, only: tmask - use ice_state, only: aicen, aice, aice_init - use ice_step_mod, only: ocean_mixed_layer - use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - - integer (kind=int_kind), intent(in) :: & - iblk ! block index - - ! local variables - - integer (kind=int_kind) :: & - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - n , & ! thickness category index - i,j , & ! horizontal indices - k , & ! tracer index - nbtrcr ! - - type (block) :: & - this_block ! block information for current block - - logical (kind=log_kind) :: & - skl_bgc , & ! - calc_Tsfc ! - - real (kind=dbl_kind) :: & - cszn , & ! counter for history averaging - puny , & ! - rhofresh , & ! - netsw ! flag for shortwave radiation presence - - character(len=*), parameter :: subname = '(coupling_prep)' - - !----------------------------------------------------------------- - - call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) - call icepack_query_parameters(skl_bgc_out=skl_bgc) - call icepack_query_tracer_numbers(nbtrcr_out=nbtrcr) - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !----------------------------------------------------------------- - ! Save current value of frzmlt for diagnostics. - ! Update mixed layer with heat and radiation from ice. - !----------------------------------------------------------------- - - do j = 1, ny_block - do i = 1, nx_block - frzmlt_init (i,j,iblk) = frzmlt(i,j,iblk) - enddo - enddo - - call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling - - if (oceanmixed_ice) & - call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst - - !----------------------------------------------------------------- - ! Aggregate albedos - !----------------------------------------------------------------- - - do j = 1, ny_block - do i = 1, nx_block - alvdf(i,j,iblk) = c0 - alidf(i,j,iblk) = c0 - alvdr(i,j,iblk) = c0 - alidr(i,j,iblk) = c0 - - albice(i,j,iblk) = c0 - albsno(i,j,iblk) = c0 - albpnd(i,j,iblk) = c0 - apeff_ai(i,j,iblk) = c0 - snowfrac(i,j,iblk) = c0 - - ! for history averaging - cszn = c0 - netsw = swvdr(i,j,iblk)+swidr(i,j,iblk)+swvdf(i,j,iblk)+swidf(i,j,iblk) - if (netsw > puny) cszn = c1 - do n = 1, nstreams - albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn - enddo - enddo - enddo - - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - - do n = 1, ncat - do j = jlo, jhi - do i = ilo, ihi - if (aicen(i,j,n,iblk) > puny) then - - alvdf(i,j,iblk) = alvdf(i,j,iblk) & - + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) - alidf(i,j,iblk) = alidf(i,j,iblk) & - + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) - alvdr(i,j,iblk) = alvdr(i,j,iblk) & - + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) - alidr(i,j,iblk) = alidr(i,j,iblk) & - + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) - - netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & - + swvdf(i,j,iblk) + swidf(i,j,iblk) - if (netsw > puny) then ! sun above horizon - albice(i,j,iblk) = albice(i,j,iblk) & - + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) - albsno(i,j,iblk) = albsno(i,j,iblk) & - + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) - albpnd(i,j,iblk) = albpnd(i,j,iblk) & - + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) - endif - - apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history - + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) - snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history - + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) - - endif ! aicen > puny - enddo - enddo - enddo - - do j = 1, ny_block - do i = 1, nx_block - - !----------------------------------------------------------------- - ! reduce fresh by fpond for coupling - !----------------------------------------------------------------- - - if (l_mpond_fresh) then - fpond(i,j,iblk) = fpond(i,j,iblk) * rhofresh/dt - fresh(i,j,iblk) = fresh(i,j,iblk) - fpond(i,j,iblk) - endif - - !---------------------------------------------------------------- - ! Store grid box mean albedos and fluxes before scaling by aice - !---------------------------------------------------------------- - - alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) - alidf_ai (i,j,iblk) = alidf (i,j,iblk) - alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) - alidr_ai (i,j,iblk) = alidr (i,j,iblk) - fresh_ai (i,j,iblk) = fresh (i,j,iblk) - fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) - fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) - fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) - - if (nbtrcr > 0) then - do k = 1, nbtrcr - flux_bio_ai (i,j,k,iblk) = flux_bio (i,j,k,iblk) - enddo - endif - - !----------------------------------------------------------------- - ! Save net shortwave for scaling factor in scale_factor - !----------------------------------------------------------------- - scale_factor(i,j,iblk) = & - swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & - + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & - + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & - + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) - - enddo - enddo - - !----------------------------------------------------------------- - ! Divide fluxes by ice area - ! - the CESM coupler assumes fluxes are per unit ice area - ! - also needed for global budget in diagnostics - !----------------------------------------------------------------- - - call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, icepack_max_aero, & - aice (:,:,iblk), Tf (:,:,iblk), & - Tair (:,:,iblk), Qa (:,:,iblk), & - strairxT (:,:,iblk), strairyT(:,:,iblk), & - fsens (:,:,iblk), flat (:,:,iblk), & - fswabs (:,:,iblk), flwout (:,:,iblk), & - evap (:,:,iblk), & - Tref (:,:,iblk), Qref (:,:,iblk), & - fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), fswthru (:,:,iblk), & - faero_ocn(:,:,:,iblk), & - alvdr (:,:,iblk), alidr (:,:,iblk), & - alvdf (:,:,iblk), alidf (:,:,iblk), & - fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio(:,:,1:nbtrcr,iblk), & - Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) - - !----------------------------------------------------------------- - ! Define ice-ocean bgc fluxes - !----------------------------------------------------------------- - - if (nbtrcr > 0 .or. skl_bgc) then - call bgcflux_ice_to_ocn (nx_block, ny_block, & - flux_bio(:,:,1:nbtrcr,iblk), & - fnit(:,:,iblk), fsil(:,:,iblk), & - famm(:,:,iblk), fdmsp(:,:,iblk), & - fdms(:,:,iblk), fhum(:,:,iblk), & - fdust(:,:,iblk), falgalN(:,:,:,iblk), & - fdoc(:,:,:,iblk), fdic(:,:,:,iblk), & - fdon(:,:,:,iblk), ffep(:,:,:,iblk), & - ffed(:,:,:,iblk)) - endif - -!echmod - comment this out for efficiency, if .not. calc_Tsfc - if (.not. calc_Tsfc) then - - !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. - !--------------------------------------------------------------- - - call sfcflux_to_ocn & - (nx_block, ny_block, & - tmask (:,:,iblk), aice_init(:,:,iblk), & - fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & - fresh (:,:,iblk), fhocn (:,:,iblk)) - endif -!echmod - - call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling - - end subroutine coupling_prep - -!======================================================================= -! -! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can -! be provided at points which do not have ice. (This is could be due to -! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, -! conserve energy and water by passing these fluxes to the ocean. -! -! author: A. McLaren, Met Office - - subroutine sfcflux_to_ocn(nx_block, ny_block, & - tmask, aice, & - fsurfn_f, flatn_f, & - fresh, fhocn) - - use ice_domain_size, only: ncat - - integer (kind=int_kind), intent(in) :: & - nx_block, ny_block ! block dimensions - - logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & - tmask ! land/boundary mask, thickness (T-cell) - - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in):: & - aice ! initial ice concentration - - real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), intent(in) :: & - fsurfn_f, & ! net surface heat flux (provided as forcing) - flatn_f ! latent heat flux (provided as forcing) - - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout):: & - fresh , & ! fresh water flux to ocean (kg/m2/s) - fhocn ! actual ocn/ice heat flx (W/m**2) - -#ifdef CICE_IN_NEMO - - ! local variables - integer (kind=int_kind) :: & - i, j, n ! horizontal indices - - real (kind=dbl_kind) :: & - puny, & ! - rLsub ! 1/Lsub - - character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - - call icepack_query_parameters(puny_out=puny) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - rLsub = c1 / Lsub - - do n = 1, ncat - do j = 1, ny_block - do i = 1, nx_block - if (tmask(i,j) .and. aice(i,j) <= puny) then - fhocn(i,j) = fhocn(i,j) & - + fsurfn_f(i,j,n) + flatn_f(i,j,n) - fresh(i,j) = fresh(i,j) & - + flatn_f(i,j,n) * rLsub - endif - enddo ! i - enddo ! j - enddo ! n - -#endif - - end subroutine sfcflux_to_ocn - -!======================================================================= - - end module CICE_RunMod - -!======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index cb3e7bb98..b72745e30 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -77,7 +77,7 @@ subroutine cice_init(mpicom_ice) use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & get_forcing_atmo, get_forcing_ocn, get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default, faero_optics, alloc_forcing_bgc + faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runtype @@ -95,7 +95,7 @@ subroutine cice_init(mpicom_ice) mpicom_ice ! communicator for sequential ccsm logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_fsd, wave_spec + tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' call init_communicate(mpicom_ice) ! initial setup for message passing @@ -122,7 +122,6 @@ subroutine cice_init(mpicom_ice) call ice_timer_start(timer_total) ! start timing entire run call init_grid2 ! grid variables call init_zbgc ! vertical biogeochemistry initialization - call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file @@ -134,6 +133,7 @@ subroutine cice_init(mpicom_ice) endif call init_coupler_flux ! initialize fluxes exchanged with coupler + #ifdef popcice call sst_sss ! POP data for CICE initialization #endif @@ -181,6 +181,7 @@ subroutine cice_init(mpicom_ice) call init_history_dyn ! initialize dynamic history variables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -194,6 +195,11 @@ subroutine cice_init(mpicom_ice) if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer +! istep = istep + 1 ! update time step counters +! istep1 = istep1 + 1 +! time = time + dt ! determine the time and date +! call calendar(time) ! at the end of the first timestep + !-------------------------------------------------------------------- ! coupler communication or forcing data initialization !-------------------------------------------------------------------- @@ -206,6 +212,8 @@ subroutine cice_init(mpicom_ice) call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -234,20 +242,21 @@ subroutine init_restart use ice_calendar, only: time, calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_aero, nfsd + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & - init_aerosol, init_hbrine, init_bgc, init_fsd + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & restart_hbrine, read_restart_hbrine, & restart_zsal, restart_bgc @@ -260,13 +269,13 @@ subroutine init_restart iblk ! block index logical(kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & - tr_pond_topo, tr_fsd, tr_aero, tr_brine, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_iage, nt_FY, nt_aero, nt_fsd + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -280,10 +289,11 @@ subroutine init_restart call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_fsd_out=tr_fsd) + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & - nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd) + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -388,6 +398,20 @@ subroutine init_restart call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) endif endif + + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + if (tr_aero) then ! ice aerosol if (trim(runtype) == 'continue') restart_aero = .true. if (restart_aero) then diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 26d40a431..09cffa0c7 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -22,7 +22,7 @@ module CICE_RunMod use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero + use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes @@ -48,12 +48,12 @@ subroutine CICE_Run use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default + fiso_default, faero_default use ice_flux, only: init_flux_atm, init_flux_ocn use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_couple, timer_step logical (kind=log_kind) :: & - tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd character(len=*), parameter :: subname = '(CICE_Run)' !-------------------------------------------------------------------- @@ -65,7 +65,8 @@ subroutine CICE_Run call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, & wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_aero_out=tr_aero, & + call icepack_query_tracer_flags(tr_iso_out=tr_iso, & + tr_aero_out=tr_aero, & tr_zaero_out=tr_zaero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -95,6 +96,8 @@ subroutine CICE_Run call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -150,7 +153,7 @@ subroutine ice_step use ice_restart_column, only: write_restart_age, write_restart_FY, & write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_bgc, write_restart_hbrine + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & @@ -172,7 +175,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -183,7 +186,7 @@ subroutine ice_step call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_fsd_out=tr_fsd) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -236,7 +239,7 @@ subroutine ice_step if (.not.prescribed_ice) & call step_therm2 (dt, iblk) ! ice thickness distribution thermo - endif + endif ! ktherm > 0 enddo ! iblk !$OMP END PARALLEL DO @@ -335,6 +338,7 @@ subroutine ice_step if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd + if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & call write_restart_bgc @@ -369,7 +373,8 @@ subroutine coupling_prep (iblk) swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind - use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai, & + use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai fnit, fsil, famm, fdmsp, fdms, fhum, fdust, falgalN, & fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn use ice_grid, only: tmask @@ -552,7 +557,8 @@ subroutine coupling_prep (iblk) !----------------------------------------------------------------- call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, icepack_max_aero, & + tmask (:,:,iblk), nbtrcr, & + icepack_max_aero, & aice (:,:,iblk), Tf (:,:,iblk), & Tair (:,:,iblk), Qa (:,:,iblk), & strairxT (:,:,iblk), strairyT(:,:,iblk), & @@ -566,7 +572,10 @@ subroutine coupling_prep (iblk) alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio(:,:,1:nbtrcr,iblk), & + flux_bio (:,:,1:nbtrcr,iblk), & + Qref_iso =Qref_iso (:,:,:,iblk), & + fiso_evap=fiso_evap(:,:,:,iblk), & + fiso_ocn =fiso_ocn (:,:,:,iblk), & Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) !----------------------------------------------------------------- diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index fd9449efd..986189f96 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -16,8 +16,8 @@ module CICE_InitMod use ice_exit, only: abort_ice use ice_fileunits, only: init_fileunits, nu_diag use icepack_intfc, only: icepack_aggregate - use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist + use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave use icepack_intfc, only: icepack_configure use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & @@ -49,6 +49,7 @@ subroutine CICE_Initialize(mpi_comm) !-------------------------------------------------------------------- ! model initialization !-------------------------------------------------------------------- + if (present(mpi_comm)) then call cice_init(mpi_comm) else @@ -81,7 +82,7 @@ subroutine cice_init(mpi_comm) use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & get_forcing_atmo, get_forcing_ocn, get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default, faero_optics, alloc_forcing_bgc + faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runtype @@ -94,11 +95,14 @@ subroutine cice_init(mpi_comm) #ifdef popcice use drv_forcing, only: sst_sss #endif + integer (kind=int_kind), optional, intent(in) :: & - mpi_comm ! communicator for sequential ccsm + mpi_comm ! communicator for sequential ccsm + logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_fsd, wave_spec + tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' + if (present(mpi_comm)) then call init_communicate(mpi_comm) ! initial setup for message passing else @@ -130,7 +134,6 @@ subroutine cice_init(mpi_comm) call ice_timer_start(timer_total) ! start timing entire run call init_grid2 ! grid variables call init_zbgc ! vertical biogeochemistry initialization - call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file @@ -142,6 +145,7 @@ subroutine cice_init(mpi_comm) endif call init_coupler_flux ! initialize fluxes exchanged with coupler + #ifdef popcice call sst_sss ! POP data for CICE initialization #endif @@ -189,6 +193,7 @@ subroutine cice_init(mpi_comm) call init_history_dyn ! initialize dynamic history variables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -220,6 +225,9 @@ subroutine cice_init(mpi_comm) #ifndef CICE_DMI call get_forcing_ocn(dt) ! ocean forcing from data #endif + + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -248,20 +256,21 @@ subroutine init_restart use ice_calendar, only: time, calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_aero, nfsd + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & - init_aerosol, init_hbrine, init_bgc, init_fsd + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & restart_hbrine, read_restart_hbrine, & restart_zsal, restart_bgc @@ -274,13 +283,13 @@ subroutine init_restart iblk ! block index logical(kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & - tr_pond_topo, tr_fsd, tr_aero, tr_brine, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_iage, nt_FY, nt_aero, nt_fsd + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -294,10 +303,11 @@ subroutine init_restart call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_fsd_out=tr_fsd) + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & - nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd) + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -402,6 +412,20 @@ subroutine init_restart call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) endif endif + + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + if (tr_aero) then ! ice aerosol if (trim(runtype) == 'continue') restart_aero = .true. if (restart_aero) then diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index 11587cd83..ad575f714 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -21,7 +21,7 @@ module CICE_RunMod use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero + use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes @@ -47,12 +47,12 @@ subroutine CICE_Run use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default + fiso_default, faero_default use ice_flux, only: init_flux_atm, init_flux_ocn use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_couple, timer_step logical (kind=log_kind) :: & - tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd character(len=*), parameter :: subname = '(CICE_Run)' !-------------------------------------------------------------------- @@ -64,7 +64,8 @@ subroutine CICE_Run call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, & wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_aero_out=tr_aero, & + call icepack_query_tracer_flags(tr_iso_out=tr_iso, & + tr_aero_out=tr_aero, & tr_zaero_out=tr_zaero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -103,6 +104,9 @@ subroutine CICE_Run #ifndef CICE_DMI call get_forcing_ocn(dt) ! ocean forcing from data #endif + + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -155,7 +159,7 @@ subroutine ice_step use ice_restart_column, only: write_restart_age, write_restart_FY, & write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_bgc, write_restart_hbrine + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & @@ -175,7 +179,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -186,7 +190,7 @@ subroutine ice_step call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_fsd_out=tr_fsd) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -231,7 +235,7 @@ subroutine ice_step call biogeochemistry (dt, iblk) ! biogeochemistry call step_therm2 (dt, iblk) ! ice thickness distribution thermo - endif + endif ! ktherm > 0 enddo ! iblk !$OMP END PARALLEL DO @@ -326,6 +330,7 @@ subroutine ice_step if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd + if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & call write_restart_bgc @@ -360,7 +365,8 @@ subroutine coupling_prep (iblk) swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt - use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai + use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai use ice_grid, only: tmask use ice_state, only: aicen, aice #ifdef CICE_IN_NEMO @@ -539,7 +545,8 @@ subroutine coupling_prep (iblk) !----------------------------------------------------------------- call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, icepack_max_aero, & + tmask (:,:,iblk), nbtrcr, & + icepack_max_aero, & aice (:,:,iblk), Tf (:,:,iblk), & Tair (:,:,iblk), Qa (:,:,iblk), & strairxT (:,:,iblk), strairyT(:,:,iblk), & @@ -553,7 +560,10 @@ subroutine coupling_prep (iblk) alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio(:,:,1:nbtrcr,iblk)) + flux_bio (:,:,1:nbtrcr,iblk), & + Qref_iso =Qref_iso (:,:,:,iblk), & + fiso_evap=fiso_evap(:,:,:,iblk), & + fiso_ocn =fiso_ocn (:,:,:,iblk)) #ifdef CICE_IN_NEMO !echmod - comment this out for efficiency, if .not. calc_Tsfc diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90_debug b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90_debug deleted file mode 100644 index 5de6b1cfd..000000000 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90_debug +++ /dev/null @@ -1,686 +0,0 @@ -!======================================================================= -! -! Main driver for time stepping of CICE. -! -! authors Elizabeth C. Hunke, LANL -! Philip W. Jones, LANL -! William H. Lipscomb, LANL -! -! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency -! 2006 ECH: Converted to free source form (F90) -! 2007 BPB: Modified Delta-Eddington shortwave interface -! 2008 ECH: moved ESMF code to its own driver - - module CICE_RunMod - - use ice_kinds_mod - use ice_fileunits, only: nu_diag - use ice_arrays_column, only: oceanmixed_ice - use ice_constants, only: c0, c1 - use ice_constants, only: field_loc_center, field_type_scalar - use ice_exit, only: abort_ice - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero - use icepack_intfc, only: icepack_query_parameters - use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_numbers - - implicit none - private - public :: CICE_Run, ice_step - -!======================================================================= - - contains - -!======================================================================= -! -! This is the main driver routine for advancing CICE forward in time. -! -! author Elizabeth C. Hunke, LANL -! Philip W. Jones, LANL -! William H. Lipscomb, LANL - - subroutine CICE_Run - - use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar - use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & - get_wave_spec - use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default - use ice_flux, only: init_flux_atm, init_flux_ocn - use ice_timers, only: ice_timer_start, ice_timer_stop, & - timer_couple, timer_step - logical (kind=log_kind) :: & - tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd - character(len=*), parameter :: subname = '(CICE_Run)' - - !-------------------------------------------------------------------- - ! initialize error code and step timer - !-------------------------------------------------------------------- - - call ice_timer_start(timer_step) ! start timing entire run - - call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, & - wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_aero_out=tr_aero, & - tr_zaero_out=tr_zaero, & - tr_fsd_out=tr_fsd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - -#ifndef CICE_IN_NEMO - !-------------------------------------------------------------------- - ! timestep loop - !-------------------------------------------------------------------- - - timeLoop: do -#endif - - call ice_step - - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - - call calendar(time) ! at the end of the timestep - -#ifndef CICE_IN_NEMO - if (stop_now >= 1) exit timeLoop -#endif - - call ice_timer_start(timer_couple) ! atm/ocn coupling - -#ifndef coupled -#ifndef CESMCOUPLED -! for now, wave_spectrum is constant in time -! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data - - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif - if (z_tracers) call get_atm_bgc ! biogeochemistry - - call init_flux_atm ! Initialize atmosphere fluxes sent to coupler - call init_flux_ocn ! initialize ocean fluxes sent to coupler - - call ice_timer_stop(timer_couple) ! atm/ocn coupling - -#ifndef CICE_IN_NEMO - enddo timeLoop -#endif - - !-------------------------------------------------------------------- - ! end of timestep loop - !-------------------------------------------------------------------- - - call ice_timer_stop(timer_step) ! end timestepping loop timer - - end subroutine CICE_Run - -!======================================================================= -! -! Calls drivers for physics components, some initialization, and output -! -! author Elizabeth C. Hunke, LANL -! William H. Lipscomb, LANL - - subroutine ice_step - - use ice_boundary, only: ice_HaloUpdate - use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_diagnostics, only: init_mass_diags, runtime_diags - use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags - use ice_domain, only: halo_info, nblocks - use ice_dyn_eap, only: write_restart_eap - use ice_dyn_shared, only: kdyn, kridge - use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd - use ice_history, only: accum_hist - use ice_history_bgc, only: init_history_bgc - use ice_restart, only: final_restart - use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & - write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_bgc, write_restart_hbrine - use ice_restart_driver, only: dumpfile - use ice_restoring, only: restore_ice, ice_HaloRestore - use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & - update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, save_init, step_dyn_wave - use ice_timers, only: ice_timer_start, ice_timer_stop, & - timer_diags, timer_column, timer_thermo, timer_bound, & - timer_hist, timer_readwrite - - integer (kind=int_kind) :: & - iblk , & ! block index - k , & ! dynamics supercycling index - ktherm ! thermodynamics is off when ktherm = -1 - - real (kind=dbl_kind) :: & - offset ! d(age)/dt time offset - - logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & - calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec - - character(len=*), parameter :: subname = '(ice_step)' - - character (len=char_len) :: plabeld - - plabeld = 'beginning time step' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & - wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & - tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_fsd_out=tr_fsd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !----------------------------------------------------------------- - ! restoring on grid boundaries - !----------------------------------------------------------------- - - if (restore_ice) call ice_HaloRestore - - !----------------------------------------------------------------- - ! initialize diagnostics and save initial state values - !----------------------------------------------------------------- - - call ice_timer_start(timer_diags) ! diagnostics/history - call init_mass_diags ! diagnostics per timestep - call init_history_therm - call init_history_bgc - call ice_timer_stop(timer_diags) ! diagnostics/history - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - - call save_init - - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) then - - !----------------------------------------------------------------- - ! scale radiation fields - !----------------------------------------------------------------- - - if (calc_Tsfc) call prep_radiation (iblk) - - plabeld = 'post prep_radiation' - call debug_ice (iblk, plabeld) - - !----------------------------------------------------------------- - ! thermodynamics and biogeochemistry - !----------------------------------------------------------------- - - call step_therm1 (dt, iblk) ! vertical thermodynamics - - plabeld = 'post step_therm1' - call debug_ice (iblk, plabeld) - - call biogeochemistry (dt, iblk) ! biogeochemistry - - plabeld = 'post biogeochemistry' - call debug_ice (iblk, plabeld) - - call step_therm2 (dt, iblk) ! ice thickness distribution thermo - - plabeld = 'post step_therm2' - call debug_ice (iblk, plabeld) - - endif - - enddo ! iblk - !$OMP END PARALLEL DO - - ! clean up, update tendency diagnostics - offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) - - call ice_timer_stop(timer_thermo) ! thermodynamics - call ice_timer_stop(timer_column) ! column physics - - !----------------------------------------------------------------- - ! dynamics, transport, ridging - !----------------------------------------------------------------- - - ! wave fracture of the floe size distribution - ! note this is called outside of the dynamics subcycling loop - if (tr_fsd .and. wave_spec) call step_dyn_wave(dt) - - do k = 1, ndtd - - ! momentum, stress, transport - call step_dyn_horiz (dt_dyn) - - plabeld = 'post step_dyn_horiz' - call debug_ice (iblk, plabeld) - - ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) - enddo - !$OMP END PARALLEL DO - - ! clean up, update tendency diagnostics - offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) - - enddo - - plabeld = 'post dynamics' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - - !----------------------------------------------------------------- - ! albedo, shortwave radiation - !----------------------------------------------------------------- - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - -!MHRI: CHECK THIS OMP - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) call step_radiation (dt, iblk) - - plabeld = 'post step_radiation' - call debug_ice (iblk, plabeld) - - !----------------------------------------------------------------- - ! get ready for coupling and the next time step - !----------------------------------------------------------------- - - call coupling_prep (iblk) - - plabeld = 'post coupling_prep' - call debug_ice (iblk, plabeld) - - enddo ! iblk - !$OMP END PARALLEL DO - - call ice_timer_start(timer_bound) - call ice_HaloUpdate (scale_factor, halo_info, & - field_loc_center, field_type_scalar) - call ice_timer_stop(timer_bound) - - call ice_timer_stop(timer_thermo) ! thermodynamics - call ice_timer_stop(timer_column) ! column physics - - !----------------------------------------------------------------- - ! write data - !----------------------------------------------------------------- - - call ice_timer_start(timer_diags) ! diagnostics - if (mod(istep,diagfreq) == 0) then - call runtime_diags(dt) ! log file - if (solve_zsal) call zsal_diags - if (skl_bgc .or. z_tracers) call bgc_diags - if (tr_brine) call hbrine_diags - endif - call ice_timer_stop(timer_diags) ! diagnostics - - call ice_timer_start(timer_hist) ! history - call accum_hist (dt) ! history file - call ice_timer_stop(timer_hist) ! history - - call ice_timer_start(timer_readwrite) ! reading/writing - if (write_restart == 1) then - call dumpfile ! core variables for restarting - if (tr_iage) call write_restart_age - if (tr_FY) call write_restart_FY - if (tr_lvl) call write_restart_lvl - if (tr_pond_cesm) call write_restart_pond_cesm - if (tr_pond_lvl) call write_restart_pond_lvl - if (tr_pond_topo) call write_restart_pond_topo - if (tr_fsd) call write_restart_fsd - if (tr_aero) call write_restart_aero - if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc - if (tr_brine) call write_restart_hbrine - if (kdyn == 2) call write_restart_eap - call final_restart - endif - - call ice_timer_stop(timer_readwrite) ! reading/writing - - end subroutine ice_step - -!======================================================================= -! -! Prepare for coupling -! -! authors: Elizabeth C. Hunke, LANL - - subroutine coupling_prep (iblk) - - use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn - use ice_blocks, only: nx_block, ny_block, get_block, block - use ice_domain, only: blocks_ice - use ice_calendar, only: dt, nstreams - use ice_domain_size, only: ncat - use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & - albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & - alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & - fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & - swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & - scale_fluxes, frzmlt_init, frzmlt - use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai - use ice_grid, only: tmask - use ice_state, only: aicen, aice -#ifdef CICE_IN_NEMO - use ice_state, only: aice_init - use ice_flux, only: flatn_f, fsurfn_f -#endif - use ice_step_mod, only: ocean_mixed_layer - use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - - integer (kind=int_kind), intent(in) :: & - iblk ! block index - - ! local variables - - integer (kind=int_kind) :: & - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - n , & ! thickness category index - i,j , & ! horizontal indices - k , & ! tracer index - nbtrcr ! - - type (block) :: & - this_block ! block information for current block - - logical (kind=log_kind) :: & - calc_Tsfc ! - - real (kind=dbl_kind) :: & - cszn , & ! counter for history averaging - puny , & ! - rhofresh , & ! - netsw ! flag for shortwave radiation presence - - character(len=*), parameter :: subname = '(coupling_prep)' - - call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) - call icepack_query_tracer_numbers(nbtrcr_out=nbtrcr) - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !----------------------------------------------------------------- - ! Save current value of frzmlt for diagnostics. - ! Update mixed layer with heat and radiation from ice. - !----------------------------------------------------------------- - - do j = 1, ny_block - do i = 1, nx_block - frzmlt_init (i,j,iblk) = frzmlt(i,j,iblk) - enddo - enddo - - call ice_timer_start(timer_couple) ! atm/ocn coupling - - if (oceanmixed_ice) & - call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst - - !----------------------------------------------------------------- - ! Aggregate albedos - !----------------------------------------------------------------- - - do j = 1, ny_block - do i = 1, nx_block - alvdf(i,j,iblk) = c0 - alidf(i,j,iblk) = c0 - alvdr(i,j,iblk) = c0 - alidr(i,j,iblk) = c0 - - albice(i,j,iblk) = c0 - albsno(i,j,iblk) = c0 - albpnd(i,j,iblk) = c0 - apeff_ai(i,j,iblk) = c0 - snowfrac(i,j,iblk) = c0 - - ! for history averaging - cszn = c0 - netsw = swvdr(i,j,iblk)+swidr(i,j,iblk)+swvdf(i,j,iblk)+swidf(i,j,iblk) - if (netsw > puny) cszn = c1 - do n = 1, nstreams - albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn - enddo - enddo - enddo - - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - - do n = 1, ncat - do j = jlo, jhi - do i = ilo, ihi - if (aicen(i,j,n,iblk) > puny) then - - alvdf(i,j,iblk) = alvdf(i,j,iblk) & - + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) - alidf(i,j,iblk) = alidf(i,j,iblk) & - + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) - alvdr(i,j,iblk) = alvdr(i,j,iblk) & - + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) - alidr(i,j,iblk) = alidr(i,j,iblk) & - + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) - - netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & - + swvdf(i,j,iblk) + swidf(i,j,iblk) - if (netsw > puny) then ! sun above horizon - albice(i,j,iblk) = albice(i,j,iblk) & - + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) - albsno(i,j,iblk) = albsno(i,j,iblk) & - + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) - albpnd(i,j,iblk) = albpnd(i,j,iblk) & - + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) - endif - - apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history - + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) - snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history - + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) - - endif ! aicen > puny - enddo - enddo - enddo - - do j = 1, ny_block - do i = 1, nx_block - - !----------------------------------------------------------------- - ! reduce fresh by fpond for coupling - !----------------------------------------------------------------- - - if (l_mpond_fresh) then - fpond(i,j,iblk) = fpond(i,j,iblk) * rhofresh/dt - fresh(i,j,iblk) = fresh(i,j,iblk) - fpond(i,j,iblk) - endif - - !---------------------------------------------------------------- - ! Store grid box mean albedos and fluxes before scaling by aice - !---------------------------------------------------------------- - - alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) - alidf_ai (i,j,iblk) = alidf (i,j,iblk) - alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) - alidr_ai (i,j,iblk) = alidr (i,j,iblk) - fresh_ai (i,j,iblk) = fresh (i,j,iblk) - fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) - fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) - fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) - - if (nbtrcr > 0) then - do k = 1, nbtrcr - flux_bio_ai (i,j,k,iblk) = flux_bio (i,j,k,iblk) - enddo - endif - - !----------------------------------------------------------------- - ! Save net shortwave for scaling factor in scale_factor - !----------------------------------------------------------------- - scale_factor(i,j,iblk) = & - swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & - + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & - + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & - + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) - - enddo - enddo - - !----------------------------------------------------------------- - ! Divide fluxes by ice area - ! - the CESM coupler assumes fluxes are per unit ice area - ! - also needed for global budget in diagnostics - !----------------------------------------------------------------- - - call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, icepack_max_aero, & - aice (:,:,iblk), Tf (:,:,iblk), & - Tair (:,:,iblk), Qa (:,:,iblk), & - strairxT (:,:,iblk), strairyT(:,:,iblk), & - fsens (:,:,iblk), flat (:,:,iblk), & - fswabs (:,:,iblk), flwout (:,:,iblk), & - evap (:,:,iblk), & - Tref (:,:,iblk), Qref (:,:,iblk), & - fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), fswthru (:,:,iblk), & - faero_ocn(:,:,:,iblk), & - alvdr (:,:,iblk), alidr (:,:,iblk), & - alvdf (:,:,iblk), alidf (:,:,iblk), & - fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio(:,:,1:nbtrcr,iblk)) - -#ifdef CICE_IN_NEMO -!echmod - comment this out for efficiency, if .not. calc_Tsfc - if (.not. calc_Tsfc) then - - !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. - !--------------------------------------------------------------- - - call sfcflux_to_ocn & - (nx_block, ny_block, & - tmask (:,:,iblk), aice_init(:,:,iblk), & - fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & - fresh (:,:,iblk), fhocn (:,:,iblk)) - endif -!echmod -#endif - call ice_timer_stop(timer_couple) ! atm/ocn coupling - - end subroutine coupling_prep - -#ifdef CICE_IN_NEMO - -!======================================================================= -! -! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can -! be provided at points which do not have ice. (This is could be due to -! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, -! conserve energy and water by passing these fluxes to the ocean. -! -! author: A. McLaren, Met Office - - subroutine sfcflux_to_ocn(nx_block, ny_block, & - tmask, aice, & - fsurfn_f, flatn_f, & - fresh, fhocn) - - use ice_domain_size, only: ncat - - integer (kind=int_kind), intent(in) :: & - nx_block, ny_block ! block dimensions - - logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & - tmask ! land/boundary mask, thickness (T-cell) - - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in):: & - aice ! initial ice concentration - - real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), intent(in) :: & - fsurfn_f, & ! net surface heat flux (provided as forcing) - flatn_f ! latent heat flux (provided as forcing) - - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout):: & - fresh , & ! fresh water flux to ocean (kg/m2/s) - fhocn ! actual ocn/ice heat flx (W/m**2) - - - ! local variables - integer (kind=int_kind) :: & - i, j, n ! horizontal indices - - real (kind=dbl_kind) :: & - puny, & ! - rLsub ! 1/Lsub - - character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - - call icepack_query_parameters(puny_out=puny) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - rLsub = c1 / Lsub - - do n = 1, ncat - do j = 1, ny_block - do i = 1, nx_block - if (tmask(i,j) .and. aice(i,j) <= puny) then - fhocn(i,j) = fhocn(i,j) & - + fsurfn_f(i,j,n) + flatn_f(i,j,n) - fresh(i,j) = fresh(i,j) & - + flatn_f(i,j,n) * rLsub - endif - enddo ! i - enddo ! j - enddo ! n - - - end subroutine sfcflux_to_ocn - -#endif - -!======================================================================= - - end module CICE_RunMod - -!======================================================================= diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 0b61433a3..59bbca31c 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -77,7 +77,7 @@ subroutine cice_init use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & get_forcing_atmo, get_forcing_ocn, get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default, faero_optics, alloc_forcing_bgc + faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runtype @@ -92,7 +92,7 @@ subroutine cice_init #endif logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_fsd, wave_spec + tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' call init_communicate ! initial setup for message passing @@ -122,7 +122,6 @@ subroutine cice_init call ice_timer_start(timer_total) ! start timing entire run call init_grid2 ! grid variables call init_zbgc ! vertical biogeochemistry initialization - call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file @@ -134,6 +133,7 @@ subroutine cice_init endif call init_coupler_flux ! initialize fluxes exchanged with coupler + #ifdef popcice call sst_sss ! POP data for CICE initialization #endif @@ -181,6 +181,7 @@ subroutine cice_init call init_history_dyn ! initialize dynamic history variables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -211,6 +212,8 @@ subroutine cice_init call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -239,20 +242,21 @@ subroutine init_restart use ice_calendar, only: time, calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_aero, nfsd + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & - init_aerosol, init_hbrine, init_bgc, init_fsd + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & restart_hbrine, read_restart_hbrine, & restart_zsal, restart_bgc @@ -265,13 +269,13 @@ subroutine init_restart iblk ! block index logical(kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & - tr_pond_topo, tr_fsd, tr_aero, tr_brine, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_iage, nt_FY, nt_aero, nt_fsd + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -285,10 +289,11 @@ subroutine init_restart call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_fsd_out=tr_fsd) + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & - nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd) + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -393,6 +398,20 @@ subroutine init_restart call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) endif endif + + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + if (tr_aero) then ! ice aerosol if (trim(runtype) == 'continue') restart_aero = .true. if (restart_aero) then diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index ad974475b..7645c43f3 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -21,7 +21,7 @@ module CICE_RunMod use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero + use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes @@ -47,12 +47,12 @@ subroutine CICE_Run use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default + fiso_default, faero_default use ice_flux, only: init_flux_atm, init_flux_ocn use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_couple, timer_step logical (kind=log_kind) :: & - tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd character(len=*), parameter :: subname = '(CICE_Run)' !-------------------------------------------------------------------- @@ -64,7 +64,8 @@ subroutine CICE_Run call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, & wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_aero_out=tr_aero, & + call icepack_query_tracer_flags(tr_iso_out=tr_iso, & + tr_aero_out=tr_aero, & tr_zaero_out=tr_zaero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -100,6 +101,8 @@ subroutine CICE_Run call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -151,7 +154,7 @@ subroutine ice_step use ice_restart_column, only: write_restart_age, write_restart_FY, & write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_bgc, write_restart_hbrine + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & @@ -171,7 +174,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -182,7 +185,7 @@ subroutine ice_step call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_fsd_out=tr_fsd) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -227,7 +230,7 @@ subroutine ice_step call biogeochemistry (dt, iblk) ! biogeochemistry call step_therm2 (dt, iblk) ! ice thickness distribution thermo - endif + endif ! ktherm > 0 enddo ! iblk !$OMP END PARALLEL DO @@ -322,6 +325,7 @@ subroutine ice_step if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd + if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & call write_restart_bgc @@ -356,7 +360,8 @@ subroutine coupling_prep (iblk) swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt - use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai + use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai use ice_grid, only: tmask use ice_state, only: aicen, aice #ifdef CICE_IN_NEMO @@ -535,7 +540,8 @@ subroutine coupling_prep (iblk) !----------------------------------------------------------------- call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, icepack_max_aero, & + tmask (:,:,iblk), nbtrcr, & + icepack_max_aero, & aice (:,:,iblk), Tf (:,:,iblk), & Tair (:,:,iblk), Qa (:,:,iblk), & strairxT (:,:,iblk), strairyT(:,:,iblk), & @@ -549,7 +555,10 @@ subroutine coupling_prep (iblk) alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio(:,:,1:nbtrcr,iblk)) + flux_bio (:,:,1:nbtrcr,iblk), & + Qref_iso =Qref_iso (:,:,:,iblk), & + fiso_evap=fiso_evap(:,:,:,iblk), & + fiso_ocn =fiso_ocn (:,:,:,iblk)) #ifdef CICE_IN_NEMO !echmod - comment this out for efficiency, if .not. calc_Tsfc diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug index 5de6b1cfd..7ca555433 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug @@ -21,9 +21,9 @@ use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero + use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters - use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_numbers + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes implicit none private @@ -47,12 +47,12 @@ use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default + fiso_default, faero_default use ice_flux, only: init_flux_atm, init_flux_ocn use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_couple, timer_step logical (kind=log_kind) :: & - tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd character(len=*), parameter :: subname = '(CICE_Run)' !-------------------------------------------------------------------- @@ -64,7 +64,8 @@ call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, & wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_aero_out=tr_aero, & + call icepack_query_tracer_flags(tr_iso_out=tr_iso, & + tr_aero_out=tr_aero, & tr_zaero_out=tr_zaero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -100,6 +101,8 @@ call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data + ! isotopes + if (tr_iso) call fiso_default ! default values ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -151,7 +154,7 @@ use ice_restart_column, only: write_restart_age, write_restart_FY, & write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_bgc, write_restart_hbrine + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & @@ -171,7 +174,7 @@ logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -189,7 +192,7 @@ call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_fsd_out=tr_fsd) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -273,8 +276,10 @@ ! momentum, stress, transport call step_dyn_horiz (dt_dyn) + do iblk = 1, nblocks plabeld = 'post step_dyn_horiz' call debug_ice (iblk, plabeld) + enddo ! iblk ! ridging !$OMP PARALLEL DO PRIVATE(iblk) @@ -283,6 +288,11 @@ enddo !$OMP END PARALLEL DO + do iblk = 1, nblocks + plabeld = 'post step_dyn_ridge' + call debug_ice (iblk, plabeld) + enddo ! iblk + ! clean up, update tendency diagnostics offset = c0 call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) @@ -357,6 +367,7 @@ if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd + if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & call write_restart_bgc @@ -391,7 +402,8 @@ swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt - use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai + use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai use ice_grid, only: tmask use ice_state, only: aicen, aice #ifdef CICE_IN_NEMO @@ -428,7 +440,7 @@ character(len=*), parameter :: subname = '(coupling_prep)' call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) - call icepack_query_tracer_numbers(nbtrcr_out=nbtrcr) + call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -570,7 +582,8 @@ !----------------------------------------------------------------- call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, icepack_max_aero, & + tmask (:,:,iblk), nbtrcr, & + icepack_max_aero, & aice (:,:,iblk), Tf (:,:,iblk), & Tair (:,:,iblk), Qa (:,:,iblk), & strairxT (:,:,iblk), strairyT(:,:,iblk), & @@ -584,7 +597,10 @@ alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio(:,:,1:nbtrcr,iblk)) + flux_bio (:,:,1:nbtrcr,iblk), & + Qref_iso =Qref_iso (:,:,:,iblk), & + fiso_evap=fiso_evap(:,:,:,iblk), & + fiso_ocn =fiso_ocn (:,:,:,iblk)) #ifdef CICE_IN_NEMO !echmod - comment this out for efficiency, if .not. calc_Tsfc diff --git a/cicecore/shared/ice_distribution.F90 b/cicecore/shared/ice_distribution.F90 index 4af95ae1f..8c5808820 100644 --- a/cicecore/shared/ice_distribution.F90 +++ b/cicecore/shared/ice_distribution.F90 @@ -118,8 +118,7 @@ function create_distribution(dist_type, nprocs, work_per_block) case('spacecurve') - create_distribution = create_distrb_spacecurve(nprocs, & - work_per_block) + create_distribution = create_distrb_spacecurve(nprocs, work_per_block) case default @@ -364,7 +363,7 @@ subroutine ice_distributionDestroy(distribution) ! !---------------------------------------------------------------------- - distribution%nprocs = 0 + distribution%nprocs = 0 distribution%communicator = 0 distribution%numLocalBlocks = 0 @@ -377,6 +376,9 @@ subroutine ice_distributionDestroy(distribution) deallocate(distribution%blockLocation, stat=istat) deallocate(distribution%blockLocalID , stat=istat) deallocate(distribution%blockGlobalID, stat=istat) + deallocate(distribution%blockCnt , stat=istat) + deallocate(distribution%blockindex , stat=istat) + !----------------------------------------------------------------------- @@ -611,6 +613,12 @@ function create_distrb_cart(nprocs, workPerBlock) result(newDistrb) return endif + allocate (newDistrb%blockCnt(nprocs)) + newDistrb%blockCnt(:) = 0 + + allocate(newDistrb%blockIndex(nprocs,max_blocks)) + newDistrb%blockIndex(:,:) = 0 + !---------------------------------------------------------------------- ! ! distribute blocks linearly across processors in each direction @@ -640,6 +648,8 @@ function create_distrb_cart(nprocs, workPerBlock) result(newDistrb) localID = localID + 1 newDistrb%blockLocation(globalID) = processor newDistrb%blockLocalID (globalID) = localID + newDistrb%blockCnt(processor) = newDistrb%blockCnt(processor) + 1 + newDistrb%blockIndex(processor,localID) = globalID else ! no work - eliminate block from distribution newDistrb%blockLocation(globalID) = 0 newDistrb%blockLocalID (globalID) = 0 @@ -966,6 +976,12 @@ function create_distrb_rake(nprocs, workPerBlock) result(newDistrb) return endif + allocate (newDistrb%blockCnt(nprocs)) + newDistrb%blockCnt(:) = 0 + + allocate(newDistrb%blockIndex(nprocs,max_blocks)) + newDistrb%blockIndex(:,:) = 0 + allocate(procTmp(nprocs), stat=istat) if (istat > 0) then call abort_ice( & @@ -981,11 +997,13 @@ function create_distrb_rake(nprocs, workPerBlock) result(newDistrb) if (pid > 0) then procTmp(pid) = procTmp(pid) + 1 newDistrb%blockLocalID (n) = procTmp(pid) + newDistrb%blockIndex(pid,procTmp(pid)) = n else newDistrb%blockLocalID (n) = 0 endif end do + newDistrb%blockCnt(:) = procTmp(:) newDistrb%numLocalBlocks = procTmp(my_task+1) if (minval(procTmp) < 1) then @@ -2146,6 +2164,12 @@ function create_distrb_spacecurve(nprocs,work_per_block) dist%blockLocation=0 dist%blockLocalID =0 + allocate (dist%blockCnt(nprocs)) + dist%blockCnt(:) = 0 + + allocate(dist%blockIndex(nprocs,max_blocks)) + dist%blockIndex(:,:) = 0 + !---------------------------------------------------------------------- ! Create the array to hold the SFC and indices into it !---------------------------------------------------------------------- @@ -2281,12 +2305,14 @@ function create_distrb_spacecurve(nprocs,work_per_block) if(pid>0) then proc_tmp(pid) = proc_tmp(pid) + 1 dist%blockLocalID(n) = proc_tmp(pid) + dist%blockIndex(pid,proc_tmp(pid)) = n else dist%blockLocalID(n) = 0 endif enddo dist%numLocalBlocks = proc_tmp(my_task+1) + dist%blockCnt(:) = proc_tmp(:) if (dist%numLocalBlocks > 0) then allocate (dist%blockGlobalID(dist%numLocalBlocks)) diff --git a/cicecore/shared/ice_domain_size.F90 b/cicecore/shared/ice_domain_size.F90 index 6f7a73aa1..56381b986 100644 --- a/cicecore/shared/ice_domain_size.F90 +++ b/cicecore/shared/ice_domain_size.F90 @@ -32,10 +32,11 @@ module ice_domain_size nfsd , & ! number of floe size categories nilyr , & ! number of ice layers per category nslyr , & ! number of snow layers per category - nblyr , & ! number of bio/brine layers per category + nblyr , & ! number of bio/brine layers per category + n_iso , & ! number of isotopes in use n_aero , & ! number of aerosols in use - n_zaero , & ! number of z aerosols in use - n_algae , & ! number of algae in use + n_zaero , & ! number of z aerosols in use + n_algae , & ! number of algae in use n_doc , & ! number of DOC pools in use n_dic , & ! number of DIC pools in use n_don , & ! number of DON pools in use diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index 00f7acaef..4c91fdb2a 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -54,6 +54,8 @@ module ice_fileunits nu_restart_pond,& ! restart input file for melt pond tracer nu_dump_fsd , & ! dump file for restarting floe size distribution nu_restart_fsd, & ! restart input file for floe size distribution + nu_dump_iso , & ! dump file for restarting isotope tracers + nu_restart_iso, & ! restart input file for isotope tracers nu_dump_aero , & ! dump file for restarting aerosol tracer nu_restart_aero,& ! restart input file for aerosol tracer nu_dump_bgc , & ! dump file for restarting bgc @@ -106,7 +108,7 @@ subroutine init_fileunits character(len=*),parameter :: subname='(init_fileunits)' - allocate(ice_IOUnitsInUse(ice_IOUnitsMaxUnit)) + if (.not.allocated(ice_IOUnitsInUse)) allocate(ice_IOUnitsInUse(ice_IOUnitsMaxUnit)) ice_IOUnitsInUse = .false. ice_IOUnitsInUse(ice_stdin) = .true. ! reserve unit 5 @@ -130,6 +132,8 @@ subroutine init_fileunits call get_fileunit(nu_restart_pond) call get_fileunit(nu_dump_fsd) call get_fileunit(nu_restart_fsd) + call get_fileunit(nu_dump_iso) + call get_fileunit(nu_restart_iso) call get_fileunit(nu_dump_aero) call get_fileunit(nu_restart_aero) call get_fileunit(nu_dump_bgc) @@ -217,6 +221,8 @@ subroutine release_all_fileunits call release_fileunit(nu_restart_pond) call release_fileunit(nu_dump_fsd) call release_fileunit(nu_restart_fsd) + call release_fileunit(nu_dump_iso) + call release_fileunit(nu_restart_iso) call release_fileunit(nu_dump_aero) call release_fileunit(nu_restart_aero) call release_fileunit(nu_dump_bgc) diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 19deb0159..fbcc8413b 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -45,7 +45,7 @@ module ice_init_column init_age, init_FY, init_lvl, init_fsd, & init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_aerosol, init_bgc, init_hbrine, init_zbgc, input_zbgc, & - count_tracers + count_tracers, init_isotope ! namelist parameters needed locally @@ -671,6 +671,21 @@ end subroutine init_fsd !======================================================================= +! Initialize isotope tracers (call prior to reading restart data) + + subroutine init_isotope(isosno, isoice) + + real(kind=dbl_kind), dimension(:,:,:,:), intent(out) :: & + isosno, isoice + character(len=*),parameter :: subname='(init_isotope)' + + isosno(:,:,:,:) = c0 + isoice(:,:,:,:) = c0 + + end subroutine init_isotope + +!======================================================================= + ! Initialize ice aerosol tracer (call prior to reading restart data) subroutine init_aerosol(aero) @@ -1731,7 +1746,7 @@ end subroutine input_zbgc subroutine count_tracers - use ice_domain_size, only: nilyr, nslyr, nblyr, nfsd, & + use ice_domain_size, only: nilyr, nslyr, nblyr, nfsd, n_iso, & n_aero, n_zaero, n_algae, n_doc, n_dic, n_don, n_fed, n_fep ! local variables @@ -1743,10 +1758,10 @@ subroutine count_tracers integer (kind=int_kind) :: ntrcr logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_fsd - logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo + logical (kind=log_kind) :: tr_iso, tr_pond_cesm, tr_pond_lvl, tr_pond_topo integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero - integer (kind=int_kind) :: nt_fsd + integer (kind=int_kind) :: nt_fsd, nt_isosno, nt_isoice integer (kind=int_kind) :: & nbtrcr, nbtrcr_sw, & @@ -1829,6 +1844,7 @@ subroutine count_tracers tr_lvl_out=tr_lvl, tr_aero_out=tr_aero, tr_pond_out=tr_pond, & tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_fsd_out=tr_fsd, & + tr_iso_out=tr_iso, & tr_bgc_Nit_out=tr_bgc_Nit, tr_bgc_Am_out =tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & tr_bgc_DMS_out=tr_bgc_DMS, tr_bgc_PON_out=tr_bgc_PON, & tr_bgc_N_out =tr_bgc_N, tr_bgc_C_out =tr_bgc_C, tr_bgc_chl_out=tr_bgc_chl, & @@ -1897,13 +1913,22 @@ subroutine count_tracers ntrcr = ntrcr + nfsd endif + nt_isosno = 0 + nt_isoice = 0 + if (tr_iso) then + nt_isosno = ntrcr + 1 ! isotopes in snow + ntrcr = ntrcr + n_iso + nt_isoice = ntrcr + 1 ! isotopes in ice + ntrcr = ntrcr + n_iso + endif + nt_aero = 0 if (tr_aero) then nt_aero = ntrcr + 1 ntrcr = ntrcr + 4*n_aero ! 4 dEdd layers, n_aero species else !tcx, modify code so we don't have to reset n_aero here - n_aero = 0 + n_aero = 0 !echmod - this is not getting set correctly (overwritten later?) endif !----------------------------------------------------------------- @@ -2178,6 +2203,8 @@ subroutine count_tracers if (nt_hpnd <= 0) nt_hpnd = ntrcr if (nt_ipnd <= 0) nt_ipnd = ntrcr if (nt_fsd <= 0) nt_fsd = ntrcr + if (nt_isosno<= 0) nt_isosno= ntrcr + if (nt_isoice<= 0) nt_isoice= ntrcr if (nt_aero <= 0) nt_aero = ntrcr if (nt_fbri <= 0) nt_fbri = ntrcr if (nt_bgc_S <= 0) nt_bgc_S = ntrcr @@ -2201,7 +2228,7 @@ subroutine count_tracers nt_qice_in=nt_qice, nt_qsno_in=nt_qsno, nt_iage_in=nt_iage, nt_fy_in=nt_fy, & nt_alvl_in=nt_alvl, nt_vlvl_in=nt_vlvl, nt_apnd_in=nt_apnd, nt_hpnd_in=nt_hpnd, & nt_ipnd_in=nt_ipnd, nt_fsd_in=nt_fsd, nt_aero_in=nt_aero, & - nt_fbri_in=nt_fbri, & + nt_isosno_in=nt_isosno, nt_isoice_in=nt_isoice, nt_fbri_in=nt_fbri, & nt_bgc_Nit_in=nt_bgc_Nit, nt_bgc_Am_in=nt_bgc_Am, nt_bgc_Sil_in=nt_bgc_Sil, & nt_bgc_DMS_in=nt_bgc_DMS, nt_bgc_PON_in=nt_bgc_PON, nt_bgc_S_in=nt_bgc_S, & nt_bgc_N_in=nt_bgc_N, nt_bgc_chl_in=nt_bgc_chl, & diff --git a/cicecore/shared/ice_restart_column.F90 b/cicecore/shared/ice_restart_column.F90 index 34055a751..e830dd50b 100644 --- a/cicecore/shared/ice_restart_column.F90 +++ b/cicecore/shared/ice_restart_column.F90 @@ -33,6 +33,7 @@ module ice_restart_column write_restart_pond_lvl, read_restart_pond_lvl, & write_restart_pond_topo, read_restart_pond_topo, & write_restart_fsd, read_restart_fsd, & + write_restart_iso, read_restart_iso, & write_restart_aero, read_restart_aero, & write_restart_bgc, read_restart_bgc, & write_restart_hbrine, read_restart_hbrine @@ -45,6 +46,7 @@ module ice_restart_column restart_pond_lvl , & ! if .true., read meltponds restart file restart_pond_topo, & ! if .true., read meltponds restart file restart_fsd , & ! if .true., read floe size restart file + restart_iso , & ! if .true., read isotope tracer restart file restart_aero , & ! if .true., read aerosol tracer restart file restart_zsal , & ! if .true., read Salinity from restart file restart_hbrine , & ! if .true., read hbrine from restart file @@ -551,6 +553,89 @@ end subroutine read_restart_fsd !======================================================================= +! Dumps all values needed for restarting +! author Elizabeth C. Hunke, LANL + + subroutine write_restart_iso() + + use ice_domain_size, only: n_iso + use ice_fileunits, only: nu_dump_iso + use ice_state, only: trcrn + + ! local variables + + logical (kind=log_kind) :: diag + integer (kind=int_kind) :: nt_isosno, nt_isoice, k + character*3 ck + character(len=*),parameter :: subname='(write_restart_iso)' + + call icepack_query_tracer_indices(nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + diag = .true. + + !----------------------------------------------------------------- + + do k = 1, n_iso + write(ck,'(i3.3)') k + call write_restart_field(nu_dump_iso,0, trcrn(:,:,nt_isosno+k-1,:,:), & + 'ruf8','isosno'//trim(ck),ncat,diag) + enddo + + do k = 1, n_iso + write(ck,'(i3.3)') k + call write_restart_field(nu_dump_iso,0, trcrn(:,:,nt_isoice+k-1,:,:), & + 'ruf8','isoice'//trim(ck),ncat,diag) + enddo + + end subroutine write_restart_iso + +!======================================================================= + +! Reads all values needed to restart isotope tracers +! author Elizabeth C. Hunke, LANL + + subroutine read_restart_iso() + + use ice_domain_size, only: n_iso + use ice_fileunits, only: nu_restart_iso + use ice_state, only: trcrn + + ! local variables + + logical (kind=log_kind) :: & + diag + integer (kind=int_kind) :: nt_isosno, nt_isoice, k + character*3 ck + character(len=*),parameter :: subname='(read_restart_iso)' + + call icepack_query_tracer_indices(nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + diag = .true. + + do k = 1, n_iso + write(ck,'(i3.3)') k + call read_restart_field(nu_restart_iso,0,trcrn(:,:,nt_isosno+k-1,:,:), & + 'ruf8','isosno'//trim(ck),ncat,diag, & + field_type=field_type_scalar,field_loc=field_loc_center) + enddo + + do k = 1, n_iso + write(ck,'(i3.3)') k + call read_restart_field(nu_restart_iso,0,trcrn(:,:,nt_isoice+k-1,:,:), & + 'ruf8','isoice'//trim(ck),ncat,diag, & + field_type=field_type_scalar,field_loc=field_loc_center) + enddo + + end subroutine read_restart_iso + +!======================================================================= + ! Dumps all values needed for restarting ! ! authors Elizabeth Hunke, LANL (original version) diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..e7f6a6e2f --- /dev/null +++ b/codecov.yml @@ -0,0 +1,6 @@ +coverage: + range: "20...100" + round: down + precision: 2 + +comment: false diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 0450d2647..e8ec14cab 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -119,7 +119,7 @@ else if (${ICE_MACHINE} =~ cori*) then @ nthrds2 = ${nthrds} * 2 cat >> ${jobfile} << EOFB #SBATCH -J ${ICE_CASENAME} -###SBATCH -A ${acct} +#SBATCH -A ${acct} #SBATCH --qos ${queue} #SBATCH --time ${batchtime} #SBATCH --nodes ${nnodes} diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index 61d639e09..2534bfa7e 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -1,7 +1,7 @@ #! /bin/csh -f #==================================== -# If the cice binary is passed as an argument and the file exists, +# If the cice binary is passed via the --exe argument and the file exists, # copy it into the run directory and don't build the model. set dohelp = 0 diff --git a/configuration/scripts/cice.settings b/configuration/scripts/cice.settings index 8bb860916..7d9bce65c 100755 --- a/configuration/scripts/cice.settings +++ b/configuration/scripts/cice.settings @@ -40,4 +40,5 @@ if (${ICE_NTASKS} == 1) setenv ICE_COMMDIR serial ### Specialty code setenv ICE_BLDDEBUG false # build debug flags +setenv ICE_CODECOV false # build debug flags diff --git a/configuration/scripts/cice.test.setup.csh b/configuration/scripts/cice.test.setup.csh index 16fd84a69..535a2ac06 100755 --- a/configuration/scripts/cice.test.setup.csh +++ b/configuration/scripts/cice.test.setup.csh @@ -40,6 +40,13 @@ if ( ! -f ${ICE_RUNDIR}/cice ) then exit 99 endif +# Initial test results and Reset test results for rerun +mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev +echo "#---" >! ${ICE_CASEDIR}/test_output +cat ${ICE_CASEDIR}/test_output.prev | grep -i "${ICE_TESTNAME} build" >> ${ICE_CASEDIR}/test_output +echo "PEND ${ICE_TESTNAME} run" >> ${ICE_CASEDIR}/test_output +rm -f ${ICE_CASEDIR}/test_output.prev + EOF2 if ( -f ${ICE_SCRIPTS}/tests/test_${ICE_TEST}.script) then diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 406e8ec91..dcfedf772 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -63,6 +63,7 @@ / &tracer_nml + n_iso = 0 n_aero = 1 n_zaero = 0 n_algae = 0 @@ -83,6 +84,8 @@ restart_pond_topo = .false. tr_pond_lvl = .true. restart_pond_lvl = .false. + tr_iso = .false. + restart_iso = .false. tr_aero = .false. restart_aero = .false. tr_fsd = .false. @@ -93,6 +96,7 @@ kitd = 1 ktherm = 2 conduct = 'bubbly' + ksno = 0.3d0 a_rapid_mode = 0.5e-3 Rac_rapid_mode = 10.0 aspect_rapid_mode = 1.0 @@ -529,9 +533,12 @@ / &icefields_bgc_nml + f_fiso_atm = 'x' + f_fiso_ocn = 'x' + f_iso = 'x' f_faero_atm = 'x' f_faero_ocn = 'x' - f_aero = 'x' + f_aero = 'x' f_fbio = 'm' f_fbio_ai = 'm' f_zaero = 'x' diff --git a/configuration/scripts/machines/Macros.gaffney_gnu b/configuration/scripts/machines/Macros.gaffney_gnu index b2f178247..0d13560de 100644 --- a/configuration/scripts/machines/Macros.gaffney_gnu +++ b/configuration/scripts/machines/Macros.gaffney_gnu @@ -4,7 +4,7 @@ CPP := ftn -E CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 +CFLAGS := -c FIXEDFLAGS := -ffixed-line-length-132 FREEFLAGS := -ffree-form @@ -12,9 +12,21 @@ FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow -else - FFLAGS += -O2 + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + CFLAGS += -O0 +endif + +ifeq ($(ICE_CODECOV), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_CODECOV), true) + FFLAGS += -O2 + CFLAGS += -O2 +endif endif SCC := gcc diff --git a/configuration/scripts/machines/Macros.gordon_gnu b/configuration/scripts/machines/Macros.gordon_gnu index 131f539c1..2e80f7364 100644 --- a/configuration/scripts/machines/Macros.gordon_gnu +++ b/configuration/scripts/machines/Macros.gordon_gnu @@ -4,7 +4,7 @@ CPP := ftn -E CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 +CFLAGS := -c FIXEDFLAGS := -ffixed-line-length-132 FREEFLAGS := -ffree-form @@ -12,9 +12,21 @@ FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow -else - FFLAGS += -O2 + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + CFLAGS += -O0 +endif + +ifeq ($(ICE_CODECOV), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_CODECOV), true) + FFLAGS += -O2 + CFLAGS += -O2 +endif endif SCC := cc diff --git a/configuration/scripts/machines/Macros.izumi_gnu b/configuration/scripts/machines/Macros.izumi_gnu index 6526ac767..cdc6620f4 100644 --- a/configuration/scripts/machines/Macros.izumi_gnu +++ b/configuration/scripts/machines/Macros.izumi_gnu @@ -4,7 +4,7 @@ CPP := /usr/bin/cpp CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 +CFLAGS := -c FIXEDFLAGS := -ffixed-line-length-132 FREEFLAGS := -ffree-form @@ -13,8 +13,20 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow -else + CFLAGS += -O0 +endif + +ifeq ($(ICE_CODECOV), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_CODECOV), true) FFLAGS += -O2 + CFLAGS += -O2 +endif endif SCC := gcc diff --git a/configuration/scripts/machines/Macros.onyx_gnu b/configuration/scripts/machines/Macros.onyx_gnu index 14784e625..d423cd9ab 100644 --- a/configuration/scripts/machines/Macros.onyx_gnu +++ b/configuration/scripts/machines/Macros.onyx_gnu @@ -4,7 +4,7 @@ CPP := ftn -E CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 +CFLAGS := -c FIXEDFLAGS := -ffixed-line-length-132 FREEFLAGS := -ffree-form @@ -12,9 +12,21 @@ FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow -else - FFLAGS += -O2 + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + CFLAGS += -O0 +endif + +ifeq ($(ICE_CODECOV), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_CODECOV), true) + FFLAGS += -O2 + CFLAGS += -O2 +endif endif SCC := cc diff --git a/configuration/scripts/machines/Macros.travisCI_gnu b/configuration/scripts/machines/Macros.travisCI_gnu index 66fb30a07..aa7b12c05 100644 --- a/configuration/scripts/machines/Macros.travisCI_gnu +++ b/configuration/scripts/machines/Macros.travisCI_gnu @@ -4,7 +4,7 @@ CPP := cpp CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 +CFLAGS := -c FIXEDFLAGS := -ffixed-line-length-132 FREEFLAGS := -ffree-form @@ -12,9 +12,21 @@ FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow -else - FFLAGS += -O2 + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + CFLAGS += -O0 +endif + +ifeq ($(ICE_CODECOV), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_CODECOV), true) + FFLAGS += -O2 + CFLAGS += -O2 +endif endif SCC := gcc diff --git a/configuration/scripts/machines/env.badger_intel b/configuration/scripts/machines/env.badger_intel index 3e7bb4f8c..8fe69148b 100755 --- a/configuration/scripts/machines/env.badger_intel +++ b/configuration/scripts/machines/env.badger_intel @@ -37,7 +37,8 @@ setenv ICE_MACHINE_WKDIR /net/scratch3/$user/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /usr/projects/climate/eclare/DATA/Consortium setenv ICE_MACHINE_BASELINE /net/scratch3/$user/CICE_BASELINE setenv ICE_MACHINE_SUBMIT "sbatch " -setenv ICE_MACHINE_ACCT e3sm +#setenv ICE_MACHINE_ACCT e3sm +setenv ICE_MACHINE_ACCT climatehilat setenv ICE_MACHINE_QUEUE "default" setenv ICE_MACHINE_TPNODE 16 setenv ICE_MACHINE_BLDTHRDS 1 diff --git a/configuration/scripts/options/set_nml.isotope b/configuration/scripts/options/set_nml.isotope new file mode 100644 index 000000000..b3042ee5f --- /dev/null +++ b/configuration/scripts/options/set_nml.isotope @@ -0,0 +1,2 @@ +n_iso = 3 +tr_iso = .true. diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index 4b0d35e5a..fad8b22f3 100755 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -45,3 +45,6 @@ smoke gx3 4x2 fsd1,diag24,run5day,debug smoke gx3 8x2 fsd12,diag24,run5day,short restart gx3 4x2 fsd12,debug,short smoke gx3 8x2 fsd12ww3,diag24,run1day,medium +smoke gx3 4x1 isotope,debug +restart gx3 8x2 isotope + diff --git a/configuration/scripts/tests/nothread_suite.ts b/configuration/scripts/tests/nothread_suite.ts index 5db402d34..49f834a98 100644 --- a/configuration/scripts/tests/nothread_suite.ts +++ b/configuration/scripts/tests/nothread_suite.ts @@ -26,6 +26,12 @@ restart gx3 20x1 alt02,debug,short restart gx3 24x1 alt03,debug,short smoke gx3 24x1 alt04,debug,short smoke gx3 32x1 alt05,debug,short +restart gx3 16x1 isotope +smoke gx3 6x1 isotope,debug +smoke gx3 8x1 fsd1,diag24,run5day,debug +smoke gx3 16x1 fsd12,diag24,run5day,short +restart gx3 12x1 fsd12,debug,short +smoke gx3 20x1 fsd12ww3,diag24,run1day,medium restart gbox128 8x1 short restart gbox128 16x1 boxdyn,short @@ -37,6 +43,12 @@ smoke gbox128 24x1 boxrestore,short,debug restart gbox80 1x1 box2001 smoke gbox80 1x1 boxslotcyl +smoke gx3 16x1 jra55_gx3_2008,medium,run90day +restart gx3 12x1 jra55_gx3,short +#tcraig, hangs nodes intermittently on izumi +#smoke gx1 24x1 jra55_gx1_2008,medium,run90day +#restart gx1 24x1 jra55_gx1,short + smoke gx3 16x1 bgcz smoke gx3 16x1 bgcz,debug smoke gx3 24x1 bgcskl,debug diff --git a/configuration/scripts/tests/report_results.csh b/configuration/scripts/tests/report_results.csh index 711dc3e3d..4a64deff8 100755 --- a/configuration/scripts/tests/report_results.csh +++ b/configuration/scripts/tests/report_results.csh @@ -1,5 +1,19 @@ #!/bin/csh -f +if ($#argv == 0) then + echo "${0}: Running results.csh" + ./results.csh >& /dev/null +else if ($#argv == 1) then + if ("$argv[1]" =~ "-n") then + #continue + else + echo "$0 usage:" + echo "$0 [-n]" + echo " -n : do NOT run results.csh (by default it does)" + exit -1 + endif +endif + if (! -e results.log) then echo " " echo "${0}: ERROR results.log does not exist, try running results.csh" @@ -25,6 +39,7 @@ set hash = `grep "#hash = " results.log | cut -c 9-` set shhash = `grep "#hshs = " results.log | cut -c 9-` set hashuser = `grep "#hshu = " results.log | cut -c 9-` set hashdate = `grep "#hshd = " results.log | cut -c 9-` +set testsuites = `grep "#suit = " results.log | cut -c 9-` set cdat = `grep "#date = " results.log | cut -c 9-` set ctim = `grep "#time = " results.log | cut -c 9-` set user = `grep "#user = " results.log | cut -c 9-` @@ -42,6 +57,7 @@ set compilers = `grep -v "#" results.log | grep ${mach}_ | cut -d "_" -f 2 | sor #echo "debug ${shhash}" #echo "debug ${hashuser}" #echo "debug ${hashdate}" +#echo "debug ${testsuites}" #echo "debug ${cdat}" #echo "debug ${ctim}" #echo "debug ${user}" @@ -79,12 +95,21 @@ unset noglob foreach compiler ( ${compilers} ) - set ofile = "${shhash}.${mach}.${compiler}.${xcdat}.${xctim}" - set outfile = "${wikiname}/${tsubdir}/${ofile}.md" + set cnt = 0 + set found = 1 + while ($found == 1) + set ofile = "${shhash}.${mach}.${compiler}.${xcdat}.${xctim}.$cnt" + set outfile = "${wikiname}/${tsubdir}/${ofile}.md" + if (-e ${outfile}) then + @ cnt = $cnt + 1 + else + set found = 0 + endif + end + mkdir -p ${wikiname}/${tsubdir} echo "${0}: writing to ${outfile}" - if (-e ${outfile}) rm -f ${outfile} cat >! ${outfile} << EOF @@ -103,7 +128,7 @@ EOF foreach case ( ${cases} ) if ( ${case} =~ *_${compiler}_* ) then -# check thata case results are meaningful +# check that case results are meaningful set fbuild = `grep " ${case} " results.log | grep " build" | cut -c 1-4` set frun = `grep " ${case} " results.log | grep " run" | cut -c 1-4` set ftest = `grep " ${case} " results.log | grep " test" | cut -c 1-4` diff --git a/configuration/scripts/tests/test_logbfb.script b/configuration/scripts/tests/test_logbfb.script index fbce5d918..d8e594e81 100644 --- a/configuration/scripts/tests/test_logbfb.script +++ b/configuration/scripts/tests/test_logbfb.script @@ -4,11 +4,6 @@ # This is identical to a smoke test, but triggers bfbcompare with log files instead of restarts # cice.run returns -1 if run did not complete successfully -mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev -cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} run" >! ${ICE_CASEDIR}/test_output -rm -f ${ICE_CASEDIR}/test_output.prev -echo "RUN ${ICE_TESTNAME} run " >> ${ICE_CASEDIR}/test_output - ./cice.run set res="$status" diff --git a/configuration/scripts/tests/test_restart.script b/configuration/scripts/tests/test_restart.script index 20953b1e1..59729b361 100644 --- a/configuration/scripts/tests/test_restart.script +++ b/configuration/scripts/tests/test_restart.script @@ -7,14 +7,6 @@ cp ice_in ice_in.0 ${ICE_CASEDIR}/casescripts/parse_namelist.sh ice_in ${ICE_CASEDIR}/casescripts/test_nml.restart1 cp ice_in ice_in.1 -mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev -cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} run" >! ${ICE_CASEDIR}/test_output -mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev -cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} test" >! ${ICE_CASEDIR}/test_output -rm -f ${ICE_CASEDIR}/test_output.prev -echo "RUN ${ICE_TESTNAME} run " >> ${ICE_CASEDIR}/test_output -echo "PEND ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output - ./cice.run set res="$status" @@ -27,8 +19,6 @@ if ( $res != 0 ) then echo "FAIL ${ICE_TESTNAME} run" >> ${ICE_CASEDIR}/test_output echo "FAIL ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output exit 99 -else - echo "PASS ${ICE_TESTNAME} initialrun" >> ${ICE_CASEDIR}/test_output endif # Prepend 'base_' to the final restart file to save for comparison diff --git a/configuration/scripts/tests/test_smoke.script b/configuration/scripts/tests/test_smoke.script index 42a963b47..f39f7cb4a 100644 --- a/configuration/scripts/tests/test_smoke.script +++ b/configuration/scripts/tests/test_smoke.script @@ -3,11 +3,6 @@ # Run the CICE model # cice.run returns -1 if run did not complete successfully -mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev -cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} run" >! ${ICE_CASEDIR}/test_output -rm -f ${ICE_CASEDIR}/test_output.prev -echo "RUN ${ICE_TESTNAME} run " >> ${ICE_CASEDIR}/test_output - ./cice.run set res="$status" diff --git a/doc/source/developer_guide/dg_other.rst b/doc/source/developer_guide/dg_other.rst index 2a46186bd..7853cb66b 100644 --- a/doc/source/developer_guide/dg_other.rst +++ b/doc/source/developer_guide/dg_other.rst @@ -132,8 +132,8 @@ tracer. A number of optional tracers are available in the code, including ice age, first-year ice area, melt pond area and volume, brine height, -aerosols, and level ice area and volume (from which ridged ice -quantities are derived). Salinity, enthalpies, age, aerosols, level-ice +aerosols, water isotopes, and level ice area and volume (from which ridged ice +quantities are derived). Salinity, enthalpies, age, aerosols, isotopes, level-ice volume, brine height and most melt pond quantities are volume-weighted tracers, while first-year area, pond area, and level-ice area are area-weighted tracers. Biogeochemistry tracers in the skeletal layer are area-weighted, diff --git a/doc/source/science_guide/sg_tracers.rst b/doc/source/science_guide/sg_tracers.rst index bc55a47f9..bbd18eb1f 100644 --- a/doc/source/science_guide/sg_tracers.rst +++ b/doc/source/science_guide/sg_tracers.rst @@ -87,6 +87,7 @@ is not in use. " ", " ", "apnd", "nt_vpnd", " " " ", " ", "apnd", "nt_ipnd", " " "tr_aero", "n_aero", "vice, vsno", "nt_aero"," " + "tr_iso", "n_iso", "vice, vsno", "nt_iso"," " "tr_brine", " ", "vice", "nt_fbri", " " "tr_fsd","nfsd","aice","nt_fsd"," " "solve_zsal", "n_trzs", "fbri or (a,v)ice", "nt_bgc_S", " " @@ -114,5 +115,4 @@ Users may add any number of additional tracers that are transported conservative provided that the dependency ``trcr_depend`` is defined appropriately. See Section :ref:`addtrcr` for guidance on adding tracers. -Please see the `Icepack documentation `_ for additional information about tracers that depend on other tracers, the floe size distribution, age of the ice, aerosols, -brine height, and the sea ice ecosystem. +Please see the `Icepack documentation `_ for additional information about tracers that depend on other tracers, the floe size distribution, age of the ice, aerosols, water isotopes, brine height, and the sea ice ecosystem. diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 411b22fb8..b3088963d 100755 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -206,6 +206,7 @@ Table of namelist options "","*tracer_nml*", "", "", "" "","", "", "**Tracers**", "" "","``n_aero``", "integer", "number of aerosol tracers", "1" + "","``n_iso``", "integer", "number of isotope tracers", "1" "","``n_zaero``", "0,1,2,3,4,5,6", "number of z aerosol tracers in use", "0" "","``n_algae``", "0,1,2,3", "number of algal tracers", "0" "","``n_doc``", "0,1,2,3", "number of dissolved organic carbon", "0" @@ -227,6 +228,8 @@ Table of namelist options "","``restart_pond_lvl``", "true/false", "restart tracer values from file", "" "\*","``tr_aero``", "true/false", "aerosols", "" "","``restart_aero``", "true/false", "restart tracer values from file", "" + "\*","``tr_iso``", "true/false", "isotopes", "" + "","``restart_iso``", "true/false", "restart tracer values from file", "" "\*","``tr_fsd``", "true/false", "floe size distribution", "" "","``restart_fsd``", "true/false", "restart floe size distribution values from file", "" "","", "", "", "" @@ -240,6 +243,7 @@ Table of namelist options "","", "``-1``", "thermodynamics disabled", "" "\*","``conduct``", "``Maykut71``", "conductivity :cite:`Maykut71`", "" "","", "``bubbly``", "conductivity :cite:`Pringle07`", "" + "\*","``ksno``", "real", "snow thermal conductivity", "0.3" "\*","``a_rapid_mode``", "real", "brine channel diameter", "0.5x10 :math:`^{-3}` m" "\*","``Rac_rapid_mode``", "real", "critical Rayleigh number", "10" "\*","``aspect_rapid_mode``", "real", "brine convection aspect ratio", "1" diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 48679577c..52621d612 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -380,6 +380,9 @@ following options are valid for suites, ``--report`` This is only used by ``--suite`` and when set, invokes a script that sends the test results to the results page when all tests are complete. Please see :ref:`testreporting` for more information. +``--codecov`` + When invoked, code coverage diagnostics are generated. This will modify the build and reduce optimization. The results will be uploaded to the **codecov.io** website via the **report_codecov.csh** script. General use is not recommended, this is mainly used as a diagnostic to periodically assess test coverage. Please see :ref:`codecoverage` for more information. + ``--setup-only`` This is only used by ``--suite`` and when set, just creates the suite testcases. It does not build or submit them to run. By default, the suites do ``--setup-build-submit``. @@ -645,7 +648,11 @@ To post results, once a test suite is complete, run ``results.csh`` and ./results.csh ./report_results.csh -The reporting can also be automated by adding ``--report`` to ``cice.setup`` +``report_results.csh`` will run ``results.csh`` by default automatically, but +we recommmend running it manually first to verify results before publishing +them. ``report_results.csh -n`` will turn off automatic running of ``results.csh``. + +The reporting can also be automated in a test suite by adding ``--report`` to ``cice.setup`` :: ./cice.setup --suite base_suite --mach conrad --env cray --testid v01a --report @@ -653,6 +660,55 @@ The reporting can also be automated by adding ``--report`` to ``cice.setup`` With ``--report``, the suite will create all the tests, build and submit them, wait for all runs to be complete, and run the results and report_results scripts. +.. _codecoverage: + +Code Coverage Testing +------------------------------ + +The ``--codecov`` feature in **cice.setup** provides a method to diagnose code coverage. +This argument turns on special compiler flags including reduced optimization and then +invokes the gcov tool. +This option is currently only available with the gnu compiler and on a few systems. + +Because codecov.io does not support git submodule analysis right now, a customized +repository has to be created to test CICE with Icepack integrated directly. The repository +https://github.com/apcraig/Test_CICE_Icepack serves as the current default test repository. +In general, to setup the code coverage test in CICE, the current CICE master has +to be copied into the Test_CICE_Icepack repository, then the code coverage tool can +be run on that repository. A sample script to do that would be:: + + git clone https://github.com/cice-consortium/cice cice.master --recursive + + git clone https://github.com/apcraig/test_cice_icepack + cd test_cice_icepack + git rm -r * + cp -p -r ../cice.master/* . + git add . + git commit -m "update to current cice master" + git push origin master + + ./cice.setup --suite first_suite,base_suite,travis_suite,decomp_suite,reprosum_suite,quick_suite -m gordon -e gnu --codecov --testid cc01 + +To use, submit a full test suite using an updated Test_CICE_Icepack version +and the gnu compiler with the ``--codecov`` argument. +The test suite will run and then a report will be generated and uploaded to +the `codecov.io site `_ by the +**report_codecov.csh** script. + +This is a special diagnostic test and does not constitute proper model testing. +General use is not recommended, this is mainly used as a diagnostic to periodically +assess test coverage. The interaction with codecov.io is not always robust and +can be tricky to manage. Some constraints are that the output generated at runtime +is copied into the directory where compilation took place. That means each +test should be compiled separately. Tests that invoke multiple runs +(such as exact restart and the decomp test) will only save coverage information +for the last run, so some coverage information may be lost. The gcov tool can +be a little slow to run on large test suites, and the codecov.io bash uploader +(that runs gcov and uploads the data to codecov.io) is constantly evolving. +Finally, gcov requires that the diagnostic output be copied into the git sandbox for +analysis. These constraints are handled by the current scripts, but may change +in the future. + .. _compliance: diff --git a/icepack b/icepack index edb8c3459..1ae044604 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit edb8c3459359f22af20d39d7defe97c4a8b2a419 +Subproject commit 1ae044604498b8d268df6c577556d22d2baa7758 From 183218aa6c54efafa20829624f6c7d1d34530b3c Mon Sep 17 00:00:00 2001 From: Mariana Vertenstein Date: Thu, 23 Apr 2020 17:43:35 -0600 Subject: [PATCH 02/90] updated orbital calculations needed for cesm --- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 222 +++++++++++++++--- 1 file changed, 191 insertions(+), 31 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 29cd34320..49218ffe3 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -15,11 +15,11 @@ module ice_comp_nuopc use NUOPC_Model , only : model_label_SetRunClock => label_SetRunClock use NUOPC_Model , only : model_label_Finalize => label_Finalize use NUOPC_Model , only : NUOPC_ModelGet, SetVM - use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl, cs=>shr_kind_cs + use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl, cs=>shr_kind_cs use shr_sys_mod , only : shr_sys_abort, shr_sys_flush use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit use shr_string_mod , only : shr_string_listGetNum - use shr_orb_mod , only : shr_orb_decl + use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT use shr_const_mod use shr_cal_mod , only : shr_cal_noleap, shr_cal_gregorian, shr_cal_ymd2date use ice_constants , only : ice_init_constants @@ -71,6 +71,7 @@ module ice_comp_nuopc private :: ModelAdvance private :: ModelSetRunClock private :: ModelFinalize + private :: ice_orbital_init ! only for cesm character(len=CL) :: flds_scalar_name = '' integer :: flds_scalar_num = 0 @@ -78,6 +79,17 @@ module ice_comp_nuopc integer :: flds_scalar_index_ny = 0 integer :: flds_scalar_index_nextsw_cday = 0 + character(len=CL) :: orb_mode ! attribute - orbital mode + integer :: orb_iyear ! attribute - orbital year + integer :: orb_iyear_align ! attribute - associated with model year + real(R8) :: orb_obliq ! attribute - obliquity in degrees + real(R8) :: orb_mvelp ! attribute - moving vernal equinox longitude + real(R8) :: orb_eccen ! attribute and update- orbital eccentricity + + character(len=*) , parameter :: orb_fixed_year = 'fixed_year' + character(len=*) , parameter :: orb_variable_year = 'variable_year' + character(len=*) , parameter :: orb_fixed_parameters = 'fixed_parameters' + integer , parameter :: dbug = 10 integer , parameter :: debug_import = 0 ! internal debug level integer , parameter :: debug_export = 0 ! internal debug level @@ -346,31 +358,32 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ice_init_constants(omega_in=SHR_CONST_OMEGA, radius_in=SHR_CONST_REARTH, & spval_dbl_in=SHR_CONST_SPVAL) + call icepack_init_parameters( & - secday_in = SHR_CONST_CDAY, & - rhoi_in = SHR_CONST_RHOICE, & - rhow_in = SHR_CONST_RHOSW, & - cp_air_in = SHR_CONST_CPDAIR, & - cp_ice_in = SHR_CONST_CPICE, & - cp_ocn_in = SHR_CONST_CPSW, & - gravit_in = SHR_CONST_G, & - rhofresh_in = SHR_CONST_RHOFW, & - zvir_in = SHR_CONST_ZVIR, & - vonkar_in = SHR_CONST_KARMAN, & - cp_wv_in = SHR_CONST_CPWV, & - stefan_boltzmann_in = SHR_CONST_STEBOL, & - Tffresh_in= SHR_CONST_TKFRZ, & - Lsub_in = SHR_CONST_LATSUB, & - Lvap_in = SHR_CONST_LATVAP, & -! Lfresh_in = SHR_CONST_LATICE, & ! computed in init_parameters as Lsub-Lvap - Timelt_in = SHR_CONST_TKFRZ-SHR_CONST_TKFRZ, & - Tsmelt_in = SHR_CONST_TKFRZ-SHR_CONST_TKFRZ, & - ice_ref_salinity_in = SHR_CONST_ICE_REF_SAL, & - depressT_in = 0.054_dbl_kind, & - Tocnfrz_in= -34.0_dbl_kind*0.054_dbl_kind, & - pi_in = SHR_CONST_PI, & - snowpatch_in = 0.005_dbl_kind, & - dragio_in = 0.00962_dbl_kind) + secday_in = SHR_CONST_CDAY, & + rhoi_in = SHR_CONST_RHOICE, & + rhow_in = SHR_CONST_RHOSW, & + cp_air_in = SHR_CONST_CPDAIR, & + cp_ice_in = SHR_CONST_CPICE, & + cp_ocn_in = SHR_CONST_CPSW, & + gravit_in = SHR_CONST_G, & + rhofresh_in = SHR_CONST_RHOFW, & + zvir_in = SHR_CONST_ZVIR, & + vonkar_in = SHR_CONST_KARMAN, & + cp_wv_in = SHR_CONST_CPWV, & + stefan_boltzmann_in = SHR_CONST_STEBOL, & + Tffresh_in = SHR_CONST_TKFRZ, & + Lsub_in = SHR_CONST_LATSUB, & + Lvap_in = SHR_CONST_LATVAP, & + !Lfresh_in = SHR_CONST_LATICE, & ! computed in init_parameters as Lsub-Lvap + Timelt_in = SHR_CONST_TKFRZ-SHR_CONST_TKFRZ, & + Tsmelt_in = SHR_CONST_TKFRZ-SHR_CONST_TKFRZ, & + ice_ref_salinity_in = SHR_CONST_ICE_REF_SAL, & + depressT_in = 0.054_dbl_kind, & + Tocnfrz_in = -34.0_dbl_kind*0.054_dbl_kind, & + pi_in = SHR_CONST_PI, & + snowpatch_in = 0.005_dbl_kind, & + dragio_in = 0.00962_dbl_kind) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -382,6 +395,10 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Get orbital values ! Note that these values are obtained in a call to init_orbit in ice_shortwave.F90 ! if CESMCOUPLED is not defined +#ifdef CESMCOUPLED + call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return +#else call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then @@ -403,11 +420,11 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) read(cvalue,*) mvelpp end if - call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, & - lambm0_in=lambm0, obliqr_in=obliqr) + call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, lambm0_in=lambm0, obliqr_in=obliqr) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) +#endif ! Determine runtype and possibly nextsw_cday call NUOPC_CompAttributeGet(gcomp, name='start_type', value=cvalue, isPresent=isPresent, rc=rc) @@ -429,7 +446,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! In the nuopc version it will be easier to assume that on startup - nextsw_cday is just the current time ! TOOD (mvertens, 2019-03-21): need to get the perpetual run working - + if (trim(runtype) /= 'initial') then ! Set nextsw_cday to -1 (this will skip an orbital calculation on initialization nextsw_cday = -1.0_r8 @@ -441,7 +458,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) end if else ! This would be the NEMS branch - ! Note that in NEMS - nextsw_cday is not needed in ice_orbital.F90 and what is needed is + ! Note that in NEMS - nextsw_cday is not needed in ice_orbital.F90 and what is needed is ! simply a CPP variable declaratino of NEMSCOUPLED runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined @@ -876,7 +893,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#ifdef CESMCOUPLED +#ifdef CESMCOUPLED !----------------------------------------------------------------- ! Prescribed ice initialization - first get compid !----------------------------------------------------------------- @@ -1024,6 +1041,10 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- ! Obtain orbital values !-------------------------------- +#ifdef CESMCOUPLED + call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return +#else call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then @@ -1050,6 +1071,7 @@ subroutine ModelAdvance(gcomp, rc) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) +#endif !-------------------------------- ! check that cice internal time is in sync with master clock before timestep update @@ -1349,4 +1371,142 @@ subroutine ModelFinalize(gcomp, rc) end subroutine ModelFinalize + !=============================================================================== + + subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) + + !---------------------------------------------------------- + ! Initialize orbital related values for cesm coupled + !---------------------------------------------------------- + + ! input/output variables + type(ESMF_GridComp) :: gcomp + type(ESMF_Clock) , intent(in) :: clock + integer , intent(in) :: logunit + logical , intent(in) :: mastertask + integer , intent(out) :: rc ! output error + + ! local variables + real(r8) :: eccen, obliqr, lambm0, mvelpp + character(len=CL) :: msgstr ! temporary + character(len=CL) :: cvalue ! temporary + type(ESMF_Time) :: CurrTime ! current time + integer :: year ! model year at current time + integer :: orb_year ! orbital year for current orbital computation + logical :: lprint + logical :: first_time = .true. + character(len=*) , parameter :: subname = "(cice_orbital_init)" + !------------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + if (first_time) then + + ! Determine orbital attributes from input + call NUOPC_CompAttributeGet(gcomp, name='orb_mode', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_mode + call NUOPC_CompAttributeGet(gcomp, name="orb_iyear", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_iyear + call NUOPC_CompAttributeGet(gcomp, name="orb_iyear_align", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_iyear_align + call NUOPC_CompAttributeGet(gcomp, name="orb_obliq", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_obliq + call NUOPC_CompAttributeGet(gcomp, name="orb_eccen", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_eccen + call NUOPC_CompAttributeGet(gcomp, name="orb_mvelp", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_mvelp + + ! Error checks + if (trim(orb_mode) == trim(orb_fixed_year)) then + orb_obliq = SHR_ORB_UNDEF_REAL + orb_eccen = SHR_ORB_UNDEF_REAL + orb_mvelp = SHR_ORB_UNDEF_REAL + if (orb_iyear == SHR_ORB_UNDEF_INT) then + if (mastertask) then + write(logunit,*) trim(subname),' ERROR: invalid settings orb_mode =',trim(orb_mode) + write(logunit,*) trim(subname),' ERROR: fixed_year settings = ',orb_iyear + write (msgstr, *) ' ERROR: invalid settings for orb_mode '//trim(orb_mode) + end if + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + return ! bail out + endif + elseif (trim(orb_mode) == trim(orb_variable_year)) then + orb_obliq = SHR_ORB_UNDEF_REAL + orb_eccen = SHR_ORB_UNDEF_REAL + orb_mvelp = SHR_ORB_UNDEF_REAL + if (orb_iyear == SHR_ORB_UNDEF_INT .or. orb_iyear_align == SHR_ORB_UNDEF_INT) then + if (mastertask) then + write(logunit,*) trim(subname),' ERROR: invalid settings orb_mode =',trim(orb_mode) + write(logunit,*) trim(subname),' ERROR: variable_year settings = ',orb_iyear, orb_iyear_align + write (msgstr, *) subname//' ERROR: invalid settings for orb_mode '//trim(orb_mode) + end if + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + return ! bail out + endif + elseif (trim(orb_mode) == trim(orb_fixed_parameters)) then + !-- force orb_iyear to undef to make sure shr_orb_params works properly + orb_iyear = SHR_ORB_UNDEF_INT + orb_iyear_align = SHR_ORB_UNDEF_INT + if (orb_eccen == SHR_ORB_UNDEF_REAL .or. & + orb_obliq == SHR_ORB_UNDEF_REAL .or. & + orb_mvelp == SHR_ORB_UNDEF_REAL) then + if (mastertask) then + write(logunit,*) trim(subname),' ERROR: invalid settings orb_mode =',trim(orb_mode) + write(logunit,*) trim(subname),' ERROR: orb_eccen = ',orb_eccen + write(logunit,*) trim(subname),' ERROR: orb_obliq = ',orb_obliq + write(logunit,*) trim(subname),' ERROR: orb_mvelp = ',orb_mvelp + write (msgstr, *) subname//' ERROR: invalid settings for orb_mode '//trim(orb_mode) + end if + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + return ! bail out + endif + else + write (msgstr, *) subname//' ERROR: invalid orb_mode '//trim(orb_mode) + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + rc = ESMF_FAILURE + return ! bail out + endif + end if + + if (trim(orb_mode) == trim(orb_variable_year)) then + call ESMF_ClockGet(clock, CurrTime=CurrTime, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet(CurrTime, yy=year, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + orb_year = orb_iyear + (year - orb_iyear_align) + lprint = mastertask + else + orb_year = orb_iyear + if (first_time) then + lprint = mastertask + else + lprint = .false. + end if + end if + + eccen = orb_eccen + call shr_orb_params(orb_year, eccen, orb_obliq, orb_mvelp, obliqr, lambm0, mvelpp, lprint) + + if ( eccen == SHR_ORB_UNDEF_REAL .or. obliqr == SHR_ORB_UNDEF_REAL .or. & + mvelpp == SHR_ORB_UNDEF_REAL .or. lambm0 == SHR_ORB_UNDEF_REAL) then + write (msgstr, *) subname//' ERROR: orb params incorrect' + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + return ! bail out + endif + + call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, lambm0_in=lambm0, obliqr_in=obliqr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + first_time = .false. + + end subroutine ice_orbital_init + end module ice_comp_nuopc From 10e7c203d9492eca34787e284edd8e0159f75e6e Mon Sep 17 00:00:00 2001 From: Mariana Vertenstein Date: Wed, 29 Apr 2020 16:36:09 -0600 Subject: [PATCH 03/90] fixed problems in updated orbital calculations needed for cesm --- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 2 +- cicecore/drivers/nuopc/cmeps/ice_import_export.F90 | 8 ++------ cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 | 2 +- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 09cffa0c7..f5e7de02f 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -374,7 +374,7 @@ subroutine coupling_prep (iblk) fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & - fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai, & fnit, fsil, famm, fdmsp, fdms, fhum, fdust, falgalN, & fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn use ice_grid, only: tmask diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 0fe2510aa..5e423fbb6 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -1039,7 +1039,6 @@ subroutine ice_export( exportState, rc ) lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return endif -#endif ! ------ ! optional short wave penetration to ocean ice category @@ -1056,11 +1055,12 @@ subroutine ice_export( exportState, rc ) ! penetrative shortwave by category ! Note: no need zero out pass-through fields over land for benefit of x2oacc fields in cpl hist files since ! the export state has been zeroed out at the beginning - call state_setexport(exportState, 'mean_sw_pen_to_ocn_ifrac_n', input=aicen_init, index=n, & + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ifrac_n', input=fswthrun_ai, index=n, & lmask=tmask, ifrac=ailohi, ungridded_index=n, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end do end if +#endif end subroutine ice_export @@ -1488,10 +1488,6 @@ subroutine state_setexport_4d_input(state, fldname, input, index, lmask, ifrac, if (geomtype == ESMF_GEOMTYPE_MESH) then - if (present(ungridded_index)) then - write(6,*)'DEBUG: fldname = ',trim(fldname),' has ungridded index= ',ungridded_index - end if - ! get field pointer if (present(ungridded_index)) then call state_getfldptr(state, trim(fldname), dataPtr2d, rc) diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index f597015f3..4c3876f6c 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -49,7 +49,7 @@ module ice_prescribed_mod ! !PUBLIC DATA MEMBERS: logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice - integer(SHR_KIND_IN),parameter :: nFilesMaximum = 400 ! max number of files + integer(kind=int_kind),parameter :: nFilesMaximum = 400 ! max number of files integer(kind=int_kind) :: stream_year_first ! first year in stream to use integer(kind=int_kind) :: stream_year_last ! last year in stream to use integer(kind=int_kind) :: model_year_align ! align stream_year_first From ce8e5a97d051dd9ff4715f6eec6829271d774836 Mon Sep 17 00:00:00 2001 From: apcraig Date: Sat, 9 May 2020 21:29:22 -0600 Subject: [PATCH 04/90] update CICE6 to support coupling with UFS --- cicecore/cicedynB/general/ice_init.F90 | 4 +- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 2 +- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 4 + .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 206 ++++++++++++------ .../drivers/nuopc/cmeps/ice_import_export.F90 | 72 +++++- .../nuopc/cmeps/ice_prescribed_mod.F90 | 11 +- .../drivers/nuopc/cmeps/ice_shr_methods.F90 | 80 +++---- configuration/scripts/Makefile | 23 +- configuration/scripts/cice.build | 15 +- forapps/ufs/comp_ice.backend.clean | 42 ++++ forapps/ufs/comp_ice.backend.libcice | 142 ++++++++++++ 11 files changed, 479 insertions(+), 122 deletions(-) create mode 100755 forapps/ufs/comp_ice.backend.clean create mode 100755 forapps/ufs/comp_ice.backend.libcice diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index ffb070644..6ffe3d05c 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -276,7 +276,7 @@ subroutine input_data kmt_file = 'unknown_kmt_file' version_name = 'unknown_version_name' ncat = 0 ! number of ice thickness categories - nfsd = 0 ! number of floe size categories (1 = default) + nfsd = 1 ! number of floe size categories (1 = default) nilyr = 0 ! number of vertical ice layers nslyr = 0 ! number of vertical snow layers nblyr = 0 ! number of bio layers @@ -748,7 +748,7 @@ subroutine input_data ice_ic /= 'none' .and. ice_ic /= 'default') then if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: runtype, restart, ice_ic are inconsistent:' - write(nu_diag,*) subname//' ERROR: runtype=',trim(runtype), 'restart=',restart, 'ice_ic=',trim(ice_ic) + write(nu_diag,*) subname//' ERROR: runtype=',trim(runtype), ' restart=',restart, ' ice_ic=',trim(ice_ic) write(nu_diag,*) subname//' ERROR: Please review user guide' endif abort_flag = 1 diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index b72745e30..16e4216e6 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -204,9 +204,9 @@ subroutine cice_init(mpicom_ice) ! coupler communication or forcing data initialization !-------------------------------------------------------------------- +#ifndef coupled call init_forcing_atmo ! initialize atmospheric forcing (standalone) -#ifndef coupled #ifndef CESMCOUPLED if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice call get_forcing_atmo ! atmospheric forcing from data diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index f5e7de02f..aed00a9a0 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -15,7 +15,9 @@ module CICE_RunMod use ice_kinds_mod +#ifdef CESMCOUPLED use perf_mod, only : t_startf, t_stopf, t_barrierf +#endif use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 @@ -207,12 +209,14 @@ subroutine ice_step call init_history_bgc call ice_timer_stop(timer_diags) ! diagnostics/history +#ifdef CESMCOUPLED if (prescribed_ice) then ! read prescribed ice call t_barrierf('cice_run_presc_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_presc') call ice_prescribed_run(idate, sec) call t_stopf ('cice_run_presc') endif +#endif call save_init diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 49218ffe3..e4c2a3802 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -15,13 +15,12 @@ module ice_comp_nuopc use NUOPC_Model , only : model_label_SetRunClock => label_SetRunClock use NUOPC_Model , only : model_label_Finalize => label_Finalize use NUOPC_Model , only : NUOPC_ModelGet, SetVM - use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl, cs=>shr_kind_cs - use shr_sys_mod , only : shr_sys_abort, shr_sys_flush +#ifdef CESMCOUPLED use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit - use shr_string_mod , only : shr_string_listGetNum use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT use shr_const_mod - use shr_cal_mod , only : shr_cal_noleap, shr_cal_gregorian, shr_cal_ymd2date + use shr_cal_mod , only : shr_cal_noleap, shr_cal_gregorian +#endif use ice_constants , only : ice_init_constants use ice_shr_methods , only : chkerr, state_setscalar, state_getscalar, state_diagnose, alarmInit use ice_shr_methods , only : set_component_logging, get_component_instance @@ -38,15 +37,17 @@ module ice_comp_nuopc use ice_calendar , only : force_restart_now, write_ic use ice_calendar , only : idate, mday, time, month, daycal, time2sec, year_init use ice_calendar , only : sec, dt, calendar, calendar_type, nextsw_cday, istep - use ice_kinds_mod , only : dbl_kind, int_kind, char_len + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long use ice_scam , only : scmlat, scmlon, single_column - use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name, inst_suffix, release_all_fileunits + use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name, inst_suffix, release_all_fileunits, flush_fileunit use ice_restart_shared , only : runid, runtype, restart_dir, restart_file use ice_history , only : accum_hist #if (defined NEWCODE) use ice_history_shared , only : model_doi_url ! TODO: add this functionality #endif +#ifdef CESMCOUPLED use ice_prescribed_mod , only : ice_prescribed_init +#endif #if (defined NEWCODE) use ice_atmo , only : flux_convergence_tolerance, flux_convergence_max_iteration use ice_atmo , only : use_coldair_outbreak_mod @@ -55,12 +56,15 @@ module ice_comp_nuopc use CICE_RunMod , only : CICE_Run use ice_exit , only : abort_ice use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters + use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters, icepack_query_orbit use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters +#ifdef CESMCOUPLED use perf_mod , only : t_startf, t_stopf, t_barrierf +#endif use ice_timers implicit none + private public :: SetServices public :: SetVM @@ -71,20 +75,22 @@ module ice_comp_nuopc private :: ModelAdvance private :: ModelSetRunClock private :: ModelFinalize +#ifdef CESMCOUPLED private :: ice_orbital_init ! only for cesm +#endif - character(len=CL) :: flds_scalar_name = '' + character(len=char_len_long) :: flds_scalar_name = '' integer :: flds_scalar_num = 0 integer :: flds_scalar_index_nx = 0 integer :: flds_scalar_index_ny = 0 integer :: flds_scalar_index_nextsw_cday = 0 - character(len=CL) :: orb_mode ! attribute - orbital mode + character(len=char_len_long) :: orb_mode ! attribute - orbital mode integer :: orb_iyear ! attribute - orbital year integer :: orb_iyear_align ! attribute - associated with model year - real(R8) :: orb_obliq ! attribute - obliquity in degrees - real(R8) :: orb_mvelp ! attribute - moving vernal equinox longitude - real(R8) :: orb_eccen ! attribute and update- orbital eccentricity + real(dbl_kind) :: orb_obliq ! attribute - obliquity in degrees + real(dbl_kind) :: orb_mvelp ! attribute - moving vernal equinox longitude + real(dbl_kind) :: orb_eccen ! attribute and update- orbital eccentricity character(len=*) , parameter :: orb_fixed_year = 'fixed_year' character(len=*) , parameter :: orb_variable_year = 'variable_year' @@ -182,8 +188,8 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) integer, intent(out) :: rc ! Local variables - character(len=CL) :: cvalue - character(len=CL) :: logmsg + character(len=char_len_long) :: cvalue + character(len=char_len_long) :: logmsg logical :: isPresent, isSet character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- @@ -195,7 +201,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_LogWrite(trim(subname)//' flds_scalar_name = '//trim(flds_scalar_name), ESMF_LOGMSG_INFO) if (ChkErr(rc,__LINE__,u_FILE_u)) return else - call shr_sys_abort(subname//'Need to set attribute ScalarFieldName') + call abort_ice(subname//'Need to set attribute ScalarFieldName') endif call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldCount", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) @@ -206,7 +212,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_LogWrite(trim(subname)//' flds_scalar_num = '//trim(logmsg), ESMF_LOGMSG_INFO) if (ChkErr(rc,__LINE__,u_FILE_u)) return else - call shr_sys_abort(subname//'Need to set attribute ScalarFieldCount') + call abort_ice(subname//'Need to set attribute ScalarFieldCount') endif call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxGridNX", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) @@ -217,7 +223,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_nx = '//trim(logmsg), ESMF_LOGMSG_INFO) if (ChkErr(rc,__LINE__,u_FILE_u)) return else - call shr_sys_abort(subname//'Need to set attribute ScalarFieldIdxGridNX') + call abort_ice(subname//'Need to set attribute ScalarFieldIdxGridNX') endif call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxGridNY", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) @@ -228,7 +234,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_ny = '//trim(logmsg), ESMF_LOGMSG_INFO) if (ChkErr(rc,__LINE__,u_FILE_u)) return else - call shr_sys_abort(subname//'Need to set attribute ScalarFieldIdxGridNY') + call abort_ice(subname//'Need to set attribute ScalarFieldIdxGridNY') endif call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxNextSwCday", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) @@ -239,7 +245,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_nextsw_cday = '//trim(logmsg), ESMF_LOGMSG_INFO) if (ChkErr(rc,__LINE__,u_FILE_u)) return else - call shr_sys_abort(subname//'Need to set attribute ScalarFieldIdxNextSwCday') + call abort_ice(subname//'Need to set attribute ScalarFieldIdxNextSwCday') endif call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) @@ -263,15 +269,15 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) type(ESMF_Mesh) :: Emesh, EmeshTemp integer :: spatialDim integer :: numOwnedElements - real(R8), pointer :: ownedElemCoords(:) - real(r8), pointer :: lat(:), latMesh(:) - real(r8), pointer :: lon(:), lonMesh(:) + real(dbl_kind), pointer :: ownedElemCoords(:) + real(dbl_kind), pointer :: lat(:), latMesh(:) + real(dbl_kind), pointer :: lon(:), lonMesh(:) integer , allocatable :: gindex_ice(:) integer , allocatable :: gindex_elim(:) integer , allocatable :: gindex(:) integer :: globalID character(ESMF_MAXSTR) :: cvalue - real(r8) :: eccen, obliqr, lambm0, mvelpp + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp character(len=char_len) :: tfrz_option character(ESMF_MAXSTR) :: convCIM, purpComp type(ESMF_VM) :: vm @@ -295,7 +301,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) integer :: dtime ! time step integer :: lmpicom integer :: shrlogunit ! original log unit - character(len=cs) :: starttype ! infodata start type + character(len=char_len) :: starttype ! infodata start type integer :: lsize ! local size of coupling array character(len=512) :: diro character(len=512) :: logfile @@ -307,8 +313,8 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain type(block) :: this_block ! block information for current block integer :: compid ! component id - character(len=CL) :: tempc1,tempc2 - real(R8) :: diff_lon + character(len=char_len_long) :: tempc1,tempc2 + real(dbl_kind) :: diff_lon integer :: npes integer :: num_elim_global integer :: num_elim_local @@ -350,12 +356,15 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! start cice timers !---------------------------------------------------------------------------- +#ifdef CESMCOUPLED call t_startf ('cice_init_total') +#endif !---------------------------------------------------------------------------- ! Initialize constants !---------------------------------------------------------------------------- +#ifdef CESMCOUPLED call ice_init_constants(omega_in=SHR_CONST_OMEGA, radius_in=SHR_CONST_REARTH, & spval_dbl_in=SHR_CONST_SPVAL) @@ -387,6 +396,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) +#endif !---------------------------------------------------------------------------- ! Determine attributes - also needed in realize phase to get grid information @@ -399,23 +409,29 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return #else + ! Start with icepack values then update with values defined in configure file if they exist + call icepack_query_orbit(eccen_out=eccen, mvelpp_out=mvelpp, lambm0_out=lambm0, obliqr_out=obliqr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) eccen end if call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) obliqr end if call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) lambm0 end if call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) mvelpp end if @@ -438,7 +454,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) else if (trim(starttype) == trim('branch')) then runtype = "continue" else - call shr_sys_abort( subname//' ERROR: unknown starttype' ) + call abort_ice( subname//' ERROR: unknown starttype' ) end if ! Note that in the mct version the atm was initialized first so that nextsw_cday could be passed to the other @@ -449,7 +465,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (trim(runtype) /= 'initial') then ! Set nextsw_cday to -1 (this will skip an orbital calculation on initialization - nextsw_cday = -1.0_r8 + nextsw_cday = -1.0_dbl_kind else call ESMF_ClockGet( clock, currTime=currTime, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -464,14 +480,15 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined end if + single_column = .false. +#ifdef CESMCOUPLED ! Determine single column info call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) single_column - else - single_column = .false. end if +#endif if (single_column) then ! Must have these attributes present call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) @@ -484,14 +501,15 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Determine runid call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) runid else runid = 'unknown' ! read in from the namelist in ice_init.F90 if CESMCOUPLED is not defined end if +#ifdef CESMCOUPLED ! Determine tfreeze_option, flux convertence before call to cice_init + ! tcx, what is going on here? if not present, set it? if present, ignore it? call NUOPC_CompAttributeGet(gcomp, name="tfreeze_option", value=tfrz_option, isPresent=isPresent, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (.not. isPresent) then @@ -501,6 +519,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) +#endif #if (defined NEWCODE) call NUOPC_CompAttributeGet(gcomp, name="flux_convergence", value=cvalue, isPresent=isPresent, rc=rc) @@ -508,7 +527,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (isPresent) then read(cvalue,*) flux_convergence_tolerance else - flux_convergence_tolerance = 0._r8 + flux_convergence_tolerance = 0._dbl_kind end if call NUOPC_CompAttributeGet(gcomp, name="flux_max_iteration", value=cvalue, isPresent=isPresent, rc=rc) @@ -537,19 +556,19 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ESMF_TimeGet( currTime, yy=yy, mm=mm, dd=dd, s=curr_tod, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call shr_cal_ymd2date(yy,mm,dd,curr_ymd) + call ice_cal_ymd2date(yy,mm,dd,curr_ymd) call ESMF_TimeGet( startTime, yy=yy, mm=mm, dd=dd, s=start_tod, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call shr_cal_ymd2date(yy,mm,dd,start_ymd) + call ice_cal_ymd2date(yy,mm,dd,start_ymd) call ESMF_TimeGet( stopTime, yy=yy, mm=mm, dd=dd, s=stop_tod, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call shr_cal_ymd2date(yy,mm,dd,stop_ymd) + call ice_cal_ymd2date(yy,mm,dd,stop_ymd) call ESMF_TimeGet( refTime, yy=yy, mm=mm, dd=dd, s=ref_tod, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call shr_cal_ymd2date(yy,mm,dd,ref_ymd) + call ice_cal_ymd2date(yy,mm,dd,ref_ymd) call ESMF_TimeIntervalGet( timeStep, s=dtime, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -558,13 +577,15 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ESMF_TimeGet( currTime, calkindflag=esmf_caltype, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return +#ifdef CESMCOUPLED if (esmf_caltype == ESMF_CALKIND_NOLEAP) then calendar_type = shr_cal_noleap else if (esmf_caltype == ESMF_CALKIND_GREGORIAN) then calendar_type = shr_cal_gregorian else - call shr_sys_abort( subname//'ERROR:: bad calendar for ESMF' ) + call abort_ice( subname//'ERROR:: bad calendar for ESMF' ) end if +#endif !---------------------------------------------------------------------------- ! Set cice logging @@ -572,11 +593,15 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Note that sets the nu_diag module variable in ice_fileunits ! Set the nu_diag_set flag so it's not reset later +#ifdef CESMCOUPLED call set_component_logging(gcomp, my_task==master_task, nu_diag, shrlogunit, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return nu_diag_set = .true. +#endif +#ifdef CESMCOUPLED call shr_file_setLogUnit (shrlogunit) +#endif !---------------------------------------------------------------------------- ! Initialize cice @@ -585,9 +610,13 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Note that cice_init also sets time manager info as well as mpi communicator info, ! including master_task and my_task +#ifdef CESMCOUPLED call t_startf ('cice_init') +#endif call cice_init( lmpicom ) +#ifdef CESMCOUPLED call t_stopf ('cice_init') +#endif !---------------------------------------------------------------------------- ! reset shr logging to my log file @@ -647,7 +676,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) write(nu_diag,*) trim(subname),' ERROR curr_ymd,year_init =',curr_ymd,year_init write(nu_diag,*) trim(subname),' ERROR idate lt zero',idate end if - call shr_sys_abort(subname//' :: ERROR idate lt zero') + call abort_ice(subname//' :: ERROR idate lt zero') endif iyear = (idate/10000) ! integer year of basedate month = (idate-iyear*10000)/100 ! integer month of basedate @@ -658,12 +687,15 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) write(nu_diag,*) trim(subname),' cice year_init = ',year_init write(nu_diag,*) trim(subname),' cice start date = ',idate write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,month,mday,start_tod + write(nu_diag,*) trim(subname),' cice calendar_type = ',trim(calendar_type) endif - if (calendar_type /= "GREGORIAN") then - call time2sec(iyear-year_init,month,mday,time) - else + if (calendar_type == "GREGORIAN" .or. & + calendar_type == "Gregorian" .or. & + calendar_type == "gregorian") then call time2sec(iyear-(year_init-1),month,mday,time) + else + call time2sec(iyear-year_init,month,mday,time) endif time = time+start_tod end if @@ -867,16 +899,16 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! error check differences between internally generated lons and those read in do n = 1,lsize diff_lon = abs(lonMesh(n) - lon(n)) - if ( (diff_lon > 1.e2 .and. abs(diff_lon - 360_r8) > 1.e-1) .or.& - (diff_lon > 1.e-3 .and. diff_lon < 1._r8) ) then + if ( (diff_lon > 1.e2 .and. abs(diff_lon - 360_dbl_kind) > 1.e-1) .or.& + (diff_lon > 1.e-3 .and. diff_lon < 1._dbl_kind) ) then !write(6,100)n,lonMesh(n),lon(n), diff_lon 100 format('ERROR: CICE n, lonmesh(n), lon(n), diff_lon = ',i6,2(f21.13,3x),d21.5) - !call shr_sys_abort() + !call abort_ice() end if if (abs(latMesh(n) - lat(n)) > 1.e-1) then !write(6,101)n,latMesh(n),lat(n), abs(latMesh(n)-lat(n)) 101 format('ERROR: CICE n, latmesh(n), lat(n), diff_lat = ',i6,2(f21.13,3x),d21.5) - !call shr_sys_abort() + !call abort_ice() end if end do @@ -952,12 +984,14 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) +#ifdef CESMCOUPLED call t_stopf ('cice_init_total') +#endif deallocate(gindex_ice) deallocate(gindex) - call shr_sys_flush(nu_diag) + call flush_fileunit(nu_diag) end subroutine InitializeRealize @@ -980,7 +1014,7 @@ subroutine ModelAdvance(gcomp, rc) type(ESMF_Time) :: nextTime type(ESMF_State) :: importState, exportState character(ESMF_MAXSTR) :: cvalue - real(r8) :: eccen, obliqr, lambm0, mvelpp + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp integer :: shrlogunit ! original log unit integer :: k,n ! index logical :: stop_now ! .true. ==> stop at the end of this run phase @@ -994,8 +1028,8 @@ subroutine ModelAdvance(gcomp, rc) integer :: mon_sync ! Sync current month integer :: day_sync ! Sync current day integer :: tod_sync ! Sync current time of day (sec) - character(CL) :: restart_date - character(CL) :: restart_filename + character(char_len_long) :: restart_date + character(char_len_long) :: restart_filename logical :: isPresent character(*) , parameter :: F00 = "('(ice_comp_nuopc) ',2a,i8,d21.14)" character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' @@ -1009,15 +1043,19 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- call ice_timer_start(timer_total) ! time entire run +#ifdef CESMCOUPLED call t_barrierf('cice_run_total_BARRIER',mpi_comm_ice) call t_startf ('cice_run_total') +#endif !-------------------------------- ! Reset shr logging to my log file !-------------------------------- +#ifdef CESMCOUPLED call shr_file_getLogUnit (shrlogunit) call shr_file_setLogUnit (nu_diag) +#endif !-------------------------------- ! Query the Component for its clock, importState and exportState @@ -1045,23 +1083,30 @@ subroutine ModelAdvance(gcomp, rc) call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return #else + ! Start with icepack values then update with values defined in configure file if they exist + ! tcx, This should be identical with initialization, why do it again? Get rid of it + call icepack_query_orbit(eccen_out=eccen, mvelpp_out=mvelpp, lambm0_out=lambm0, obliqr_out=obliqr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) eccen end if call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) obliqr end if call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) lambm0 end if call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent) then read(cvalue,*) mvelpp end if @@ -1086,7 +1131,7 @@ subroutine ModelAdvance(gcomp, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call ESMF_TimeGet( currTime, yy=yr_sync, mm=mon_sync, dd=day_sync, s=tod_sync, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call shr_cal_ymd2date(yr_sync, mon_sync, day_sync, ymd_sync) + call ice_cal_ymd2date(yr_sync, mon_sync, day_sync, ymd_sync) ! error check if ( (ymd /= ymd_sync) .or. (tod /= tod_sync) ) then @@ -1130,15 +1175,19 @@ subroutine ModelAdvance(gcomp, rc) ! Unpack import state !-------------------------------- +#ifdef CESMCOUPLED call t_barrierf('cice_run_import_BARRIER',mpi_comm_ice) call t_startf ('cice_run_import') call ice_timer_start(timer_cplrecv) +#endif call ice_import(importState, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return +#ifdef CESMCOUPLED call ice_timer_stop(timer_cplrecv) call t_stopf ('cice_run_import') +#endif ! write Debug output if (debug_import > 0 .and. my_task==master_task) then @@ -1161,15 +1210,19 @@ subroutine ModelAdvance(gcomp, rc) ! Create export state !-------------------------------- +#ifdef CESMCOUPLED call t_barrierf('cice_run_export_BARRIER',mpi_comm_ice) call t_startf ('cice_run_export') call ice_timer_start(timer_cplsend) +#endif call ice_export(exportState, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return +#ifdef CESMCOUPLED call ice_timer_stop(timer_cplsend) call t_stopf ('cice_run_export') +#endif if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & @@ -1177,8 +1230,10 @@ subroutine ModelAdvance(gcomp, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if +#ifdef CESMCOUPLED ! reset shr logging to my original values call shr_file_setLogUnit (shrlogunit) +#endif !-------------------------------- ! stop timers and print timer info @@ -1202,7 +1257,9 @@ subroutine ModelAdvance(gcomp, rc) stop_now = .false. endif +#ifdef CESMCOUPLED call t_stopf ('cice_run_total') +#endif ! Need to stop this at the end of every run phase in a coupled run. call ice_timer_stop(timer_total) @@ -1373,6 +1430,7 @@ end subroutine ModelFinalize !=============================================================================== +#ifdef CESMCOUPLED subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) !---------------------------------------------------------- @@ -1387,9 +1445,9 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) integer , intent(out) :: rc ! output error ! local variables - real(r8) :: eccen, obliqr, lambm0, mvelpp - character(len=CL) :: msgstr ! temporary - character(len=CL) :: cvalue ! temporary + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + character(len=char_len_long) :: msgstr ! temporary + character(len=char_len_long) :: cvalue ! temporary type(ESMF_Time) :: CurrTime ! current time integer :: year ! model year at current time integer :: orb_year ! orbital year for current orbital computation @@ -1508,5 +1566,31 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) first_time = .false. end subroutine ice_orbital_init +#endif + !=============================================================================== + + subroutine ice_cal_ymd2date(year, month, day, date) + + implicit none + + ! !INPUT/OUTPUT PARAMETERS: + + integer,intent(in ) :: year,month,day ! calendar year,month,day + integer,intent(out) :: date ! coded (yyyymmdd) calendar date + + !--- local --- + character(*),parameter :: subName = "(ice_cal_ymd2date)" + + !------------------------------------------------------------------------------- + ! NOTE: + ! this calendar has a year zero (but no day or month zero) + !------------------------------------------------------------------------------- + + date = abs(year)*10000 + month*100 + day ! coded calendar date + if (year < 0) date = -date + + end subroutine ice_cal_ymd2date + + !=============================================================================== end module ice_comp_nuopc diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 5e423fbb6..b253c0123 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -3,10 +3,10 @@ module ice_import_export use ESMF use NUOPC use NUOPC_Model - use shr_sys_mod , only : shr_sys_abort, shr_sys_flush +#ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp - use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl, cs=>shr_kind_cs - use ice_kinds_mod , only : int_kind, dbl_kind, char_len_long, log_kind +#endif + use ice_kinds_mod , only : int_kind, dbl_kind, char_len, log_kind use ice_constants , only : c0, c1, spval_dbl use ice_constants , only : field_loc_center, field_type_scalar, field_type_vector use ice_blocks , only : block, get_block, nx_block, ny_block @@ -23,7 +23,7 @@ module ice_import_export use ice_flux , only : fresh, fsalt, zlvl, uatm, vatm, potT, Tair, Qa use ice_flux , only : rhoa, swvdr, swvdf, swidr, swidf, flw, frain use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt - use ice_flux , only : sss, tf, wind, fsw + use ice_flux , only : sss, Tf, wind, fsw #if (defined NEWCODE) use ice_flux , only : faero_atm, faero_ocn use ice_flux , only : fiso_atm, fiso_ocn, fiso_rain, fiso_evap @@ -33,13 +33,16 @@ module ice_import_export use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm, ocn_gridcell_frac use ice_grid , only : grid_type, t2ugrid_vector use ice_boundary , only : ice_HaloUpdate - use ice_fileunits , only : nu_diag + use ice_fileunits , only : nu_diag, flush_fileunit use ice_communicate , only : my_task, master_task, MPI_COMM_ICE use ice_prescribed_mod , only : prescribed_ice use ice_shr_methods , only : chkerr, state_reset use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags + use icepack_intfc , only : icepack_liquidus_temperature +#ifdef CESMCOUPLED use perf_mod , only : t_startf, t_stopf, t_barrierf +#endif implicit none public @@ -107,8 +110,8 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam ! local variables integer :: n - character(CS) :: stdname - character(CS) :: cvalue + character(char_len) :: stdname + character(char_len) :: cvalue logical :: flds_wiso ! use case logical :: flds_i2o_per_cat ! .true. => select per ice thickness category character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' @@ -117,6 +120,9 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam rc = ESMF_SUCCESS if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + flds_wiso = .false. + flds_i2o_per_cat = .false. +#ifdef CESMCOUPLED call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return read(cvalue,*) flds_wiso @@ -127,6 +133,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam if (ChkErr(rc,__LINE__,u_FILE_u)) return read(cvalue,*) send_i2x_per_cat call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) +#endif #endif !----------------- @@ -154,7 +161,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 'inst_spec_humid_height_lowest' ) call fldlist_add(fldsToIce_num, fldsToIce, 'inst_temp_height_lowest' ) call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_vis_dir_flx' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_ir_dir_flx' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_vis_dif_flx' ) @@ -163,6 +170,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 'mean_prec_rate' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_fprec_rate' ) +#ifdef CESMCOUPLED ! from atm - black carbon deposition fluxes (3) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) @@ -171,6 +179,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam ! from - atm dry dust deposition frluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) +#endif do n = 1,fldsToIce_num call NUOPC_Advertise(importState, standardName=fldsToIce(n)%stdname, & @@ -231,9 +240,11 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_salt_rate' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_zonal' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_merid' ) +#ifdef CESMCOUPLED call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcpho' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcphi' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_flxdst' ) +#endif if (flds_wiso) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_fresh_water_to_ocean_rate_wiso', & ungridded_lbound=1, ungridded_ubound=3) @@ -388,6 +399,7 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'inst_height_lowest', output=aflds, index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return +!tcx errr.... this needs to be fixed in the dictionary!!! call state_getimport(importState, 'Sa_ptem', output=aflds, index=4, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -431,9 +443,13 @@ subroutine ice_import( importState, rc ) ! perform a halo update if (.not.prescribed_ice) then +#ifdef CESMCOUPLED call t_startf ('cice_imp_halo') +#endif call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_scalar) +#ifdef CESMCOUPLED call t_stopf ('cice_imp_halo') +#endif endif ! now fill in the ice internal data types @@ -485,9 +501,13 @@ subroutine ice_import( importState, rc ) if (.not.prescribed_ice) then +#ifdef CESMCOUPLED call t_startf ('cice_imp_halo') +#endif call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_vector) +#ifdef CESMCOUPLED call t_stopf ('cice_imp_halo') +#endif endif !$OMP PARALLEL DO PRIVATE(iblk,i,j) @@ -600,7 +620,9 @@ subroutine ice_import( importState, rc ) ! interpolate across the pole) ! use ANGLET which is on the T grid ! +#ifdef CESMCOUPLED call t_startf ('cice_imp_ocn') +#endif !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks @@ -624,33 +646,47 @@ subroutine ice_import( importState, rc ) sst(i,j,iblk) = sst(i,j,iblk) - Tffresh ! sea sfc temp (C) sss(i,j,iblk) = max(sss(i,j,iblk),c0) +#ifndef CESMCOUPLED +!tcx should this be icepack_sea_freezing_temperature? + Tf (i,j,iblk) = icepack_liquidus_temperature(sss(i,j,iblk)) +#endif enddo enddo - ! Use shr_frz_mod for this +#ifdef CESMCOUPLED + ! Use shr_frz_mod for this, overwrite Tf computed above Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) +#endif enddo !$OMP END PARALLEL DO +#ifdef CESMCOUPLED call t_stopf ('cice_imp_ocn') +#endif ! Interpolate ocean dynamics variables from T-cell centers to ! U-cell centers. if (.not.prescribed_ice) then +#ifdef CESMCOUPLED call t_startf ('cice_imp_t2u') +#endif call t2ugrid_vector(uocn) call t2ugrid_vector(vocn) call t2ugrid_vector(ss_tltx) call t2ugrid_vector(ss_tlty) +#ifdef CESMCOUPLED call t_stopf ('cice_imp_t2u') +#endif end if ! Atmosphere variables are needed in T cell centers in ! subroutine stability and are interpolated to the U grid ! later as necessary. +#ifdef CESMCOUPLED call t_startf ('cice_imp_atm') +#endif !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks do j = 1, ny_block @@ -671,7 +707,9 @@ subroutine ice_import( importState, rc ) enddo enddo !$OMP END PARALLEL DO +#ifdef CESMCOUPLED call t_stopf ('cice_imp_atm') +#endif end subroutine ice_import @@ -787,7 +825,7 @@ subroutine ice_export( exportState, rc ) if (tmask(i,j,iblk) .and. ailohi(i,j,iblk) < c0 ) then write(nu_diag,*) & ' (ice) send: ERROR ailohi < 0.0 ',i,j,ailohi(i,j,iblk) - call shr_sys_flush(nu_diag) + call flush_fileunit(nu_diag) endif end do end do @@ -1083,7 +1121,7 @@ subroutine fldlist_add(num, fldlist, stdname, ungridded_lbound, ungridded_ubound num = num + 1 if (num > fldsMax) then - call shr_sys_abort(trim(subname)//": ERROR num > fldsMax "//trim(stdname)) + call abort_ice(trim(subname)//": ERROR num > fldsMax "//trim(stdname)) endif fldlist(num)%stdname = trim(stdname) @@ -1270,6 +1308,9 @@ subroutine state_getimport_4d_output(state, fldname, output, index, do_sum, ungr rc = ESMF_SUCCESS + ! check that fieldname exists + if (.not. State_FldChk(state, trim(fldname))) return + if (geomtype == ESMF_GEOMTYPE_MESH) then ! get field pointer @@ -1381,6 +1422,9 @@ subroutine state_getimport_3d_output(state, fldname, output, do_sum, ungridded_i rc = ESMF_SUCCESS + ! check that fieldname exists + if (.not. State_FldChk(state, trim(fldname))) return + if (geomtype == ESMF_GEOMTYPE_MESH) then ! get field pointer @@ -1486,6 +1530,9 @@ subroutine state_setexport_4d_input(state, fldname, input, index, lmask, ifrac, rc = ESMF_SUCCESS + ! check that fieldname exists + if (.not. State_FldChk(state, trim(fldname))) return + if (geomtype == ESMF_GEOMTYPE_MESH) then ! get field pointer @@ -1600,6 +1647,9 @@ subroutine state_setexport_3d_input(state, fldname, input, lmask, ifrac, ungridd rc = ESMF_SUCCESS + ! check that fieldname exists + if (.not. State_FldChk(state, trim(fldname))) return + if (geomtype == ESMF_GEOMTYPE_MESH) then ! get field pointer diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index 4c3876f6c..dd56ac441 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -1,6 +1,15 @@ module ice_prescribed_mod -#ifdef CESMCOUPLED +#ifndef CESMCOUPLED + + use ice_kinds_mod + + implicit none + private ! except + + logical(kind=log_kind), parameter, public :: prescribed_ice = .false. ! true if prescribed ice + +#else ! !DESCRIPTION: ! The prescribed ice model reads in ice concentration data from a netCDF diff --git a/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 b/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 index 24a4226e5..323cba9a4 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 @@ -20,9 +20,11 @@ module ice_shr_methods use ESMF , only : ESMF_VM, ESMF_VMGet, ESMF_VMBroadcast, ESMF_VMGetCurrent use NUOPC , only : NUOPC_CompAttributeGet use NUOPC_Model , only : NUOPC_ModelGet - use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl, cs=>shr_kind_cs - use shr_sys_mod , only : shr_sys_abort + use ice_kinds_mod, only : r8 => dbl_kind, cl=>char_len_long, cs=>char_len + use ice_exit , only : abort_ice +#ifdef CESMCOUPLED use shr_file_mod , only : shr_file_setlogunit, shr_file_getLogUnit +#endif implicit none private @@ -89,9 +91,11 @@ subroutine memcheck(string, level, mastertask) character(len=*), parameter :: subname='(memcheck)' !----------------------------------------------------------------------- +#ifdef CESMCOUPLED if ((mastertask .and. memdebug_level > level) .or. memdebug_level > level+1) then ierr = GPTLprint_memusage(string) endif +#endif end subroutine memcheck @@ -160,7 +164,9 @@ subroutine set_component_logging(gcomp, mastertask, logunit, shrlogunit, rc) logUnit = 6 endif +#ifdef CESMCOUPLED call shr_file_setLogUnit (logunit) +#endif end subroutine set_component_logging @@ -710,10 +716,10 @@ subroutine alarmInit( clock, alarm, option, & case (optDate) if (.not. present(opt_ymd)) then - call shr_sys_abort(subname//trim(option)//' requires opt_ymd') + call abort_ice(subname//trim(option)//' requires opt_ymd') end if if (lymd < 0 .or. ltod < 0) then - call shr_sys_abort(subname//trim(option)//'opt_ymd, opt_tod invalid') + call abort_ice(subname//trim(option)//'opt_ymd, opt_tod invalid') end if call ESMF_TimeIntervalSet(AlarmInterval, yy=9999, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -723,13 +729,13 @@ subroutine alarmInit( clock, alarm, option, & case (optIfdays0) if (.not. present(opt_ymd)) then - call shr_sys_abort(subname//trim(option)//' requires opt_ymd') + call abort_ice(subname//trim(option)//' requires opt_ymd') end if if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -739,10 +745,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNSteps) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_ClockGet(clock, TimeStep=AlarmInterval, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -750,8 +756,8 @@ subroutine alarmInit( clock, alarm, option, & update_nextalarm = .true. case (optNStep) - if (.not.present(opt_n)) call shr_sys_abort(subname//trim(option)//' requires opt_n') - if (opt_n <= 0) call shr_sys_abort(subname//trim(option)//' invalid opt_n') + if (.not.present(opt_n)) call abort_ice(subname//trim(option)//' requires opt_n') + if (opt_n <= 0) call abort_ice(subname//trim(option)//' invalid opt_n') call ESMF_ClockGet(clock, TimeStep=AlarmInterval, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return AlarmInterval = AlarmInterval * opt_n @@ -759,10 +765,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNSeconds) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, s=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -771,10 +777,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNSecond) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, s=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -784,20 +790,20 @@ subroutine alarmInit( clock, alarm, option, & case (optNMinutes) call ESMF_TimeIntervalSet(AlarmInterval, s=60, rc=rc) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if AlarmInterval = AlarmInterval * opt_n update_nextalarm = .true. case (optNMinute) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, s=60, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -806,10 +812,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNHours) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, s=3600, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -818,10 +824,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNHour) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, s=3600, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -830,10 +836,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNDays) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, d=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -842,10 +848,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNDay) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, d=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -854,10 +860,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNMonths) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -866,10 +872,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNMonth) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -885,10 +891,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNYears) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, yy=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -897,10 +903,10 @@ subroutine alarmInit( clock, alarm, option, & case (optNYear) if (.not.present(opt_n)) then - call shr_sys_abort(subname//trim(option)//' requires opt_n') + call abort_ice(subname//trim(option)//' requires opt_n') end if if (opt_n <= 0) then - call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call abort_ice(subname//trim(option)//' invalid opt_n') end if call ESMF_TimeIntervalSet(AlarmInterval, yy=1, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -915,7 +921,7 @@ subroutine alarmInit( clock, alarm, option, & update_nextalarm = .true. case default - call shr_sys_abort(subname//'unknown option '//trim(option)) + call abort_ice(subname//'unknown option '//trim(option)) end select @@ -964,7 +970,7 @@ subroutine timeInit( Time, ymd, cal, tod, rc) rc = ESMF_SUCCESS if ( (ymd < 0) .or. (tod < 0) .or. (tod > SecPerDay) )then - call shr_sys_abort( subname//'ERROR yymmdd is a negative number or time-of-day out of bounds' ) + call abort_ice( subname//'ERROR yymmdd is a negative number or time-of-day out of bounds' ) end if tdate = abs(date) diff --git a/configuration/scripts/Makefile b/configuration/scripts/Makefile index 88a5030d1..7b39d5c8d 100644 --- a/configuration/scripts/Makefile +++ b/configuration/scripts/Makefile @@ -16,7 +16,8 @@ # # Usage examples: # % gmake -j 8 VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ -# -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.conrad_intel +# -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.conrad_intel \ +# DEPFILE=${ICE_CASEDIR}/makdep.c cice #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- @@ -25,6 +26,7 @@ EXEC := a.out MACFILE := NONE +DEPFILE := NONE MODEL := NONE VPFILE := NONE VPATH := . @@ -33,6 +35,13 @@ SRCS := NONE # dependency generator DEPGEN := ./makdep +OBJS_DEPGEN := $(DEPFILE) + +ifneq ($(ESMFMKFILE),) + -include $(ESMFMKFILE) + INCLDIR += $(ESMF_F90COMPILEPATHS) + SLIBS += $(ESMF_F90LINKPATHS) $(ESMF_F90LINKRPATHS) $(ESMF_F90ESMFLINKLIBS) +endif ifneq ($(VPATH),.) # this variable was specified on cmd line or in an env var @@ -59,14 +68,14 @@ endif OBJS := $(addsuffix .o, $(sort $(basename $(notdir $(SRCS))))) DEPS := $(addsuffix .d, $(sort $(basename $(notdir $(SRCS))))) INCS := $(patsubst %,-I%, $(VPATH) ) -OBJS_DEPGEN := $(addprefix $(ICE_CASEDIR)/,$(addsuffix .c, $(notdir $(DEPGEN)))) MODDIR:= -I. RM := rm +AR := ar .SUFFIXES: .SUFFIXES: .F90 .F .c .o -.PHONY: all cice targets target db_files db_flags clean realclean +.PHONY: all cice libcice targets target db_files db_flags clean realclean all: $(EXEC) cice: $(EXEC) @@ -83,7 +92,7 @@ cice: $(EXEC) targets: @echo " " - @echo "Supported Makefile Targets are: cice, makdep, depends, clean, realclean, targets, db_files, db_flags" + @echo "Supported Makefile Targets are: cice, libcice, makdep, depends, clean, realclean, targets, db_files, db_flags" target: targets db_files: @@ -100,6 +109,7 @@ db_files: @echo "* ULIBS := $(ULIBS)" @echo "* SLIBS := $(SLIBS)" @echo "* INCLDIR := $(INCLDIR)" + @echo "* DEPFILE := $(DEPFILE)" @echo "* OBJS_DEPGEN := $(OBJS_DEPGEN)" db_flags: @echo " " @@ -112,6 +122,7 @@ db_flags: @echo "* .c.o := $(CC) $(CFLAGS) $(CPPDEFS) $(INCLDIR)" @echo "* .F.o := $(FC) -c $(FFLAGS) $(FIXEDFLAGS) $(CPPDEFS) $(INCLDIR)" @echo "* .F90.o := $(FC) -c $(FFLAGS) $(FREEFLAGS) $(CPPDEFS) $(MODDIR) $(INCLDIR)" + @echo "* libcice := $(AR) -r $(EXEC) " @echo "* $(notdir $(EXEC)) := $(LD) $(LDFLAGS) $(ULIBS) $(SLIBS)" #------------------------------------------------------------------------------- @@ -130,6 +141,10 @@ $(DEPGEN): $(OBJS_DEPGEN) $(EXEC): $(OBJS) $(LD) -o $(EXEC) $(LDFLAGS) $(OBJS) $(ULIBS) $(SLIBS) +libcice: $(OBJS) + @ echo "$(AR) -r $(EXEC) $(OBJS)" + $(AR) -r $(EXEC) $(OBJS) + .c.o: $(CC) $(CFLAGS) $(CPPDEFS) $(INCLDIR) $< diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index 2534bfa7e..eaa920ac4 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -147,7 +147,8 @@ endif if (${directmake} == 1) then echo "make ${target}" ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ - -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} ${target} + -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ + DEPFILE=${ICE_CASEDIR}/makdep.c ${target} set bldstat = ${status} if (${bldstat} != 0) then echo "${0}: targeted make FAILED" @@ -172,10 +173,12 @@ if (${ICE_CLEANBUILD} == 'true') then echo "gmake clean" if (${quiet} == "true") then ${ICE_MACHINE_MAKE} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ - -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} clean >& ${ICE_BLDLOG_FILE} + -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ + DEPFILE=${ICE_CASEDIR}/makdep.c clean >& ${ICE_BLDLOG_FILE} else ${ICE_MACHINE_MAKE} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ - -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} clean |& tee ${ICE_BLDLOG_FILE} + -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ + DEPFILE=${ICE_CASEDIR}/makdep.c clean |& tee ${ICE_BLDLOG_FILE} endif endif @@ -183,11 +186,13 @@ echo "gmake cice" if (${quiet} == "true") then echo " quiet mode on... patience" ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ - -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} >& ${ICE_BLDLOG_FILE} + -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ + DEPFILE=${ICE_CASEDIR}/makdep.c cice >& ${ICE_BLDLOG_FILE} set bldstat = ${status} else ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ - -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} |& tee ${ICE_BLDLOG_FILE} + -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ + DEPFILE=${ICE_CASEDIR}/makdep.c cice |& tee ${ICE_BLDLOG_FILE} set bldstat = ${status} endif diff --git a/forapps/ufs/comp_ice.backend.clean b/forapps/ufs/comp_ice.backend.clean new file mode 100755 index 000000000..7eef2ed1a --- /dev/null +++ b/forapps/ufs/comp_ice.backend.clean @@ -0,0 +1,42 @@ +#! /bin/csh -f + +### Expect to find the following environment variables set on entry: +# SITE +# SYSTEM_USERDIR +# SRCDIR +# EXEDIR + +setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR + +if (${SITE} =~ cheyenne*) then + setenv ARCH cheyenne_intel +#else if (${SITE} =~ Orion*) then +# setenv ARCH orion_intel +#else if (${SITE} =~ hera*) then +# setenv ARCH hera_intel +else + echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" + exit -2 +endif + +echo "CICE6 ${0}: ARCH = $ARCH" + +cd $OBJDIR + +setenv MAKENAME gmake +setenv MAKETHRDS 1 +setenv MAKEFILE ${SRCDIR}/configuration/scripts/Makefile +setenv MACROSFILE ${SRCDIR}/configuration/scripts/machines/Macros.$ARCH + +echo "CICE6 ${0}: EXEDIR = ${EXEDIR}" +echo "CICE6 ${0}: OBJDIR = ${OBJDIR}" +echo "CICE6 ${0}: MAKEFILE = ${MAKEFILE}" +echo "CICE6 ${0}: MACROSFILE = ${MACROSFILE}" +echo "CICE6 ${0}: ESMFMKFILE = ${ESMFMKFILE}" + +#clean +${MAKENAME} EXEC=${OBJDIR}/libcice6.a \ + -f ${MAKEFILE} MACFILE=${MACROSFILE} clean + +#clean install +rm -r -f ${BINDIR} diff --git a/forapps/ufs/comp_ice.backend.libcice b/forapps/ufs/comp_ice.backend.libcice new file mode 100755 index 000000000..eb1b8a4e7 --- /dev/null +++ b/forapps/ufs/comp_ice.backend.libcice @@ -0,0 +1,142 @@ +#! /bin/csh -f + +### Expect to find the following environment variables set on entry: +# SITE +# SYSTEM_USERDIR +# SRCDIR +# EXEDIR + +### local variable that begin with ICE_ are needed in the Macros file +# ICE_COMMDIR +# ICE_BLDDEBUG +# ICE_THREADED +# ICE_CPPDEFS + +setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR + +setenv THRD no # set to yes for OpenMP threading + +if (${SITE} =~ cheyenne*) then + setenv ARCH cheyenne_intel +#else if (${SITE} =~ Orion*) then +# setenv ARCH orion_intel +#else if (${SITE} =~ hera*) then +# setenv ARCH hera_intel +else + echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" + exit -2 +endif + +echo "CICE6 ${0}: ARCH = $ARCH" + +cd $OBJDIR + +setenv SHRDIR csm_share # location of CCSM shared code +setenv DRVDIR nuopc/cmeps + +#if ($NTASK == 1) then +# setenv ICE_COMMDIR serial +#else + setenv ICE_COMMDIR mpi +#endif + +if ($THRD == 'yes') then + setenv ICE_THREADED true +else + setenv ICE_THREADED false +endif + +if ($?ICE_CPPDEFS) then + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dcoupled" +else + setenv ICE_CPPDEFS "-Dcoupled" +endif + +if !($?IO_TYPE) then + setenv IO_TYPE netcdf4 # set to none if netcdf library is unavailable +endif +if ($IO_TYPE == 'netcdf3' || $IO_TYPE == 'netcdf4') then + setenv IODIR io_netcdf + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" +else if ($IO_TYPE == 'pio') then + setenv IODIR io_pio + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" +else + setenv IODIR io_binary +endif + +# Build in debug mode. If DEBUG=Y, enable DEBUG compilation. This +# flag is set in ${ROOTDIR}/coupledFV3_MOM6_CICE_debug.appBuilder file. +if (! $?DEBUG) then + setenv ICE_BLDDEBUG true +else + if ($DEBUG != "Y") then + setenv ICE_BLDDEBUG false + endif +endif +echo "CICE6 ${0}: DEBUG = ${ICE_BLDDEBUG}" + +### List of source code directories (in order of importance). +cat >! Filepath << EOF +${SRCDIR}/cicecore/drivers/${DRVDIR} +${SRCDIR}/cicecore/cicedynB/dynamics +${SRCDIR}/cicecore/cicedynB/general +${SRCDIR}/cicecore/cicedynB/analysis +${SRCDIR}/cicecore/cicedynB/infrastructure +${SRCDIR}/cicecore/cicedynB/infrastructure/io/${IODIR} +${SRCDIR}/cicecore/cicedynB/infrastructure/comm/${ICE_COMMDIR} +${SRCDIR}/cicecore/shared +${SRCDIR}/icepack/columnphysics +${SRCDIR}/$SHRDIR +EOF + +setenv MAKENAME gmake +setenv MAKETHRDS 1 +setenv MAKEFILE ${SRCDIR}/configuration/scripts/Makefile +setenv MACROSFILE ${SRCDIR}/configuration/scripts/machines/Macros.$ARCH +setenv DEPFILE ${SRCDIR}/configuration/scripts/makdep.c + +echo "CICE6 ${0}: EXEDIR = ${EXEDIR}" +echo "CICE6 ${0}: OBJDIR = ${OBJDIR}" +echo "CICE6 ${0}: MAKEFILE = ${MAKEFILE}" +echo "CICE6 ${0}: MACROSFILE = ${MACROSFILE}" +echo "CICE6 ${0}: DEPFILE = ${DEPFILE}" +echo "CICE6 ${0}: ESMFMKFILE = ${ESMFMKFILE}" + +#diagnostics +#${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/cice \ +# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} db_files +#${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/cice \ +# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} db_flags + +#clean +#${MAKENAME} VPFILE=Filepath EXEC=${OBJDIR}/cice \ +# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} clean + +#needed to trigger a failed build to rest of system +rm ${BINDIR}/cice6.mk + +#build lib (includes dependencies) +${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/libcice6.a \ + -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} libcice + +if ($status != 0) then + echo "CICE6 ${0}: gmake failed, exiting" + exit -2 +endif + +#install +mkdir -p ${BINDIR} +cp -f ${OBJDIR}/libcice6.a ${BINDIR}/ +cp -f ${OBJDIR}/ice_comp_nuopc.mod ${BINDIR}/ + +cat >! ${BINDIR}/cice6.mk << EOF +# ESMF self-describing build dependency makefile fragment + +ESMF_DEP_FRONT = ice_comp_nuopc +ESMF_DEP_INCPATH = ${BINDIR} +ESMF_DEP_CMPL_OBJS = +ESMF_DEP_LINK_OBJS = ${BINDIR}/libcice6.a + +EOF + From 53715eaffa8b6a543dbb126fe922c5232626fbe8 Mon Sep 17 00:00:00 2001 From: Mariana Vertenstein Date: Sun, 24 May 2020 18:06:06 -0600 Subject: [PATCH 05/90] put in changes so that both ufsatm and cesm requirements for potential temperature and density are satisfied --- .../drivers/nuopc/cmeps/ice_import_export.F90 | 132 +++++++++++------- 1 file changed, 84 insertions(+), 48 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index b253c0123..083283895 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -3,9 +3,6 @@ module ice_import_export use ESMF use NUOPC use NUOPC_Model -#ifdef CESMCOUPLED - use shr_frz_mod , only : shr_frz_freezetemp -#endif use ice_kinds_mod , only : int_kind, dbl_kind, char_len, log_kind use ice_constants , only : c0, c1, spval_dbl use ice_constants , only : field_loc_center, field_type_scalar, field_type_vector @@ -19,16 +16,14 @@ module ice_import_export #if (defined NEWCODE) use ice_flux , only : fswthruvdr, fswthruvdf, fswthruidr, fswthruidf use ice_flux , only : send_i2x_per_cat, fswthrun_ai + use ice_flux , only : faero_atm, faero_ocn + use ice_flux , only : fiso_atm, fiso_ocn, fiso_rain, fiso_evap + use ice_flux , only : Qa_iso, Qref_iso, HDO_ocn, H2_18O_ocn, H2_16O_ocn #endif use ice_flux , only : fresh, fsalt, zlvl, uatm, vatm, potT, Tair, Qa use ice_flux , only : rhoa, swvdr, swvdf, swidr, swidf, flw, frain use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt use ice_flux , only : sss, Tf, wind, fsw -#if (defined NEWCODE) - use ice_flux , only : faero_atm, faero_ocn - use ice_flux , only : fiso_atm, fiso_ocn, fiso_rain, fiso_evap - use ice_flux , only : Qa_iso, Qref_iso, HDO_ocn, H2_18O_ocn, H2_16O_ocn -#endif use ice_state , only : vice, vsno, aice, aicen_init, trcr use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm, ocn_gridcell_frac use ice_grid , only : grid_type, t2ugrid_vector @@ -41,6 +36,7 @@ module ice_import_export use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags use icepack_intfc , only : icepack_liquidus_temperature #ifdef CESMCOUPLED + use shr_frz_mod , only : shr_frz_freezetemp use perf_mod , only : t_startf, t_stopf, t_barrierf #endif @@ -127,7 +123,6 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam if (ChkErr(rc,__LINE__,u_FILE_u)) return read(cvalue,*) flds_wiso call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) - #if (defined NEWCODE) call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -149,7 +144,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 's_surf' ) call fldlist_add(fldsToIce_num, fldsToIce, 'ocn_current_zonal' ) call fldlist_add(fldsToIce_num, fldsToIce, 'ocn_current_merid' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'freezing_melting_potential' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'freezing_melting_potential') if (flds_wiso) then call fldlist_add(fldsToIce_num, fldsToIce, 'So_roce_wiso', ungridded_lbound=1, ungridded_ubound=3) end if @@ -160,8 +155,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 'inst_merid_wind_height_lowest' ) call fldlist_add(fldsToIce_num, fldsToIce, 'inst_spec_humid_height_lowest' ) call fldlist_add(fldsToIce_num, fldsToIce, 'inst_temp_height_lowest' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_pres_height_lowest' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_vis_dir_flx' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_ir_dir_flx' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_vis_dif_flx' ) @@ -169,6 +163,8 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_lw_flx' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_prec_rate' ) call fldlist_add(fldsToIce_num, fldsToIce, 'mean_fprec_rate' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) !cesm + call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) !cesm #ifdef CESMCOUPLED ! from atm - black carbon deposition fluxes (3) @@ -348,7 +344,7 @@ subroutine ice_import( importState, rc ) integer , intent(out) :: rc ! local variables - integer,parameter :: nflds=15 + integer,parameter :: nflds=16 integer,parameter :: nfldv=6 integer :: i, j, iblk, n integer :: ilo, ihi, jlo, jhi !beginning and end of physical domain @@ -357,6 +353,7 @@ subroutine ice_import( importState, rc ) real (kind=dbl_kind) :: workx, worky real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP real (kind=dbl_kind) :: tffresh + real (kind=dbl_kind) :: inst_pres_height_lowest character(len=*), parameter :: subname = 'ice_import' !----------------------------------------------------- @@ -394,50 +391,56 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 's_surf', output=aflds, index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! import ocean states + ! import atm states call state_getimport(importState, 'inst_height_lowest', output=aflds, index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -!tcx errr.... this needs to be fixed in the dictionary!!! - call state_getimport(importState, 'Sa_ptem', output=aflds, index=4, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - call state_getimport(importState, 'inst_temp_height_lowest', output=aflds, index=5, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (State_FldChk(importState, 'Sa_ptem') .and. State_fldchk(importState, 'air_density_height_lowest')) then + call state_getimport(importState, 'Sa_ptem', output=aflds, index=4, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'air_density_height_lowest', output=aflds, index=5, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else if (State_FldChk(importState, 'inst_pres_height_lowest')) then + call state_getimport(importState, 'inst_pres_height_lowest', output=aflds, index=6, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call abort_ice(trim(subname)//& + ": ERROR either Sa_ptem and air_density_height_lowest OR inst_pres_height_lowest must be in import state") + end if - call state_getimport(importState, 'inst_spec_humid_height_lowest', output=aflds, index=6, rc=rc) + call state_getimport(importState, 'inst_temp_height_lowest', output=aflds, index=7, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'air_density_height_lowest', output=aflds, index=7, rc=rc) + call state_getimport(importState, 'inst_spec_humid_height_lowest', output=aflds, index=8, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! import ocn/ice fluxes - call state_getimport(importState, 'freezing_melting_potential', output=aflds, index=8, rc=rc) + call state_getimport(importState, 'freezing_melting_potential', output=aflds, index=9, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! import atm fluxes - call state_getimport(importState, 'mean_down_sw_vis_dir_flx', output=aflds, index=9, rc=rc) + call state_getimport(importState, 'mean_down_sw_vis_dir_flx', output=aflds, index=10, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_down_sw_ir_dir_flx', output=aflds, index=10, rc=rc) + call state_getimport(importState, 'mean_down_sw_ir_dir_flx', output=aflds, index=11, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_down_sw_vis_dif_flx', output=aflds, index=11, rc=rc) + call state_getimport(importState, 'mean_down_sw_vis_dif_flx', output=aflds, index=12, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_down_sw_ir_dif_flx', output=aflds, index=12, rc=rc) + call state_getimport(importState, 'mean_down_sw_ir_dif_flx', output=aflds, index=13, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_down_lw_flx', output=aflds, index=13, rc=rc) + call state_getimport(importState, 'mean_down_lw_flx', output=aflds, index=14, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_prec_rate', output=aflds, index=14, rc=rc) + call state_getimport(importState, 'mean_prec_rate', output=aflds, index=15, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_fprec_rate', output=aflds, index=15, rc=rc) + call state_getimport(importState, 'mean_fprec_rate', output=aflds, index=16, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! perform a halo update @@ -458,26 +461,59 @@ subroutine ice_import( importState, rc ) do iblk = 1, nblocks do j = 1,ny_block do i = 1,nx_block - sst (i,j,iblk) = aflds(i,j, 1,iblk) - sss (i,j,iblk) = aflds(i,j, 2,iblk) - zlvl (i,j,iblk) = aflds(i,j, 3,iblk) - potT (i,j,iblk) = aflds(i,j, 4,iblk) - Tair (i,j,iblk) = aflds(i,j, 5,iblk) - Qa (i,j,iblk) = aflds(i,j, 6,iblk) - rhoa (i,j,iblk) = aflds(i,j, 7,iblk) - frzmlt (i,j,iblk) = aflds(i,j, 8,iblk) - swvdr(i,j,iblk) = aflds(i,j, 9,iblk) - swidr(i,j,iblk) = aflds(i,j,10,iblk) - swvdf(i,j,iblk) = aflds(i,j,11,iblk) - swidf(i,j,iblk) = aflds(i,j,12,iblk) - flw (i,j,iblk) = aflds(i,j,13,iblk) - frain(i,j,iblk) = aflds(i,j,14,iblk) - fsnow(i,j,iblk) = aflds(i,j,15,iblk) - enddo !i - enddo !j - enddo !iblk + sst (i,j,iblk) = aflds(i,j, 1,iblk) + sss (i,j,iblk) = aflds(i,j, 2,iblk) + zlvl (i,j,iblk) = aflds(i,j, 3,iblk) + ! see below for 4,5,6 + Tair (i,j,iblk) = aflds(i,j, 7,iblk) + Qa (i,j,iblk) = aflds(i,j, 8,iblk) + frzmlt (i,j,iblk) = aflds(i,j, 9,iblk) + swvdr(i,j,iblk) = aflds(i,j,10,iblk) + swidr(i,j,iblk) = aflds(i,j,11,iblk) + swvdf(i,j,iblk) = aflds(i,j,12,iblk) + swidf(i,j,iblk) = aflds(i,j,13,iblk) + flw (i,j,iblk) = aflds(i,j,14,iblk) + frain(i,j,iblk) = aflds(i,j,15,iblk) + fsnow(i,j,iblk) = aflds(i,j,16,iblk) + end do + end do + end do !$OMP END PARALLEL DO + if ( State_fldChk(importState, 'Sa_ptem') .and. State_fldchk(importState,'air_density_height_lowest')) then + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + potT (i,j,iblk) = aflds(i,j, 4,iblk) + rhoa (i,j,iblk) = aflds(i,j, 5,iblk) + end do + end do + end do + !$OMP END PARALLEL DO + else if (State_fldChk(importState, 'inst_pres_height_lowest')) then + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + inst_pres_height_lowest = aflds(i,j,6,iblk) + if (inst_pres_height_lowest > 0.0_ESMF_KIND_R8) then + potT (i,j,iblk) = Tair(i,j,iblk) * (100000._ESMF_KIND_R8/inst_pres_height_lowest)**0.286_ESMF_KIND_R8 + else + potT (i,j,iblk) = 0.0_ESMF_KIND_R8 + end if + if (Tair(i,j,iblk) /= 0._ESMF_KIND_R8) then + rhoa(i,j,iblk) = inst_pres_height_lowest / & + (287.058_ESMF_KIND_R8*(1._ESMF_KIND_R8+0.608_ESMF_KIND_R8*Qa(i,j,iblk))*Tair(i,j,iblk)) + else + rhoa(i,j,iblk) = 0._ESMF_KIND_R8 + endif + end do !i + end do !j + end do !iblk + !$OMP END PARALLEL DO + end if + deallocate(aflds) allocate(aflds(nx_block,ny_block,nfldv,nblocks)) aflds = c0 From 3bb36945c210c5927a8cdd0e2b4cfaaaedb56be8 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Fri, 5 Jun 2020 13:32:44 -0400 Subject: [PATCH 06/90] Convergence on ustar for CICE. (#452) (#5) * Add atmiter_conv to CICE * Add documentation * trigger build the docs Co-authored-by: David A. Bailey --- cicecore/cicedynB/general/ice_init.F90 | 11 ++++++++--- configuration/scripts/ice_in | 1 + doc/source/cice_index.rst | 1 + doc/source/user_guide/ug_case_settings.rst | 3 ++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index e7820d0b7..289b70a9f 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -117,7 +117,7 @@ subroutine input_data ahmax, R_ice, R_pnd, R_snw, dT_mlt, rsnw_mlt, emissivity, & mu_rdg, hs0, dpscale, rfracmin, rfracmax, pndaspect, hs1, hp1, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, dSdt_slow_mode, & - phi_c_slow_mode, phi_i_mushy, kalg + phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & kitd, kcatbound @@ -204,7 +204,8 @@ subroutine input_data namelist /forcing_nml/ & formdrag, atmbndy, calc_strair, calc_Tsfc, & - highfreq, natmiter, ustar_min, emissivity, & + highfreq, natmiter, atmiter_conv, & + ustar_min, emissivity, & fbot_xfer_type, update_ocn_f, l_mpond_fresh, tfrz_option, & oceanmixed_ice, restore_ice, restore_ocn, trestore, & precip_units, default_season, wave_spec_type,nfreq, & @@ -357,6 +358,7 @@ subroutine input_data formdrag = .false. ! calculate form drag highfreq = .false. ! calculate high frequency RASM coupling natmiter = 5 ! number of iterations for atm boundary layer calcs + atmiter_conv = c0 ! ustar convergence criteria precip_units = 'mks' ! 'mm_per_month' or ! 'mm_per_sec' = 'mks' = kg/m^2 s tfrz_option = 'mushy' ! freezing temp formulation @@ -631,6 +633,7 @@ subroutine input_data call broadcast_scalar(formdrag, master_task) call broadcast_scalar(highfreq, master_task) call broadcast_scalar(natmiter, master_task) + call broadcast_scalar(atmiter_conv, master_task) call broadcast_scalar(update_ocn_f, master_task) call broadcast_scalar(l_mpond_fresh, master_task) call broadcast_scalar(ustar_min, master_task) @@ -1154,6 +1157,7 @@ subroutine input_data write(nu_diag,1010) ' formdrag = ', formdrag write(nu_diag,1010) ' highfreq = ', highfreq write(nu_diag,1020) ' natmiter = ', natmiter + write(nu_diag,1005) ' atmiter_conv = ', atmiter_conv write(nu_diag,1010) ' calc_strair = ', calc_strair write(nu_diag,1010) ' calc_Tsfc = ', calc_Tsfc @@ -1305,7 +1309,8 @@ subroutine input_data endif call icepack_init_parameters(ustar_min_in=ustar_min, albicev_in=albicev, albicei_in=albicei, & - albsnowv_in=albsnowv, albsnowi_in=albsnowi, natmiter_in=natmiter, emissivity_in=emissivity, & + albsnowv_in=albsnowv, albsnowi_in=albsnowi, natmiter_in=natmiter, atmiter_conv_in=atmiter_conv, & + emissivity_in=emissivity, & ahmax_in=ahmax, shortwave_in=shortwave, albedo_type_in=albedo_type, R_ice_in=R_ice, R_pnd_in=R_pnd, & R_snw_in=R_snw, dT_mlt_in=dT_mlt, rsnw_mlt_in=rsnw_mlt, & kstrength_in=kstrength, krdg_partic_in=krdg_partic, krdg_redist_in=krdg_redist, mu_rdg_in=mu_rdg, & diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index ec984397a..d64960c6f 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -167,6 +167,7 @@ calc_Tsfc = .true. highfreq = .false. natmiter = 5 + atmiter_conv = 0.0d0 ustar_min = 0.0005 emissivity = 0.95 fbot_xfer_type = 'constant' diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index b78ac356d..e2920c839 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -61,6 +61,7 @@ either Celsius or Kelvin units). "ardgn", "fractional area of ridged ice", "" "aspect_rapid_mode", ":math:`\bullet` brine convection aspect ratio", "1" "astar", "e-folding scale for participation function", "0.05" + "atmiter_conv", ":math:`\bullet` convergence criteria for ustar", "0.00" "atm_data_dir", ":math:`\bullet` directory for atmospheric forcing data", "" "atm_data_format", ":math:`\bullet` format of atmospheric forcing files", "" "atm_data_type", ":math:`\bullet` type of atmospheric forcing", "" diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index fe93cca4c..9d6e6f906 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -332,7 +332,8 @@ Table of namelist options "\*","``calc_strair``", "true", "calculate wind stress and speed", "" "","", "false", "read wind stress and speed from files", "" "\*","``highfreq``", "true/false", "high-frequency atmo coupling", "" - "\*","``natmiter``", "integer", "number of atmo boundary layer iterations", "" + "\*","``natmiter``", "integer", "number of atmo boundary layer iterations", "5" + "\*","``atmiter_conv``", "real", "convergence criteria for ustar", "0.0" "\*","``calc_Tsfc``", "true/false", "calculate surface temperature", "``.true.``" "\*","``default_season``","``winter``", "Sets initial values of forcing and is overwritten if forcing is read in.", "" "\*","``precip_units``", "``mks``", "liquid precipitation data units", "" From e70d1abcbeb4351195a2b81c6ce3f623c936426c Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 22 Jun 2020 14:58:13 -0600 Subject: [PATCH 07/90] update icepack submodule --- icepack | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/icepack b/icepack index 1ae044604..2b27a78aa 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 1ae044604498b8d268df6c577556d22d2baa7758 +Subproject commit 2b27a78aaecb3635d14b94464d918a67df750ff0 From 308a1d4f6a1d2e8d9b78f51599eef77a2662feea Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 22 Jun 2020 15:05:32 -0600 Subject: [PATCH 08/90] Revert "update icepack submodule" This reverts commit e70d1abcbeb4351195a2b81c6ce3f623c936426c. --- icepack | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/icepack b/icepack index 2b27a78aa..1ae044604 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 2b27a78aaecb3635d14b94464d918a67df750ff0 +Subproject commit 1ae044604498b8d268df6c577556d22d2baa7758 From 089f60faaa33b66fe878e932b8a20ab81b6f5beb Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Thu, 25 Jun 2020 15:18:56 +0000 Subject: [PATCH 09/90] update comp_ice.backend with temporary ice_timers fix --- forapps/ufs/comp_ice.backend.libcice | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/forapps/ufs/comp_ice.backend.libcice b/forapps/ufs/comp_ice.backend.libcice index eb1b8a4e7..ca718548a 100755 --- a/forapps/ufs/comp_ice.backend.libcice +++ b/forapps/ufs/comp_ice.backend.libcice @@ -18,10 +18,10 @@ setenv THRD no # set to yes for OpenMP threading if (${SITE} =~ cheyenne*) then setenv ARCH cheyenne_intel -#else if (${SITE} =~ Orion*) then -# setenv ARCH orion_intel -#else if (${SITE} =~ hera*) then -# setenv ARCH hera_intel +else if (${SITE} =~ Orion*) then + setenv ARCH orion_intel +else if (${SITE} =~ hera*) then + setenv ARCH hera_intel else echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" exit -2 @@ -129,6 +129,7 @@ endif mkdir -p ${BINDIR} cp -f ${OBJDIR}/libcice6.a ${BINDIR}/ cp -f ${OBJDIR}/ice_comp_nuopc.mod ${BINDIR}/ +cp -f ${OBJDIR}/ice_timers.mod ${BINDIR}/ cat >! ${BINDIR}/cice6.mk << EOF # ESMF self-describing build dependency makefile fragment From ad03424248118ad304290c30a1454ea591df4f0a Mon Sep 17 00:00:00 2001 From: David Bailey Date: Wed, 1 Jul 2020 12:52:00 -0600 Subject: [PATCH 10/90] Fix threading problem in init_bgc --- cicecore/shared/ice_init_column.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index b41e71aa1..1a4791291 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -865,7 +865,7 @@ subroutine init_bgc() endif ! .not. restart - !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) From 73e77746d8204c181a311be8e51c6b3edec75dea Mon Sep 17 00:00:00 2001 From: David Bailey Date: Wed, 1 Jul 2020 14:41:27 -0600 Subject: [PATCH 11/90] Fix additional OMP problems --- cicecore/shared/ice_init_column.F90 | 8 ++++++-- cicecore/shared/ice_restart_column.F90 | 6 +++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 1a4791291..9e4838087 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -785,7 +785,7 @@ subroutine init_bgc() if (solve_zsal) then ! default values - !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,trcrn_bgc) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) @@ -816,6 +816,7 @@ subroutine init_bgc() enddo ! i enddo ! j enddo ! iblk + !$OMP END PARALLEL DO call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -855,6 +856,7 @@ subroutine init_bgc() enddo ! j enddo ! iblk + !$OMP END PARALLEL DO call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -865,7 +867,7 @@ subroutine init_bgc() endif ! .not. restart - !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,sicen,trcrn_bgc) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) @@ -900,6 +902,7 @@ subroutine init_bgc() enddo ! j enddo ! iblk + !$OMP END PARALLEL DO call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -925,6 +928,7 @@ subroutine init_bgc() enddo ! i enddo ! j enddo ! iblk + !$OMP END PARALLEL DO call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & diff --git a/cicecore/shared/ice_restart_column.F90 b/cicecore/shared/ice_restart_column.F90 index e830dd50b..e819b1098 100644 --- a/cicecore/shared/ice_restart_column.F90 +++ b/cicecore/shared/ice_restart_column.F90 @@ -809,6 +809,7 @@ subroutine read_restart_hbrine() enddo ! i enddo ! j enddo ! iblk + !$OMP END PARALLEL DO end subroutine read_restart_hbrine @@ -868,6 +869,7 @@ subroutine write_restart_hbrine() enddo ! i enddo ! j enddo ! iblk + !$OMP END PARALLEL DO call write_restart_field(nu_dump_hbrine,0,trcrn(:,:,nt_fbri,:,:),'ruf8', & 'fbrn',ncat,diag) @@ -997,6 +999,7 @@ subroutine write_restart_bgc() enddo enddo enddo + !$OMP END PARALLEL DO call write_restart_field(nu_dump_bgc,0,Rayleigh_real,'ruf8','Rayleigh',1,diag) @@ -1411,7 +1414,8 @@ subroutine read_restart_bgc() endif enddo enddo - enddo + enddo ! iblk + !$OMP END PARALLEL DO endif ! restart_zsal !----------------------------------------------------------------- From 46fcfbaaba0161c63c44ecf7f7449df027f97281 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Thu, 2 Jul 2020 05:24:54 -0600 Subject: [PATCH 12/90] changes for coldstart running --- .../cicedynB/analysis/ice_history_shared.F90 | 2 - .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 362 +++++++----------- .../drivers/nuopc/cmeps/ice_import_export.F90 | 29 +- 3 files changed, 147 insertions(+), 246 deletions(-) diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index b5f2226fa..ce177ad1e 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -672,9 +672,7 @@ subroutine construct_filename(ncfile,suffix,ns) iday = mday isec = sec - dt -#ifdef CESMCOUPLED if (write_ic) isec = sec -#endif ! construct filename if (write_ic) then write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index e4c2a3802..fca4974b7 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -15,12 +15,6 @@ module ice_comp_nuopc use NUOPC_Model , only : model_label_SetRunClock => label_SetRunClock use NUOPC_Model , only : model_label_Finalize => label_Finalize use NUOPC_Model , only : NUOPC_ModelGet, SetVM -#ifdef CESMCOUPLED - use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit - use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT - use shr_const_mod - use shr_cal_mod , only : shr_cal_noleap, shr_cal_gregorian -#endif use ice_constants , only : ice_init_constants use ice_shr_methods , only : chkerr, state_setscalar, state_getscalar, state_diagnose, alarmInit use ice_shr_methods , only : set_component_logging, get_component_instance @@ -42,26 +36,20 @@ module ice_comp_nuopc use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name, inst_suffix, release_all_fileunits, flush_fileunit use ice_restart_shared , only : runid, runtype, restart_dir, restart_file use ice_history , only : accum_hist -#if (defined NEWCODE) - use ice_history_shared , only : model_doi_url ! TODO: add this functionality -#endif -#ifdef CESMCOUPLED - use ice_prescribed_mod , only : ice_prescribed_init -#endif -#if (defined NEWCODE) - use ice_atmo , only : flux_convergence_tolerance, flux_convergence_max_iteration - use ice_atmo , only : use_coldair_outbreak_mod -#endif use CICE_InitMod , only : CICE_Init use CICE_RunMod , only : CICE_Run use ice_exit , only : abort_ice use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters, icepack_query_orbit use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters + use ice_timers #ifdef CESMCOUPLED + use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit + use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT + use shr_const_mod + use ice_prescribed_mod , only : ice_prescribed_init use perf_mod , only : t_startf, t_stopf, t_barrierf #endif - use ice_timers implicit none private @@ -75,32 +63,33 @@ module ice_comp_nuopc private :: ModelAdvance private :: ModelSetRunClock private :: ModelFinalize -#ifdef CESMCOUPLED - private :: ice_orbital_init ! only for cesm -#endif + private :: ice_orbital_init ! only valid for cesm character(len=char_len_long) :: flds_scalar_name = '' - integer :: flds_scalar_num = 0 - integer :: flds_scalar_index_nx = 0 - integer :: flds_scalar_index_ny = 0 - integer :: flds_scalar_index_nextsw_cday = 0 + integer :: flds_scalar_num = 0 + integer :: flds_scalar_index_nx = 0 + integer :: flds_scalar_index_ny = 0 + integer :: flds_scalar_index_nextsw_cday = 0 character(len=char_len_long) :: orb_mode ! attribute - orbital mode - integer :: orb_iyear ! attribute - orbital year - integer :: orb_iyear_align ! attribute - associated with model year - real(dbl_kind) :: orb_obliq ! attribute - obliquity in degrees - real(dbl_kind) :: orb_mvelp ! attribute - moving vernal equinox longitude - real(dbl_kind) :: orb_eccen ! attribute and update- orbital eccentricity + integer :: orb_iyear ! attribute - orbital year + integer :: orb_iyear_align ! attribute - associated with model year + real(dbl_kind) :: orb_obliq ! attribute - obliquity in degrees + real(dbl_kind) :: orb_mvelp ! attribute - moving vernal equinox longitude + real(dbl_kind) :: orb_eccen ! attribute and update- orbital eccentricity character(len=*) , parameter :: orb_fixed_year = 'fixed_year' character(len=*) , parameter :: orb_variable_year = 'variable_year' character(len=*) , parameter :: orb_fixed_parameters = 'fixed_parameters' - integer , parameter :: dbug = 10 - integer , parameter :: debug_import = 0 ! internal debug level - integer , parameter :: debug_export = 0 ! internal debug level - character(*), parameter :: modName = "(ice_comp_nuopc)" - character(*), parameter :: u_FILE_u = & + character(len=*),parameter :: shr_cal_noleap = 'NO_LEAP' + character(len=*),parameter :: shr_cal_gregorian = 'GREGORIAN' + + integer , parameter :: dbug = 10 + integer , parameter :: debug_import = 0 ! internal debug level + integer , parameter :: debug_export = 0 ! internal debug level + character(*), parameter :: modName = "(ice_comp_nuopc)" + character(*), parameter :: u_FILE_u = & __FILE__ !======================================================================= @@ -244,8 +233,6 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) write(logmsg,*) flds_scalar_index_nextsw_cday call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_nextsw_cday = '//trim(logmsg), ESMF_LOGMSG_INFO) if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call abort_ice(subname//'Need to set attribute ScalarFieldIdxNextSwCday') endif call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) @@ -265,6 +252,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) integer, intent(out) :: rc ! Local variables + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp type(ESMF_DistGrid) :: distGrid type(ESMF_Mesh) :: Emesh, EmeshTemp integer :: spatialDim @@ -277,7 +265,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) integer , allocatable :: gindex(:) integer :: globalID character(ESMF_MAXSTR) :: cvalue - real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp character(len=char_len) :: tfrz_option character(ESMF_MAXSTR) :: convCIM, purpComp type(ESMF_VM) :: vm @@ -356,9 +343,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! start cice timers !---------------------------------------------------------------------------- -#ifdef CESMCOUPLED call t_startf ('cice_init_total') -#endif !---------------------------------------------------------------------------- ! Initialize constants @@ -403,44 +388,8 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) !---------------------------------------------------------------------------- ! Get orbital values - ! Note that these values are obtained in a call to init_orbit in ice_shortwave.F90 - ! if CESMCOUPLED is not defined -#ifdef CESMCOUPLED call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#else - ! Start with icepack values then update with values defined in configure file if they exist - call icepack_query_orbit(eccen_out=eccen, mvelpp_out=mvelpp, lambm0_out=lambm0, obliqr_out=obliqr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) eccen - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) obliqr - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) lambm0 - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) mvelpp - end if - - call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, lambm0_in=lambm0, obliqr_in=obliqr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) -#endif ! Determine runtype and possibly nextsw_cday call NUOPC_CompAttributeGet(gcomp, name='start_type', value=cvalue, isPresent=isPresent, rc=rc) @@ -473,22 +422,17 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if else - ! This would be the NEMS branch - ! Note that in NEMS - nextsw_cday is not needed in ice_orbital.F90 and what is needed is - ! simply a CPP variable declaratino of NEMSCOUPLED - runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined end if single_column = .false. -#ifdef CESMCOUPLED - ! Determine single column info - call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) single_column - end if -#endif + ! Determine single column info - only valid for cesm + !call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, rc=rc) + !if (ChkErr(rc,__LINE__,u_FILE_u)) return + !if (isPresent) then + ! read(cvalue,*) single_column + !end if + if (single_column) then ! Must have these attributes present call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) @@ -507,46 +451,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) runid = 'unknown' ! read in from the namelist in ice_init.F90 if CESMCOUPLED is not defined end if -#ifdef CESMCOUPLED - ! Determine tfreeze_option, flux convertence before call to cice_init - ! tcx, what is going on here? if not present, set it? if present, ignore it? - call NUOPC_CompAttributeGet(gcomp, name="tfreeze_option", value=tfrz_option, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (.not. isPresent) then - tfrz_option = 'linear_salt' ! TODO: is this right? This must be the same as mom is using for the calculation. - end if - call icepack_init_parameters(tfrz_option_in=tfrz_option) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) -#endif - -#if (defined NEWCODE) - call NUOPC_CompAttributeGet(gcomp, name="flux_convergence", value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) flux_convergence_tolerance - else - flux_convergence_tolerance = 0._dbl_kind - end if - - call NUOPC_CompAttributeGet(gcomp, name="flux_max_iteration", value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) flux_convergence_max_iteration - else - flux_convergence_max_iteration = 5 - end if - - call NUOPC_CompAttributeGet(gcomp, name="coldair_outbreak_mod", value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) use_coldair_outbreak_mod - else - use_coldair_outbreak_mod = .false. - end if -#endif - ! Get clock information before call to cice_init call ESMF_ClockGet( clock, & @@ -576,8 +480,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ESMF_TimeGet( currTime, calkindflag=esmf_caltype, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return - -#ifdef CESMCOUPLED if (esmf_caltype == ESMF_CALKIND_NOLEAP) then calendar_type = shr_cal_noleap else if (esmf_caltype == ESMF_CALKIND_GREGORIAN) then @@ -585,7 +487,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) else call abort_ice( subname//'ERROR:: bad calendar for ESMF' ) end if -#endif !---------------------------------------------------------------------------- ! Set cice logging @@ -594,15 +495,16 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Set the nu_diag_set flag so it's not reset later #ifdef CESMCOUPLED - call set_component_logging(gcomp, my_task==master_task, nu_diag, shrlogunit, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (my_task == master_task) then + call NUOPC_CompAttributeGet(gcomp, name="diro", value=diro, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call NUOPC_CompAttributeGet(gcomp, name="logfile", value=logfile, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + open(newunit=nu_diag, file=trim(diro)//"/"//trim(logfile)) + end if nu_diag_set = .true. #endif -#ifdef CESMCOUPLED - call shr_file_setLogUnit (shrlogunit) -#endif - !---------------------------------------------------------------------------- ! Initialize cice !---------------------------------------------------------------------------- @@ -610,13 +512,9 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Note that cice_init also sets time manager info as well as mpi communicator info, ! including master_task and my_task -#ifdef CESMCOUPLED call t_startf ('cice_init') -#endif call cice_init( lmpicom ) -#ifdef CESMCOUPLED call t_stopf ('cice_init') -#endif !---------------------------------------------------------------------------- ! reset shr logging to my log file @@ -637,10 +535,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) write(nu_diag,*) trim(subname),' inst_name = ',trim(inst_name) write(nu_diag,*) trim(subname),' inst_index = ',inst_index write(nu_diag,*) trim(subname),' inst_suffix = ',trim(inst_suffix) -#if (defined NEWCODE) - write(nu_diag,*) trim(subname),' flux_convergence = ', flux_convergence_tolerance - write(nu_diag,*) trim(subname),' flux_convergence_max_iteration = ', flux_convergence_max_iteration -#endif endif !--------------------------------------------------------------------------- @@ -968,25 +862,9 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return endif -#ifdef USE_ESMF_METADATA - convCIM = "CIM" - purpComp = "Model Component Simulation Description" - call ESMF_AttributeAdd(comp, convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "ShortName", "CICE", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "LongName", "CICE Model", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "Description", "CICE5", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "ReleaseDate", "TBD", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "ModelType", "Sea Ice", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "Name", "David Bailey", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "EmailAddress", "dbailey@ucar.edu", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "ResponsiblePartyRole", "contact", convention=convCIM, purpose=purpComp, rc=rc) -#endif - if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) -#ifdef CESMCOUPLED call t_stopf ('cice_init_total') -#endif deallocate(gindex_ice) deallocate(gindex) @@ -1030,7 +908,7 @@ subroutine ModelAdvance(gcomp, rc) integer :: tod_sync ! Sync current time of day (sec) character(char_len_long) :: restart_date character(char_len_long) :: restart_filename - logical :: isPresent + logical :: isPresent, isSet character(*) , parameter :: F00 = "('(ice_comp_nuopc) ',2a,i8,d21.14)" character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' !-------------------------------- @@ -1043,19 +921,15 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- call ice_timer_start(timer_total) ! time entire run -#ifdef CESMCOUPLED call t_barrierf('cice_run_total_BARRIER',mpi_comm_ice) call t_startf ('cice_run_total') -#endif !-------------------------------- ! Reset shr logging to my log file !-------------------------------- -#ifdef CESMCOUPLED call shr_file_getLogUnit (shrlogunit) call shr_file_setLogUnit (nu_diag) -#endif !-------------------------------- ! Query the Component for its clock, importState and exportState @@ -1068,10 +942,18 @@ subroutine ModelAdvance(gcomp, rc) ! Determine time of next atmospheric shortwave calculation !-------------------------------- - call State_GetScalar(importState, flds_scalar_index_nextsw_cday, nextsw_cday, & - flds_scalar_name, flds_scalar_num, rc) + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxNextSwCday", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - + if (isPresent .and. isSet) then + call State_GetScalar(importState, flds_scalar_index_nextsw_cday, nextsw_cday, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call ESMF_ClockGetNextTime(clock, nextTime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet(nextTime, dayOfYear_r8=nextsw_cday, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if if (my_task == master_task) then write(nu_diag,F00) trim(subname),' cice istep, nextsw_cday = ',istep, nextsw_cday end if @@ -1079,44 +961,8 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- ! Obtain orbital values !-------------------------------- -#ifdef CESMCOUPLED call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#else - ! Start with icepack values then update with values defined in configure file if they exist - ! tcx, This should be identical with initialization, why do it again? Get rid of it - call icepack_query_orbit(eccen_out=eccen, mvelpp_out=mvelpp, lambm0_out=lambm0, obliqr_out=obliqr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) eccen - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) obliqr - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) lambm0 - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) mvelpp - end if - - call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, & - lambm0_in=lambm0, obliqr_in=obliqr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) -#endif !-------------------------------- ! check that cice internal time is in sync with master clock before timestep update @@ -1175,19 +1021,11 @@ subroutine ModelAdvance(gcomp, rc) ! Unpack import state !-------------------------------- -#ifdef CESMCOUPLED call t_barrierf('cice_run_import_BARRIER',mpi_comm_ice) call t_startf ('cice_run_import') - call ice_timer_start(timer_cplrecv) -#endif - call ice_import(importState, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - -#ifdef CESMCOUPLED - call ice_timer_stop(timer_cplrecv) call t_stopf ('cice_run_import') -#endif ! write Debug output if (debug_import > 0 .and. my_task==master_task) then @@ -1200,29 +1038,17 @@ subroutine ModelAdvance(gcomp, rc) ! Advance cice and timestep update !-------------------------------- -!tcraig if (force_restart_now) then -! call CICE_Run(restart_filename=restart_filename) -! else - call CICE_Run() -! end if + call CICE_Run() !-------------------------------- ! Create export state !-------------------------------- -#ifdef CESMCOUPLED call t_barrierf('cice_run_export_BARRIER',mpi_comm_ice) call t_startf ('cice_run_export') - call ice_timer_start(timer_cplsend) -#endif - call ice_export(exportState, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - -#ifdef CESMCOUPLED - call ice_timer_stop(timer_cplsend) call t_stopf ('cice_run_export') -#endif if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & @@ -1230,10 +1056,8 @@ subroutine ModelAdvance(gcomp, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if -#ifdef CESMCOUPLED ! reset shr logging to my original values call shr_file_setLogUnit (shrlogunit) -#endif !-------------------------------- ! stop timers and print timer info @@ -1257,9 +1081,7 @@ subroutine ModelAdvance(gcomp, rc) stop_now = .false. endif -#ifdef CESMCOUPLED call t_stopf ('cice_run_total') -#endif ! Need to stop this at the end of every run phase in a coupled run. call ice_timer_stop(timer_total) @@ -1333,7 +1155,7 @@ subroutine ModelSetRunClock(gcomp, rc) call ESMF_GridCompGet(gcomp, name=name, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ESMF_LogWrite(subname//'setting alarms for' // trim(name), ESMF_LOGMSG_INFO) + call ESMF_LogWrite(subname//'setting alarms for ' // trim(name), ESMF_LOGMSG_INFO) !---------------- ! Restart alarm @@ -1564,8 +1386,64 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) file=__FILE__, line=__LINE__) first_time = .false. + end subroutine ice_orbital_init + +#else + + subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) + + ! dummy input/output arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_Clock) , intent(in) :: clock + integer , intent(in) :: logunit + logical , intent(in) :: mastertask + integer , intent(out) :: rc ! output error + + ! local variables + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + logical :: isPresent, isSet + character(ESMF_MAXSTR) :: cvalue + logical :: first_time = .true. + character(len=*) , parameter :: subname = "(cice_orbital_init)" + !-------------------------------- + + rc = ESMF_SUCCESS + + if (first_time) then + ! Start with icepack values then update with values defined in configure file if they exist + ! tcx, This should be identical with initialization, why do it again? Get rid of it + call icepack_query_orbit(eccen_out=eccen, mvelpp_out=mvelpp, lambm0_out=lambm0, obliqr_out=obliqr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) + ! if (isPresent) then + ! read(cvalue,*) eccen + ! end if + ! call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) + ! if (isPresent) then + ! read(cvalue,*) obliqr + ! end if + ! call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) + ! if (isPresent) then + ! read(cvalue,*) lambm0 + ! end if + ! call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) + ! if (isPresent) then + ! read(cvalue,*) mvelpp + ! end if + + ! call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, lambm0_in=lambm0, obliqr_in=obliqr) + ! call icepack_warnings_flush(nu_diag) + ! if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + ! file=__FILE__, line=__LINE__) + + first_time = .false. + end if end subroutine ice_orbital_init + #endif !=============================================================================== @@ -1593,4 +1471,28 @@ end subroutine ice_cal_ymd2date !=============================================================================== +#ifndef CESMCOUPLED + subroutine shr_file_setLogUnit(nunit) + integer, intent(in) :: nunit + ! do nothing for this stub - its just here to replace + ! having cppdefs in the main program + end subroutine shr_file_setLogUnit + subroutine shr_file_getLogUnit(nunit) + integer, intent(in) :: nunit + ! do nothing for this stub - its just here to replace + ! having cppdefs in the main program + end subroutine shr_file_getLogUnit + + subroutine t_startf(string) + character(len=*) :: string + end subroutine t_startf + subroutine t_stopf(string) + character(len=*) :: string + end subroutine t_stopf + subroutine t_barrierf(string, comm) + character(len=*) :: string + integer:: comm + end subroutine t_barrierf +#endif + end module ice_comp_nuopc diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 083283895..4cceaa9ca 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -190,19 +190,19 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, trim(flds_scalar_name)) ! ice states - call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_mask' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'sea_ice_temperature' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_ice_volume' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_snow_volume' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_tref' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_snowh' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_u10' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dir_albedo' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dir_albedo' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_mask' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'sea_ice_surface_temperature' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_ice_volume' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_snow_volume' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_tref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_snowh' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_u10' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dir_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dir_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) #if (defined NEWCODE) if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction_n', & @@ -908,7 +908,8 @@ subroutine ice_export( exportState, rc ) ! ---- ! surface temperature of ice covered portion (degK) - call state_setexport(exportState, 'sea_ice_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) + !call state_setexport(exportState, 'sea_ice_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'sea_ice_surface_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! albedo vis dir From c76233609c7e90d77618bf54fbf65e50604d851b Mon Sep 17 00:00:00 2001 From: David Bailey Date: Thu, 2 Jul 2020 11:36:49 -0600 Subject: [PATCH 13/90] Move the forapps directory --- .../scripts/forapps}/ufs/comp_ice.backend.clean | 0 .../scripts/forapps}/ufs/comp_ice.backend.libcice | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {forapps => configuration/scripts/forapps}/ufs/comp_ice.backend.clean (100%) rename {forapps => configuration/scripts/forapps}/ufs/comp_ice.backend.libcice (100%) diff --git a/forapps/ufs/comp_ice.backend.clean b/configuration/scripts/forapps/ufs/comp_ice.backend.clean similarity index 100% rename from forapps/ufs/comp_ice.backend.clean rename to configuration/scripts/forapps/ufs/comp_ice.backend.clean diff --git a/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice similarity index 100% rename from forapps/ufs/comp_ice.backend.libcice rename to configuration/scripts/forapps/ufs/comp_ice.backend.libcice From 6bccf71a499b0fa558e75d44c79159ac988f6b3c Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Thu, 2 Jul 2020 13:19:25 -0600 Subject: [PATCH 14/90] remove cesmcoupled ifdefs --- .../infrastructure/ice_read_write.F90 | 30 ++++++++++++------- .../infrastructure/ice_restart_driver.F90 | 8 ----- .../io/io_netcdf/ice_restart.F90 | 3 -- 3 files changed, 19 insertions(+), 22 deletions(-) diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index f497db49b..4fa115ee3 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -1106,6 +1106,7 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: & varid , & ! variable id status ! status output from netcdf routines +! status , & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! dimension size @@ -1113,7 +1114,7 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & +! character (char_len) :: & ! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & @@ -1279,6 +1280,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & n, & ! ncat index varid , & ! variable id status ! status output from netcdf routines +! status , & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! size of dimension @@ -1286,7 +1288,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & +! character (char_len) :: & ! dimname ! dimension name real (kind=dbl_kind), dimension(:,:,:), allocatable :: & @@ -1364,7 +1366,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & if (my_task==master_task .and. diag) then ! write(nu_diag,*) & -! 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & +! 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & ! ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar @@ -1835,6 +1837,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & ! netCDF file diagnostics: integer (kind=int_kind) :: & status ! status output from netcdf routines +! status, & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! size of dimension @@ -1844,7 +1847,8 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & character (char_len) :: & lvarname ! variable name -! dimname ! dimension name +! lvarname, & ! variable name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 @@ -1955,16 +1959,18 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & integer (kind=int_kind) :: & n, & ! ncat index status ! status output from netcdf routines -! ndim, nvar, & ! sizes of netcdf file -! id, & ! dimension index -! dimlen ! size of dimension +! status, & ! status output from netcdf routines +! ndim, nvar, & ! sizes of netcdf file +! id, & ! dimension index +! dimlen ! size of dimension real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array character (char_len) :: & lvarname ! variable name -! dimname ! dimension name +! lvarname, & ! variable name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g1 @@ -2081,6 +2087,7 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) integer (kind=int_kind) :: & varid, & ! netcdf id for field status ! status output from netcdf routines +! status, & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! size of dimension @@ -2088,9 +2095,9 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & +! character (char_len) :: & ! dimname ! dimension name -! + #ifdef ORCA_GRID real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g3 @@ -2232,6 +2239,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & integer (kind=int_kind) :: & varid , & ! variable id status ! status output from netcdf routines +! status, & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! size of dimension @@ -2239,7 +2247,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & +! character (char_len) :: & ! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 index d3829b9c4..25bb6f5f8 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 @@ -58,9 +58,7 @@ subroutine dumpfile(filename_spec) stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 -#ifdef CESMCOUPLED use ice_flux, only: coszen -#endif use ice_state, only: aicen, vicen, vsnon, trcrn, uvel, vvel character(len=char_len_long), intent(in), optional :: filename_spec @@ -132,9 +130,7 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! radiation fields !----------------------------------------------------------------- -#ifdef CESMCOUPLED call write_restart_field(nu_dump,0,coszen,'ruf8','coszen',1,diag) -#endif call write_restart_field(nu_dump,0,scale_factor,'ruf8','scale_factor',1,diag) call write_restart_field(nu_dump,0,swvdr,'ruf8','swvdr',1,diag) @@ -209,9 +205,7 @@ subroutine restartfile (ice_ic) stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 -#ifdef CESMCOUPLED use ice_flux, only: coszen -#endif use ice_grid, only: tmask, grid_type use ice_state, only: trcr_depend, aice, vice, vsno, trcr, & aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & @@ -310,11 +304,9 @@ subroutine restartfile (ice_ic) if (my_task == master_task) & write(nu_diag,*) 'radiation fields' -#ifdef CESMCOUPLED call read_restart_field(nu_restart,0,coszen,'ruf8', & ! 'coszen',1,diag, field_loc_center, field_type_scalar) 'coszen',1,diag) -#endif call read_restart_field(nu_restart,0,scale_factor,'ruf8', & 'scale_factor',1,diag, field_loc_center, field_type_scalar) call read_restart_field(nu_restart,0,swvdr,'ruf8', & diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index d4decf6f7..214fc356b 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -84,7 +84,6 @@ subroutine init_restart_read(ice_ic) endif endif ! use namelist values if use_restart_time = F - write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc endif call broadcast_scalar(istep0,master_task) @@ -228,9 +227,7 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'uvel',dims) call define_rest_field(ncid,'vvel',dims) -#ifdef CESMCOUPLED call define_rest_field(ncid,'coszen',dims) -#endif call define_rest_field(ncid,'scale_factor',dims) call define_rest_field(ncid,'swvdr',dims) call define_rest_field(ncid,'swvdf',dims) From 902e8833b3c8c40f0d12fd81b38eb792ca739f0e Mon Sep 17 00:00:00 2001 From: David Bailey Date: Thu, 2 Jul 2020 15:27:55 -0600 Subject: [PATCH 15/90] Fix logging issues for NUOPC --- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 8 ++------ cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 16 +++++++++++----- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 16e4216e6..3dcd8fb2f 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -57,7 +57,7 @@ end subroutine CICE_Initialize ! ! Initialize CICE model. - subroutine cice_init(mpicom_ice) + subroutine cice_init use ice_arrays_column, only: hin_max, c_hi_range, alloc_arrays_column use ice_arrays_column, only: floe_rad_l, floe_rad_c, & @@ -66,7 +66,7 @@ subroutine cice_init(mpicom_ice) use ice_flux_bgc, only: alloc_flux_bgc use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & init_calendar, calendar - use ice_communicate, only: init_communicate, my_task, master_task + use ice_communicate, only: my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd @@ -91,14 +91,10 @@ subroutine cice_init(mpicom_ice) use drv_forcing, only: sst_sss #endif - integer (kind=int_kind), optional, intent(in) :: & - mpicom_ice ! communicator for sequential ccsm - logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' - call init_communicate(mpicom_ice) ! initial setup for message passing call init_fileunits ! unit numbers call icepack_configure() ! initialize icepack diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index e4c2a3802..81fb1a308 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -62,6 +62,7 @@ module ice_comp_nuopc use perf_mod , only : t_startf, t_stopf, t_barrierf #endif use ice_timers + use ice_communicate, only: init_communicate implicit none private @@ -328,6 +329,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) integer(int_kind) :: ktherm character(*), parameter :: F00 = "('(ice_comp_nuopc) ',2a,1x,d21.14)" character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' + logical :: mastertask !-------------------------------- rc = ESMF_SUCCESS @@ -406,7 +408,9 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Note that these values are obtained in a call to init_orbit in ice_shortwave.F90 ! if CESMCOUPLED is not defined #ifdef CESMCOUPLED - call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) + mastertask = .false. + if (my_task == master_task) mastertask = .true. + call ice_orbital_init(gcomp, clock, nu_diag, mastertask, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return #else ! Start with icepack values then update with values defined in configure file if they exist @@ -593,11 +597,13 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Note that sets the nu_diag module variable in ice_fileunits ! Set the nu_diag_set flag so it's not reset later -#ifdef CESMCOUPLED - call set_component_logging(gcomp, my_task==master_task, nu_diag, shrlogunit, rc) + call init_communicate(lmpicom) ! initial setup for message passing + + mastertask = .false. + if (my_task == master_task) mastertask = .true. + call set_component_logging(gcomp, mastertask, nu_diag, shrlogunit, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return nu_diag_set = .true. -#endif #ifdef CESMCOUPLED call shr_file_setLogUnit (shrlogunit) @@ -613,7 +619,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) #ifdef CESMCOUPLED call t_startf ('cice_init') #endif - call cice_init( lmpicom ) + call cice_init #ifdef CESMCOUPLED call t_stopf ('cice_init') #endif From b4afd2e55df3a66db133775f6ceb7bd412317fab Mon Sep 17 00:00:00 2001 From: Mariana Vertenstein Date: Sat, 4 Jul 2020 11:59:19 -0600 Subject: [PATCH 16/90] removal of many cpp-ifdefs --- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 84 ++--- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 50 +-- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 36 +++ .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 287 ++++++++---------- .../drivers/nuopc/cmeps/ice_import_export.F90 | 97 +++--- .../nuopc/cmeps/ice_prescribed_mod.F90 | 11 + 6 files changed, 249 insertions(+), 316 deletions(-) create mode 100644 cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 16e4216e6..2ae6f87fe 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -25,40 +25,21 @@ module CICE_InitMod implicit none private - public :: CICE_Initialize, cice_init + public :: cice_init !======================================================================= contains -!======================================================================= - -! Initialize the basic state, grid and all necessary parameters for -! running the CICE model. Return the initial state in routine -! export state. -! Note: This initialization driver is designed for standalone and -! CESM-coupled applications. For other -! applications (e.g., standalone CAM), this driver would be -! replaced by a different driver that calls subroutine cice_init, -! where most of the work is done. - - subroutine CICE_Initialize - - character(len=*), parameter :: subname='(CICE_Initialize)' - !-------------------------------------------------------------------- - ! model initialization - !-------------------------------------------------------------------- - - call cice_init - - end subroutine CICE_Initialize - !======================================================================= ! ! Initialize CICE model. subroutine cice_init(mpicom_ice) + ! Initialize the basic state, grid and all necessary parameters for + ! running the CICE model. + use ice_arrays_column, only: hin_max, c_hi_range, alloc_arrays_column use ice_arrays_column, only: floe_rad_l, floe_rad_c, & floe_binwidth, c_fsd_range @@ -66,7 +47,7 @@ subroutine cice_init(mpicom_ice) use ice_flux_bgc, only: alloc_flux_bgc use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & init_calendar, calendar - use ice_communicate, only: init_communicate, my_task, master_task + use ice_communicate, only: my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd @@ -74,8 +55,7 @@ subroutine cice_init(mpicom_ice) use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux - use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & - get_forcing_atmo, get_forcing_ocn, get_wave_spec + use ice_forcing, only: init_forcing_ocn use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid @@ -87,9 +67,6 @@ subroutine cice_init(mpicom_ice) use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif integer (kind=int_kind), optional, intent(in) :: & mpicom_ice ! communicator for sequential ccsm @@ -98,7 +75,6 @@ subroutine cice_init(mpicom_ice) tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' - call init_communicate(mpicom_ice) ! initial setup for message passing call init_fileunits ! unit numbers call icepack_configure() ! initialize icepack @@ -133,10 +109,6 @@ subroutine cice_init(mpicom_ice) endif call init_coupler_flux ! initialize fluxes exchanged with coupler - -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution @@ -162,7 +134,9 @@ subroutine cice_init(mpicom_ice) call calendar(time) ! determine the initial date + ! TODO: - why is this being called when you are using CMEPS? call init_forcing_ocn(dt) ! initialize sss and sst from data + call init_state ! initialize the ice state call init_transport ! initialize horizontal transport call ice_HaloRestore_init ! restored boundary conditions @@ -186,51 +160,31 @@ subroutine cice_init(mpicom_ice) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical - !property tables + if (tr_aero .or. tr_zaero) then + call faero_optics !initialize aerosol optical property tables + end if ! Initialize shortwave components using swdn from previous timestep ! if restarting. These components will be scaled to current forcing ! in prep_radiation. - if (trim(runtype) == 'continue' .or. restart) & - call init_shortwave ! initialize radiative transfer - -! istep = istep + 1 ! update time step counters -! istep1 = istep1 + 1 -! time = time + dt ! determine the time and date -! call calendar(time) ! at the end of the first timestep - !-------------------------------------------------------------------- - ! coupler communication or forcing data initialization - !-------------------------------------------------------------------- - -#ifndef coupled - call init_forcing_atmo ! initialize atmospheric forcing (standalone) + if (trim(runtype) == 'continue' .or. restart) then + call init_shortwave ! initialize radiative transfer + end if -#ifndef CESMCOUPLED - if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry - if (runtype == 'initial' .and. .not. restart) & + if (runtype == 'initial' .and. .not. restart) then call init_shortwave ! initialize radiative transfer using current swdn + end if call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler -! if (write_ic) call accum_hist(dt) ! write initial conditions - end subroutine cice_init !======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index aed00a9a0..486c36dcc 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -15,9 +15,7 @@ module CICE_RunMod use ice_kinds_mod -#ifdef CESMCOUPLED - use perf_mod, only : t_startf, t_stopf, t_barrierf -#endif + use cice_wrapper_mod, only : t_startf, t_stopf, t_barrierf use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 @@ -79,48 +77,22 @@ subroutine CICE_Run ! timestep loop !-------------------------------------------------------------------- -! timeLoop: do - -! call ice_step - - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - -! call calendar(time) ! at the end of the timestep + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date - call ice_timer_start(timer_couple) ! atm/ocn coupling - -#ifndef coupled -#ifndef CESMCOUPLED -! for now, wave_spectrum is constant in time -! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data - - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif - if (z_tracers) call get_atm_bgc ! biogeochemistry + call ice_timer_start(timer_couple) ! atm/ocn coupling - call init_flux_atm ! Initialize atmosphere fluxes sent to coupler - call init_flux_ocn ! initialize ocean fluxes sent to coupler + if (z_tracers) call get_atm_bgc ! biogeochemistry - call calendar(time) ! at the end of the timestep + call init_flux_atm ! Initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler - call ice_timer_stop(timer_couple) ! atm/ocn coupling + call calendar(time) ! at the end of the timestep - call ice_step + call ice_timer_stop(timer_couple) ! atm/ocn coupling -! if (stop_now >= 1) exit timeLoop -! enddo timeLoop + call ice_step !-------------------------------------------------------------------- ! end of timestep loop diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 new file mode 100644 index 000000000..e350e9a52 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -0,0 +1,36 @@ +module cice_wrapper_mod + +#ifdef CESMCOUPLED + use perf_mod, only : t_startf, t_stopf, t_barrierf +#endif + + +contains + +#ifndef CESMCOUPLED + ! These are just stub routines put in place to remove + + subroutine shr_file_setLogUnit(nunit) + integer, intent(in) :: nunit + ! do nothing for this stub - its just here to replace + ! having cppdefs in the main program + end subroutine shr_file_setLogUnit + subroutine shr_file_getLogUnit(nunit) + integer, intent(in) :: nunit + ! do nothing for this stub - its just here to replace + ! having cppdefs in the main program + end subroutine shr_file_getLogUnit + + subroutine t_startf(string) + character(len=*) :: string + end subroutine t_startf + subroutine t_stopf(string) + character(len=*) :: string + end subroutine t_stopf + subroutine t_barrierf(string, comm) + character(len=*) :: string + integer:: comm + end subroutine t_barrierf +#endif + +end module cice_wrapper_mod diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index fca4974b7..c3947cb98 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -27,7 +27,7 @@ module ice_comp_nuopc use ice_blocks , only : nblocks_tot, get_block_parameter use ice_distribution , only : ice_distributiongetblockloc use ice_grid , only : tlon, tlat, hm, tarea, ULON, ULAT - use ice_communicate , only : my_task, master_task, mpi_comm_ice + use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice use ice_calendar , only : force_restart_now, write_ic use ice_calendar , only : idate, mday, time, month, daycal, time2sec, year_init use ice_calendar , only : sec, dt, calendar, calendar_type, nextsw_cday, istep @@ -36,19 +36,19 @@ module ice_comp_nuopc use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name, inst_suffix, release_all_fileunits, flush_fileunit use ice_restart_shared , only : runid, runtype, restart_dir, restart_file use ice_history , only : accum_hist - use CICE_InitMod , only : CICE_Init - use CICE_RunMod , only : CICE_Run + use CICE_InitMod , only : cice_init + use CICE_RunMod , only : cice_run use ice_exit , only : abort_ice use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters, icepack_query_orbit use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters use ice_timers + use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf + use cice_wrapper_mod , only : shr_file_getlogunit, shr_file_setlogunit #ifdef CESMCOUPLED - use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit - use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT use shr_const_mod + use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT use ice_prescribed_mod , only : ice_prescribed_init - use perf_mod , only : t_startf, t_stopf, t_barrierf #endif implicit none @@ -179,7 +179,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) ! Local variables character(len=char_len_long) :: cvalue character(len=char_len_long) :: logmsg - logical :: isPresent, isSet + logical :: isPresent, isSet character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- @@ -252,69 +252,69 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) integer, intent(out) :: rc ! Local variables - real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp - type(ESMF_DistGrid) :: distGrid - type(ESMF_Mesh) :: Emesh, EmeshTemp - integer :: spatialDim - integer :: numOwnedElements - real(dbl_kind), pointer :: ownedElemCoords(:) - real(dbl_kind), pointer :: lat(:), latMesh(:) - real(dbl_kind), pointer :: lon(:), lonMesh(:) - integer , allocatable :: gindex_ice(:) - integer , allocatable :: gindex_elim(:) - integer , allocatable :: gindex(:) - integer :: globalID - character(ESMF_MAXSTR) :: cvalue - character(len=char_len) :: tfrz_option - character(ESMF_MAXSTR) :: convCIM, purpComp - type(ESMF_VM) :: vm - type(ESMF_Time) :: currTime ! Current time - type(ESMF_Time) :: startTime ! Start time - type(ESMF_Time) :: stopTime ! Stop time - type(ESMF_Time) :: refTime ! Ref time - type(ESMF_TimeInterval) :: timeStep ! Model timestep - type(ESMF_Calendar) :: esmf_calendar ! esmf calendar - type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type - integer :: start_ymd ! Start date (YYYYMMDD) - integer :: start_tod ! start time of day (s) - integer :: curr_ymd ! Current date (YYYYMMDD) - integer :: curr_tod ! Current time of day (s) - integer :: stop_ymd ! stop date (YYYYMMDD) - integer :: stop_tod ! stop time of day (sec) - integer :: ref_ymd ! Reference date (YYYYMMDD) - integer :: ref_tod ! reference time of day (s) - integer :: yy,mm,dd ! Temporaries for time query - integer :: iyear ! yyyy - integer :: dtime ! time step - integer :: lmpicom - integer :: shrlogunit ! original log unit - character(len=char_len) :: starttype ! infodata start type - integer :: lsize ! local size of coupling array - character(len=512) :: diro - character(len=512) :: logfile - logical :: isPresent - integer :: localPet - integer :: n,c,g,i,j,m ! indices - integer :: iblk, jblk ! indices - integer :: ig, jg ! indices - integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain - type(block) :: this_block ! block information for current block - integer :: compid ! component id + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + type(ESMF_DistGrid) :: distGrid + type(ESMF_Mesh) :: Emesh, EmeshTemp + integer :: spatialDim + integer :: numOwnedElements + real(dbl_kind), pointer :: ownedElemCoords(:) + real(dbl_kind), pointer :: lat(:), latMesh(:) + real(dbl_kind), pointer :: lon(:), lonMesh(:) + integer , allocatable :: gindex_ice(:) + integer , allocatable :: gindex_elim(:) + integer , allocatable :: gindex(:) + integer :: globalID + character(ESMF_MAXSTR) :: cvalue + character(len=char_len) :: tfrz_option + character(ESMF_MAXSTR) :: convCIM, purpComp + type(ESMF_VM) :: vm + type(ESMF_Time) :: currTime ! Current time + type(ESMF_Time) :: startTime ! Start time + type(ESMF_Time) :: stopTime ! Stop time + type(ESMF_Time) :: refTime ! Ref time + type(ESMF_TimeInterval) :: timeStep ! Model timestep + type(ESMF_Calendar) :: esmf_calendar ! esmf calendar + type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type + integer :: start_ymd ! Start date (YYYYMMDD) + integer :: start_tod ! start time of day (s) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: stop_ymd ! stop date (YYYYMMDD) + integer :: stop_tod ! stop time of day (sec) + integer :: ref_ymd ! Reference date (YYYYMMDD) + integer :: ref_tod ! reference time of day (s) + integer :: yy,mm,dd ! Temporaries for time query + integer :: iyear ! yyyy + integer :: dtime ! time step + integer :: lmpicom + integer :: shrlogunit ! original log unit + character(len=char_len) :: starttype ! infodata start type + integer :: lsize ! local size of coupling array + logical :: isPresent + logical :: isSet + integer :: localPet + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ig, jg ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type(block) :: this_block ! block information for current block + integer :: compid ! component id character(len=char_len_long) :: tempc1,tempc2 - real(dbl_kind) :: diff_lon - integer :: npes - integer :: num_elim_global - integer :: num_elim_local - integer :: num_elim - integer :: num_ice - integer :: num_elim_gcells ! local number of eliminated gridcells - integer :: num_elim_blocks ! local number of eliminated blocks - integer :: num_total_blocks - integer :: my_elim_start, my_elim_end - real(dbl_kind) :: rad_to_deg - integer(int_kind) :: ktherm - character(*), parameter :: F00 = "('(ice_comp_nuopc) ',2a,1x,d21.14)" - character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' + real(dbl_kind) :: diff_lon + integer :: npes + integer :: num_elim_global + integer :: num_elim_local + integer :: num_elim + integer :: num_ice + integer :: num_elim_gcells ! local number of eliminated gridcells + integer :: num_elim_blocks ! local number of eliminated blocks + integer :: num_total_blocks + integer :: my_elim_start, my_elim_end + real(dbl_kind) :: rad_to_deg + integer(int_kind) :: ktherm + character(len=char_len_long) :: diag_filename = 'unset' + character(len=*), parameter :: F00 = "('(ice_comp_nuopc) ',2a,1x,d21.14)" + character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' !-------------------------------- rc = ESMF_SUCCESS @@ -378,6 +378,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) pi_in = SHR_CONST_PI, & snowpatch_in = 0.005_dbl_kind, & dragio_in = 0.00962_dbl_kind) + call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -406,12 +407,8 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call abort_ice( subname//' ERROR: unknown starttype' ) end if - ! Note that in the mct version the atm was initialized first so that nextsw_cday could be passed to the other - ! components - this assumed that cam or datm was ALWAYS initialized first. - ! In the nuopc version it will be easier to assume that on startup - nextsw_cday is just the current time - + ! We assume here that on startup - nextsw_cday is just the current time ! TOOD (mvertens, 2019-03-21): need to get the perpetual run working - if (trim(runtype) /= 'initial') then ! Set nextsw_cday to -1 (this will skip an orbital calculation on initialization nextsw_cday = -1.0_dbl_kind @@ -425,34 +422,33 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined end if - single_column = .false. - ! Determine single column info - only valid for cesm - !call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - !if (isPresent) then - ! read(cvalue,*) single_column - !end if - - if (single_column) then - ! Must have these attributes present - call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlon - call NUOPC_CompAttributeGet(gcomp, name='scmlat', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlat + ! Determine if single column + call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) single_column + if (single_column) then + call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlon + call NUOPC_CompAttributeGet(gcomp, name='scmlat', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlat + end if + else + single_column = .false. end if ! Determine runid - call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, rc=rc) - if (isPresent) then + call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (isPresent .and. isSet) then read(cvalue,*) runid else - runid = 'unknown' ! read in from the namelist in ice_init.F90 if CESMCOUPLED is not defined + ! read in from the namelist in ice_init.F90 if this is not an attribute passed from the driver + runid = 'unknown' end if ! Get clock information before call to cice_init - call ESMF_ClockGet( clock, & currTime=currTime, startTime=startTime, stopTime=stopTime, refTime=RefTime, & timeStep=timeStep, rc=rc) @@ -488,22 +484,35 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call abort_ice( subname//'ERROR:: bad calendar for ESMF' ) end if + !---------------------------------------------------------------------------- + ! Initialize cice communicators + !---------------------------------------------------------------------------- + + call init_communicate(lmpicom) ! initial setup for message passing + !---------------------------------------------------------------------------- ! Set cice logging !---------------------------------------------------------------------------- + ! Note - this must be done AFTER the communicators are set ! Note that sets the nu_diag module variable in ice_fileunits ! Set the nu_diag_set flag so it's not reset later -#ifdef CESMCOUPLED if (my_task == master_task) then - call NUOPC_CompAttributeGet(gcomp, name="diro", value=diro, rc=rc) + call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return - call NUOPC_CompAttributeGet(gcomp, name="logfile", value=logfile, rc=rc) + if (isPresent .and. isSet) then + diag_filename = trim(cvalue) + end if + call NUOPC_CompAttributeGet(gcomp, name="logfile", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return - open(newunit=nu_diag, file=trim(diro)//"/"//trim(logfile)) + if (isPresent .and. isSet) then + diag_filename = trim(diag_filename) // '/' // trim(cvalue) + end if + if (trim(diag_filename) /= 'unset') then + open(newunit=nu_diag, file=trim(diag_filename)) + nu_diag_set = .true. + end if end if - nu_diag_set = .true. -#endif !---------------------------------------------------------------------------- ! Initialize cice @@ -513,7 +522,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! including master_task and my_task call t_startf ('cice_init') - call cice_init( lmpicom ) + call cice_init() call t_stopf ('cice_init') !---------------------------------------------------------------------------- @@ -819,18 +828,18 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#ifdef CESMCOUPLED !----------------------------------------------------------------- ! Prescribed ice initialization - first get compid !----------------------------------------------------------------- - call NUOPC_CompAttributeGet(gcomp, name='MCTID', value=cvalue, rc=rc) + call NUOPC_CompAttributeGet(gcomp, name='MCTID', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) compid ! convert from string to integer - - ! Having this if-defd means that MCT does not need to be build in a NEMS configuration + if (isPresent and isSet) then + read(cvalue,*) compid ! convert from string to integer + else + compid = 0 + end if call ice_prescribed_init(lmpicom, compid, gindex_ice) -#endif !----------------------------------------------------------------- ! Create cice export state @@ -847,7 +856,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! TODO (mvertens, 2018-12-21): fill in iceberg_prognostic as .false. - if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & idate, sec, nu_diag, rc=rc) @@ -1267,14 +1275,14 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) integer , intent(out) :: rc ! output error ! local variables - real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp character(len=char_len_long) :: msgstr ! temporary character(len=char_len_long) :: cvalue ! temporary - type(ESMF_Time) :: CurrTime ! current time - integer :: year ! model year at current time - integer :: orb_year ! orbital year for current orbital computation - logical :: lprint - logical :: first_time = .true. + type(ESMF_Time) :: CurrTime ! current time + integer :: year ! model year at current time + integer :: orb_year ! orbital year for current orbital computation + logical :: lprint + logical :: first_time = .true. character(len=*) , parameter :: subname = "(cice_orbital_init)" !------------------------------------------------------------------------------- @@ -1388,7 +1396,7 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) first_time = .false. end subroutine ice_orbital_init -#else +#else subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) @@ -1401,12 +1409,10 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) ! local variables real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp - logical :: isPresent, isSet - character(ESMF_MAXSTR) :: cvalue logical :: first_time = .true. character(len=*) , parameter :: subname = "(cice_orbital_init)" !-------------------------------- - + rc = ESMF_SUCCESS if (first_time) then @@ -1417,28 +1423,6 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - ! call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) - ! if (isPresent) then - ! read(cvalue,*) eccen - ! end if - ! call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) - ! if (isPresent) then - ! read(cvalue,*) obliqr - ! end if - ! call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) - ! if (isPresent) then - ! read(cvalue,*) lambm0 - ! end if - ! call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) - ! if (isPresent) then - ! read(cvalue,*) mvelpp - ! end if - - ! call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, lambm0_in=lambm0, obliqr_in=obliqr) - ! call icepack_warnings_flush(nu_diag) - ! if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - ! file=__FILE__, line=__LINE__) - first_time = .false. end if @@ -1471,28 +1455,5 @@ end subroutine ice_cal_ymd2date !=============================================================================== -#ifndef CESMCOUPLED - subroutine shr_file_setLogUnit(nunit) - integer, intent(in) :: nunit - ! do nothing for this stub - its just here to replace - ! having cppdefs in the main program - end subroutine shr_file_setLogUnit - subroutine shr_file_getLogUnit(nunit) - integer, intent(in) :: nunit - ! do nothing for this stub - its just here to replace - ! having cppdefs in the main program - end subroutine shr_file_getLogUnit - - subroutine t_startf(string) - character(len=*) :: string - end subroutine t_startf - subroutine t_stopf(string) - character(len=*) :: string - end subroutine t_stopf - subroutine t_barrierf(string, comm) - character(len=*) :: string - integer:: comm - end subroutine t_barrierf -#endif end module ice_comp_nuopc diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 4cceaa9ca..da022ddcf 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -35,9 +35,9 @@ module ice_import_export use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags use icepack_intfc , only : icepack_liquidus_temperature + use ice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp - use perf_mod , only : t_startf, t_stopf, t_barrierf #endif implicit none @@ -105,30 +105,35 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam integer , intent(out) :: rc ! local variables - integer :: n + integer :: n character(char_len) :: stdname character(char_len) :: cvalue - logical :: flds_wiso ! use case - logical :: flds_i2o_per_cat ! .true. => select per ice thickness category + logical :: flds_wiso ! use case + logical :: flds_i2o_per_cat ! .true. => select per ice thickness category + logical :: isPresent, isSet character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' !------------------------------------------------------------------------------- rc = ESMF_SUCCESS if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + ! Determine if the following attributes are sent by the driver and if so read them in flds_wiso = .false. - flds_i2o_per_cat = .false. -#ifdef CESMCOUPLED - call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, rc=rc) + call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) flds_wiso - call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) + if (isPresent .and. isSet) then + read(cvalue,*) flds_wiso + call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) + end if + #if (defined NEWCODE) - call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, rc=rc) + flds_i2o_per_cat = .false. + call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) send_i2x_per_cat - call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) -#endif + if (isPresent .and. isSet) then + read(cvalue,*) send_i2x_per_cat + call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) + end if #endif !----------------- @@ -166,16 +171,14 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) !cesm call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) !cesm -#ifdef CESMCOUPLED + ! the folloing are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific ! from atm - black carbon deposition fluxes (3) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) - ! from atm - wet dust deposition frluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstwet', ungridded_lbound=1, ungridded_ubound=4) - ! from - atm dry dust deposition frluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) -#endif do n = 1,fldsToIce_num call NUOPC_Advertise(importState, standardName=fldsToIce(n)%stdname, & @@ -203,7 +206,10 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dir_albedo' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) + #if (defined NEWCODE) + ! the following are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction_n', & ungridded_lbound=1, ungridded_ubound=ncat) @@ -226,6 +232,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_vis_dif_flx' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dir_flx' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dif_flx' ) + #if (defined NEWCODE) if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ifrac_n', & @@ -236,11 +243,13 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_salt_rate' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_zonal' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_merid' ) -#ifdef CESMCOUPLED + + ! the following are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcpho' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcphi' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_flxdst' ) -#endif + if (flds_wiso) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_fresh_water_to_ocean_rate_wiso', & ungridded_lbound=1, ungridded_ubound=3) @@ -446,13 +455,9 @@ subroutine ice_import( importState, rc ) ! perform a halo update if (.not.prescribed_ice) then -#ifdef CESMCOUPLED call t_startf ('cice_imp_halo') -#endif call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_scalar) -#ifdef CESMCOUPLED call t_stopf ('cice_imp_halo') -#endif endif ! now fill in the ice internal data types @@ -537,13 +542,9 @@ subroutine ice_import( importState, rc ) if (.not.prescribed_ice) then -#ifdef CESMCOUPLED call t_startf ('cice_imp_halo') -#endif call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_vector) -#ifdef CESMCOUPLED call t_stopf ('cice_imp_halo') -#endif endif !$OMP PARALLEL DO PRIVATE(iblk,i,j) @@ -656,9 +657,8 @@ subroutine ice_import( importState, rc ) ! interpolate across the pole) ! use ANGLET which is on the T grid ! -#ifdef CESMCOUPLED call t_startf ('cice_imp_ocn') -#endif + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks @@ -667,14 +667,16 @@ subroutine ice_import( importState, rc ) ! ocean workx = uocn (i,j,iblk) ! currents, m/s worky = vocn (i,j,iblk) - uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + + uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! rotate to align with model i,j + worky*sin(ANGLET(i,j,iblk)) vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & - workx*sin(ANGLET(i,j,iblk)) workx = ss_tltx (i,j,iblk) ! sea sfc tilt, m/m worky = ss_tlty (i,j,iblk) - ss_tltx(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + + ss_tltx(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! rotate to align with model i,j + worky*sin(ANGLET(i,j,iblk)) ss_tlty(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & - workx*sin(ANGLET(i,j,iblk)) @@ -682,47 +684,46 @@ subroutine ice_import( importState, rc ) sst(i,j,iblk) = sst(i,j,iblk) - Tffresh ! sea sfc temp (C) sss(i,j,iblk) = max(sss(i,j,iblk),c0) -#ifndef CESMCOUPLED -!tcx should this be icepack_sea_freezing_temperature? - Tf (i,j,iblk) = icepack_liquidus_temperature(sss(i,j,iblk)) -#endif + enddo enddo + end do #ifdef CESMCOUPLED - ! Use shr_frz_mod for this, overwrite Tf computed above - Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) + ! Use shr_frz_mod for this + Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) +#else + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + !TODO: tcx should this be icepack_sea_freezing_temperature? + Tf (i,j,iblk) = icepack_liquidus_temperature(sss(i,j,iblk)) + end do + end do + end do + !$OMP END PARALLEL DO #endif - enddo - !$OMP END PARALLEL DO -#ifdef CESMCOUPLED call t_stopf ('cice_imp_ocn') -#endif ! Interpolate ocean dynamics variables from T-cell centers to ! U-cell centers. if (.not.prescribed_ice) then -#ifdef CESMCOUPLED call t_startf ('cice_imp_t2u') -#endif call t2ugrid_vector(uocn) call t2ugrid_vector(vocn) call t2ugrid_vector(ss_tltx) call t2ugrid_vector(ss_tlty) -#ifdef CESMCOUPLED call t_stopf ('cice_imp_t2u') -#endif end if ! Atmosphere variables are needed in T cell centers in ! subroutine stability and are interpolated to the U grid ! later as necessary. -#ifdef CESMCOUPLED call t_startf ('cice_imp_atm') -#endif !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks do j = 1, ny_block @@ -743,9 +744,7 @@ subroutine ice_import( importState, rc ) enddo enddo !$OMP END PARALLEL DO -#ifdef CESMCOUPLED call t_stopf ('cice_imp_atm') -#endif end subroutine ice_import diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index dd56ac441..85b4177fd 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -95,6 +95,8 @@ module ice_prescribed_mod contains !=============================================================================== +#ifdef CESM_COUPLED + subroutine ice_prescribed_init(mpicom, compid, gindex) use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys, shr_pio_getioformat @@ -647,6 +649,15 @@ subroutine ice_prescribed_set_domain( lsize, mpicom, gsmap_i, dom_i ) end subroutine ice_prescribed_set_domain +#else + ! This is a stub routine for now + subroutine ice_prescribed_init(mpicom, compid, gindex) + integer(kind=int_kind), intent(in) :: mpicom + integer(kind=int_kind), intent(in) :: compid + integer(kind=int_kind), intent(in) :: gindex(:) + ! do nothing + end subroutine ice_prescribed_init + #endif end module ice_prescribed_mod From 3a1b88bffa5abe4741be4effb9f13fcbfe07b189 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Sat, 4 Jul 2020 13:25:48 -0600 Subject: [PATCH 17/90] fix compile errors --- cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 4 ++-- cicecore/drivers/nuopc/cmeps/ice_import_export.F90 | 8 ++++---- cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 | 5 ++++- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index c3947cb98..53c57f721 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -48,8 +48,8 @@ module ice_comp_nuopc #ifdef CESMCOUPLED use shr_const_mod use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT - use ice_prescribed_mod , only : ice_prescribed_init #endif + use ice_prescribed_mod , only : ice_prescribed_init implicit none private @@ -834,7 +834,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call NUOPC_CompAttributeGet(gcomp, name='MCTID', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent and isSet) then + if (isPresent .and. isSet) then read(cvalue,*) compid ! convert from string to integer else compid = 0 diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index da022ddcf..9adb868db 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -35,7 +35,7 @@ module ice_import_export use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags use icepack_intfc , only : icepack_liquidus_temperature - use ice_wrapper_mod , only : t_startf, t_stopf, t_barrierf + use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp #endif @@ -171,7 +171,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) !cesm call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) !cesm - ! the folloing are advertised but might not be connected if they are not present + ! the following are advertised but might not be connected if they are not present ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific ! from atm - black carbon deposition fluxes (3) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) @@ -253,8 +253,8 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam if (flds_wiso) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_fresh_water_to_ocean_rate_wiso', & ungridded_lbound=1, ungridded_ubound=3) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & - ungridded_lbound=1, ungridded_ubound=3) + !call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & + ! ungridded_lbound=1, ungridded_ubound=3) call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref_wiso', & ungridded_lbound=1, ungridded_ubound=3) end if diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index 85b4177fd..4104b70b4 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -7,6 +7,9 @@ module ice_prescribed_mod implicit none private ! except + ! MEMBER FUNCTIONS: + public :: ice_prescribed_init ! initialize input data stream + logical(kind=log_kind), parameter, public :: prescribed_ice = .false. ! true if prescribed ice #else @@ -90,6 +93,7 @@ module ice_prescribed_mod ! , rcpidepressT = rcpi*depressT & ! param for finding T(z) from q (J/m^3) ! , rLfidepressT = rLfi*depressT ! param for heat capacity (J deg/m^3) ! ! heat capacity of sea ice, rhoi*C=rcpi+rLfidepressT*salinity/T^2 +#endif !======================================================================= contains @@ -657,7 +661,6 @@ subroutine ice_prescribed_init(mpicom, compid, gindex) integer(kind=int_kind), intent(in) :: gindex(:) ! do nothing end subroutine ice_prescribed_init - #endif end module ice_prescribed_mod From 41855fde3b5a463b20455cc4bfb8a5af6a16436f Mon Sep 17 00:00:00 2001 From: Mariana Vertenstein Date: Sat, 4 Jul 2020 14:29:12 -0600 Subject: [PATCH 18/90] fixes to get cesm working --- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 7 +- .../nuopc/cmeps/ice_prescribed_mod.F90 | 159 ++++++------------ 2 files changed, 56 insertions(+), 110 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index e350e9a52..0da2ed491 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -1,13 +1,12 @@ module cice_wrapper_mod #ifdef CESMCOUPLED - use perf_mod, only : t_startf, t_stopf, t_barrierf -#endif - + use perf_mod , only : t_startf, t_stopf, t_barrierf + use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit +#else contains -#ifndef CESMCOUPLED ! These are just stub routines put in place to remove subroutine shr_file_setLogUnit(nunit) diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index 4104b70b4..78ea39b4e 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -1,5 +1,12 @@ module ice_prescribed_mod + ! !DESCRIPTION: + ! The prescribed ice model reads in ice concentration data from a netCDF + ! file. Ice thickness, temperature, the ice temperature profile are + ! prescribed. Air/ice fluxes are computed to get surface temperature, + ! Ice/ocean fluxes are set to zero, and ice dynamics are not calculated. + ! Regridding and data cycling capabilities are included. + #ifndef CESMCOUPLED use ice_kinds_mod @@ -7,22 +14,21 @@ module ice_prescribed_mod implicit none private ! except - ! MEMBER FUNCTIONS: public :: ice_prescribed_init ! initialize input data stream - logical(kind=log_kind), parameter, public :: prescribed_ice = .false. ! true if prescribed ice -#else +contains + ! This is a stub routine for now + subroutine ice_prescribed_init(mpicom, compid, gindex) + integer(kind=int_kind), intent(in) :: mpicom + integer(kind=int_kind), intent(in) :: compid + integer(kind=int_kind), intent(in) :: gindex(:) + ! do nothing + end subroutine ice_prescribed_init - ! !DESCRIPTION: - ! The prescribed ice model reads in ice concentration data from a netCDF - ! file. Ice thickness, temperature, the ice temperature profile are - ! prescribed. Air/ice fluxes are computed to get surface temperature, - ! Ice/ocean fluxes are set to zero, and ice dynamics are not calculated. - ! Regridding and data cycling capabilities are included. +#else - ! !USES: - use shr_nl_mod, only : shr_nl_find_group_name + use shr_nl_mod , only : shr_nl_find_group_name use shr_strdata_mod use shr_dmodel_mod use shr_string_mod @@ -31,24 +37,23 @@ module ice_prescribed_mod use shr_mct_mod use mct_mod use pio - use ice_broadcast - use ice_communicate , only : my_task, master_task, MPI_COMM_ICE + use ice_communicate , only : my_task, master_task, MPI_COMM_ICE use ice_kinds_mod use ice_fileunits - use ice_exit , only : abort_ice - use ice_domain_size , only : nx_global, ny_global, ncat, nilyr, nslyr, max_blocks + use ice_exit , only : abort_ice + use ice_domain_size , only : nx_global, ny_global, ncat, nilyr, nslyr, max_blocks use ice_constants - use ice_blocks , only : nx_block, ny_block, block, get_block - use ice_domain , only : nblocks, distrb_info, blocks_ice - use ice_grid , only : TLAT, TLON, hm, tmask, tarea, grid_type, ocn_gridcell_frac - use ice_calendar , only : idate, sec, calendar_type - use ice_arrays_column, only : hin_max + use ice_blocks , only : nx_block, ny_block, block, get_block + use ice_domain , only : nblocks, distrb_info, blocks_ice + use ice_grid , only : TLAT, TLON, hm, tmask, tarea, grid_type, ocn_gridcell_frac + use ice_calendar , only : idate, sec, calendar_type + use ice_arrays_column , only : hin_max use ice_read_write - use ice_exit, only: abort_ice - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_tracer_sizes - use icepack_intfc, only: icepack_query_parameters + use ice_exit , only: abort_ice + use icepack_intfc , only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc , only: icepack_query_tracer_indices, icepack_query_tracer_sizes + use icepack_intfc , only: icepack_query_parameters implicit none private ! except @@ -59,59 +64,38 @@ module ice_prescribed_mod public :: ice_prescribed_phys ! set prescribed ice state and fluxes ! !PUBLIC DATA MEMBERS: - logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice - + logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice integer(kind=int_kind),parameter :: nFilesMaximum = 400 ! max number of files - integer(kind=int_kind) :: stream_year_first ! first year in stream to use - integer(kind=int_kind) :: stream_year_last ! last year in stream to use - integer(kind=int_kind) :: model_year_align ! align stream_year_first - ! with this model year - - character(len=char_len_long) :: stream_fldVarName - character(len=char_len_long) :: stream_fldFileName(nFilesMaximum) - character(len=char_len_long) :: stream_domTvarName - character(len=char_len_long) :: stream_domXvarName - character(len=char_len_long) :: stream_domYvarName - character(len=char_len_long) :: stream_domAreaName - character(len=char_len_long) :: stream_domMaskName - character(len=char_len_long) :: stream_domFileName - character(len=char_len_long) :: stream_mapread - logical(kind=log_kind) :: prescribed_ice_fill ! true if data fill required - - type(shr_strdata_type) :: sdat ! prescribed data stream - character(len=char_len_long) :: fldList ! list of fields in data stream - real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover - -! real (kind=dbl_kind), parameter :: & -! cp_sno = 0.0_dbl_kind & ! specific heat of snow (J/kg/K) -! , rLfi = Lfresh*rhoi & ! latent heat of fusion ice (J/m^3) -! , rLfs = Lfresh*rhos & ! latent heat of fusion snow (J/m^3) -! , rLvi = Lvap*rhoi & ! latent heat of vapor*rhoice (J/m^3) -! , rLvs = Lvap*rhos & ! latent heat of vapor*rhosno (J/m^3) -! , rcpi = cp_ice*rhoi & ! heat capacity of fresh ice (J/m^3) -! , rcps = cp_sno*rhos & ! heat capacity of snow (J/m^3) -! , rcpidepressT = rcpi*depressT & ! param for finding T(z) from q (J/m^3) -! , rLfidepressT = rLfi*depressT ! param for heat capacity (J deg/m^3) -! ! heat capacity of sea ice, rhoi*C=rcpi+rLfidepressT*salinity/T^2 -#endif + integer(kind=int_kind) :: stream_year_first ! first year in stream to use + integer(kind=int_kind) :: stream_year_last ! last year in stream to use + integer(kind=int_kind) :: model_year_align ! align stream_year_first with this model year + character(len=char_len_long) :: stream_fldVarName + character(len=char_len_long) :: stream_fldFileName(nFilesMaximum) + character(len=char_len_long) :: stream_domTvarName + character(len=char_len_long) :: stream_domXvarName + character(len=char_len_long) :: stream_domYvarName + character(len=char_len_long) :: stream_domAreaName + character(len=char_len_long) :: stream_domMaskName + character(len=char_len_long) :: stream_domFileName + character(len=char_len_long) :: stream_mapread + logical(kind=log_kind) :: prescribed_ice_fill ! true if data fill required + type(shr_strdata_type) :: sdat ! prescribed data stream + character(len=char_len_long) :: fldList ! list of fields in data stream + real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover -!======================================================================= contains -!=============================================================================== - -#ifdef CESM_COUPLED subroutine ice_prescribed_init(mpicom, compid, gindex) - use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys, shr_pio_getioformat - ! !DESCRIPTION: ! Prescribed ice initialization - needed to ! work with new shr_strdata module derived type - ! !INPUT/OUTPUT PARAMETERS: + use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys, shr_pio_getioformat + implicit none include 'mpif.h' + ! !nput/output parameters: integer(kind=int_kind), intent(in) :: mpicom integer(kind=int_kind), intent(in) :: compid integer(kind=int_kind), intent(in) :: gindex(:) @@ -263,7 +247,6 @@ subroutine ice_prescribed_init(mpicom, compid, gindex) end subroutine ice_prescribed_init !======================================================================= - subroutine ice_prescribed_run(mDateIn, secIn) ! !DESCRIPTION: @@ -335,25 +318,12 @@ subroutine ice_prescribed_run(mDateIn, secIn) end subroutine ice_prescribed_run !=============================================================================== - !BOP =========================================================================== - ! - ! !IROUTINE: ice_prescribed_phys -- set prescribed ice state and fluxes - ! - ! !DESCRIPTION: - ! - ! Set prescribed ice state using input ice concentration; - ! set surface ice temperature to atmospheric value; use - ! linear temperature gradient in ice to ocean temperature. - ! - ! !REVISION HISTORY: - ! 2005-May-23 - J. Schramm - Updated with data models - ! 2004-July - J. Schramm - Modified to allow variable snow cover - ! 2001-May - B. P. Briegleb - Original version - ! - ! !INTERFACE: ------------------------------------------------------------------ - subroutine ice_prescribed_phys + ! Set prescribed ice state using input ice concentration; + ! set surface ice temperature to atmospheric value; use + ! linear temperature gradient in ice to ocean temperature. + ! !USES: use ice_flux use ice_state @@ -395,20 +365,6 @@ subroutine ice_prescribed_phys if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - !----------------------------------------------------------------- - ! Initialize ice state - !----------------------------------------------------------------- - - ! TODO - can we now get rid of the following??? - - ! aicen(:,:,:,:) = c0 - ! vicen(:,:,:,:) = c0 - ! eicen(:,:,:,:) = c0 - - ! do nc=1,ncat - ! trcrn(:,:,nt_Tsfc,nc,:) = Tf(:,:,:) - ! enddo - !----------------------------------------------------------------- ! Set ice cover over land to zero, not sure if this should be ! be done earier, before time/spatial interp?????? @@ -554,7 +510,6 @@ subroutine ice_prescribed_phys end subroutine ice_prescribed_phys !=============================================================================== - subroutine ice_prescribed_set_domain( lsize, mpicom, gsmap_i, dom_i ) ! Arguments @@ -653,14 +608,6 @@ subroutine ice_prescribed_set_domain( lsize, mpicom, gsmap_i, dom_i ) end subroutine ice_prescribed_set_domain -#else - ! This is a stub routine for now - subroutine ice_prescribed_init(mpicom, compid, gindex) - integer(kind=int_kind), intent(in) :: mpicom - integer(kind=int_kind), intent(in) :: compid - integer(kind=int_kind), intent(in) :: gindex(:) - ! do nothing - end subroutine ice_prescribed_init #endif end module ice_prescribed_mod From 30a81cce323bfd6b742e9cb6be00c8708b4f8bde Mon Sep 17 00:00:00 2001 From: Mariana Vertenstein Date: Tue, 7 Jul 2020 12:17:21 -0600 Subject: [PATCH 19/90] fixed white space issue --- .../infrastructure/ice_read_write.F90 | 30 +++++++------------ 1 file changed, 11 insertions(+), 19 deletions(-) diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index 4fa115ee3..f497db49b 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -1106,7 +1106,6 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: & varid , & ! variable id status ! status output from netcdf routines -! status , & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! dimension size @@ -1114,7 +1113,7 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & +! character (char_len) :: & ! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & @@ -1280,7 +1279,6 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & n, & ! ncat index varid , & ! variable id status ! status output from netcdf routines -! status , & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! size of dimension @@ -1288,7 +1286,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & +! character (char_len) :: & ! dimname ! dimension name real (kind=dbl_kind), dimension(:,:,:), allocatable :: & @@ -1366,7 +1364,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & if (my_task==master_task .and. diag) then ! write(nu_diag,*) & -! 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & +! 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & ! ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar @@ -1837,7 +1835,6 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & ! netCDF file diagnostics: integer (kind=int_kind) :: & status ! status output from netcdf routines -! status, & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! size of dimension @@ -1847,8 +1844,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & character (char_len) :: & lvarname ! variable name -! lvarname, & ! variable name -! dimname ! dimension name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 @@ -1959,18 +1955,16 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & integer (kind=int_kind) :: & n, & ! ncat index status ! status output from netcdf routines -! status, & ! status output from netcdf routines -! ndim, nvar, & ! sizes of netcdf file -! id, & ! dimension index -! dimlen ! size of dimension +! ndim, nvar, & ! sizes of netcdf file +! id, & ! dimension index +! dimlen ! size of dimension real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array character (char_len) :: & lvarname ! variable name -! lvarname, & ! variable name -! dimname ! dimension name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g1 @@ -2087,7 +2081,6 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) integer (kind=int_kind) :: & varid, & ! netcdf id for field status ! status output from netcdf routines -! status, & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! size of dimension @@ -2095,9 +2088,9 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & +! character (char_len) :: & ! dimname ! dimension name - +! #ifdef ORCA_GRID real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g3 @@ -2239,7 +2232,6 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & integer (kind=int_kind) :: & varid , & ! variable id status ! status output from netcdf routines -! status, & ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index ! dimlen ! size of dimension @@ -2247,7 +2239,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & +! character (char_len) :: & ! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & From 27dd3b7e6003b8aac0977d3a057e9a61ed7305d4 Mon Sep 17 00:00:00 2001 From: David Bailey Date: Tue, 7 Jul 2020 12:40:33 -0600 Subject: [PATCH 20/90] Add restart_coszen namelist option --- cicecore/cicedynB/general/ice_init.F90 | 8 ++++++-- .../infrastructure/ice_restart_driver.F90 | 15 +++------------ .../infrastructure/io/io_netcdf/ice_restart.F90 | 7 ++----- .../infrastructure/io/io_pio2/ice_restart.F90 | 8 +++----- cicecore/shared/ice_restart_shared.F90 | 1 + configuration/scripts/ice_in | 1 + doc/source/cice_index.rst | 1 + doc/source/user_guide/ug_case_settings.rst | 1 + doc/source/user_guide/ug_implementation.rst | 4 ++++ 9 files changed, 22 insertions(+), 24 deletions(-) diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index f43c08793..91c5d539d 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -73,7 +73,7 @@ subroutine input_data restart_pond_cesm, restart_pond_lvl, restart_pond_topo, restart_aero, & restart_fsd, restart_iso use ice_restart_shared, only: & - restart, restart_ext, restart_dir, restart_file, pointer_file, & + restart, restart_ext, restart_coszen, restart_dir, restart_file, pointer_file, & runid, runtype, use_restart_time, restart_format, lcdf64 use ice_history_shared, only: hist_avg, history_dir, history_file, & incond_dir, incond_file, version_name, & @@ -149,7 +149,8 @@ subroutine input_data dt, npt, ndtd, numin, & runtype, runid, bfbflag, numax, & ice_ic, restart, restart_dir, restart_file, & - restart_ext, use_restart_time, restart_format, lcdf64, & + restart_ext, restart_coszen, use_restart_time, restart_format, & + lcdf64, & pointer_file, dumpfreq, dumpfreq_n, dump_last, & diagfreq, diag_type, diag_file, history_format,& print_global, print_points, latpnt, lonpnt, & @@ -269,6 +270,7 @@ subroutine input_data restart_dir = './' ! write to executable dir for default restart_file = 'iced' ! restart file name prefix restart_ext = .false. ! if true, read/write ghost cells + restart_coszen = .false. ! if true, read/write coszen use_restart_time = .true. ! if true, use time info written in file pointer_file = 'ice.restart_file' restart_format = 'default' ! restart file format @@ -563,6 +565,7 @@ subroutine input_data call broadcast_scalar(restart, master_task) call broadcast_scalar(restart_dir, master_task) call broadcast_scalar(restart_ext, master_task) + call broadcast_scalar(restart_coszen, master_task) call broadcast_scalar(use_restart_time, master_task) call broadcast_scalar(restart_format, master_task) call broadcast_scalar(lcdf64, master_task) @@ -1458,6 +1461,7 @@ subroutine input_data write(nu_diag,*) ' restart_dir = ', & trim(restart_dir) write(nu_diag,*) ' restart_ext = ', restart_ext + write(nu_diag,*) ' restart_coszen = ', restart_coszen write(nu_diag,*) ' restart_format = ', & trim(restart_format) write(nu_diag,*) ' lcdf64 = ', & diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 index d3829b9c4..7eb7c020d 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 @@ -23,7 +23,7 @@ module ice_restart_driver field_loc_center, field_loc_NEcorner, & field_type_scalar, field_type_vector use ice_restart_shared, only: restart_dir, pointer_file, & - runid, use_restart_time, lenstr + runid, use_restart_time, lenstr, restart_coszen use ice_restart use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag, nu_rst_pointer, nu_restart, nu_dump @@ -58,9 +58,7 @@ subroutine dumpfile(filename_spec) stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 -#ifdef CESMCOUPLED use ice_flux, only: coszen -#endif use ice_state, only: aicen, vicen, vsnon, trcrn, uvel, vvel character(len=char_len_long), intent(in), optional :: filename_spec @@ -132,9 +130,7 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! radiation fields !----------------------------------------------------------------- -#ifdef CESMCOUPLED - call write_restart_field(nu_dump,0,coszen,'ruf8','coszen',1,diag) -#endif + if (restart_coszen) call write_restart_field(nu_dump,0,coszen,'ruf8','coszen',1,diag) call write_restart_field(nu_dump,0,scale_factor,'ruf8','scale_factor',1,diag) call write_restart_field(nu_dump,0,swvdr,'ruf8','swvdr',1,diag) @@ -209,9 +205,7 @@ subroutine restartfile (ice_ic) stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 -#ifdef CESMCOUPLED use ice_flux, only: coszen -#endif use ice_grid, only: tmask, grid_type use ice_state, only: trcr_depend, aice, vice, vsno, trcr, & aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & @@ -310,11 +304,8 @@ subroutine restartfile (ice_ic) if (my_task == master_task) & write(nu_diag,*) 'radiation fields' -#ifdef CESMCOUPLED - call read_restart_field(nu_restart,0,coszen,'ruf8', & -! 'coszen',1,diag, field_loc_center, field_type_scalar) + if (restart_coszen) call read_restart_field(nu_restart,0,coszen,'ruf8', & 'coszen',1,diag) -#endif call read_restart_field(nu_restart,0,scale_factor,'ruf8', & 'scale_factor',1,diag, field_loc_center, field_type_scalar) call read_restart_field(nu_restart,0,swvdr,'ruf8', & diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index d4decf6f7..d3cf954a0 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -11,7 +11,7 @@ module ice_restart use netcdf use ice_restart_shared, only: & restart_ext, restart_dir, restart_file, pointer_file, & - runid, use_restart_time, lcdf64, lenstr + runid, use_restart_time, lcdf64, lenstr, restart_coszen use ice_fileunits, only: nu_diag, nu_rst_pointer use ice_exit, only: abort_ice use icepack_intfc, only: icepack_query_parameters @@ -227,10 +227,7 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'uvel',dims) call define_rest_field(ncid,'vvel',dims) - -#ifdef CESMCOUPLED - call define_rest_field(ncid,'coszen',dims) -#endif + if (restart_coszen) call define_rest_field(ncid,'coszen',dims) call define_rest_field(ncid,'scale_factor',dims) call define_rest_field(ncid,'swvdr',dims) call define_rest_field(ncid,'swvdf',dims) diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index 5bb880dc5..b11dcf0d0 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -11,7 +11,8 @@ module ice_restart use ice_kinds_mod use ice_restart_shared, only: & restart, restart_ext, restart_dir, restart_file, pointer_file, & - runid, runtype, use_restart_time, restart_format, lcdf64, lenstr + runid, runtype, use_restart_time, restart_format, lcdf64, lenstr, & + restart_coszen use ice_pio use pio use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -245,10 +246,7 @@ subroutine init_restart_write(filename_spec) call define_rest_field(File,'uvel',dims) call define_rest_field(File,'vvel',dims) - -#ifdef CESMCOUPLED - call define_rest_field(File,'coszen',dims) -#endif + if (restart_coszen) call define_rest_field(File,'coszen',dims) call define_rest_field(File,'scale_factor',dims) call define_rest_field(File,'swvdr',dims) call define_rest_field(File,'swvdf',dims) diff --git a/cicecore/shared/ice_restart_shared.F90 b/cicecore/shared/ice_restart_shared.F90 index 82b82c5ce..6578ef3ad 100644 --- a/cicecore/shared/ice_restart_shared.F90 +++ b/cicecore/shared/ice_restart_shared.F90 @@ -11,6 +11,7 @@ module ice_restart_shared logical (kind=log_kind), public :: & restart , & ! if true, initialize using restart file instead of defaults restart_ext, & ! if true, read/write extended grid (with ghost cells) + restart_coszen, & ! if true, read/write coszen use_restart_time ! if true, use time written in core restart file character (len=char_len), public :: & diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 9d35b4366..54663f86c 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -10,6 +10,7 @@ ice_ic = './restart/iced_gx3_v5.nc' restart = .true. restart_ext = .false. + restart_coszen = .false. use_restart_time = .true. restart_format = 'default' lcdf64 = .false. diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 1884d03f1..1d3baca38 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -523,6 +523,7 @@ either Celsius or Kelvin units). "restart_format", ":math:`\bullet` restart file format", "" "restart_[tracer]", ":math:`\bullet` if true, read tracer restart file", "" "restart_ext", ":math:`\bullet` if true, read/write halo cells in restart file", "" + "restart_coszen", ":math:`\bullet` if true, read/write coszen in restart file", "" "restore_bgc", ":math:`\bullet` if true, restore nitrate/silicate to data", "" "restore_ice", ":math:`\bullet` if true, restore ice state along lateral boundaries", "" "restore_ocn", ":math:`\bullet` restore sst to data", "" diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 84d3633b1..5512841a2 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -148,6 +148,7 @@ setup_nml "``restart``", "logical", "initialize using restart file", "``.false.``" "``restart_dir``", "string", "path to restart directory", "'./'" "``restart_ext``", "logical", "read/write halo cells in restart files", "``.false.``" + "``restart_coszen``", "logical", "read/write coszen in restart files", "``.false.``" "``restart_file``", "string", "output file prefix for restart dump", "'iced'" "``restart_format``", "``default``", "read/write restart file with default format", "``default``" "", "``pio_pnetcdf``", "read/write restart files with pnetcdf in pio", "" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index b7d9c0f47..44d4ef1d6 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -569,6 +569,10 @@ An additional namelist option, ``restart_ext`` specifies whether halo cells are included in the restart files. This option is useful for tripole and regional grids, but can not be used with PIO. +An additional namelist option, ``restart_coszen`` specifies whether the +cosine of the zenith angle is included in the restart files. This is mainly +used in coupled models. + MPI is initialized in *init\_communicate* for both coupled and stand-alone MPI runs. The ice component communicates with a flux coupler or other climate components via external routines that handle the From f92bef336c79186daac226af143dccbd20348e95 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Tue, 7 Jul 2020 15:20:45 -0400 Subject: [PATCH 21/90] update icepack submodule --- icepack | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/icepack b/icepack index 09a5e19f0..b1e41d9f1 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 09a5e19f006f62f60f6b940a4385feb47451368e +Subproject commit b1e41d9f12a59390aacdb933889c3c4a87c9e8d2 From 8ff4ee0d72eeab8211b71a94af61669adf1550ef Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Tue, 7 Jul 2020 15:46:57 -0400 Subject: [PATCH 22/90] change Orion to orion in backend remove duplicate print lines from ice_transport_driver --- cicecore/cicedynB/dynamics/ice_transport_driver.F90 | 6 ------ configuration/scripts/forapps/ufs/comp_ice.backend.libcice | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 index a496402f0..c500e1631 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 @@ -204,12 +204,6 @@ subroutine init_transport if (nt-k==nt_isoice) & write(nu_diag,1000) 'nt_isoice ',nt,depend(nt),tracer_type(nt),& has_dependents(nt) - if (nt-k==nt_isosno) & - write(nu_diag,*) 'nt_isosno',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_isoice) & - write(nu_diag,*) 'nt_isoice',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) if (nt-k==nt_bgc_Nit) & write(nu_diag,1000) 'nt_bgc_Nit ',nt,depend(nt),tracer_type(nt),& has_dependents(nt) diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice index ca718548a..886e0a3ff 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -18,7 +18,7 @@ setenv THRD no # set to yes for OpenMP threading if (${SITE} =~ cheyenne*) then setenv ARCH cheyenne_intel -else if (${SITE} =~ Orion*) then +else if (${SITE} =~ orion*) then setenv ARCH orion_intel else if (${SITE} =~ hera*) then setenv ARCH hera_intel From 916c6af35222368562697a365a658e8e1bebe955 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Tue, 14 Jul 2020 08:18:33 -0400 Subject: [PATCH 23/90] add -link_mpi=dbg to debug flags (#8) --- configuration/scripts/machines/Macros.cheyenne_intel | 2 +- configuration/scripts/machines/Macros.hera_intel | 2 +- configuration/scripts/machines/Macros.orion_intel | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/configuration/scripts/machines/Macros.cheyenne_intel b/configuration/scripts/machines/Macros.cheyenne_intel index 902224766..243295487 100644 --- a/configuration/scripts/machines/Macros.cheyenne_intel +++ b/configuration/scripts/machines/Macros.cheyenne_intel @@ -12,7 +12,7 @@ FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -trace FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.hera_intel b/configuration/scripts/machines/Macros.hera_intel index 519e3a5ba..230f43e70 100644 --- a/configuration/scripts/machines/Macros.hera_intel +++ b/configuration/scripts/machines/Macros.hera_intel @@ -12,7 +12,7 @@ FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -trace FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.orion_intel b/configuration/scripts/machines/Macros.orion_intel index aae839f4e..6dffdd0a2 100644 --- a/configuration/scripts/machines/Macros.orion_intel +++ b/configuration/scripts/machines/Macros.orion_intel @@ -12,7 +12,7 @@ FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -trace FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif From 8f37bfc6425cc66ff274f8fe3abc6e0de0ef0f08 Mon Sep 17 00:00:00 2001 From: Minsuk Ji <57227195+MinsukJi-NOAA@users.noreply.github.com> Date: Fri, 17 Jul 2020 09:05:06 -0400 Subject: [PATCH 24/90] cice6 compile (#6) * enable debug build. fix to remove errors * fix an error in comp_ice.backend.libcice * change Orion to orion for machine identification --- configuration/scripts/forapps/ufs/comp_ice.backend.clean | 8 ++++---- .../scripts/forapps/ufs/comp_ice.backend.libcice | 6 ++++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.clean b/configuration/scripts/forapps/ufs/comp_ice.backend.clean index 7eef2ed1a..823f1f586 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.clean +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.clean @@ -10,10 +10,10 @@ setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR if (${SITE} =~ cheyenne*) then setenv ARCH cheyenne_intel -#else if (${SITE} =~ Orion*) then -# setenv ARCH orion_intel -#else if (${SITE} =~ hera*) then -# setenv ARCH hera_intel +else if (${SITE} =~ orion*) then + setenv ARCH orion_intel +else if (${SITE} =~ hera*) then + setenv ARCH hera_intel else echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" exit -2 diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice index 886e0a3ff..a408cc7d2 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -68,9 +68,11 @@ endif # Build in debug mode. If DEBUG=Y, enable DEBUG compilation. This # flag is set in ${ROOTDIR}/coupledFV3_MOM6_CICE_debug.appBuilder file. if (! $?DEBUG) then - setenv ICE_BLDDEBUG true + setenv ICE_BLDDEBUG false else - if ($DEBUG != "Y") then + if ($DEBUG == "Y") then + setenv ICE_BLDDEBUG true + else setenv ICE_BLDDEBUG false endif endif From bdf1a1f6cd44c595e57c275f69efcf28a069b06a Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Wed, 12 Aug 2020 15:55:21 -0400 Subject: [PATCH 25/90] changes for consistency w/ current emc-cice5 (#13) Update to emc/develop fork to current CICE consortium Co-authored-by: David A. Bailey Co-authored-by: Tony Craig Co-authored-by: Elizabeth Hunke Co-authored-by: Mariana Vertenstein Co-authored-by: apcraig Co-authored-by: Philippe Blain --- README.md | 10 +- cice.setup | 19 +- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 39 ++-- cicecore/cicedynB/general/ice_flux.F90 | 46 ++++- cicecore/cicedynB/general/ice_forcing.F90 | 91 +++------ cicecore/cicedynB/general/ice_forcing_bgc.F90 | 24 +-- cicecore/cicedynB/general/ice_init.F90 | 97 ++++++---- cicecore/cicedynB/general/ice_step_mod.F90 | 55 +++++- .../comm/mpi/ice_communicate.F90 | 7 + .../comm/mpi/ice_gather_scatter.F90 | 69 +++---- .../infrastructure/comm/mpi/ice_reprosum.F90 | 4 +- .../comm/serial/ice_communicate.F90 | 80 +------- .../comm/serial/ice_reprosum.F90 | 4 +- .../cicedynB/infrastructure/ice_domain.F90 | 24 ++- cicecore/cicedynB/infrastructure/ice_grid.F90 | 34 +++- .../infrastructure/ice_read_write.F90 | 173 +++++++++--------- .../io/io_netcdf/ice_history_write.F90 | 11 +- .../io/io_netcdf/ice_restart.F90 | 39 ++++ .../io/io_pio2/ice_history_write.F90 | 6 - .../infrastructure/io/io_pio2/ice_restart.F90 | 2 +- cicecore/drivers/direct/hadgem3/CICE.F90 | 2 - .../drivers/direct/hadgem3/CICE_FinalMod.F90 | 5 +- .../drivers/direct/hadgem3/CICE_InitMod.F90 | 30 ++- .../drivers/direct/hadgem3/CICE_RunMod.F90 | 20 +- cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 | 5 +- cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 32 ++-- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 23 +-- .../drivers/nuopc/cmeps/CICE_FinalMod.F90 | 5 +- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 8 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 48 ++++- .../drivers/nuopc/cmeps/ice_import_export.F90 | 75 ++++---- cicecore/drivers/nuopc/dmi/CICE.F90 | 2 - cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 6 - cicecore/drivers/standalone/cice/CICE.F90 | 2 - .../drivers/standalone/cice/CICE_FinalMod.F90 | 2 - .../drivers/standalone/cice/CICE_InitMod.F90 | 10 - .../drivers/standalone/cice/CICE_RunMod.F90 | 15 +- .../standalone/cice/CICE_RunMod.F90_debug | 4 - cicecore/shared/ice_arrays_column.F90 | 8 + cicecore/shared/ice_init_column.F90 | 14 +- configuration/scripts/cice.build | 4 +- .../forapps/ufs/comp_ice.backend.libcice | 4 +- configuration/scripts/ice_in | 9 +- configuration/scripts/options/set_nml.alt03 | 3 + configuration/scripts/options/set_nml.alt04 | 3 + configuration/scripts/parse_namelist.sh | 27 ++- configuration/scripts/parse_settings.sh | 28 +++ configuration/scripts/tests/cice.lcov.csh | 2 +- .../scripts/tests/cice_test_codecov.csh | 4 +- configuration/scripts/tests/io_suite.ts | 5 +- .../scripts/tests/report_results.csh | 2 +- doc/source/cice_index.rst | 5 + doc/source/intro/citing.rst | 26 ++- doc/source/user_guide/ug_case_settings.rst | 65 ++++++- doc/source/user_guide/ug_implementation.rst | 6 + doc/source/user_guide/ug_running.rst | 37 +++- doc/source/user_guide/ug_testing.rst | 6 +- icepack | 2 +- 58 files changed, 827 insertions(+), 561 deletions(-) diff --git a/README.md b/README.md index 0c5940a7a..a584e8ac9 100644 --- a/README.md +++ b/README.md @@ -11,21 +11,19 @@ CICE is a computationally efficient model for simulating the growth, melting, an This repository contains the files and code needed to run the CICE sea ice numerical model starting with version 6. CICE is maintained by the CICE Consortium. Versions prior to v6 are found in the [CICE-svn-trunk repository](https://github.com/CICE-Consortium/CICE-svn-trunk). -CICE consists of a top level driver and dynamical core plus the [Icepack column physics code][icepack], which is included in CICE as a Git submodule. Because Icepack is a submodule of CICE, Icepack and CICE development are handled independently with respect to the GitHub repositories even though development and testing may be done together. +CICE consists of a top level driver and dynamical core plus the [Icepack][icepack] column physics code], which is included in CICE as a Git submodule. Because Icepack is a submodule of CICE, Icepack and CICE development are handled independently with respect to the GitHub repositories even though development and testing may be done together. [icepack]: https://github.com/CICE-Consortium/Icepack -The first point of contact with the CICE Consortium is the [Consortium Community Forum][forum]. +The first point of contact with the CICE Consortium is the Consortium Community [Forum][forum]. This forum is monitored by Consortium members and also opened to the whole community. Please do not use our issue tracker for general support questions. -[doc-resources]: https://github.com/CICE-Consortium/About-Us/wiki/Resource-Index#model-documentation -[doc-running]: https://cice-consortium-cice.readthedocs.io/en/master/user_guide/ug_running.html [forum]: https://xenforo.cgd.ucar.edu/cesm/forums/cice-consortium.146/ If you expect to make any changes to the code, we recommend that you first fork both the CICE and Icepack repositories. In order to incorporate your developments into the Consortium code it is imperative you follow the guidance for Pull Requests and requisite testing. -Head over to our [Contribution guide][contributing] to learn more about how you can help improve CICE. +Head over to our [Contributing][contributing] guide to learn more about how you can help improve CICE. [contributing]: https://github.com/CICE-Consortium/About-Us/wiki/Contributing @@ -34,7 +32,7 @@ Head over to our [Contribution guide][contributing] to learn more about how you Information about the CICE model -* **CICE Version Index**: https://github.com/CICE-Consortium/CICE/wiki/CICE-Version-Index +* **CICE Release Table**: https://github.com/CICE-Consortium/CICE/wiki/CICE-Release-Table Numbered CICE releases since version 6 with associated documentation and DOIs. diff --git a/cice.setup b/cice.setup index 43fdd836c..3efe94827 100755 --- a/cice.setup +++ b/cice.setup @@ -40,6 +40,7 @@ set suitebuild = true set suitereuse = true set suiterun = false set suitesubmit = true +set ignoreuserset = false if ($#argv < 1) then set helpheader = 1 @@ -98,6 +99,7 @@ DESCRIPTION --acct : account number for the batch submission --grid, -g : grid, grid (default = ${grid}) --set, -s : case option setting(s), comma separated (default = " ") + --ignore-user-set: ignore ~/.cice_set if it exists --queue : queue for the batch submission For testing @@ -112,7 +114,7 @@ DESCRIPTION --diff : generate comparison against another case --report : automatically post results when tests are complete --coverage : generate and report test coverage metrics when tests are complete, - requires GNU compiler (ie. normally ``--env gnu``) + requires GNU compiler (ie. normally --env gnu) --setup-only : for suite, setup testcases, no build, no submission --setup-build : for suite, setup and build testcases, no submission --setup-build-run : for suite, setup, build, and run interactively @@ -263,6 +265,10 @@ while (1) set suitesubmit = true shift argv + else if ("$option" == "--ignore-user-set") then + set ignoreuserset = true + shift argv + # arguments with settings else shift argv @@ -412,6 +418,17 @@ set vers = ${ICE_VERSION} set shhash = `echo ${hash} | cut -c 1-10` if ( ${dosuite} == 0 ) then + # grab user defined default sets + if ("${ignoreuserset}" == "false" && -e ~/.cice_set) then + set setsu1 = `cat ~/.cice_set` + # get rid of spaces if they exist! + set setsuser = `echo ${setsu1} | sed 's/ //g'` + if ( ${sets} == "" ) then + set sets = "${setsuser}" + else + set sets = "${setsuser},${sets}" + endif + endif set teststring = "${test} ${grid} ${pesx} ${sets}" if ( $bfbcomp != ${spval} ) then if ( ${sets} == "" ) then diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index df50dd99e..c3dc83a24 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -35,10 +35,11 @@ module ice_dyn_shared ndte ! number of subcycles: ndte=dt/dte character (len=char_len), public :: & - coriolis ! 'constant', 'zero', or 'latitude' + coriolis , & ! 'constant', 'zero', or 'latitude' + ssh_stress ! 'geostrophic' or 'coupled' logical (kind=log_kind), public :: & - revised_evp ! if true, use revised evp procedure + revised_evp ! if true, use revised evp procedure integer (kind=int_kind), public :: & kevp_kernel ! 0 = 2D org version @@ -475,9 +476,7 @@ subroutine dyn_prep2 (nx_block, ny_block, & integer (kind=int_kind) :: & i, j, ij -#ifdef coupled real (kind=dbl_kind) :: gravit -#endif logical (kind=log_kind), dimension(nx_block,ny_block) :: & iceumask_old ! old-time iceumask @@ -577,12 +576,12 @@ subroutine dyn_prep2 (nx_block, ny_block, & ! Define variables for momentum equation !----------------------------------------------------------------- -#ifdef coupled - call icepack_query_parameters(gravit_out=gravit) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) -#endif + if (trim(ssh_stress) == 'coupled') then + call icepack_query_parameters(gravit_out=gravit) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif do ij = 1, icellu i = indxui(ij) @@ -597,14 +596,18 @@ subroutine dyn_prep2 (nx_block, ny_block, & watery(i,j) = vocn(i,j)*cosw + uocn(i,j)*sinw*sign(c1,fm(i,j)) ! combine tilt with wind stress -#ifndef coupled - ! calculate tilt from geostrophic currents if needed - strtltx(i,j) = -fm(i,j)*vocn(i,j) - strtlty(i,j) = fm(i,j)*uocn(i,j) -#else - strtltx(i,j) = -gravit*umass(i,j)*ss_tltx(i,j) - strtlty(i,j) = -gravit*umass(i,j)*ss_tlty(i,j) -#endif + if (trim(ssh_stress) == 'geostrophic') then + ! calculate tilt from geostrophic currents if needed + strtltx(i,j) = -fm(i,j)*vocn(i,j) + strtlty(i,j) = fm(i,j)*uocn(i,j) + elseif (trim(ssh_stress) == 'coupled') then + strtltx(i,j) = -gravit*umass(i,j)*ss_tltx(i,j) + strtlty(i,j) = -gravit*umass(i,j)*ss_tlty(i,j) + else + call abort_ice(subname//' ERROR: unknown ssh_stress='//trim(ssh_stress), & + file=__FILE__, line=__LINE__) + endif + forcex(i,j) = strairx(i,j) + strtltx(i,j) forcey(i,j) = strairy(i,j) + strtlty(i,j) enddo diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 6b16edb77..97b726fdb 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -217,7 +217,11 @@ module ice_flux fresh , & ! fresh water flux to ocean (kg/m^2/s) fsalt , & ! salt flux to ocean (kg/m^2/s) fhocn , & ! net heat flux to ocean (W/m^2) - fswthru ! shortwave penetrating to ocean (W/m^2) + fswthru , & ! shortwave penetrating to ocean (W/m^2) + fswthru_vdr , & ! vis dir shortwave penetrating to ocean (W/m^2) + fswthru_vdf , & ! vis dif shortwave penetrating to ocean (W/m^2) + fswthru_idr , & ! nir dir shortwave penetrating to ocean (W/m^2) + fswthru_idf ! nir dif shortwave penetrating to ocean (W/m^2) ! internal @@ -307,6 +311,11 @@ module ice_flux fresh_da, & ! fresh water flux to ocean due to data assim (kg/m^2/s) fsalt_da ! salt flux to ocean due to data assimilation(kg/m^2/s) + real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & + fswthrun_ai ! per-category fswthru * ai (W/m^2) + + logical (kind=log_kind), public :: send_i2x_per_cat = .false. + !----------------------------------------------------------------- ! internal !----------------------------------------------------------------- @@ -438,6 +447,10 @@ subroutine alloc_flux fsalt (nx_block,ny_block,max_blocks), & ! salt flux to ocean (kg/m^2/s) fhocn (nx_block,ny_block,max_blocks), & ! net heat flux to ocean (W/m^2) fswthru (nx_block,ny_block,max_blocks), & ! shortwave penetrating to ocean (W/m^2) + fswthru_vdr (nx_block,ny_block,max_blocks), & ! vis dir shortwave penetrating to ocean (W/m^2) + fswthru_vdf (nx_block,ny_block,max_blocks), & ! vis dif shortwave penetrating to ocean (W/m^2) + fswthru_idr (nx_block,ny_block,max_blocks), & ! nir dir shortwave penetrating to ocean (W/m^2) + fswthru_idf (nx_block,ny_block,max_blocks), & ! nir dif shortwave penetrating to ocean (W/m^2) scale_factor (nx_block,ny_block,max_blocks), & ! scaling factor for shortwave components strairx_ocn(nx_block,ny_block,max_blocks), & ! stress on ocean by air, x-direction strairy_ocn(nx_block,ny_block,max_blocks), & ! stress on ocean by air, y-direction @@ -684,6 +697,10 @@ subroutine init_coupler_flux fpond (:,:,:) = c0 fhocn (:,:,:) = c0 fswthru (:,:,:) = c0 + fswthru_vdr (:,:,:) = c0 + fswthru_vdf (:,:,:) = c0 + fswthru_idr (:,:,:) = c0 + fswthru_idf (:,:,:) = c0 fresh_da(:,:,:) = c0 ! data assimilation fsalt_da(:,:,:) = c0 flux_bio (:,:,:,:) = c0 ! bgc @@ -701,6 +718,11 @@ subroutine init_coupler_flux ffep (:,:,:,:)= c0 ffed (:,:,:,:)= c0 + if (send_i2x_per_cat) then + allocate(fswthrun_ai(nx_block,ny_block,ncat,max_blocks)) + fswthrun_ai(:,:,:,:) = c0 + endif + !----------------------------------------------------------------- ! derived or computed fields !----------------------------------------------------------------- @@ -783,6 +805,10 @@ subroutine init_flux_ocn fpond (:,:,:) = c0 fhocn (:,:,:) = c0 fswthru (:,:,:) = c0 + fswthru_vdr (:,:,:) = c0 + fswthru_vdf (:,:,:) = c0 + fswthru_idr (:,:,:) = c0 + fswthru_idf (:,:,:) = c0 faero_ocn (:,:,:,:) = c0 fiso_ocn (:,:,:,:) = c0 @@ -790,6 +816,10 @@ subroutine init_flux_ocn H2_16O_ocn (:,:,:) = c0 H2_18O_ocn (:,:,:) = c0 + if (send_i2x_per_cat) then + fswthrun_ai(:,:,:,:) = c0 + endif + end subroutine init_flux_ocn !======================================================================= @@ -978,6 +1008,8 @@ subroutine scale_fluxes (nx_block, ny_block, & Tref, Qref, & fresh, fsalt, & fhocn, fswthru, & + fswthru_vdr, fswthru_vdf, & + fswthru_idr, fswthru_idf, & faero_ocn, & alvdr, alidr, & alvdf, alidf, & @@ -1022,6 +1054,10 @@ subroutine scale_fluxes (nx_block, ny_block, & fsalt , & ! salt flux to ocean (kg/m2/s) fhocn , & ! actual ocn/ice heat flx (W/m**2) fswthru , & ! sw radiation through ice bot (W/m**2) + fswthru_vdr , & ! vis dir sw radiation through ice bot (W/m**2) + fswthru_vdf , & ! vis dif sw radiation through ice bot (W/m**2) + fswthru_idr , & ! nir dir sw radiation through ice bot (W/m**2) + fswthru_idf , & ! nir dif sw radiation through ice bot (W/m**2) alvdr , & ! visible, direct (fraction) alidr , & ! near-ir, direct (fraction) alvdf , & ! visible, diffuse (fraction) @@ -1090,6 +1126,10 @@ subroutine scale_fluxes (nx_block, ny_block, & fsalt (i,j) = fsalt (i,j) * ar fhocn (i,j) = fhocn (i,j) * ar fswthru (i,j) = fswthru (i,j) * ar + fswthru_vdr (i,j) = fswthru_vdr (i,j) * ar + fswthru_vdf (i,j) = fswthru_vdf (i,j) * ar + fswthru_idr (i,j) = fswthru_idr (i,j) * ar + fswthru_idf (i,j) = fswthru_idf (i,j) * ar alvdr (i,j) = alvdr (i,j) * ar alidr (i,j) = alidr (i,j) * ar alvdf (i,j) = alvdf (i,j) * ar @@ -1118,6 +1158,10 @@ subroutine scale_fluxes (nx_block, ny_block, & fsalt (i,j) = c0 fhocn (i,j) = c0 fswthru (i,j) = c0 + fswthru_vdr (i,j) = c0 + fswthru_vdf (i,j) = c0 + fswthru_idr (i,j) = c0 + fswthru_idf (i,j) = c0 alvdr (i,j) = c0 ! zero out albedo where ice is absent alidr (i,j) = c0 alvdf (i,j) = c0 diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index 66a7d9ef3..4c88037ed 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! ! Reads and interpolates forcing data for atmosphere and ocean quantities. @@ -300,9 +303,6 @@ subroutine init_forcing_ocn(dt) use ice_domain, only: nblocks use ice_domain_size, only: max_blocks use ice_flux, only: sss, sst, Tf -#ifdef ncdf - use netcdf -#endif real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -866,7 +866,6 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data_nc)' -#ifdef ncdf integer (kind=int_kind) :: & nrec , & ! record number to read n2, n4 , & ! like ixm and ixp, but @@ -967,9 +966,6 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & call ice_timer_stop(timer_readwrite) ! reading/writing -#else - field_data = c0 ! to satisfy intent(out) attribute -#endif end subroutine read_data_nc !======================================================================= @@ -1007,7 +1003,6 @@ subroutine read_data_nc_hycom (flag, recd, & intent(out) :: & field_data ! 2 values needed for interpolation -#ifdef ncdf ! local variables integer (kind=int_kind) :: & fid ! file id for netCDF routines @@ -1040,11 +1035,6 @@ subroutine read_data_nc_hycom (flag, recd, & call ice_timer_stop(timer_readwrite) ! reading/writing -#else - field_data = c0 ! to satisfy intent(out) attribute - write(*,*)'ERROR: CICE not compiled with NetCDF' - stop -#endif end subroutine read_data_nc_hycom !======================================================================= @@ -3342,9 +3332,6 @@ subroutine oned_data use ice_flux, only: uatm, vatm, Tair, fsw, fsnow, Qa, rhoa, frain -#ifdef ncdf - use netcdf - ! local parameters character (char_len_long) :: & @@ -3402,7 +3389,7 @@ subroutine oned_data Temp = work Tair(:,:,:) = Temp - if (my_task == master_task) status = nf90_close(fid) + call ice_close_nc(fid) ! hourly solar data beginning Jan 1, 1989, 01:00 met_file = fsw_file @@ -3412,7 +3399,7 @@ subroutine oned_data call ice_read_nc(fid,istep1,fieldname,work,diag) fsw(:,:,:) = work - if (my_task == master_task) status = nf90_close(fid) + call ice_close_nc(fid) ! hourly interpolated monthly data beginning Jan 1, 1989, 01:00 met_file = humid_file @@ -3426,7 +3413,7 @@ subroutine oned_data call ice_read_nc(fid,istep1,fieldname,work,diag) fsnow(:,:,:) = work - if (my_task == master_task) status = nf90_close(fid) + call ice_close_nc(fid) !------------------------------------------------------------------- ! Find specific humidity using Hyland-Wexler formulation @@ -3447,8 +3434,6 @@ subroutine oned_data cldf (:,:,:) = p25 ! cloud fraction frain(:,:,:) = c0 ! this is available in hourlymet_rh file -#endif - end subroutine oned_data !======================================================================= @@ -3648,7 +3633,7 @@ subroutine ocn_data_ncar_init use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: max_blocks -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif @@ -3664,7 +3649,6 @@ subroutine ocn_data_ncar_init 'T', 'S', 'hblt', 'U', 'V', & 'dhdx', 'dhdy', 'qdp' / -#ifdef ncdf integer (kind=int_kind) :: & fid , & ! file id dimid ! dimension id @@ -3673,7 +3657,6 @@ subroutine ocn_data_ncar_init status , & ! status flag nlat , & ! number of longitudes of data nlon ! number of latitudes of data -#endif real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & work1 @@ -3701,7 +3684,7 @@ subroutine ocn_data_ncar_init endif ! master_task if (trim(ocn_data_format) == 'nc') then -#ifdef ncdf +#ifdef USE_NETCDF if (my_task == master_task) then call ice_open_nc(sst_file, fid) @@ -3741,7 +3724,10 @@ subroutine ocn_data_ncar_init enddo ! month loop enddo ! field loop - if (my_task == master_task) status = nf90_close(fid) + if (my_task == master_task) call ice_close_nc(fid) +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(sst_file), & + file=__FILE__, line=__LINE__) #endif else ! binary format @@ -3803,11 +3789,11 @@ subroutine ocn_data_ncar_init_3D use ice_domain_size, only: max_blocks use ice_grid, only: to_ugrid, ANGLET use ice_read_write, only: ice_read_nc_uv -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif -#ifdef ncdf +#ifdef USE_NETCDF integer (kind=int_kind) :: & n , & ! field index m , & ! month index @@ -3856,7 +3842,7 @@ subroutine ocn_data_ncar_init_3D endif ! master_task if (trim(ocn_data_format) == 'nc') then -#ifdef ncdf +#ifdef USE_NETCDF if (my_task == master_task) then call ice_open_nc(sst_file, fid) @@ -3902,7 +3888,7 @@ subroutine ocn_data_ncar_init_3D enddo ! month loop enddo ! field loop - if (my_task == master_task) status = nf90_close(fid) + if (my_task == master_task) call ice_close_nc(fid) ! Rotate vector quantities and shift to U-grid do n=4,6,2 @@ -3923,6 +3909,9 @@ subroutine ocn_data_ncar_init_3D enddo ! month loop enddo ! field loop +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif else ! binary format @@ -4327,9 +4316,6 @@ subroutine ocn_data_hycom_init use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_flux, only: sss, sst, Tf -#ifdef ncdf - use netcdf -#endif integer (kind=int_kind) :: & i, j, iblk , & ! horizontal indices @@ -4611,7 +4597,6 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data_nc_point)' -#ifdef ncdf integer (kind=int_kind) :: & nrec , & ! record number to read n2, n4 , & ! like ixm and ixp, but @@ -4723,9 +4708,6 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & call ice_timer_stop(timer_readwrite) ! reading/writing -#else - field_data = c0 ! to satisfy intent(out) attribute -#endif end subroutine read_data_nc_point !======================================================================= @@ -4779,13 +4761,9 @@ subroutine ISPOL_data ! use ice_flux, only: uatm, vatm, Tair, fsw, Qa, rhoa, & frain, fsnow, flw -#ifdef ncdf - use netcdf -#endif !local parameters -#ifdef ncdf character (char_len_long) :: & met_file, & ! netcdf filename fieldname ! field name in netcdf file @@ -4822,7 +4800,6 @@ subroutine ISPOL_data sec1hr ! number of seconds in 1 hour logical (kind=log_kind) :: read1 -#endif integer (kind=int_kind) :: & recnum , & ! record number @@ -4830,7 +4807,6 @@ subroutine ISPOL_data character(len=*), parameter :: subname = '(ISPOL_data)' -#ifdef ncdf call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -4965,14 +4941,6 @@ subroutine ISPOL_data flw(:,:,:) = c1intp * flw_data_p(1) & + c2intp * flw_data_p(2) endif !nc -#else - - uatm(:,:,:) = c0 !wind velocity (m/s) - vatm(:,:,:) = c0 - fsw(:,:,:) = c0 - fsnow (:,:,:) = c0 - -#endif !flw given cldf and Tair calculated in prepare_forcing @@ -5015,11 +4983,7 @@ subroutine ocn_data_ispol_init ! use ice_gather_scatter use ice_read_write -#ifdef ncdf - use netcdf -#endif -#ifdef ncdf integer (kind=int_kind) :: & n , & ! field index m ! month index @@ -5038,7 +5002,6 @@ subroutine ocn_data_ispol_init integer (kind=int_kind) :: & status ! status flag -#endif character(len=*), parameter :: subname = '(ocn_data_ispol_init)' @@ -5058,7 +5021,6 @@ subroutine ocn_data_ispol_init endif ! master_task if (trim(ocn_data_format) == 'nc') then -#ifdef ncdf if (my_task == master_task) then call ice_open_nc(sst_file, fid) endif ! master_task @@ -5078,8 +5040,7 @@ subroutine ocn_data_ispol_init enddo ! month loop enddo ! field loop - if (my_task == master_task) status = nf90_close(fid) -#endif + if (my_task == master_task) call ice_close_nc(fid) else ! binary format call abort_ice (error_message=subname//'new ocean forcing is netcdf only', & @@ -5188,9 +5149,6 @@ subroutine get_wave_spec use ice_constants, only: c0 use ice_domain_size, only: nfreq use ice_timers, only: ice_timer_start, ice_timer_stop, timer_fsd -#ifdef ncdf - use netcdf -#endif ! local variables integer (kind=int_kind) :: & @@ -5228,16 +5186,19 @@ subroutine get_wave_spec ! read more realistic data from a file if ((trim(wave_spec_type) == 'constant').OR.(trim(wave_spec_type) == 'random')) then if (trim(wave_spec_file(1:4)) == 'unkn') then - call abort_ice (subname//'ERROR: wave_spec_file '//trim(wave_spec_file)) + call abort_ice (subname//'ERROR: wave_spec_file '//trim(wave_spec_file), & + file=__FILE__, line=__LINE__) else -#ifdef ncdf +#ifdef USE_NETCDF call ice_open_nc(wave_spec_file,fid) call ice_read_nc_xyf (fid, 1, 'efreq', wave_spectrum(:,:,:,:), dbug, & field_loc_center, field_type_scalar) call ice_close_nc(fid) #else - write (nu_diag,*) "wave spectrum file not available, requires ncdf" + write (nu_diag,*) "wave spectrum file not available, requires cpp USE_NETCDF" write (nu_diag,*) "wave spectrum file not available, using default profile" + call abort_ice (subname//'ERROR: wave_spec_file '//trim(wave_spec_file), & + file=__FILE__, line=__LINE__) #endif endif endif diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedynB/general/ice_forcing_bgc.F90 index 4eedcfb80..e5ef851fa 100644 --- a/cicecore/cicedynB/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedynB/general/ice_forcing_bgc.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! ! Reads and interpolates forcing data for biogeochemistry @@ -587,7 +590,6 @@ subroutine faero_data use ice_flux_bgc, only: faero_atm use ice_forcing, only: interp_coeff_monthly, read_clim_data_nc, interpolate_data -#ifdef ncdf ! local parameters real (kind=dbl_kind), dimension(:,:,:,:), allocatable, & @@ -672,7 +674,6 @@ subroutine faero_data where (faero_atm(:,:,:,:) > 1.e20) faero_atm(:,:,:,:) = c0 deallocate( aero1_data, aero2_data, aero3_data ) -#endif end subroutine faero_data @@ -688,7 +689,6 @@ subroutine fzaero_data use ice_flux_bgc, only: faero_atm use ice_forcing, only: interp_coeff_monthly, read_clim_data_nc, interpolate_data -#ifdef ncdf ! local parameters real (kind=dbl_kind), dimension(:,:,:,:), allocatable, & @@ -766,7 +766,6 @@ subroutine fzaero_data where (faero_atm(:,:,nlt_zaero(1),:) > 1.e20) faero_atm(:,:,nlt_zaero(1),:) = c0 deallocate( aero_data ) -#endif end subroutine fzaero_data @@ -780,10 +779,6 @@ subroutine init_bgc_data (fed1,fep1) use ice_read_write, only: ice_open_nc, ice_read_nc, ice_close_nc -#ifdef ncdf - use netcdf -#endif - real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(inout) :: & fed1, & ! first dissolved iron pool (nM) fep1 ! first particulate iron pool (nM) @@ -868,7 +863,7 @@ subroutine faero_optics gaer_bc_tab, & ! BC aerosol asymmetry parameter (cos(theta)) bcenh ! BC absorption enhancement facto -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif @@ -876,7 +871,6 @@ subroutine faero_optics logical (kind=log_kind) :: modal_aero -#ifdef ncdf integer (kind=int_kind) :: & varid , & ! variable id status , & ! status output from netcdf routines @@ -891,7 +885,6 @@ subroutine faero_optics character (char_len_long) :: & optics_file, & ! netcdf filename fieldname ! field name in netcdf file -#endif character(len=*), parameter :: subname = '(faero_optics)' @@ -968,8 +961,8 @@ subroutine faero_optics if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) -#ifdef ncdf if (modal_aero) then +#ifdef USE_NETCDF optics_file = & '/usr/projects/climate/njeffery/DATA/CAM/snicar/snicar_optics_5bnd_mam_c140303.nc' @@ -1004,12 +997,11 @@ subroutine faero_optics call broadcast_array(bcenh(n,:,k), master_task) enddo enddo - endif ! modal_aero #else - if (modal_aero) then - call abort_ice(subname//'ERROR: netcdf required for modal_aero') - endif + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif + endif ! modal_aero end subroutine faero_optics diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 91c5d539d..d3b096eb3 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -59,7 +59,7 @@ subroutine input_data use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_diagnostics, only: diag_file, print_global, print_points, latpnt, lonpnt - use ice_domain, only: close_boundaries, ns_boundary_type + use ice_domain, only: close_boundaries, ns_boundary_type, orca_halogrid use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd, nfreq, & n_iso, n_aero, n_zaero, n_algae, & n_doc, n_dic, n_don, n_fed, n_fep, & @@ -92,12 +92,13 @@ subroutine input_data use ice_arrays_column, only: bgc_data_dir, fe_data_type use ice_grid, only: grid_file, gridcpl_file, kmt_file, & bathymetry_file, use_bathymetry, & + bathymetry_format, & grid_type, grid_format, & dxrect, dyrect use ice_dyn_shared, only: ndte, kdyn, revised_evp, yield_curve, & kevp_kernel, & basalstress, k1, k2, alphab, threshold_hw, & - Ktens, e_ratio, coriolis, & + Ktens, e_ratio, coriolis, ssh_stress, & kridge, ktransport, brlx, arlx use ice_transport_driver, only: advection, conserv_check use ice_restoring, only: restore_ice @@ -117,7 +118,8 @@ subroutine input_data ahmax, R_ice, R_pnd, R_snw, dT_mlt, rsnw_mlt, emissivity, & mu_rdg, hs0, dpscale, rfracmin, rfracmax, pndaspect, hs1, hp1, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, dSdt_slow_mode, & - phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar + phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar, & + sw_frac, sw_dtemp integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & kitd, kcatbound @@ -125,7 +127,8 @@ subroutine input_data character (len=char_len) :: shortwave, albedo_type, conduct, fbot_xfer_type, & tfrz_option, frzpnd, atmbndy, wave_spec_type - logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec + logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & + sw_redist logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd @@ -149,8 +152,7 @@ subroutine input_data dt, npt, ndtd, numin, & runtype, runid, bfbflag, numax, & ice_ic, restart, restart_dir, restart_file, & - restart_ext, restart_coszen, use_restart_time, restart_format, & - lcdf64, & + restart_ext, use_restart_time, restart_format, lcdf64, & pointer_file, dumpfreq, dumpfreq_n, dump_last, & diagfreq, diag_type, diag_file, history_format,& print_global, print_points, latpnt, lonpnt, & @@ -161,10 +163,10 @@ subroutine input_data namelist /grid_nml/ & grid_format, grid_type, grid_file, kmt_file, & - bathymetry_file, use_bathymetry, nfsd, & + bathymetry_file, use_bathymetry, nfsd, bathymetry_format, & ncat, nilyr, nslyr, nblyr, & kcatbound, gridcpl_file, dxrect, dyrect, & - close_boundaries + close_boundaries, orca_halogrid namelist /tracer_nml/ & tr_iage, restart_age, & @@ -182,12 +184,13 @@ subroutine input_data namelist /thermo_nml/ & kitd, ktherm, conduct, ksno, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, & - dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy + dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy, & + sw_redist, sw_frac, sw_dtemp namelist /dynamics_nml/ & kdyn, ndte, revised_evp, yield_curve, & kevp_kernel, & - brlx, arlx, & + brlx, arlx, ssh_stress, & advection, coriolis, kridge, ktransport, & kstrength, krdg_partic, krdg_redist, mu_rdg, & e_ratio, Ktens, Cf, basalstress, & @@ -213,7 +216,7 @@ subroutine input_data oceanmixed_ice, restore_ice, restore_ocn, trestore, & precip_units, default_season, wave_spec_type,nfreq, & atm_data_type, ocn_data_type, bgc_data_type, fe_data_type, & - ice_data_type, wave_spec_file, & + ice_data_type, wave_spec_file, restart_coszen, & fyear_init, ycycle, & atm_data_dir, ocn_data_dir, bgc_data_dir, & atm_data_format, ocn_data_format, rotate_wind, & @@ -280,8 +283,10 @@ subroutine input_data grid_type = 'rectangular' ! define rectangular grid internally grid_file = 'unknown_grid_file' gridcpl_file = 'unknown_gridcpl_file' - bathymetry_file = 'unknown_bathymetry_file' - use_bathymetry = .false. + orca_halogrid = .false. ! orca haloed grid + bathymetry_file = 'unknown_bathymetry_file' + bathymetry_format = 'default' + use_bathymetry = .false. kmt_file = 'unknown_kmt_file' version_name = 'unknown_version_name' ncat = 0 ! number of ice thickness categories @@ -325,6 +330,7 @@ subroutine input_data ktherm = 1 ! -1 = OFF, 0 = 0-layer, 1 = BL99, 2 = mushy thermo conduct = 'bubbly' ! 'MU71' or 'bubbly' (Pringle et al 2007) coriolis = 'latitude' ! latitude dependent, or 'constant' + ssh_stress = 'geostrophic' ! 'geostrophic' or 'coupled' kridge = 1 ! -1 = off, 1 = on ktransport = 1 ! -1 = off, 1 = on calc_Tsfc = .true. ! calculate surface temperature @@ -436,6 +442,11 @@ subroutine input_data phi_c_slow_mode = 0.05_dbl_kind ! critical liquid fraction porosity cutoff phi_i_mushy = 0.85_dbl_kind ! liquid fraction of congelation ice + ! shortwave redistribution in the thermodynamics + sw_redist = .false. + sw_frac = 0.9_dbl_kind + sw_dtemp = 0.02_dbl_kind + !----------------------------------------------------------------- ! read from input file !----------------------------------------------------------------- @@ -578,7 +589,9 @@ subroutine input_data call broadcast_scalar(grid_type, master_task) call broadcast_scalar(grid_file, master_task) call broadcast_scalar(gridcpl_file, master_task) + call broadcast_scalar(orca_halogrid, master_task) call broadcast_scalar(bathymetry_file, master_task) + call broadcast_scalar(bathymetry_format, master_task) call broadcast_scalar(use_bathymetry, master_task) call broadcast_scalar(kmt_file, master_task) call broadcast_scalar(kitd, master_task) @@ -612,6 +625,7 @@ subroutine input_data call broadcast_scalar(albedo_type, master_task) call broadcast_scalar(ktherm, master_task) call broadcast_scalar(coriolis, master_task) + call broadcast_scalar(ssh_stress, master_task) call broadcast_scalar(kridge, master_task) call broadcast_scalar(ktransport, master_task) call broadcast_scalar(conduct, master_task) @@ -717,6 +731,9 @@ subroutine input_data call broadcast_scalar(dSdt_slow_mode, master_task) call broadcast_scalar(phi_c_slow_mode, master_task) call broadcast_scalar(phi_i_mushy, master_task) + call broadcast_scalar(sw_redist, master_task) + call broadcast_scalar(sw_frac, master_task) + call broadcast_scalar(sw_dtemp, master_task) #ifdef CESMCOUPLED pointer_file = trim(pointer_file) // trim(inst_suffix) @@ -778,16 +795,6 @@ subroutine input_data abort_list = trim(abort_list)//":1" endif -#ifndef ncdf - if (grid_format /= 'bin' .or. atm_data_format /= 'bin' .or. ocn_data_format /= 'bin') then - if (my_task == master_task) then - write(nu_diag,*) subname//' ERROR: ncdf CPP flag unset, data formats must be bin' - write(nu_diag,*) subname//' ERROR: check grid_format, atm_data_format, ocn_data_format or set ncdf CPP' - endif - abort_list = trim(abort_list)//":2" - endif -#endif - if (advection /= 'remap' .and. advection /= 'upwind' .and. advection /= 'none') then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: invalid advection=',trim(advection) abort_list = trim(abort_list)//":3" @@ -970,6 +977,12 @@ subroutine input_data endif endif !tcraig + if (ktherm == 1 .and. .not.sw_redist) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and sw_redist = ',sw_redist + write(nu_diag,*) subname//' WARNING: For consistency, set sw_redist = .true.' + endif + endif if (formdrag) then if (trim(atmbndy) == 'constant') then @@ -1083,6 +1096,7 @@ subroutine input_data tmpstr2 = ' bathymetric input data is not used' endif write(nu_diag,1012) ' use_bathymetry = ', use_bathymetry,trim(tmpstr2) + write(nu_diag,*) ' bathymetry_format= ', trim(bathymetry_format) endif write(nu_diag,1022) ' nilyr = ', nilyr, ' number of ice layers (equal thickness)' write(nu_diag,1022) ' nslyr = ', nslyr, ' number of snow layers (equal thickness)' @@ -1153,6 +1167,13 @@ subroutine input_data endif write(nu_diag,*) 'coriolis = ',trim(coriolis),trim(tmpstr2) + if (trim(ssh_stress) == 'geostrophic') then + tmpstr2 = ': from ocean velocity' + elseif (trim(ssh_stress) == 'coupled') then + tmpstr2 = ': from coupled sea surface height gradients' + endif + write(nu_diag,*) 'ssh_stress = ',trim(ssh_stress),trim(tmpstr2) + if (ktransport == 1) then tmpstr2 = ' transport enabled' if (trim(advection) == 'remap') then @@ -1177,8 +1198,8 @@ subroutine input_data write(nu_diag,1007) ' k2 = ', k2, ' free parameter for landfast ice' write(nu_diag,1007) ' alphab = ', alphab, ' factor for landfast ice' write(nu_diag,1007) ' threshold_hw = ', threshold_hw, ' max water depth for grounding ice' - write(nu_diag,1007) ' Ktens = ', Ktens, ' tensile strength factor' endif + write(nu_diag,1007) ' Ktens = ', Ktens, ' tensile strength factor' endif ! kdyn enabled write(nu_diag,*) ' ' @@ -1240,6 +1261,9 @@ subroutine input_data write(nu_diag,1007) ' ksno = ', ksno,' snow thermal conductivity' if (ktherm == 1) & write(nu_diag,*) 'conduct = ', trim(conduct),' ice thermal conductivity' + write(nu_diag,1012) ' sw_redist = ', sw_redist,' redistribute internal shortwave to surface' + write(nu_diag,1002) ' sw_frac = ', sw_frac,' fraction redistributed' + write(nu_diag,1002) ' sw_dtemp = ', sw_dtemp,' temperature difference from freezing to redistribute' if (ktherm == 2) then write(nu_diag,1002) ' a_rapid_mode = ', a_rapid_mode,' brine channel diameter' write(nu_diag,1007) ' Rac_rapid_mode = ', Rac_rapid_mode,' critical Rayleigh number' @@ -1291,12 +1315,12 @@ subroutine input_data write(nu_diag,1012) ' calc_strair = ', calc_strair,' calculate wind stress and speed' write(nu_diag,1012) ' rotate_wind = ', rotate_wind,' rotate wind/stress to computational grid' write(nu_diag,1012) ' formdrag = ', formdrag,' use form drag parameterization' - if (trim(atmbndy) == 'constant') then + if (trim(atmbndy) == 'default') then tmpstr2 = ': stability-based boundary layer' write(nu_diag,1012) ' highfreq = ', highfreq,' high-frequency atmospheric coupling' write(nu_diag,1022) ' natmiter = ', natmiter,' number of atmo boundary layer iterations' write(nu_diag,1006) ' atmiter_conv = ', atmiter_conv,' convergence criterion for ustar' - elseif (trim(atmbndy) == 'default') then + elseif (trim(atmbndy) == 'constant') then tmpstr2 = ': boundary layer uses bulk transfer coefficients' endif write(nu_diag,*) 'atmbndy = ', trim(atmbndy),trim(tmpstr2) @@ -1310,6 +1334,11 @@ subroutine input_data tmpstr2 = ' ocean mixed layer calculation (SST) disabled' endif write(nu_diag,1012) ' oceanmixed_ice = ', oceanmixed_ice,trim(tmpstr2) + if (oceanmixed_ice) then + write(nu_diag,*) ' WARNING: ocean mixed layer ON' + write(nu_diag,*) ' WARNING: will impact ocean forcing interaction' + write(nu_diag,*) ' WARNING: coupled forcing will be modified by mixed layer routine' + endif if (trim(tfrz_option) == 'minus1p8') then tmpstr2 = ': constant ocean freezing temperature (-1.8C)' elseif (trim(tfrz_option) == 'linear_salt') then @@ -1486,6 +1515,8 @@ subroutine input_data endif write(nu_diag,1010) ' close_boundaries = ', & close_boundaries + write(nu_diag,1010) ' orca_halogrid = ', & + orca_halogrid write(nu_diag,1010) ' conserv_check = ', conserv_check @@ -1538,17 +1569,6 @@ subroutine input_data if (restore_ice .or. restore_ocn) & write(nu_diag,1020) ' trestore = ', trestore -#ifdef coupled - if( oceanmixed_ice ) then - write(nu_diag,*) subname//' WARNING ** WARNING ** WARNING ** WARNING ' - write(nu_diag,*) subname//' WARNING: coupled CPP and oceanmixed_ice namelist are BOTH ON' - write(nu_diag,*) subname//' WARNING: Ocean data received from coupler will' - write(nu_diag,*) subname//' WARNING: be altered by mixed layer routine!' - write(nu_diag,*) subname//' WARNING ** WARNING ** WARNING ** WARNING ' - write(nu_diag,*) ' ' - endif -#endif - write(nu_diag,*) ' ' write(nu_diag,'(a30,2f8.2)') 'Diagnostic point 1: lat, lon =', & latpnt(1), lonpnt(1) @@ -1630,7 +1650,8 @@ subroutine input_data wave_spec_type_in = wave_spec_type, & wave_spec_in=wave_spec, nfreq_in=nfreq, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & - Pstar_in=Pstar, Cstar_in=Cstar) + Pstar_in=Pstar, Cstar_in=Cstar, & + sw_redist_in=sw_redist, sw_frac_in=sw_frac, sw_dtemp_in=sw_dtemp) call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & tr_fsd_in=tr_fsd, tr_pond_in=tr_pond, & diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index 2f1a1c75b..7a2493d58 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -78,7 +78,8 @@ subroutine prep_radiation (iblk) use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & alvdr_ai, alvdf_ai, alidr_ai, alidf_ai, & alvdr_init, alvdf_init, alidr_init, alidf_init - use ice_arrays_column, only: fswsfcn, fswintn, fswthrun, & + use ice_arrays_column, only: fswsfcn, fswintn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & fswpenln, Sswabsn, Iswabsn use ice_state, only: aice, aicen use ice_timers, only: ice_timer_start, ice_timer_stop, timer_sw @@ -130,7 +131,12 @@ subroutine prep_radiation (iblk) alvdr_ai = alvdr_ai(i,j, iblk), alvdf_ai = alvdf_ai(i,j, iblk), & alidr_ai = alidr_ai(i,j, iblk), alidf_ai = alidf_ai(i,j, iblk), & fswsfcn = fswsfcn (i,j, :,iblk), fswintn = fswintn (i,j, :,iblk), & - fswthrun = fswthrun(i,j, :,iblk), fswpenln = fswpenln(i,j,:,:,iblk), & + fswthrun = fswthrun(i,j, :,iblk), & + fswthrun_vdr = fswthrun_vdr(i,j, :,iblk), & + fswthrun_vdf = fswthrun_vdf(i,j, :,iblk), & + fswthrun_idr = fswthrun_idr(i,j, :,iblk), & + fswthrun_idf = fswthrun_idf(i,j, :,iblk), & + fswpenln = fswpenln(i,j,:,:,iblk), & Sswabsn = Sswabsn (i,j,:,:,iblk), Iswabsn = Iswabsn (i,j,:,:,iblk)) enddo ! i @@ -157,7 +163,8 @@ subroutine step_therm1 (dt, iblk) Cdn_ocn, Cdn_ocn_skin, Cdn_ocn_floe, Cdn_ocn_keel, Cdn_atm_ratio, & Cdn_atm, Cdn_atm_skin, Cdn_atm_floe, Cdn_atm_rdg, Cdn_atm_pond, & hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & - fswsfcn, fswintn, fswthrun, Sswabsn, Iswabsn + fswsfcn, fswintn, Sswabsn, Iswabsn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf use ice_blocks, only: block, get_block, nx_block, ny_block use ice_calendar, only: yday use ice_domain, only: blocks_ice @@ -168,8 +175,10 @@ subroutine step_therm1 (dt, iblk) flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, & frain, Tair, strairxT, strairyT, fsurf, fcondtop, fsens, & flat, fswabs, flwout, evap, evaps, evapi, Tref, Qref, Uref, fresh, fsalt, fhocn, & - fswthru, meltt, melts, meltb, congel, snoice, & - flatn_f, fsensn_f, fsurfn_f, fcondtopn_f + fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + meltt, melts, meltb, congel, snoice, & + flatn_f, fsensn_f, fsurfn_f, fcondtopn_f, & + send_i2x_per_cat, fswthrun_ai use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn, fiso_atm, fiso_ocn, & Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn use ice_grid, only: lmask_n, lmask_s, tmask @@ -302,7 +311,8 @@ subroutine step_therm1 (dt, iblk) enddo endif ! tr_aero - if (tmask(i,j,iblk)) & + if (tmask(i,j,iblk)) then + call icepack_step_therm1(dt=dt, ncat=ncat, & nilyr=nilyr, nslyr=nslyr, & aicen_init = aicen_init (i,j,:,iblk), & @@ -389,6 +399,10 @@ subroutine step_therm1 (dt, iblk) fswsfcn = fswsfcn (i,j,:,iblk), & fswintn = fswintn (i,j,:,iblk), & fswthrun = fswthrun (i,j,:,iblk), & + fswthrun_vdr = fswthrun_vdr (i,j,:,iblk),& + fswthrun_vdf = fswthrun_vdf (i,j,:,iblk),& + fswthrun_idr = fswthrun_idr (i,j,:,iblk),& + fswthrun_idf = fswthrun_idf (i,j,:,iblk),& fswabs = fswabs (i,j, iblk), & flwout = flwout (i,j, iblk), & Sswabsn = Sswabsn (i,j,:,:,iblk), & @@ -405,6 +419,10 @@ subroutine step_therm1 (dt, iblk) fsalt = fsalt (i,j, iblk), & fhocn = fhocn (i,j, iblk), & fswthru = fswthru (i,j, iblk), & + fswthru_vdr = fswthru_vdr (i,j, iblk),& + fswthru_vdf = fswthru_vdf (i,j, iblk),& + fswthru_idr = fswthru_idr (i,j, iblk),& + fswthru_idf = fswthru_idf (i,j, iblk),& flatn_f = flatn_f (i,j,:,iblk), & fsensn_f = fsensn_f (i,j,:,iblk), & fsurfn_f = fsurfn_f (i,j,:,iblk), & @@ -436,6 +454,21 @@ subroutine step_therm1 (dt, iblk) frz_onset = frz_onset (i,j, iblk), & yday=yday, prescribed_ice=prescribed_ice) + !----------------------------------------------------------------- + ! handle per-category i2x fields, no merging + !----------------------------------------------------------------- + + if (send_i2x_per_cat) then + do n = 1, ncat + ! TODO (mvertens, 2018-12-22): do we need to add the band separated quantities + ! for MOM6 here also? + + fswthrun_ai(i,j,n,iblk) = fswthrun(i,j,n,iblk)*aicen_init(i,j,n,iblk) + enddo ! ncat + endif + + endif + if (tr_iso) then do n = 1, ncat if (vicen(i,j,n,iblk) > puny) & @@ -985,7 +1018,8 @@ end subroutine step_dyn_ridge subroutine step_radiation (dt, iblk) use ice_arrays_column, only: ffracn, dhsn, & - fswsfcn, fswintn, fswthrun, fswpenln, Sswabsn, Iswabsn, & + fswsfcn, fswintn, fswpenln, Sswabsn, Iswabsn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & albicen, albsnon, albpndn, & alvdrn, alidrn, alvdfn, alidfn, apeffn, trcrn_sw, snowfracn, & kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, & @@ -1122,7 +1156,12 @@ subroutine step_radiation (dt, iblk) alvdrn =alvdrn (i,j,: ,iblk), alvdfn =alvdfn (i,j,: ,iblk), & alidrn =alidrn (i,j,: ,iblk), alidfn =alidfn (i,j,: ,iblk), & fswsfcn =fswsfcn (i,j,: ,iblk), fswintn =fswintn (i,j,: ,iblk), & - fswthrun =fswthrun (i,j,: ,iblk), fswpenln=fswpenln(i,j,:,:,iblk), & + fswthrun =fswthrun (i,j,: ,iblk), & + fswthrun_vdr =fswthrun_vdr (i,j,: ,iblk), & + fswthrun_vdf =fswthrun_vdf (i,j,: ,iblk), & + fswthrun_idr =fswthrun_idr (i,j,: ,iblk), & + fswthrun_idf =fswthrun_idf (i,j,: ,iblk), & + fswpenln=fswpenln(i,j,:,:,iblk), & Sswabsn =Sswabsn (i,j,:,:,iblk), Iswabsn =Iswabsn (i,j,:,:,iblk), & albicen =albicen (i,j,: ,iblk), albsnon =albsnon (i,j,: ,iblk), & albpndn =albpndn (i,j,: ,iblk), apeffn =apeffn (i,j,: ,iblk), & diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 index d574ebdfe..a7d186083 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 @@ -45,6 +45,9 @@ module ice_communicate mpitagHalo = 1, &! MPI tags for various mpitag_gs = 1000 ! communication patterns + logical (log_kind), public :: & + add_mpi_barriers = .false. ! turn on mpi barriers for throttling + !*********************************************************************** contains @@ -98,7 +101,11 @@ subroutine init_communicate(mpicom) master_task = 0 call MPI_COMM_RANK (MPI_COMM_ICE, my_task, ierr) +#if (defined NO_R16) + mpiR16 = MPI_REAL8 +#else mpiR16 = MPI_REAL16 +#endif mpiR8 = MPI_REAL8 mpiR4 = MPI_REAL4 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 index ba6476904..010a5c8c4 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 @@ -16,7 +16,8 @@ module ice_gather_scatter use mpi ! MPI Fortran module use ice_kinds_mod - use ice_communicate, only: my_task, mpiR8, mpiR4, mpitag_gs, MPI_COMM_ICE + use ice_communicate, only: my_task, mpiR8, mpiR4, mpitag_gs, MPI_COMM_ICE, & + ice_barrier, add_mpi_barriers use ice_constants, only: spval_dbl, c0, & field_loc_center, field_loc_NEcorner, field_loc_Nface, field_loc_Eface, & field_loc_noupdate, & @@ -233,9 +234,9 @@ subroutine gather_global_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -400,9 +401,9 @@ subroutine gather_global_real(ARRAY_G, ARRAY, dst_task, src_dist) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -567,9 +568,9 @@ subroutine gather_global_int(ARRAY_G, ARRAY, dst_task, src_dist) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -961,9 +962,9 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -1284,9 +1285,9 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -1607,9 +1608,9 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -1983,9 +1984,9 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & enddo endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -2372,9 +2373,9 @@ subroutine scatter_global_real(ARRAY, ARRAY_G, src_task, dst_dist, & enddo endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -2761,9 +2762,9 @@ subroutine scatter_global_int(ARRAY, ARRAY_G, src_task, dst_dist, & enddo endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -3093,9 +3094,9 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) deallocate(rcv_request, rcv_status) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -3379,9 +3380,9 @@ subroutine scatter_global_stress(ARRAY, ARRAY_G1, ARRAY_G2, & deallocate(rcv_request, rcv_status) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 index f85109339..27f66f712 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 @@ -39,7 +39,7 @@ MODULE ice_reprosum #ifndef SERIAL_REMOVE_MPI use mpi ! MPI Fortran module #endif -#if ( defined noI8 ) +#if defined (NO_I8) ! Workaround for when shr_kind_i8 is not supported. use ice_kinds_mod, only: r8 => dbl_kind, i8 => int_kind #else @@ -1032,7 +1032,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & #ifdef SERIAL_REMOVE_MPI i8_arr_gsum_level = i8_arr_lsum_level #else -#if ( defined noI8 ) +#if defined (NO_I8) ! Workaround for when i8 is not supported. ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_i4") call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 index 2468f485b..c9df264dd 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 @@ -27,6 +27,9 @@ module ice_communicate my_task, &! MPI task number for this task master_task ! task number of master task + logical (log_kind), public :: & + add_mpi_barriers = .false. ! turn on mpi barriers for throttling + !*********************************************************************** contains @@ -43,12 +46,6 @@ subroutine init_communicate ! !----------------------------------------------------------------------- -#ifdef coupled - use mpi ! MPI Fortran module - - integer (int_kind) :: ierr ! MPI error flag -#endif - character(len=*), parameter :: subname = '(init_communicate)' !----------------------------------------------------------------------- @@ -58,27 +55,9 @@ subroutine init_communicate ! !----------------------------------------------------------------------- -#ifdef coupled - call MPI_INIT(ierr) - call MPI_COMM_RANK (MPI_COMM_ICE, my_task, ierr) -#else my_task = 0 -#endif - master_task = 0 -#ifdef coupled -!----------------------------------------------------------------------- -! -! On some 64-bit machines where real_kind and dbl_kind are -! identical, the MPI implementation uses MPI_REAL for both. -! In these cases, set MPI_DBL to MPI_REAL. -! -!----------------------------------------------------------------------- - - MPI_DBL = MPI_DOUBLE_PRECISION - -#endif !----------------------------------------------------------------------- end subroutine init_communicate @@ -136,11 +115,6 @@ subroutine create_communicator(new_comm, num_procs) ! this routine should be called from init_domain1 when the ! domain configuration (e.g. nprocs_btrop) has been determined -#ifdef coupled - - use mpi ! MPI Fortran module - -#endif ! !INPUT PARAMETERS: integer (int_kind), intent(in) :: & @@ -151,54 +125,8 @@ subroutine create_communicator(new_comm, num_procs) integer (int_kind), intent(out) :: & new_comm ! new communicator for this distribution -#ifdef coupled -!----------------------------------------------------------------------- -! -! local variables -! -!----------------------------------------------------------------------- - - integer (int_kind) :: & - MPI_GROUP_ICE, &! group of processors assigned to ice - MPI_GROUP_NEW ! group of processors assigned to new dist - - integer (int_kind) :: & - ierr ! error flag for MPI comms - - integer (int_kind), dimension(3) :: & - range ! range of tasks assigned to new dist - ! (assumed 0,num_procs-1) - - character(len=*), parameter :: subname = '(create_communicator)' - -!----------------------------------------------------------------------- -! -! determine group of processes assigned to distribution -! -!----------------------------------------------------------------------- - - call MPI_COMM_GROUP (MPI_COMM_ICE, MPI_GROUP_ICE, ierr) - - range(1) = 0 - range(2) = num_procs-1 - range(3) = 1 - -!----------------------------------------------------------------------- -! -! create subroup and communicator for new distribution -! note: MPI_COMM_CREATE must be called by all procs in MPI_COMM_ICE -! -!----------------------------------------------------------------------- - - call MPI_GROUP_RANGE_INCL(MPI_GROUP_ICE, 1, range, & - MPI_GROUP_NEW, ierr) - - call MPI_COMM_CREATE (MPI_COMM_ICE, MPI_GROUP_NEW, & - new_comm, ierr) - -#else new_comm = MPI_COMM_ICE -#endif + !----------------------------------------------------------------------- end subroutine create_communicator diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 index ec852e2c3..1e4307535 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 @@ -40,7 +40,7 @@ MODULE ice_reprosum #ifndef SERIAL_REMOVE_MPI use mpi ! MPI Fortran module #endif -#if ( defined noI8 ) +#if defined (NO_I8) ! Workaround for when shr_kind_i8 is not supported. use ice_kinds_mod, only: r8 => dbl_kind, i8 => int_kind #else @@ -1033,7 +1033,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & #ifdef SERIAL_REMOVE_MPI i8_arr_gsum_level = i8_arr_lsum_level #else -#if ( defined noI8 ) +#if defined (NO_I8) ! Workaround for when i8 is not supported. ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_i4") call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index 3916039b5..cc57ea585 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||| module ice_domain @@ -14,7 +17,8 @@ module ice_domain use ice_kinds_mod use ice_constants, only: shlat, nhlat - use ice_communicate, only: my_task, master_task, get_num_procs + use ice_communicate, only: my_task, master_task, get_num_procs, & + add_mpi_barriers use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_blocks, only: block, get_block, create_blocks, nghost, & nblocks_x, nblocks_y, nblocks_tot, nx_block, ny_block @@ -26,7 +30,7 @@ module ice_domain use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif @@ -58,7 +62,8 @@ module ice_domain logical (kind=log_kind), public :: & maskhalo_dyn , & ! if true, use masked halo updates for dynamics maskhalo_remap , & ! if true, use masked halo updates for transport - maskhalo_bound ! if true, use masked halo updates for bound_state + maskhalo_bound , & ! if true, use masked halo updates for bound_state + orca_halogrid ! if true, input fields are haloed as defined by orca grid !----------------------------------------------------------------------- ! @@ -128,7 +133,8 @@ subroutine init_domain_blocks ns_boundary_type, & maskhalo_dyn, & maskhalo_remap, & - maskhalo_bound + maskhalo_bound, & + add_mpi_barriers !---------------------------------------------------------------------- ! @@ -146,6 +152,7 @@ subroutine init_domain_blocks maskhalo_dyn = .false. ! if true, use masked halos for dynamics maskhalo_remap = .false. ! if true, use masked halos for transport maskhalo_bound = .false. ! if true, use masked halos for bound_state + add_mpi_barriers = .false. ! if true, throttle communication max_blocks = -1 ! max number of blocks per processor block_size_x = -1 ! size of block in first horiz dimension block_size_y = -1 ! size of block in second horiz dimension @@ -182,6 +189,7 @@ subroutine init_domain_blocks call broadcast_scalar(maskhalo_dyn, master_task) call broadcast_scalar(maskhalo_remap, master_task) call broadcast_scalar(maskhalo_bound, master_task) + call broadcast_scalar(add_mpi_barriers, master_task) if (my_task == master_task) then if (max_blocks < 1) then max_blocks=int( & @@ -259,6 +267,7 @@ subroutine init_domain_blocks write(nu_diag,'(a,l6)') ' maskhalo_dyn = ', maskhalo_dyn write(nu_diag,'(a,l6)') ' maskhalo_remap = ', maskhalo_remap write(nu_diag,'(a,l6)') ' maskhalo_bound = ', maskhalo_bound + write(nu_diag,'(a,l6)') ' add_mpi_barriers = ', add_mpi_barriers write(nu_diag,'(a,2i6)') ' block_size_x,_y = ', block_size_x, block_size_y write(nu_diag,'(a,i6)') ' max_blocks = ', max_blocks write(nu_diag,'(a,i6,/)')' Number of ghost cells = ', nghost @@ -303,7 +312,7 @@ subroutine init_domain_distribution(KMTG,ULATG) i,j,n ,&! dummy loop indices ig,jg ,&! global indices work_unit ,&! size of quantized work unit -#ifdef ncdf +#ifdef USE_NETCDF fid ,&! file id varid ,&! var id status ,&! netcdf return code @@ -439,7 +448,7 @@ subroutine init_domain_distribution(KMTG,ULATG) allocate(wght(nx_global,ny_global)) if (my_task == master_task) then ! cannot use ice_read_write due to circular dependency -#ifdef ncdf +#ifdef USE_NETCDF write(nu_diag,*) 'read ',trim(distribution_wght_file),minval(wght),maxval(wght) status = nf90_open(distribution_wght_file, NF90_NOWRITE, fid) if (status /= nf90_noerr) then @@ -449,7 +458,8 @@ subroutine init_domain_distribution(KMTG,ULATG) status = nf90_get_var(fid, varid, wght) status = nf90_close(fid) #else - call abort_ice (subname//'ERROR: distribution_wght file needs ncdf cpp ') + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif endif call broadcast_array(wght, master_task) diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index f4b5fef6e..34b37cf29 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! Spatial grids, masks, and boundary conditions @@ -45,6 +48,7 @@ module ice_grid grid_file , & ! input file for POP grid info kmt_file , & ! input file for POP grid info bathymetry_file, & ! input bathymetry for basalstress + bathymetry_format, & ! bathymetry file format (default or pop) grid_spacing , & ! default of 30.e3m or set by user in namelist grid_type ! current options are rectangular (default), ! displaced_pole, tripole, regional @@ -541,11 +545,14 @@ subroutine init_grid2 ! bathymetry !----------------------------------------------------------------- -#ifdef RASM_MODS - call get_bathymetry_popfile -#else - call get_bathymetry -#endif + if (trim(bathymetry_format) == 'default') then + call get_bathymetry + elseif (trim(bathymetry_format) == 'pop') then + call get_bathymetry_popfile + else + call abort_ice(subname//'ERROR: bathymetry_format value must be default or pop', & + file=__FILE__, line=__LINE__) + endif !---------------------------------------------------------------- ! Corner coordinates for CF compliant history files @@ -713,13 +720,14 @@ end subroutine popgrid subroutine popgrid_nc -#ifdef ncdf use ice_blocks, only: nx_block, ny_block use ice_constants, only: c0, c1, & field_loc_center, field_loc_NEcorner, & field_type_scalar, field_type_angle use ice_domain_size, only: max_blocks +#ifdef USE_NETCDF use netcdf +#endif integer (kind=int_kind) :: & i, j, iblk, & @@ -752,6 +760,7 @@ subroutine popgrid_nc character(len=*), parameter :: subname = '(popgrid_nc)' +#ifdef USE_NETCDF call icepack_query_parameters(pi_out=pi) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -866,7 +875,11 @@ subroutine popgrid_nc call ice_close_nc(fid_grid) call ice_close_nc(fid_kmt) endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif + end subroutine popgrid_nc #ifdef CESMCOUPLED @@ -879,13 +892,14 @@ end subroutine popgrid_nc subroutine latlongrid -#ifdef ncdf ! use ice_boundary use ice_domain_size use ice_scam, only : scmlat, scmlon, single_column use ice_constants, only: c0, c1, p5, p25, & field_loc_center, field_type_scalar, radius +#ifdef USE_NETCDF use netcdf +#endif integer (kind=int_kind) :: & i, j, iblk @@ -927,6 +941,7 @@ subroutine latlongrid character(len=*), parameter :: subname = '(lonlatgrid)' +#ifdef USE_NETCDF !----------------------------------------------------------------- ! - kmt file is actually clm fractional land file ! - Determine consistency of dimensions @@ -1139,6 +1154,9 @@ subroutine latlongrid !$OMP END PARALLEL DO call makemask +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine latlongrid @@ -2510,11 +2528,9 @@ subroutine read_basalstress_bathy character(len=*), parameter :: subname = '(read_basalstress_bathy)' if (my_task == master_task) then - write (nu_diag,*) ' ' write (nu_diag,*) 'Bathymetry file: ', trim(bathymetry_file) call icepack_warnings_flush(nu_diag) - endif call ice_open_nc(bathymetry_file,fid_init) diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index f497db49b..87d0813cc 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! Routines for opening, reading and writing external files @@ -15,13 +18,13 @@ module ice_read_write field_loc_noupdate, field_type_noupdate use ice_communicate, only: my_task, master_task use ice_broadcast, only: broadcast_scalar - use ice_domain, only: distrb_info + use ice_domain, only: distrb_info, orca_halogrid use ice_domain_size, only: max_blocks, nx_global, ny_global, ncat use ice_blocks, only: nx_block, ny_block, nghost use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif @@ -1044,7 +1047,7 @@ subroutine ice_open_nc(filename, fid) character(len=*), parameter :: subname = '(ice_open_nc)' -#ifdef ncdf +#ifdef USE_NETCDF integer (kind=int_kind) :: & status ! status variable from netCDF routine @@ -1058,6 +1061,8 @@ subroutine ice_open_nc(filename, fid) endif ! my_task = master_task #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(filename), & + file=__FILE__, line=__LINE__) fid = -999 ! to satisfy intent(out) attribute #endif end subroutine ice_open_nc @@ -1101,7 +1106,7 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_xy)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid , & ! variable id @@ -1121,18 +1126,17 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: nx, ny -#ifdef ORCA_GRID real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g2 - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1)) else allocate(work_g2(1,1)) ! to save memory endif + work_g2(:,:) = c0 endif -#endif nx = nx_global ny = ny_global @@ -1166,22 +1170,16 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & ! Read global array !-------------------------------------------------------------- -#ifndef ORCA_GRID - status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,nrec/), & - count=(/nx,ny,1/) ) -#else - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & start=(/1,1,nrec/), & count=(/nx_global+2,ny_global+1,1/) ) work_g1 = work_g2(2:nx_global+1,1:ny_global) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,nrec/), & - count=(/nx,ny,1/) ) + start=(/1,1,nrec/), & + count=(/nx,ny,1/) ) endif -#endif endif ! my_task = master_task @@ -1225,11 +1223,11 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & endif deallocate(work_g1) -#ifdef ORCA_GRID - if (.not. present(restart_ext)) deallocate(work_g2) -#endif + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_xy @@ -1273,7 +1271,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_xyz)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & n, & ! ncat index @@ -1294,18 +1292,17 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: nx, ny -#ifdef ORCA_GRID real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g2 - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1,ncat)) else allocate(work_g2(1,1,ncat)) ! to save memory endif + work_g2(:,:,:) = c0 endif -#endif nx = nx_global ny = ny_global @@ -1339,12 +1336,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & ! Read global array !-------------------------------------------------------------- -#ifndef ORCA_GRID - status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & - count=(/nx,ny,ncat,1/) ) -#else - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & start=(/1,1,1,nrec/), & count=(/nx_global+2,ny_global+1,ncat,1/) ) @@ -1354,7 +1346,6 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & start=(/1,1,1,nrec/), & count=(/nx,ny,ncat,1/) ) endif -#endif endif ! my_task = master_task @@ -1407,11 +1398,11 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & endif deallocate(work_g1) -#ifdef ORCA_GRID - if (.not. present(restart_ext)) deallocate(work_g2) -#endif + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_xyz @@ -1458,7 +1449,6 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ! local variables -#ifdef ncdf ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! variable id @@ -1480,18 +1470,20 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: nx, ny -#ifdef ORCA_GRID + character(len=*), parameter :: subname = '(ice_read_nc_xyf)' + +#ifdef USE_NETCDF real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g2 - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1,nfreq)) else allocate(work_g2(1,1,nfreq)) ! to save memory endif + work_g2(:,:,:) = c0 endif -#endif nx = nx_global ny = ny_global @@ -1526,13 +1518,7 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ! Read global array !-------------------------------------------------------------- -#ifndef ORCA_GRID - status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & - count=(/nx,ny,nfreq,1/) ) -#else - print *, 'restart_ext',restart_ext - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & start=(/1,1,1,nrec/), & count=(/nx_global+2,ny_global+1,nfreq,1/) ) @@ -1542,8 +1528,6 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & start=(/1,1,1,nrec/), & count=(/nx,ny,nfreq,1/) ) endif - print *, 'fid',fid ,' varid',varid -#endif status = nf90_get_att(fid, varid, "missing_value", missingvalue) endif ! my_task = master_task @@ -1601,11 +1585,11 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & where (work > 1.0e+30_dbl_kind) work = c0 deallocate(work_g1) -#ifdef ORCA_GRID - if (.not. present(restart_ext)) deallocate(work_g2) -#endif + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif @@ -1640,7 +1624,7 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_point)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! netcdf id for field @@ -1699,6 +1683,8 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & work = workg(1) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_point @@ -1731,7 +1717,7 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & ! local variables -#ifdef ncdf +#ifdef USE_NETCDF real (kind=dbl_kind), dimension(:), allocatable :: & work_z @@ -1749,7 +1735,7 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_z)' -#ifdef ncdf +#ifdef USE_NETCDF allocate(work_z(nilyr)) @@ -1795,6 +1781,8 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & deallocate(work_z) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_z @@ -1831,7 +1819,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_xy)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & status ! status output from netcdf routines @@ -1915,7 +1903,11 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & deallocate(work_g1) +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif + end subroutine ice_write_nc_xy !======================================================================= @@ -1950,7 +1942,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_xyz)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & n, & ! ncat index @@ -2045,7 +2037,11 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & deallocate(work_g1) +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif + end subroutine ice_write_nc_xyz !======================================================================= @@ -2076,7 +2072,7 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) character(len=*), parameter :: subname = '(ice_read_global_nc)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! netcdf id for field @@ -2091,18 +2087,18 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) ! character (char_len) :: & ! dimname ! dimension name ! -#ifdef ORCA_GRID real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g3 - if (my_task == master_task) then - allocate(work_g3(nx_global+2,ny_global+1)) - else - allocate(work_g3(1,1)) ! to save memory - endif + if (orca_halogrid) then + if (my_task == master_task) then + allocate(work_g3(nx_global+2,ny_global+1)) + else + allocate(work_g3(1,1)) ! to save memory + endif + work_g3(:,:) = c0 + endif - work_g3(:,:) = c0 -#endif work_g(:,:) = c0 if (my_task == master_task) then @@ -2121,16 +2117,16 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) ! Read global array !-------------------------------------------------------------- -#ifndef ORCA_GRID - status = nf90_get_var( fid, varid, work_g, & - start=(/1,1,nrec/), & - count=(/nx_global,ny_global,1/) ) -#else - status = nf90_get_var( fid, varid, work_g3, & - start=(/1,1,nrec/), & - count=(/nx_global+2,ny_global+1,1/) ) - work_g=work_g3(2:nx_global+1,1:ny_global) -#endif + if (orca_halogrid) then + status = nf90_get_var( fid, varid, work_g3, & + start=(/1,1,nrec/), & + count=(/nx_global+2,ny_global+1,1/) ) + work_g=work_g3(2:nx_global+1,1:ny_global) + else + status = nf90_get_var( fid, varid, work_g, & + start=(/1,1,nrec/), & + count=(/nx_global,ny_global,1/) ) + endif endif ! my_task = master_task !------------------------------------------------------------------- @@ -2153,13 +2149,14 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) write(nu_diag,*) 'min, max, sum = ', amin, amax, asum, trim(varname) endif -#ifdef ORCA_GRID - deallocate(work_g3) -#endif + if (orca_halogrid) deallocate(work_g3) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work_g = c0 ! to satisfy intent(out) attribute #endif + end subroutine ice_read_global_nc !======================================================================= @@ -2176,13 +2173,16 @@ subroutine ice_close_nc(fid) character(len=*), parameter :: subname = '(ice_close_nc)' -#ifdef ncdf +#ifdef USE_NETCDF integer (kind=int_kind) :: & status ! status variable from netCDF routine if (my_task == master_task) then status = nf90_close(fid) endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_close_nc @@ -2227,7 +2227,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_uv)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid , & ! variable id @@ -2318,8 +2318,11 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & deallocate(work_g1) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif + end subroutine ice_read_nc_uv !======================================================================= @@ -2350,7 +2353,7 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) character(len=*), parameter :: subname = '(ice_read_vec_nc)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! netcdf id for field @@ -2393,9 +2396,11 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) endif #else - write(*,*) 'ERROR: ncdf not defined during compilation' + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work_g = c0 ! to satisfy intent(out) attribute #endif + end subroutine ice_read_vec_nc !======================================================================= @@ -2411,7 +2416,7 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) ! local variables -#ifdef ncdf +#ifdef USE_NETCDF integer (kind=int_kind) :: & ndims, i, status character (char_len) :: & @@ -2419,7 +2424,7 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) #endif character(len=*), parameter :: subname = '(ice_get_ncvarsize)' -#ifdef ncdf +#ifdef USE_NETCDF if (my_task == master_task) then status=nf90_inquire(fid, nDimensions = nDims) if (status /= nf90_noerr) then @@ -2437,9 +2442,11 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) endif endif #else - write(*,*) 'ERROR: ncdf not defined during compilation' + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) recsize = 0 ! to satisfy intent(out) attribute #endif + end subroutine ice_get_ncvarsize !======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 index 5b6aa0dd8..b3024302e 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! ! Writes history in netCDF format @@ -41,7 +44,6 @@ module ice_history_write subroutine ice_write_hist (ns) use ice_kinds_mod -#ifdef ncdf use ice_arrays_column, only: hin_max, floe_rad_c use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar @@ -56,6 +58,7 @@ subroutine ice_write_hist (ns) lont_bounds, latt_bounds, lonu_bounds, latu_bounds use ice_history_shared use ice_restart_shared, only: runid, lcdf64 +#ifdef USE_NETCDF use netcdf #endif @@ -63,7 +66,6 @@ subroutine ice_write_hist (ns) ! local variables -#ifdef ncdf real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 real (kind=real_kind), dimension(:,:), allocatable :: work_gr real (kind=real_kind), dimension(:,:,:), allocatable :: work_gr3 @@ -120,6 +122,7 @@ subroutine ice_write_hist (ns) character(len=*), parameter :: subname = '(ice_write_hist)' +#ifdef USE_NETCDF call icepack_query_parameters(secday_out=secday, rad_to_deg_out=rad_to_deg) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -1571,6 +1574,10 @@ subroutine ice_write_hist (ns) write(nu_diag,*) ' ' write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) endif + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_write_hist diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index 8bb09398e..53c7dac60 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! Read and write ice model restart files using netCDF or binary @@ -8,7 +11,9 @@ module ice_restart use ice_broadcast use ice_kinds_mod +#ifdef USE_NETCDF use netcdf +#endif use ice_restart_shared, only: & restart_ext, restart_dir, restart_file, pointer_file, & runid, use_restart_time, lcdf64, lenstr, restart_coszen @@ -52,6 +57,7 @@ subroutine init_restart_read(ice_ic) character(len=*), parameter :: subname = '(init_restart_read)' +#ifdef USE_NETCDF if (present(ice_ic)) then filename = trim(ice_ic) else @@ -97,6 +103,10 @@ subroutine init_restart_read(ice_ic) if (trim(runid) == 'bering') then npt = npt - istep0 endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(ice_ic), & + file=__FILE__, line=__LINE__) +#endif end subroutine init_restart_read @@ -153,6 +163,7 @@ subroutine init_restart_write(filename_spec) character(len=*), parameter :: subname = '(init_restart_write)' +#ifdef USE_NETCDF call icepack_query_parameters( & solve_zsal_out=solve_zsal, skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) call icepack_query_tracer_sizes( & @@ -619,6 +630,11 @@ subroutine init_restart_write(filename_spec) write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif ! master_task +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(filename_spec), & + file=__FILE__, line=__LINE__) +#endif + end subroutine init_restart_write !======================================================================= @@ -661,6 +677,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & character(len=*), parameter :: subname = '(read_restart_field)' +#ifdef USE_NETCDF if (present(field_loc)) then if (ndim3 == ncat) then if (restart_ext) then @@ -699,6 +716,11 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & endif endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + end subroutine read_restart_field !======================================================================= @@ -740,6 +762,7 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) character(len=*), parameter :: subname = '(write_restart_field)' +#ifdef USE_NETCDF status = nf90_inq_varid(ncid,trim(vname),varid) if (ndim3 == ncat) then if (restart_ext) then @@ -758,6 +781,11 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) write(nu_diag,*) 'ndim3 not supported',ndim3 endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + end subroutine write_restart_field !======================================================================= @@ -774,11 +802,17 @@ subroutine final_restart() character(len=*), parameter :: subname = '(final_restart)' +#ifdef USE_NETCDF status = nf90_close(ncid) if (my_task == master_task) & write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + end subroutine final_restart !======================================================================= @@ -799,7 +833,12 @@ subroutine define_rest_field(ncid, vname, dims) character(len=*), parameter :: subname = '(define_rest_field)' +#ifdef USE_NETCDF status = nf90_def_var(ncid,trim(vname),nf90_double,dims,varid) +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif end subroutine define_rest_field diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 index d030b439b..7e16f2591 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -39,7 +39,6 @@ module ice_history_write subroutine ice_write_hist (ns) -#ifdef ncdf use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_calendar, only: time, sec, idate, idate0, write_ic, & @@ -55,8 +54,6 @@ subroutine ice_write_hist (ns) use ice_history_shared use ice_arrays_column, only: hin_max, floe_rad_c use ice_restart_shared, only: runid, lcdf64 - use netcdf -#endif use ice_pio use pio @@ -64,7 +61,6 @@ subroutine ice_write_hist (ns) ! local variables -#ifdef ncdf integer (kind=int_kind) :: i,j,k,ic,n,nn, & ncid,status,imtid,jmtid,kmtidi,kmtids,kmtidb, cmtid,timid, & length,nvertexid,ivertex,kmtida,fmtid @@ -1300,8 +1296,6 @@ subroutine ice_write_hist (ns) write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) endif -#endif - end subroutine ice_write_hist !======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index b11dcf0d0..eb703abcd 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -662,7 +662,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & use ice_global_reductions, only: global_minval, global_maxval, global_sum integer (kind=int_kind), intent(in) :: & - nu , & ! unit number (not used for netcdf) + nu , & ! unit number ndim3 , & ! third dimension nrec ! record number (0 for sequential access) diff --git a/cicecore/drivers/direct/hadgem3/CICE.F90 b/cicecore/drivers/direct/hadgem3/CICE.F90 index b38c1aa29..72bf1b747 100644 --- a/cicecore/drivers/direct/hadgem3/CICE.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE.F90 @@ -17,7 +17,6 @@ ! https://github.com/CICE-Consortium ! !======================================================================= -#ifndef popcice ! ! Main driver routine for CICE. Initializes and steps through the model. ! This program should be compiled if CICE is run as a separate executable, @@ -56,7 +55,6 @@ program icemodel end program icemodel -#endif !======================================================================= ! ! Wrapper for the print_state debugging routine. diff --git a/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 index 6b5a53abe..397950023 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 @@ -58,9 +58,8 @@ subroutine CICE_Finalize ! quit MPI !------------------------------------------------------------------- -#ifndef coupled - call end_run ! quit MPI -#endif +! standalone +! call end_run ! quit MPI end subroutine CICE_Finalize diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index b208bcbef..dc41ff9fd 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -87,9 +87,6 @@ subroutine cice_init use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & tr_fsd, wave_spec @@ -131,9 +128,6 @@ subroutine cice_init endif call init_coupler_flux ! initialize fluxes exchanged with coupler -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution @@ -202,19 +196,17 @@ subroutine cice_init call init_forcing_atmo ! initialize atmospheric forcing (standalone) #endif -#ifndef coupled -#ifndef CESMCOUPLED - if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data - - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif +! standalone +! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data + +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + if (z_tracers) call get_atm_bgc ! biogeochemistry if (runtype == 'initial' .and. .not. restart) & diff --git a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 index 90af92122..e43b4a24d 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 @@ -92,21 +92,19 @@ subroutine CICE_Run call ice_timer_start(timer_couple) ! atm/ocn coupling -#ifndef coupled -#ifndef CESMCOUPLED +! standalone ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values + +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! initialize atmosphere fluxes sent to coupler diff --git a/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 b/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 index c2331e4e5..943787498 100644 --- a/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 @@ -55,9 +55,8 @@ subroutine CICE_Finalize ! quit MPI !------------------------------------------------------------------- -#ifndef coupled - call end_run ! quit MPI -#endif +! standalone +! call end_run ! quit MPI end subroutine CICE_Finalize diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index b72745e30..80bb2570e 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -87,9 +87,6 @@ subroutine cice_init(mpicom_ice) use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif integer (kind=int_kind), optional, intent(in) :: & mpicom_ice ! communicator for sequential ccsm @@ -134,9 +131,6 @@ subroutine cice_init(mpicom_ice) call init_coupler_flux ! initialize fluxes exchanged with coupler -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution @@ -206,21 +200,19 @@ subroutine cice_init(mpicom_ice) call init_forcing_atmo ! initialize atmospheric forcing (standalone) -#ifndef coupled -#ifndef CESMCOUPLED - if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data +! for standalone +! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data + +! ! isotopes +! if (tr_iso) call fiso_default ! default values +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry if (runtype == 'initial' .and. .not. restart) & diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index f5e7de02f..ee217712b 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -89,23 +89,20 @@ subroutine CICE_Run call ice_timer_start(timer_couple) ! atm/ocn coupling -#ifndef coupled -#ifndef CESMCOUPLED +! for standalone ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values +! ! isotopes +! if (tr_iso) call fiso_default ! default values +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler diff --git a/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 index c2331e4e5..943787498 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 @@ -55,9 +55,8 @@ subroutine CICE_Finalize ! quit MPI !------------------------------------------------------------------- -#ifndef coupled - call end_run ! quit MPI -#endif +! standalone +! call end_run ! quit MPI end subroutine CICE_Finalize diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 486c36dcc..644ef72fa 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -346,6 +346,7 @@ subroutine coupling_prep (iblk) alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind @@ -543,7 +544,12 @@ subroutine coupling_prep (iblk) evap (:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), fswthru (:,:,iblk), & + fhocn (:,:,iblk), & + fswthru (:,:,iblk), & + fswthru_vdr(:,:,iblk), & + fswthru_vdf(:,:,iblk), & + fswthru_idr(:,:,iblk), & + fswthru_idf(:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index aff4b5099..da3d95369 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -86,7 +86,7 @@ module ice_comp_nuopc character(len=*),parameter :: shr_cal_noleap = 'NO_LEAP' character(len=*),parameter :: shr_cal_gregorian = 'GREGORIAN' - integer , parameter :: dbug = 10 + integer :: dbug = 0 integer , parameter :: debug_import = 0 ! internal debug level integer , parameter :: debug_export = 0 ! internal debug level character(*), parameter :: modName = "(ice_comp_nuopc)" @@ -236,6 +236,14 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return endif + call NUOPC_CompAttributeGet(gcomp, name='dbug_flag', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) dbug + end if + write(logmsg,'(i6)') dbug + call ESMF_LogWrite('CICE_cap: dbug = '//trim(logmsg), ESMF_LOGMSG_INFO) + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -539,6 +547,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) !---------------------------------------------------------------------------- call icepack_query_parameters(ktherm_out=ktherm) + call icepack_query_parameters(tfrz_option_out=tfrz_option) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -602,6 +611,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) write(nu_diag,*) trim(subname),' cice calendar_type = ',trim(calendar_type) endif +#ifdef CESMCOUPLED if (calendar_type == "GREGORIAN" .or. & calendar_type == "Gregorian" .or. & calendar_type == "gregorian") then @@ -609,6 +619,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) else call time2sec(iyear-year_init,month,mday,time) endif +#endif time = time+start_tod end if @@ -874,8 +885,8 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! diagnostics !-------------------------------- - if (dbug > 1) then - call State_diagnose(exportState,subname//':ES',rc=rc) + if (dbug > 0) then + call state_diagnose(exportState,subname//':ES',rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return endif @@ -905,8 +916,10 @@ subroutine ModelAdvance(gcomp, rc) ! Local variables type(ESMF_Clock) :: clock type(ESMF_Alarm) :: alarm + type(ESMF_Time) :: startTime type(ESMF_Time) :: currTime type(ESMF_Time) :: nextTime + type(ESMF_TimeInterval) :: timeStep type(ESMF_State) :: importState, exportState character(ESMF_MAXSTR) :: cvalue real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp @@ -928,11 +941,31 @@ subroutine ModelAdvance(gcomp, rc) logical :: isPresent, isSet character(*) , parameter :: F00 = "('(ice_comp_nuopc) ',2a,i8,d21.14)" character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' + character(char_len_long) :: msgString !-------------------------------- rc = ESMF_SUCCESS if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + ! query the Component for its clock, importState and exportState + call ESMF_GridCompGet(gcomp, clock=clock, importState=importState, & + exportState=exportState, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockPrint(clock, options="currTime", & + preString="------>Advancing ICE from: ", unit=msgString, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_LogWrite(subname//trim(msgString), ESMF_LOGMSG_INFO) + + call ESMF_ClockGet(clock, startTime=startTime, currTime=currTime, & + timeStep=timeStep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_TimePrint(currTime + timeStep, & + preString="--------------------------------> to: ", unit=msgString, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + !-------------------------------- ! Turn on timers !-------------------------------- @@ -1050,6 +1083,10 @@ subroutine ModelAdvance(gcomp, rc) idate, sec, nu_diag, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if + if (dbug > 0) then + call state_diagnose(importState,subname//':IS',rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if !-------------------------------- ! Advance cice and timestep update @@ -1067,11 +1104,16 @@ subroutine ModelAdvance(gcomp, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call t_stopf ('cice_run_export') + ! write Debug output if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & idate, sec, nu_diag, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if + if (dbug > 0) then + call state_diagnose(exportState,subname//':ES',rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if ! reset shr logging to my original values call shr_file_setLogUnit (shrlogunit) diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 9adb868db..b32085143 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -13,13 +13,11 @@ module ice_import_export use ice_flux , only : strairxt, strairyt, strocnxt, strocnyt use ice_flux , only : alvdr, alidr, alvdf, alidf, Tref, Qref, Uref use ice_flux , only : flat, fsens, flwout, evap, fswabs, fhocn, fswthru -#if (defined NEWCODE) - use ice_flux , only : fswthruvdr, fswthruvdf, fswthruidr, fswthruidf + use ice_flux , only : fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf use ice_flux , only : send_i2x_per_cat, fswthrun_ai - use ice_flux , only : faero_atm, faero_ocn - use ice_flux , only : fiso_atm, fiso_ocn, fiso_rain, fiso_evap - use ice_flux , only : Qa_iso, Qref_iso, HDO_ocn, H2_18O_ocn, H2_16O_ocn -#endif + use ice_flux_bgc , only : faero_atm, faero_ocn + use ice_flux_bgc , only : fiso_atm, fiso_ocn, fiso_evap + use ice_flux_bgc , only : Qa_iso, Qref_iso, HDO_ocn, H2_18O_ocn, H2_16O_ocn use ice_flux , only : fresh, fsalt, zlvl, uatm, vatm, potT, Tair, Qa use ice_flux , only : rhoa, swvdr, swvdf, swidr, swidf, flw, frain use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt @@ -35,6 +33,7 @@ module ice_import_export use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags use icepack_intfc , only : icepack_liquidus_temperature + use icepack_intfc , only : icepack_sea_freezing_temperature use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp @@ -87,7 +86,7 @@ module ice_import_export type (fld_list_type) :: fldsFrIce(fldsMax) type(ESMF_GeomType_Flag) :: geomtype - integer , parameter :: dbug = 10 ! i/o debug messages + integer , parameter :: io_dbug = 10 ! i/o debug messages character(*), parameter :: u_FILE_u = & __FILE__ @@ -115,7 +114,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam !------------------------------------------------------------------------------- rc = ESMF_SUCCESS - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) ! Determine if the following attributes are sent by the driver and if so read them in flds_wiso = .false. @@ -126,7 +125,6 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) end if -#if (defined NEWCODE) flds_i2o_per_cat = .false. call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -134,7 +132,6 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam read(cvalue,*) send_i2x_per_cat call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) end if -#endif !----------------- ! advertise import fields @@ -207,14 +204,12 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) -#if (defined NEWCODE) ! the following are advertised but might not be connected if they are not present ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction_n', & ungridded_lbound=1, ungridded_ubound=ncat) end if -#endif ! ice/atm fluxes computed by ice call fldlist_add(fldsFrIce_num, fldsFrIce, 'stress_on_air_ice_zonal' ) @@ -233,12 +228,10 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dir_flx' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dif_flx' ) -#if (defined NEWCODE) if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ifrac_n', & ungridded_lbound=1, ungridded_ubound=ncat) end if -#endif call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_fresh_water_to_ocean_rate' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_salt_rate' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_zonal' ) @@ -253,8 +246,8 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam if (flds_wiso) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_fresh_water_to_ocean_rate_wiso', & ungridded_lbound=1, ungridded_ubound=3) - !call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & - ! ungridded_lbound=1, ungridded_ubound=3) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & + ungridded_lbound=1, ungridded_ubound=3) call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref_wiso', & ungridded_lbound=1, ungridded_ubound=3) end if @@ -265,7 +258,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam if (ChkErr(rc,__LINE__,u_FILE_u)) return enddo - if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + if (io_dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) end subroutine ice_advertise_fields @@ -361,12 +354,22 @@ subroutine ice_import( importState, rc ) real (kind=dbl_kind),allocatable :: aflds(:,:,:,:) real (kind=dbl_kind) :: workx, worky real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP - real (kind=dbl_kind) :: tffresh + real (kind=dbl_kind) :: Tffresh real (kind=dbl_kind) :: inst_pres_height_lowest + character(len=char_len) :: tfrz_option + integer(int_kind) :: ktherm character(len=*), parameter :: subname = 'ice_import' + character(len=1024) :: msgString !----------------------------------------------------- call icepack_query_parameters(Tffresh_out=Tffresh) + call icepack_query_parameters(tfrz_option_out=tfrz_option) + call icepack_query_parameters(ktherm_out=ktherm) + if (io_dbug > 5) then + write(msgString,'(A,i8)')trim(subname)//' tfrz_option = ' & + // trim(tfrz_option)//', ktherm = ',ktherm + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + end if ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & ! Tffresh_out=Tffresh) @@ -568,7 +571,6 @@ subroutine ice_import( importState, rc ) ! Get aerosols from mediator !------------------------------------------------------- -#if (defined NEWCODE) if (State_FldChk(importState, 'Faxa_bcph')) then ! the following indices are based on what the atmosphere is sending ! bcphidry ungridded_index=1 @@ -604,7 +606,6 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=4, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if -#endif !------------------------------------------------------- ! Water isotopes from the mediator @@ -614,7 +615,6 @@ subroutine ice_import( importState, rc ) ! 18O => ungridded_index=2 ! HDO => ungridded_index=3 -#if (defined NEWCODE) if (State_FldChk(importState, 'shum_wiso')) then call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=1, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -623,12 +623,12 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=3, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=1, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -644,7 +644,6 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'So_roce_wiso', output=H2_18O_ocn, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if -#endif !----------------------------------------------------------------- ! rotate zonal/meridional vectors to local coordinates @@ -697,8 +696,7 @@ subroutine ice_import( importState, rc ) do iblk = 1, nblocks do j = 1,ny_block do i = 1,nx_block - !TODO: tcx should this be icepack_sea_freezing_temperature? - Tf (i,j,iblk) = icepack_liquidus_temperature(sss(i,j,iblk)) + Tf(i,j,iblk) = icepack_sea_freezing_temperature(sss(i,j,iblk)) end do end do end do @@ -773,12 +771,12 @@ subroutine ice_export( exportState, rc ) real (kind=dbl_kind) :: tauyo (nx_block,ny_block,max_blocks) ! ice/ocean stress real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) ! fractional ice area real (kind=dbl_kind), allocatable :: tempfld(:,:,:) - real (kind=dbl_kind) :: tffresh + real (kind=dbl_kind) :: Tffresh character(len=*),parameter :: subname = 'ice_export' !----------------------------------------------------- rc = ESMF_SUCCESS - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) call icepack_query_parameters(Tffresh_out=Tffresh) ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & @@ -907,7 +905,6 @@ subroutine ice_export( exportState, rc ) ! ---- ! surface temperature of ice covered portion (degK) - !call state_setexport(exportState, 'sea_ice_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) call state_setexport(exportState, 'sea_ice_surface_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -1005,23 +1002,21 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'mean_sw_pen_to_ocn' , input=fswthru, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#if (defined NEWCODE) ! flux of vis dir shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthruvdr, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of vis dif shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dif_flx' , input=fswthruvdf, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dif_flx' , input=fswthru_vdf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of ir dir shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dir_flx' , input=fswthruidr, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dir_flx' , input=fswthru_idr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of ir dif shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthruidf, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthru_idf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#endif ! heat exchange with ocean call state_setexport(exportState, 'net_heat_flx_to_ocn' , input=fhocn, lmask=tmask, ifrac=ailohi, rc=rc) @@ -1043,7 +1038,6 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'stress_on_ocn_ice_merid' , input=tauyo, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#if (defined NEWCODE) ! ------ ! optional aerosol fluxes to ocean ! ------ @@ -1134,7 +1128,6 @@ subroutine ice_export( exportState, rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return end do end if -#endif end subroutine ice_export diff --git a/cicecore/drivers/nuopc/dmi/CICE.F90 b/cicecore/drivers/nuopc/dmi/CICE.F90 index 56dffc6b7..ec1963d38 100644 --- a/cicecore/drivers/nuopc/dmi/CICE.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE.F90 @@ -17,7 +17,6 @@ ! https://github.com/CICE-Consortium ! !======================================================================= -#ifndef popcice ! ! Main driver routine for CICE. Initializes and steps through the model. ! This program should be compiled if CICE is run as a separate executable, @@ -57,7 +56,6 @@ program icemodel end program icemodel -#endif !======================================================================= ! ! Wrapper for the print_state debugging routine. diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index adafb3d36..4e236bb11 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -92,9 +92,6 @@ subroutine cice_init(mpi_comm) use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif integer (kind=int_kind), optional, intent(in) :: & mpi_comm ! communicator for sequential ccsm @@ -146,9 +143,6 @@ subroutine cice_init(mpi_comm) call init_coupler_flux ! initialize fluxes exchanged with coupler -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution diff --git a/cicecore/drivers/standalone/cice/CICE.F90 b/cicecore/drivers/standalone/cice/CICE.F90 index 56dffc6b7..ec1963d38 100644 --- a/cicecore/drivers/standalone/cice/CICE.F90 +++ b/cicecore/drivers/standalone/cice/CICE.F90 @@ -17,7 +17,6 @@ ! https://github.com/CICE-Consortium ! !======================================================================= -#ifndef popcice ! ! Main driver routine for CICE. Initializes and steps through the model. ! This program should be compiled if CICE is run as a separate executable, @@ -57,7 +56,6 @@ program icemodel end program icemodel -#endif !======================================================================= ! ! Wrapper for the print_state debugging routine. diff --git a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 index 0cd1ff177..dd0ca0b20 100644 --- a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 @@ -65,9 +65,7 @@ subroutine CICE_Finalize ! quit MPI !------------------------------------------------------------------- -#ifndef coupled call end_run ! quit MPI -#endif end subroutine CICE_Finalize diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 59bbca31c..0a8614eb2 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -87,9 +87,6 @@ subroutine cice_init use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & tr_iso, tr_fsd, wave_spec @@ -134,9 +131,6 @@ subroutine cice_init call init_coupler_flux ! initialize fluxes exchanged with coupler -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution @@ -206,8 +200,6 @@ subroutine cice_init call init_forcing_atmo ! initialize atmospheric forcing (standalone) -#ifndef coupled -#ifndef CESMCOUPLED if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data @@ -219,8 +211,6 @@ subroutine cice_init ! if (tr_zaero) call fzaero_data ! data file (gx1) if (tr_aero .or. tr_zaero) call faero_default ! default values if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry if (runtype == 'initial' .and. .not. restart) & diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 7645c43f3..b45db2514 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -94,8 +94,6 @@ subroutine CICE_Run call ice_timer_start(timer_couple) ! atm/ocn coupling -#ifndef coupled -#ifndef CESMCOUPLED ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice call get_forcing_atmo ! atmospheric forcing from data @@ -109,8 +107,6 @@ subroutine CICE_Run if (tr_aero .or. tr_zaero) call faero_default ! default values if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler @@ -356,7 +352,9 @@ subroutine coupling_prep (iblk) albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_ai, fhocn, & + fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + scale_factor, snowfrac, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt @@ -550,7 +548,12 @@ subroutine coupling_prep (iblk) evap (:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), fswthru (:,:,iblk), & + fhocn (:,:,iblk), & + fswthru (:,:,iblk), & + fswthru_vdr (:,:,iblk), & + fswthru_vdf (:,:,iblk), & + fswthru_idr (:,:,iblk), & + fswthru_idf (:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug index 7ca555433..c7ae7601f 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug @@ -94,8 +94,6 @@ call ice_timer_start(timer_couple) ! atm/ocn coupling -#ifndef coupled -#ifndef CESMCOUPLED ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice call get_forcing_atmo ! atmospheric forcing from data @@ -109,8 +107,6 @@ if (tr_aero .or. tr_zaero) call faero_default ! default values if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler diff --git a/cicecore/shared/ice_arrays_column.F90 b/cicecore/shared/ice_arrays_column.F90 index 64c4de612..06efd6e94 100644 --- a/cicecore/shared/ice_arrays_column.F90 +++ b/cicecore/shared/ice_arrays_column.F90 @@ -106,6 +106,10 @@ module ice_arrays_column public :: & fswsfcn , & ! SW absorbed at ice/snow surface (W m-2) fswthrun , & ! SW through ice to ocean (W/m^2) + fswthrun_vdr , & ! vis dir SW through ice to ocean (W/m^2) + fswthrun_vdf , & ! vis dif SW through ice to ocean (W/m^2) + fswthrun_idr , & ! nir dir SW through ice to ocean (W/m^2) + fswthrun_idf , & ! nir dif SW through ice to ocean (W/m^2) fswintn ! SW absorbed in ice interior, below surface (W m-2) real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable, & @@ -359,6 +363,10 @@ subroutine alloc_arrays_column snowfracn (nx_block,ny_block,ncat,max_blocks), & ! Category snow fraction used in radiation fswsfcn (nx_block,ny_block,ncat,max_blocks), & ! SW absorbed at ice/snow surface (W m-2) fswthrun (nx_block,ny_block,ncat,max_blocks), & ! SW through ice to ocean (W/m^2) + fswthrun_vdr (nx_block,ny_block,ncat,max_blocks), & ! vis dir SW through ice to ocean (W/m^2) + fswthrun_vdf (nx_block,ny_block,ncat,max_blocks), & ! vis dif SW through ice to ocean (W/m^2) + fswthrun_idr (nx_block,ny_block,ncat,max_blocks), & ! nir dir SW through ice to ocean (W/m^2) + fswthrun_idf (nx_block,ny_block,ncat,max_blocks), & ! nir dif SW through ice to ocean (W/m^2) fswintn (nx_block,ny_block,ncat,max_blocks), & ! SW absorbed in ice interior, below surface (W m-2) first_ice_real & (nx_block,ny_block,ncat,max_blocks), & ! .true. = c1, .false. = c0 diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 9e4838087..0370a0d7e 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -181,7 +181,8 @@ end subroutine init_thermo_vertical subroutine init_shortwave use ice_arrays_column, only: fswpenln, Iswabsn, Sswabsn, albicen, & - albsnon, alvdrn, alidrn, alvdfn, alidfn, fswsfcn, fswthrun, & + albsnon, alvdrn, alidrn, alvdfn, alidfn, fswsfcn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & fswintn, albpndn, apeffn, trcrn_sw, dhsn, ffracn, snowfracn, & kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, gaer_bc_tab, bcenh, & swgrid, igrid @@ -304,6 +305,10 @@ subroutine init_shortwave fswsfcn(i,j,n,iblk) = c0 fswintn(i,j,n,iblk) = c0 fswthrun(i,j,n,iblk) = c0 + fswthrun_vdr(i,j,n,iblk) = c0 + fswthrun_vdf(i,j,n,iblk) = c0 + fswthrun_idr(i,j,n,iblk) = c0 + fswthrun_idf(i,j,n,iblk) = c0 enddo ! ncat enddo @@ -363,7 +368,12 @@ subroutine init_shortwave alvdrn=alvdrn(i,j,:,iblk), alvdfn=alvdfn(i,j,:,iblk), & alidrn=alidrn(i,j,:,iblk), alidfn=alidfn(i,j,:,iblk), & fswsfcn=fswsfcn(i,j,:,iblk), fswintn=fswintn(i,j,:,iblk), & - fswthrun=fswthrun(i,j,:,iblk), fswpenln=fswpenln(i,j,:,:,iblk), & + fswthrun=fswthrun(i,j,:,iblk), & + fswthrun_vdr=fswthrun_vdr(i,j,:,iblk), & + fswthrun_vdf=fswthrun_vdf(i,j,:,iblk), & + fswthrun_idr=fswthrun_idr(i,j,:,iblk), & + fswthrun_idf=fswthrun_idf(i,j,:,iblk), & + fswpenln=fswpenln(i,j,:,:,iblk), & Sswabsn=Sswabsn(i,j,:,:,iblk), Iswabsn=Iswabsn(i,j,:,:,iblk), & albicen=albicen(i,j,:,iblk), albsnon=albsnon(i,j,:,iblk), & albpndn=albpndn(i,j,:,iblk), apeffn=apeffn(i,j,:,iblk), & diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index b51484201..b9aed44fe 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -117,10 +117,10 @@ cd ${ICE_OBJDIR} if (${ICE_IOTYPE} == 'netcdf') then set IODIR = io_netcdf - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" else if (${ICE_IOTYPE} =~ pio*) then set IODIR = io_pio2 - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" else set IODIR = io_binary endif diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice index a408cc7d2..ea38e048b 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -57,10 +57,10 @@ if !($?IO_TYPE) then endif if ($IO_TYPE == 'netcdf3' || $IO_TYPE == 'netcdf4') then setenv IODIR io_netcdf - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" else if ($IO_TYPE == 'pio') then setenv IODIR io_pio - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" else setenv IODIR io_binary endif diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 54663f86c..a26579df1 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -10,7 +10,6 @@ ice_ic = './restart/iced_gx3_v5.nc' restart = .true. restart_ext = .false. - restart_coszen = .false. use_restart_time = .true. restart_format = 'default' lcdf64 = .false. @@ -53,6 +52,7 @@ grid_file = 'grid' kmt_file = 'kmt' bathymetry_file = 'unknown_bathymetry_file' + bathymetry_format = 'default' use_bathymetry = .false. gridcpl_file = 'unknown_gridcpl_file' kcatbound = 0 @@ -64,6 +64,7 @@ nilyr = 7 nslyr = 1 nblyr = 7 + orca_halogrid = .false. / &tracer_nml @@ -107,6 +108,9 @@ dSdt_slow_mode = -5.0e-8 phi_c_slow_mode = 0.05 phi_i_mushy = 0.85 + sw_redist = .false. + sw_frac = 0.9d0 + sw_dtemp = 0.02d0 / &dynamics_nml @@ -134,6 +138,7 @@ coriolis = 'latitude' kridge = 1 ktransport = 1 + ssh_stress = 'geostrophic' / &shortwave_nml @@ -182,6 +187,7 @@ wave_spec_type = 'none' wave_spec_file = 'unknown_wave_spec_file' nfreq = 25 + restart_coszen = .false. restore_ice = .false. restore_ocn = .false. trestore = 90 @@ -217,6 +223,7 @@ maskhalo_dyn = .false. maskhalo_remap = .false. maskhalo_bound = .false. + add_mpi_barriers = .false. / &zbgc_nml diff --git a/configuration/scripts/options/set_nml.alt03 b/configuration/scripts/options/set_nml.alt03 index 43681ab9d..f82491d9d 100644 --- a/configuration/scripts/options/set_nml.alt03 +++ b/configuration/scripts/options/set_nml.alt03 @@ -14,6 +14,9 @@ tr_aero = .true. calc_Tsfc = .false. kdyn = 2 ktherm = 1 +sw_redist = .true. +sw_frac = 0.9d0 +sw_dtemp = 0.02d0 tfrz_option = 'linear_salt' revised_evp = .false. Ktens = 0. diff --git a/configuration/scripts/options/set_nml.alt04 b/configuration/scripts/options/set_nml.alt04 index 786decae6..937704294 100644 --- a/configuration/scripts/options/set_nml.alt04 +++ b/configuration/scripts/options/set_nml.alt04 @@ -13,6 +13,9 @@ tr_pond_lvl = .true. tr_aero = .true. kitd = 0 ktherm = 1 +sw_redist = .true. +sw_frac = 0.9d0 +sw_dtemp = 0.02d0 conduct = 'MU71' kdyn = 1 kevp_kernel = 102 diff --git a/configuration/scripts/parse_namelist.sh b/configuration/scripts/parse_namelist.sh index c94420f6e..ea539a2d0 100755 --- a/configuration/scripts/parse_namelist.sh +++ b/configuration/scripts/parse_namelist.sh @@ -5,12 +5,15 @@ if [[ "$#" -ne 2 ]]; then exit -1 fi +scriptname=`basename "$0"` filename=$1 filemods=$2 #echo "$0 $1 $2" echo "running parse_namelist.sh" foundstring="FoundSTRING" +vnamearray=() +valuearray=() while read -r line do @@ -24,17 +27,39 @@ do value=`echo $line | sed "s|^[[:space:]]*\([^[:space:]]*\)[[:space:]]*=[[:space:]]*\([^[:space:]]*\).*$|\2|g"` # echo "$line $vname $value" + found=${foundstring} + for i in "${!vnamearray[@]}"; do + if [[ "${found}" == "${foundstring}" ]]; then + vn=${vnamearray[$i]} + vv=${valuearray[$i]} +# echo "names/values $i ${vname} ${vn} ${value} ${vv}" + if [[ "$vname" == "$vn" ]]; then + found=$i + if [[ "$value" != "${vv}" ]]; then +# echo "names/values $i ${vname} ${vn} ${value} ${vv}" + echo "${scriptname} WARNING: re-overriding $vname from ${vv} to ${value}" + fi + fi + fi + done + #sed -i 's|\(^\s*'"$vname"'\s*\=\s*\)\(.*$\)|\1'"$value"'|g' $filename cp ${filename} ${filename}.check sed -i.sedbak -e 's|\(^[[:space:]]*'"$vname"'[[:space:]]*=[[:space:]]*\)\(.*$\)|\1'"$foundstring"'|g' ${filename}.check grep -q ${foundstring} ${filename}.check if [ $? -eq 0 ]; then sed -i.sedbak -e 's|\(^[[:space:]]*'"$vname"'[[:space:]]*=[[:space:]]*\)\(.*$\)|\1'"$value"'|g' ${filename} + if [[ "${found}" == "${foundstring}" ]]; then + vnamearray+=($vname) + valuearray+=($value) + else + valuearray[$found]=${value} + fi if [[ -e "${filename}.sedbak" ]]; then rm ${filename}.sedbak fi else - echo "$0 ERROR: parsing error for ${vname}" + echo "${scriptname} ERROR: parsing error for ${vname}" exit -99 fi rm ${filename}.check ${filename}.check.sedbak diff --git a/configuration/scripts/parse_settings.sh b/configuration/scripts/parse_settings.sh index f797dbebe..d6ed31c15 100755 --- a/configuration/scripts/parse_settings.sh +++ b/configuration/scripts/parse_settings.sh @@ -5,11 +5,15 @@ if [[ "$#" -ne 2 ]]; then exit -1 fi +scriptname=`basename "$0"` filename=$1 filemods=$2 #echo "$0 $1 $2" echo "running parse_settings.sh" +foundstring="FoundSTRING" +vnamearray=() +valuearray=() while read -r line do @@ -23,8 +27,32 @@ do value=`echo $line | sed "s|\(^[[:space:]]*set[^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\).*$|\3|g"` # echo "$line $vname $value" + found=${foundstring} + for i in "${!vnamearray[@]}"; do + if [[ "${found}" == "${foundstring}" ]]; then + vn=${vnamearray[$i]} + vv=${valuearray[$i]} +# echo "names/values $i ${vname} ${vn} ${value} ${vv}" + if [[ "$vname" == "$vn" ]]; then + found=$i + if [[ "$value" != "${vv}" ]]; then +# echo "names/values $i ${vname} ${vn} ${value} ${vv}" + echo "${scriptname} WARNING: re-overriding $vname from ${vv} to ${value}" + fi + fi + fi + done + #sed -i 's|\(^\s*set.* '"$vname"' \)[^#]*\(#*.*$\)|\1 '"$value"' \2|g' $filename sed -i.sedbak -e 's|\(^[[:space:]]*set.* '"$vname"' \)[^#]*\(#*.*$\)|\1 '"$value"' \2|g' $filename + + if [[ "${found}" == "${foundstring}" ]]; then + vnamearray+=($vname) + valuearray+=($value) + else + valuearray[$found]=${value} + fi + if [[ -e "${filename}.sedbak" ]]; then rm ${filename}.sedbak fi diff --git a/configuration/scripts/tests/cice.lcov.csh b/configuration/scripts/tests/cice.lcov.csh index 8107778d9..5772833d1 100644 --- a/configuration/scripts/tests/cice.lcov.csh +++ b/configuration/scripts/tests/cice.lcov.csh @@ -9,7 +9,7 @@ set lcovhtmldir = lcov_cice_${report_name} genhtml -o ./${lcovhtmldir} --precision 2 -t "${report_name}" total.info rm -r -f ${lcovrepo} -git clone https://github.com/apcraig/${lcovrepo} +git clone --depth=1 https://github.com/apcraig/${lcovrepo} cp -p -r ${lcovhtmldir} ${lcovrepo}/ cd ${lcovrepo} diff --git a/configuration/scripts/tests/cice_test_codecov.csh b/configuration/scripts/tests/cice_test_codecov.csh index be9399f1b..d9a69e898 100755 --- a/configuration/scripts/tests/cice_test_codecov.csh +++ b/configuration/scripts/tests/cice_test_codecov.csh @@ -29,7 +29,7 @@ cd ${testdir} # Check out current cice master echo " " echo "*** checkout current cice master ***" -git clone https://github.com/cice-consortium/cice cice.master.${date} --recursive +git clone --depth=1 https://github.com/cice-consortium/cice cice.master.${date} --recursive cd cice.master.${date} set hash = `git rev-parse --short HEAD ` cd ../ @@ -40,7 +40,7 @@ cd ../ # This also copies in all dot file at the root that do not start with .g (ie. .git*) echo " " echo "*** checkout current test_cice_master ***" -git clone https://github.com/apcraig/test_cice_icepack test_cice_icepack.${date} +git clone --depth=1 https://github.com/apcraig/test_cice_icepack test_cice_icepack.${date} cd test_cice_icepack.${date} echo " " echo "*** remove current files and copy in cice master files ***" diff --git a/configuration/scripts/tests/io_suite.ts b/configuration/scripts/tests/io_suite.ts index c1edec292..3e98642e9 100755 --- a/configuration/scripts/tests/io_suite.ts +++ b/configuration/scripts/tests/io_suite.ts @@ -1,8 +1,9 @@ # Test Grid PEs Sets BFB-compare +# some iobinary configurations fail due to bathymetry netcdf file requirement, remove them restart gx3 8x4 debug,histall,iobinary,precision8 -restart gx3 12x2 alt01,histall,iobinary +#restart gx3 12x2 alt01,histall,iobinary restart gx3 16x2 alt02,histall,iobinary,precision8 -restart gx3 4x2 alt03,histall,iobinary +#restart gx3 4x2 alt03,histall,iobinary restart gx3 8x4 alt04,histall,iobinary,precision8 restart gx3 4x4 alt05,histall,iobinary restart gx3 32x1 bgcz,histall,iobinary,precision8 diff --git a/configuration/scripts/tests/report_results.csh b/configuration/scripts/tests/report_results.csh index e3f8eed70..2eb3731d5 100755 --- a/configuration/scripts/tests/report_results.csh +++ b/configuration/scripts/tests/report_results.csh @@ -25,7 +25,7 @@ set wikirepo = "https://github.com/CICE-Consortium/Test-Results.wiki.git" set wikiname = Test-Results.wiki rm -r -f ${wikiname} -git clone ${wikirepo} ${wikiname} +git clone --depth=1 ${wikirepo} ${wikiname} if ($status != 0) then echo " " echo "${0}: ERROR git clone failed" diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 1d3baca38..229fa92d5 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -29,6 +29,7 @@ either Celsius or Kelvin units). "a4Df", "history field accumulations, 4D categories, fsd", "" "a_min", "minimum area concentration for computing velocity", "0.001" "a_rapid_mode", ":math:`{\bullet}` brine channel diameter", "" + "add_mpi_barriers", ":math:`\bullet` turns on MPI barriers for communication throttling", "" "advection", ":math:`\bullet` type of advection algorithm used (‘remap’ or ‘upwind’)", "remap" "afsd(n)", "floe size distribution (in category n)", "" "ahmax", ":math:`\bullet` thickness above which ice albedo is constant", "0.3m" @@ -252,6 +253,10 @@ either Celsius or Kelvin units). "fswint", "shortwave absorbed in ice interior", "W/m\ :math:`^2`" "fswpenl", "shortwave penetrating through ice layers", "W/m\ :math:`^2`" "fswthru", "shortwave penetrating to ocean", "W/m\ :math:`^2`" + "fswthru_vdr", "visible direct shortwave penetrating to ocean", "W/m\ :math:`^2`" + "fswthru_vdf", "visible diffuse shortwave penetrating to ocean", "W/m\ :math:`^2`" + "fswthru_idr", "near IR direct shortwave penetrating to ocean", "W/m\ :math:`^2`" + "fswthru_idf", "near IR diffuse shortwave penetrating to ocean", "W/m\ :math:`^2`" "fswthru_ai", "grid-box-mean shortwave penetrating to ocean (fswthru)", "W/m\ :math:`^2`" "fyear", "current data year", "" "fyear_final", "last data year", "" diff --git a/doc/source/intro/citing.rst b/doc/source/intro/citing.rst index 8f4e142c8..c128bc4e6 100644 --- a/doc/source/intro/citing.rst +++ b/doc/source/intro/citing.rst @@ -5,14 +5,26 @@ Citing the CICE code ==================== -If you use the CICE code, please cite the version you are using with the CICE -Digital Object Identifier (DOI): +Each individual release has its own Digital Object Identifier (DOI), +e.g. CICE v6.1.2 has DOI 10.5281/zenodo.3888653. All versions of +this lineage (e.g. CICE6) can be cited by using the DOI +10.5281/zenodo.1205674 (https://zenodo.org/record/1205674). This DOI +represents all v6 releases, and will always resolve to the latest one. +More information can be found by following the DOI link to zenodo. -DOI:10.5281/zenodo.1205674 (https://zenodo.org/record/1205674) +If you use CICE, please cite the version number of the code you +are using or modifying. -This DOI can be used to cite all CICE versions and the URL will default to the most recent version. -However, each released version of CICE will also receive its own, unique DOI that can be -used for citations as well. +If using code from the CICE-Consortium repository ``master`` branch +that includes modifications +that have not yet been released with a version number, then in +addition to the most recent version number, the hash at time of +download can be cited, determined by executing the command ``git log`` +in your clone. -Please also make the CICE Consortium aware of any publications and model use. +A hash can also be cited for your own modifications, once they have +been committed to a repository branch. + +Please also make the CICE Consortium aware of any publications and +model use. diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 5512841a2..550162515 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -8,7 +8,51 @@ Case Settings There are two important files that define the case, **cice.settings** and **ice_in**. **cice.settings** is a list of env variables that define many values used to setup, build and run the case. **ice_in** is the input namelist file -for CICE. Variables in both files are described below. +for CICE. Variables in both files are described below. In addition, the first +table lists available preprocessor macros to activate or deactivate various +features when compiling. + +.. _tabcpps: + +Table of C Preprocessor (CPP) Macros +--------------------------------------------------- + +The CICE model supports a number of C Preprocessor (CPP) Macros. These +can be turned on during compilation to activate different pieces of source +code. The main purpose is to introduce build-time code modifications to +include or exclude certain libraries or Fortran language features. More information +can be found in :ref:`cicecpps`. The following CPPs are available. + +.. csv-table:: **CPP Macros** + :header: "CPP name", "description" + :widths: 15, 60 + + "","" + "**General Macros**", "" + "CESM1_PIO", "Provide backwards compatible support for PIO interfaces/version released with CESM1 in about 2010" + "ESMF_INTERFACE", "Turns on ESMF support in a subset of driver code. Also USE_ESMF_LIB and USE_ESMF_METADATA" + "FORTRANUNDERSCORE", "Used in ice_shr_reprosum86.c to support Fortran-C interfaces. This should generally be turned on at all times. There are other CPPs (FORTRANDOUBULEUNDERSCORE, FORTRANCAPS, etc) in ice_shr_reprosum.c that are generally not used in CICE but could be useful if problems arise in the Fortran-C interfaces" + "GPTL", "Turns on GPTL initialization if needed for PIO" + "key_oasis3", "Leverages Oasis CPPs to define the local MPI communicator" + "key_oasis3mct", "Leverages Oasis CPPs to define the local MPI communicator" + "key_oasis4", "Leverages Oasis CPPs to define the local MPI communicator" + "key_iomput", "Leverages Oasis CPPs to define the local MPI communicator" + "NO_F2003", "Turns off some Fortran 2003 features" + "NO_I8", "Converts integer*8 to integer*4. This could have adverse affects for certain algorithms including the ddpdd implementation associated with the ``bfbflag``" + "NO_R16", "Converts real*16 to real*8. This could have adverse affects for certain algorithms including the lsum16 implementation associated with the ``bfbflag``" + "USE_NETCDF", "Turns on netcdf code. This is normally on and is needed for released configurations. An older value, ncdf, is still supported" + "","" + "**Application Macros**", "" + "CESMCOUPLED", "Turns on code changes for the CESM coupled application " + "CICE_IN_NEMO", "Turns on code changes for coupling in the NEMO ocean model" + "CICE_DMI", "Turns on code changes for the DMI coupled model application" + "ICE_DA", "Turns on code changes in the hadgem driver" + "RASM_MODS", "Turns on code changes for the RASM coupled application" + "","" + "**Library Macros**", "" + "_OPENMP", "Automatically defined when compiling with OpenMP " + "_OPENACC", "Automatically defined when compiling with OpenACC " + .. _tabsettings: @@ -37,7 +81,7 @@ can be modified as needed. "ICE_RSTDIR", "string", "unused", "${ICE_RUNDIR}/restart" "ICE_HSTDIR", "string", "unused", "${ICE_RUNDIR}/history" "ICE_LOGDIR", "string", "log directory", "${ICE_CASEDIR}/logs" - "ICE_DRVOPT", "string", "unused", "cice" + "ICE_DRVOPT", "string", "unused", "standalone/cice" "ICE_IOTYPE", "string", "I/O format", "set by cice.setup" " ", "netcdf", "serial netCDF" " ", "pio", "parallel netCDF" @@ -126,7 +170,7 @@ setup_nml "``histfreq_n``", "integer array", "frequency history output is written with ``histfreq``", "1,1,1,1,1" "``history_dir``", "string", "path to history output directory", "'./'" "``history_file``", "string", "output file for history", "'iceh'" - "``history_format``", "``default``", "read/write restart files in default format", "``default``" + "``history_format``", "``default``", "read/write history files in default format", "``default``" "", "``pio_pnetcdf``", "read/write restart files with pnetcdf in pio", "" "``history_precision``", "integer", "history file precision: 4 or 8 byte", "4" "``ice_ic``", "``default``", "latitude and sst dependent initial condition", "``default``" @@ -148,7 +192,6 @@ setup_nml "``restart``", "logical", "initialize using restart file", "``.false.``" "``restart_dir``", "string", "path to restart directory", "'./'" "``restart_ext``", "logical", "read/write halo cells in restart files", "``.false.``" - "``restart_coszen``", "logical", "read/write coszen in restart files", "``.false.``" "``restart_file``", "string", "output file prefix for restart dump", "'iced'" "``restart_format``", "``default``", "read/write restart file with default format", "``default``" "", "``pio_pnetcdf``", "read/write restart files with pnetcdf in pio", "" @@ -171,6 +214,8 @@ grid_nml "", "", "", "" "``bathymetry_file``", "string", "name of bathymetry file to be read", "‘unknown_bathymetry_file’" + "``bathymetry_format``", "``default``", "NetCDF depth field", "‘default’" + "", "``pop``", "pop thickness file in cm in ascii format", "" "``close_boundaries``", "logical", "set land on edges of grid", "``.false.``" "``dxrect``", "real", "x-direction grid spacing for rectangular grid in cm", "0.0" "``dyrect``", "real", "y-direction grid spacing for rectangular grid in cm", "0.0" @@ -193,6 +238,7 @@ grid_nml "``nfsd``", "integer", "number of floe size categories", "1" "``nilyr``", "integer", "number of vertical layers in ice", "0" "``nslyr``", "integer", "number of vertical layers in snow", "0" + "``orca_halogrid``", "logical", "use orca haloed grid for data/grid read", "``.false.``" "``use_bathymetry``", "logical", "use read in bathymetry file for basalstress option", "``.false.``" "", "", "", "" @@ -204,6 +250,7 @@ domain_nml :widths: 15, 15, 30, 15 "", "", "", "" + "``add_mpi_barriers``", "logical", "throttle communication", "``.false.``" "``block_size_x``", "integer", "block size in x direction", "-1" "``block_size_y``", "integer", "block size in y direction", "-1" "``distribution_type``", "``cartesian``", "2D cartesian block distribution method", "``cartesian``" @@ -297,6 +344,9 @@ thermo_nml "``phi_c_slow_mode``", ":math:`0<\phi_c < 1`", "critical liquid fraction", "0.05" "``phi_i_mushy``", ":math:`0<\phi_i < 1`", "solid fraction at lower boundary", "0.85" "``Rac_rapid_mode``", "real", "critical Rayleigh number", "10.0" + "``sw_redist``", "logical", "redistribute internal shortwave to surface", "``.false.``" + "``sw_frac``", "real", "fraction redistributed", "0.9" + "``sw_dtemp``", "real", "temperature difference from melt to start redistributing", "0.02" "", "", "", "" dynamics_nml @@ -315,9 +365,9 @@ dynamics_nml "``basalstress``", "logical", "use basal stress parameterization for landfast ice", "``.false.``" "``Cf``", "real", "ratio of ridging work to PE change in ridging", "17.0" "``coriolis``", "``constant``", "constant coriolis value = 1.46e-4", "``latitude``" - "``Cstar``", "real", "constant in Hibler strength formula", "20" "", "``latitude``", "coriolis variable by latitude", "" "", "``zero``", "zero coriolis", "" + "``Cstar``", "real", "constant in Hibler strength formula", "20" "``e_ratio``", "real", "EVP ellipse aspect ratio", "2.0" "``kdyn``", "``-1``", "dynamics algorithm OFF", "1" "", "``0``", "dynamics OFF", "" @@ -342,6 +392,8 @@ dynamics_nml "``ndte``", "integer", "number of EVP subcycles", "120" "``Pstar``", "real", "constant in Hibler strength formula (N/m\ :math:`^2`)", "2.75e4" "``revised_evp``", "logical", "use revised EVP formulation", "``.false.``" + "``ssh_stress``", "``coupled``", "computed from coupled sea surface height gradient", "``geostrophic``" + "", "``geostropic``", "computed from ocean velocity", "" "``threshold_hw``", "real", "Max water depth for grounding (see :cite:`Amundrud04`)", "30." "``yield_curve``", "``ellipse``", "elliptical yield curve", "``ellipse``" "", "", "", "" @@ -452,6 +504,7 @@ forcing_nml "", "``mm_per_month``", "", "" "", "``mm_per_sec``", "(same as MKS units)", "" "", "``m_per_sec``", "", "" + "``restart_coszen``", "logical", "read/write coszen in restart files", "``.false.``" "``restore_ocn``", "logical", "restore sst to data", "``.false.``" "``restore_ice``", "logical", "restore ice state along lateral boundaries", "``.false.``" "``tfrz_option``", "``linear_salt``", "linear functino of salinity (ktherm=1)", "``mushy``" @@ -651,5 +704,3 @@ icefields_nml "", "``md``", "*e.g.,* write both monthly and daily files", "" "", "", "", "" - - diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index 44d4ef1d6..cbfe37b0c 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -181,6 +181,12 @@ that prints out the variable ``blkmask`` to the history file and which labels the blocks in the grid decomposition according to ``blkmask = my_task + iblk/100``. +The namelist ``add_mpi_barriers`` can be set to ``.true.`` to help +throttle communication for communication intensive configurations. This +may slow the code down a bit. These barriers have been added to +a few select locations, but it's possible others may be needed. As a general +rule, ``add_mpi_barriers`` should be ``.false.``. + ************* Tripole grids ************* diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index 8befee9cb..957cfc4fc 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -14,9 +14,10 @@ Software Requirements To run stand-alone, CICE requires +- bash and csh - gmake (GNU Make) - Fortran and C compilers (Intel, PGI, GNU, Cray, and NAG have been tested) -- NetCDF +- NetCDF (this is actually optional but required to test out of the box configurations) - MPI (this is actually optional but without it you can only run on 1 processor) Below are lists of software versions that the Consortium has tested at some point. There is no @@ -202,7 +203,10 @@ specifies the compilation environment associated with the machine. This should specifies the grid. This is a string and for the current CICE driver, gx1, gx3, and tx1 are supported. (default = gx3) ``--set``, ``-s`` SET1,SET2,SET3 - specifies the optional settings for the case. The settings for ``--suite`` are defined in the suite file. Multiple settings can be specified by providing a comma deliminated set of values without spaces between settings. The available settings are in **configurations/scripts/options** and ``cice.setup --help`` will also list them. These settings files can change either the namelist values or overall case settings (such as the debug flag). + specifies the optional settings for the case. The settings for ``--suite`` are defined in the suite file. Multiple settings can be specified by providing a comma deliminated set of values without spaces between settings. The available settings are in **configurations/scripts/options** and ``cice.setup --help`` will also list them. These settings files can change either the namelist values or overall case settings (such as the debug flag). For cases and tests (not suites), settings defined in **~/.cice_set** (if it exists) will be included in the --set options. This behaviour can be overridden with the `--ignore-user-set`` command line option. + +``--ignore-user-set`` + ignores settings defined in **~/.cice.set** (if it exists) for cases and tests. **~/.cice_set** is always ignored for test suites. For CICE, when setting up cases, the ``--case`` and ``--mach`` must be specified. It's also recommended that ``--env`` be set explicitly as well. @@ -228,7 +232,13 @@ settings (options), the set_env.setting and set_nml.setting will be used to change the defaults. This is done as part of the ``cice.setup`` and the modifications are resolved in the **cice.settings** and **ice_in** file placed in the case directory. If multiple options are chosen that conflict, then the last -option chosen takes precedent. Not all options are compatible with each other. +option chosen takes precedence. Not all options are compatible with each other. + +Settings defined in **~/.cice_set** (if it exists) will be included in the ``--set`` +options. This behaviour can be overridden with the `--ignore-user-set`` command +line option. The format of the **~/.cice_set** file is a identical to the +``--set`` option, a single comma-delimited line of options. Settings on the +command line will take precedence over settings defined in **~/.cice_set**. Some of the options are @@ -350,6 +360,25 @@ automatically clean the prior build. If incremental builds are desired to save time during development, the ``ICE_CLEANBUILD`` setting in **cice.settings** should be modified. +.. _cicecpps: + +C Preprocessor (CPP) Macros +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +There are a number of C Preprocessing Macros supported in the CICE model. These +allow certain coding features like NetCDF, MPI, or specific Fortran features to be +excluded or included during the compile. + +The CPPs are defined by the `CPPDEFS` variable in the Makefile. They are defined +by passing the -D[CPP] to the C and Fortran compilers (ie. -DUSE_NETCDF) and this +is what needs to be set in the `CPPDEFS` variable. The value of `ICE_CPPDEFS` in +**cice.settings** is copied into the Makefile `CPPDEFS` variable as are settings +hardwired into the **Macros.[machine]_[environment]** file. + +In general, ``-DFORTRANUNDERSCORE`` should always be set to support the Fortran/C +interfaces in **ice_shr_reprosum.c**. In addition, if NetCDF is used, ``-DUSE_NETCDF`` +should also be defined. A list of available CPPs can be found in +:ref:`tabcpps`. .. _porting: @@ -453,7 +482,7 @@ the **env.[machine]** file. The easiest way to change a user's default is to create a file in your home directory called **.cice\_proj** and add your preferred account name to the first line. There is also an option (``--acct``) in **cice.setup** to define the account number. -The order of precedent is **cice.setup** command line option, +The order of precedence is **cice.setup** command line option, **.cice\_proj** setting, and then value in the **env.[machine]** file. .. _queue: diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 8f8fe9441..5369efe5f 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -56,6 +56,8 @@ For individual tests, the following command line options can be set ``--set`` SET1,SET2,SET3 (see :ref:`case_options`) +``--ignore-user-set`` (see :ref:`case_options`) + ``--acct`` ACCOUNT (see :ref:`case_options`) ``--grid`` GRID (see :ref:`case_options`) @@ -312,7 +314,7 @@ If a user adds ``--set`` to the suite, all tests in that suite will add that opt ./cice.setup --suite base_suite,decomp_suite --mach wolf --env gnu --testid myid -s debug -The option settings defined in the suite have precendent over the command line +The option settings defined in the suite have precendence over the command line values if there are conflicts. The predefined test suites are defined under **configuration/scripts/tests** and @@ -459,7 +461,7 @@ Test Suite Examples ./results.csh If there are conflicts between the ``--set`` options in the suite and on the command line, - the suite will take precedent. + the suite will take precedence. 5) **Multiple test suites from a single command line** diff --git a/icepack b/icepack index b1e41d9f1..4c42a82e3 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit b1e41d9f12a59390aacdb933889c3c4a87c9e8d2 +Subproject commit 4c42a82e3d92f191a9c52bca3831e8d242e2e4c0 From d81a834d815f5df625d819fc72d333a6f114ce69 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Thu, 13 Aug 2020 09:40:18 -0400 Subject: [PATCH 26/90] Fixcommit (#14) Align commit history between emc/develop and cice-consortium/master From 285985c089319010dab260b6c335a96911dbad9a Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Mon, 31 Aug 2020 12:53:02 -0400 Subject: [PATCH 27/90] Update CICE6 for integration to S2S * add wcoss_dell_p3 compiler macro * update to icepack w/ debug fix * replace SITE with MACHINE_ID * update compile scripts --- .gitmodules | 2 +- .../dynamics/ice_transport_driver.F90 | 142 +++++++++--------- cicecore/cicedynB/general/ice_init.F90 | 12 +- cicecore/cicedynB/general/ice_step_mod.F90 | 14 +- cicecore/shared/ice_init_column.F90 | 22 ++- cicecore/version.txt | 2 +- .../forapps/ufs/comp_ice.backend.clean | 10 +- .../forapps/ufs/comp_ice.backend.libcice | 10 +- .../scripts/machines/Macros.hera_intel | 12 +- .../scripts/machines/Macros.orion_intel | 12 +- .../machines/Macros.wcoss_dell_p3_intel | 49 ++++++ configuration/scripts/options/set_nml.alt04 | 2 +- configuration/scripts/tests/QC/cice.t-test.py | 7 +- doc/source/cice_index.rst | 2 +- doc/source/conf.py | 4 +- doc/source/developer_guide/dg_dynamics.rst | 5 +- doc/source/science_guide/sg_horiztrans.rst | 2 +- doc/source/user_guide/ug_case_settings.rst | 4 +- doc/source/user_guide/ug_testing.rst | 2 +- icepack | 2 +- 20 files changed, 194 insertions(+), 123 deletions(-) create mode 100644 configuration/scripts/machines/Macros.wcoss_dell_p3_intel diff --git a/.gitmodules b/.gitmodules index 22e452f35..8a773d230 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "icepack"] path = icepack - url = https://github.com/cice-consortium/Icepack + url = https://github.com/NOAA-EMC/Icepack diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 index c500e1631..82e04dc71 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 @@ -1,6 +1,7 @@ !======================================================================= ! -! Drivers for remapping and upwind ice transport +!deprecate upwind Drivers for remapping and upwind ice transport +! Drivers for incremental remapping ice transport ! ! authors: Elizabeth C. Hunke and William H. Lipscomb, LANL ! @@ -9,6 +10,7 @@ ! 2006: Incorporated remap transport driver and renamed from ! ice_transport_upwind. ! 2011: ECH moved edgearea arrays into ice_transport_remap.F90 +! 2020: deprecated upwind transport module ice_transport_driver @@ -28,12 +30,13 @@ module ice_transport_driver implicit none private - public :: init_transport, transport_remap, transport_upwind + public :: init_transport, transport_remap!deprecate upwind:, transport_upwind character (len=char_len), public :: & advection ! type of advection scheme used - ! 'upwind' => 1st order donor cell scheme +!deprecate upwind ! 'upwind' => 1st order donor cell scheme ! 'remap' => remapping scheme + ! 'none' => advection off (ktransport = -1 also turns it off) logical, parameter :: & ! if true, prescribe area flux across each edge l_fixed_area = .false. @@ -69,8 +72,9 @@ module ice_transport_driver !======================================================================= ! ! This subroutine is a wrapper for init_remap, which initializes the -! remapping transport scheme. If the model is run with upwind -! transport, no initializations are necessary. +! remapping transport scheme. +!deprecate upwind If the model is run with upwind +!deprecate upwind! transport, no initializations are necessary. ! ! authors William H. Lipscomb, LANL @@ -680,11 +684,12 @@ subroutine transport_remap (dt) end subroutine transport_remap !======================================================================= -! +!deprecate upwind! ! Computes the transport equations for one timestep using upwind. Sets ! several fields into a work array and passes it to upwind routine. - subroutine transport_upwind (dt) +!deprecate upwind + subroutine transport_upwind_deprecated (dt) use ice_boundary, only: ice_HaloUpdate use ice_blocks, only: nx_block, ny_block, block, get_block, nx_block, ny_block @@ -769,52 +774,52 @@ subroutine transport_upwind (dt) field_loc_Nface, field_type_vector) call ice_timer_stop(timer_bound) - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - +!deprecate upwind !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) +!deprecate upwind do iblk = 1, nblocks +!deprecate upwind this_block = get_block(blocks_ice(iblk),iblk) +!deprecate upwind ilo = this_block%ilo +!deprecate upwind ihi = this_block%ihi +!deprecate upwind jlo = this_block%jlo +!deprecate upwind jhi = this_block%jhi !----------------------------------------------------------------- ! fill work arrays with fields to be advected !----------------------------------------------------------------- - call state_to_work (nx_block, ny_block, & - ntrcr, & - narr, trcr_depend, & - aicen (:,:, :,iblk), trcrn (:,:,:,:,iblk), & - vicen (:,:, :,iblk), vsnon (:,:, :,iblk), & - aice0 (:,:, iblk), works (:,:, :,iblk)) +!deprecate upwind +!deprecate upwind call state_to_work (nx_block, ny_block, & +!deprecate upwind ntrcr, & +!deprecate upwind narr, trcr_depend, & +!deprecate upwind aicen (:,:, :,iblk), trcrn (:,:,:,:,iblk), & +!deprecate upwind vicen (:,:, :,iblk), vsnon (:,:, :,iblk), & +!deprecate upwind aice0 (:,:, iblk), works (:,:, :,iblk)) !----------------------------------------------------------------- ! advect !----------------------------------------------------------------- - call upwind_field (nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - dt, & - narr, works(:,:,:,iblk), & - uee(:,:,iblk), vnn (:,:,iblk), & - HTE(:,:,iblk), HTN (:,:,iblk), & - tarea(:,:,iblk)) +!deprecate upwind call upwind_field (nx_block, ny_block, & +!deprecate upwind ilo, ihi, jlo, jhi, & +!deprecate upwind dt, & +!deprecate upwind narr, works(:,:,:,iblk), & +!deprecate upwind uee(:,:,iblk), vnn (:,:,iblk), & +!deprecate upwind HTE(:,:,iblk), HTN (:,:,iblk), & +!deprecate upwind tarea(:,:,iblk)) !----------------------------------------------------------------- ! convert work arrays back to state variables !----------------------------------------------------------------- - call work_to_state (nx_block, ny_block, & - ntrcr, narr, & - trcr_depend(:), trcr_base(:,:), & - n_trcr_strata(:), nt_strata(:,:), & - aicen(:,:, :,iblk), trcrn (:,:,:,:,iblk), & - vicen(:,:, :,iblk), vsnon (:,:, :,iblk), & - aice0(:,:, iblk), works (:,:, :,iblk)) +!deprecate upwind call work_to_state (nx_block, ny_block, & +!deprecate upwind ntrcr, narr, & +!deprecate upwind trcr_depend(:), trcr_base(:,:), & +!deprecate upwind n_trcr_strata(:), nt_strata(:,:), & +!deprecate upwind aicen(:,:, :,iblk), trcrn (:,:,:,:,iblk), & +!deprecate upwind vicen(:,:, :,iblk), vsnon (:,:, :,iblk), & +!deprecate upwind aice0(:,:, iblk), works (:,:, :,iblk)) - enddo ! iblk - !$OMP END PARALLEL DO +!deprecate upwind enddo ! iblk +!deprecate upwind !$OMP END PARALLEL DO deallocate (works) @@ -832,7 +837,8 @@ subroutine transport_upwind (dt) call ice_timer_stop(timer_advect) ! advection - end subroutine transport_upwind + end subroutine transport_upwind_deprecated +!deprecate upwind !======================================================================= ! The next few subroutines (through check_monotonicity) are called @@ -1455,12 +1461,12 @@ subroutine check_monotonicity (nx_block, ny_block, & end subroutine check_monotonicity !======================================================================= -! The remaining subroutines are called by transport_upwind. +!deprecate upwind! The remaining subroutines are called by transport_upwind. !======================================================================= ! ! Fill work array with state variables in preparation for upwind transport - - subroutine state_to_work (nx_block, ny_block, & +!deprecate upwind + subroutine state_to_work_deprecated (nx_block, ny_block, & ntrcr, & narr, trcr_depend, & aicen, trcrn, & @@ -1601,13 +1607,13 @@ subroutine state_to_work (nx_block, ny_block, & if (narr /= narrays) write(nu_diag,*) & "Wrong number of arrays in transport bound call" - end subroutine state_to_work + end subroutine state_to_work_deprecated !======================================================================= ! ! Convert work array back to state variables - - subroutine work_to_state (nx_block, ny_block, & +!deprecate upwind + subroutine work_to_state_deprecated (nx_block, ny_block, & ntrcr, narr, & trcr_depend, & trcr_base, & @@ -1715,13 +1721,13 @@ subroutine work_to_state (nx_block, ny_block, & if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - end subroutine work_to_state + end subroutine work_to_state_deprecated !======================================================================= ! ! upwind transport algorithm - - subroutine upwind_field (nx_block, ny_block, & +!deprecate upwind + subroutine upwind_field_deprecated (nx_block, ny_block, & ilo, ihi, jlo, jhi, & dt, & narrays, phi, & @@ -1764,26 +1770,26 @@ subroutine upwind_field (nx_block, ny_block, & do n = 1, narrays - do j = 1, jhi - do i = 1, ihi - worka(i,j)= & - upwind(phi(i,j,n),phi(i+1,j,n),uee(i,j),HTE(i,j),dt) - workb(i,j)= & - upwind(phi(i,j,n),phi(i,j+1,n),vnn(i,j),HTN(i,j),dt) - enddo - enddo - - do j = jlo, jhi - do i = ilo, ihi - phi(i,j,n) = phi(i,j,n) - ( worka(i,j)-worka(i-1,j) & - + workb(i,j)-workb(i,j-1) ) & - / tarea(i,j) - enddo - enddo +!deprecate upwind do j = 1, jhi +!deprecate upwind do i = 1, ihi +!deprecate upwind worka(i,j)= & +!deprecate upwind upwind(phi(i,j,n),phi(i+1,j,n),uee(i,j),HTE(i,j),dt) +!deprecate upwind workb(i,j)= & +!deprecate upwind upwind(phi(i,j,n),phi(i,j+1,n),vnn(i,j),HTN(i,j),dt) +!deprecate upwind enddo +!deprecate upwind enddo + +!deprecate upwind do j = jlo, jhi +!deprecate upwind do i = ilo, ihi +!deprecate upwind phi(i,j,n) = phi(i,j,n) - ( worka(i,j)-worka(i-1,j) & +!deprecate upwind + workb(i,j)-workb(i,j-1) ) & +!deprecate upwind / tarea(i,j) +!deprecate upwind enddo +!deprecate upwind enddo enddo ! narrays - end subroutine upwind_field + end subroutine upwind_field_deprecated !======================================================================= @@ -1791,13 +1797,13 @@ end subroutine upwind_field ! Define upwind function !------------------------------------------------------------------- - real(kind=dbl_kind) function upwind(y1,y2,a,h,dt) +!deprecate upwind real(kind=dbl_kind) function upwind(y1,y2,a,h,dt) - real(kind=dbl_kind), intent(in) :: y1,y2,a,h,dt +!deprecate upwind real(kind=dbl_kind), intent(in) :: y1,y2,a,h,dt - upwind = p5*dt*h*((a+abs(a))*y1+(a-abs(a))*y2) +!deprecate upwind upwind = p5*dt*h*((a+abs(a))*y1+(a-abs(a))*y2) - end function upwind +!deprecate upwind end function upwind !======================================================================= diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index d3b096eb3..f2eaae17d 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -795,7 +795,11 @@ subroutine input_data abort_list = trim(abort_list)//":1" endif - if (advection /= 'remap' .and. advection /= 'upwind' .and. advection /= 'none') then +!deprecate upwind if (advection /= 'remap' .and. advection /= 'upwind' .and. advection /= 'none') then + if (advection /= 'remap' .and. advection /= 'none') then + if (trim(advection) == 'upwind') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: upwind advection has been deprecated' + endif if (my_task == master_task) write(nu_diag,*) subname//' ERROR: invalid advection=',trim(advection) abort_list = trim(abort_list)//":3" endif @@ -1178,8 +1182,10 @@ subroutine input_data tmpstr2 = ' transport enabled' if (trim(advection) == 'remap') then tmpstr2 = ': linear remapping advection' - elseif (trim(advection) == 'upwind') then - tmpstr2 = ': donor cell (upwind) advection' +!deprecate upwind elseif (trim(advection) == 'upwind') then +!deprecate upwind tmpstr2 = ': donor cell (upwind) advection' + elseif (trim(advection) == 'none') then + tmpstr2 = ': advection off' endif write(nu_diag,*) 'advection = ', trim(advection),trim(tmpstr2) else diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index 7a2493d58..77d0ad492 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -852,7 +852,8 @@ subroutine step_dyn_horiz (dt) use ice_dyn_eap, only: eap use ice_dyn_shared, only: kdyn, ktransport use ice_flux, only: init_history_dyn - use ice_transport_driver, only: advection, transport_upwind, transport_remap +!deprecate upwind use ice_transport_driver, only: advection, transport_upwind, transport_remap + use ice_transport_driver, only: advection, transport_remap real (kind=dbl_kind), intent(in) :: & dt ! dynamics time step @@ -872,12 +873,13 @@ subroutine step_dyn_horiz (dt) ! Horizontal ice transport !----------------------------------------------------------------- - if (ktransport > 0) then - if (advection == 'upwind') then - call transport_upwind (dt) ! upwind - else +!deprecate upwind if (ktransport > 0) then + if (ktransport > 0 .and. advection == 'remap') then +!deprecate upwind if (advection == 'upwind') then +!deprecate upwind call transport_upwind (dt) ! upwind +!deprecate upwind else call transport_remap (dt) ! incremental remapping - endif +!deprecate upwind endif endif end subroutine step_dyn_horiz diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 0370a0d7e..b3937c0cd 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -877,7 +877,7 @@ subroutine init_bgc() endif ! .not. restart - !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,sicen,trcrn_bgc) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) @@ -889,15 +889,6 @@ subroutine init_bgc() do j = jlo, jhi do i = ilo, ihi - do n = 1, ncat - do k = 1, nilyr - sicen(k,n) = trcrn(i,j,nt_sice+k-1,n,iblk) - enddo - do k = ntrcr_o+1, ntrcr - trcrn_bgc(k-ntrcr_o,n) = trcrn(i,j,k,n,iblk) - enddo - enddo - call icepack_load_ocean_bio_array(max_nbtrcr=icepack_max_nbtrcr, & max_algae=icepack_max_algae, max_don=icepack_max_don, & max_doc=icepack_max_doc, max_fe=icepack_max_fe, & @@ -919,7 +910,7 @@ subroutine init_bgc() file=__FILE__, line=__LINE__) if (.not. restart_bgc) then - !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,sicen,trcrn_bgc) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) @@ -930,7 +921,14 @@ subroutine init_bgc() do j = jlo, jhi do i = ilo, ihi - + do n = 1, ncat + do k = 1, nilyr + sicen(k,n) = trcrn(i,j,nt_sice+k-1,n,iblk) + enddo + do k = ntrcr_o+1, ntrcr + trcrn_bgc(k-ntrcr_o,n) = trcrn(i,j,k,n,iblk) + enddo + enddo call icepack_init_bgc(ncat=ncat, nblyr=nblyr, nilyr=nilyr, ntrcr_o=ntrcr_o, & cgrid=cgrid, igrid=igrid, ntrcr=ntrcr, nbtrcr=nbtrcr, & sicen=sicen(:,:), trcrn=trcrn_bgc(:,:), sss=sss(i,j, iblk), & diff --git a/cicecore/version.txt b/cicecore/version.txt index 43f856223..83a606cb9 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.1.2 +CICE 6.1.3 diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.clean b/configuration/scripts/forapps/ufs/comp_ice.backend.clean index 823f1f586..af6cfe9ab 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.clean +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.clean @@ -1,19 +1,21 @@ #! /bin/csh -f ### Expect to find the following environment variables set on entry: -# SITE +# MACHINE_ID # SYSTEM_USERDIR # SRCDIR # EXEDIR setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR -if (${SITE} =~ cheyenne*) then +if (${MACHINE_ID} =~ cheyenne*) then setenv ARCH cheyenne_intel -else if (${SITE} =~ orion*) then +else if (${MACHINE_ID} =~ orion*) then setenv ARCH orion_intel -else if (${SITE} =~ hera*) then +else if (${MACHINE_ID} =~ hera*) then setenv ARCH hera_intel +else if (${MACHINE_ID} =~ wcoss*) then + setenv ARCH wcoss_dell_p3_intel else echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" exit -2 diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice index ea38e048b..1b5b142a5 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -1,7 +1,7 @@ #! /bin/csh -f ### Expect to find the following environment variables set on entry: -# SITE +# MACHINE_ID # SYSTEM_USERDIR # SRCDIR # EXEDIR @@ -16,12 +16,14 @@ setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR setenv THRD no # set to yes for OpenMP threading -if (${SITE} =~ cheyenne*) then +if (${MACHINE_ID} =~ cheyenne*) then setenv ARCH cheyenne_intel -else if (${SITE} =~ orion*) then +else if (${MACHINE_ID} =~ orion*) then setenv ARCH orion_intel -else if (${SITE} =~ hera*) then +else if (${MACHINE_ID} =~ hera*) then setenv ARCH hera_intel +else if (${MACHINE_ID} =~ wcoss*) then + setenv ARCH wcoss_dell_p3_intel else echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" exit -2 diff --git a/configuration/scripts/machines/Macros.hera_intel b/configuration/scripts/machines/Macros.hera_intel index 230f43e70..caad25ead 100644 --- a/configuration/scripts/machines/Macros.hera_intel +++ b/configuration/scripts/machines/Macros.hera_intel @@ -4,11 +4,11 @@ CPP := fpp CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -fp-model precise -xHost +CFLAGS := -c -O2 -fp-model precise -xHost FIXEDFLAGS := -132 FREEFLAGS := -FR -FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) @@ -48,9 +48,9 @@ INCLDIR := $(INCLDIR) -I$(INC_NETCDF) #SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -ifeq ($(ICE_THREADED), true) - LDFLAGS += -qopenmp - CFLAGS += -qopenmp - FFLAGS += -qopenmp +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp endif diff --git a/configuration/scripts/machines/Macros.orion_intel b/configuration/scripts/machines/Macros.orion_intel index 6dffdd0a2..fa6745e03 100644 --- a/configuration/scripts/machines/Macros.orion_intel +++ b/configuration/scripts/machines/Macros.orion_intel @@ -4,11 +4,11 @@ CPP := fpp CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -fp-model precise -xHost +CFLAGS := -c -O2 -fp-model precise -xHost FIXEDFLAGS := -132 FREEFLAGS := -FR -FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) @@ -48,9 +48,9 @@ INCLDIR := $(INCLDIR) -I$(INC_NETCDF) #SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -ifeq ($(ICE_THREADED), true) - LDFLAGS += -qopenmp - CFLAGS += -qopenmp - FFLAGS += -qopenmp +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp endif diff --git a/configuration/scripts/machines/Macros.wcoss_dell_p3_intel b/configuration/scripts/machines/Macros.wcoss_dell_p3_intel new file mode 100644 index 000000000..a835be424 --- /dev/null +++ b/configuration/scripts/machines/Macros.wcoss_dell_p3_intel @@ -0,0 +1,49 @@ +#============================================================================== +# Makefile macros for wcoss phase3 machine, intel compiler +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -xHost + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg +else + FFLAGS += -O2 +endif + +SCC := icc +SFC := ifort +MPICC := mpiicc +MPIFC := mpiifort + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +NETCDF_PATH := $(NETCDF) + +PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INC_NETCDF := $(NETCDF_PATH)/include +LIB_NETCDF := $(NETCDF_PATH)/lib + +INCLDIR := $(INCLDIR) -I$(INC_NETCDF) +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif diff --git a/configuration/scripts/options/set_nml.alt04 b/configuration/scripts/options/set_nml.alt04 index 937704294..e3689fe82 100644 --- a/configuration/scripts/options/set_nml.alt04 +++ b/configuration/scripts/options/set_nml.alt04 @@ -22,7 +22,7 @@ kevp_kernel = 102 fbot_xfer_type = 'Cdn_ocn' shortwave = 'dEdd' formdrag = .true. -advection = 'upwind' +advection = 'remap' kstrength = 0 krdg_partic = 0 krdg_redist = 0 diff --git a/configuration/scripts/tests/QC/cice.t-test.py b/configuration/scripts/tests/QC/cice.t-test.py index 86938d8e8..987175245 100755 --- a/configuration/scripts/tests/QC/cice.t-test.py +++ b/configuration/scripts/tests/QC/cice.t-test.py @@ -448,7 +448,12 @@ def plot_data(data, lat, lon, units, case, plot_type): # Make some room at the bottom of the figure, and create a colorbar fig.subplots_adjust(bottom=0.2) cbar_ax = fig.add_axes([0.11,0.1,0.8,0.05]) - cb = plt.colorbar(sc, cax=cbar_ax, orientation="horizontal", format="%.2f") + if '\n- ' in case: + # If making a difference plot, use scientific notation for colorbar + cb = plt.colorbar(sc, cax=cbar_ax, orientation="horizontal", format="%.2e") + else: + # If plotting non-difference data, do not use scientific notation for colorbar + cb = plt.colorbar(sc, cax=cbar_ax, orientation="horizontal", format="%.2f") cb.set_label(units, x=1.0) outfile = 'ice_thickness_{}.png'.format(case.replace('\n- ','_minus_')) diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 229fa92d5..1fb73c2d7 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -578,7 +578,7 @@ either Celsius or Kelvin units). "strairx(y)", "stress on ice by air in the x(y)-direction (centered in U cell)", "N/m\ :math:`^2`" "strairx(y)T", "stress on ice by air, x(y)-direction (centered in T cell)", "N/m\ :math:`^2`" "strax(y)", "wind stress components from data", "N/m\ :math:`^2`" - "strength", "ice strength (pressure)", "N/m" + "strength", "ice strength", "N/m" "stress12", "internal ice stress, :math:`\sigma_{12}`", "N/m" "stressm", "internal ice stress, :math:`\sigma_{11}-\sigma_{22}`", "N/m" "stressp", "internal ice stress, :math:`\sigma_{11}+\sigma_{22}`", "N/m" diff --git a/doc/source/conf.py b/doc/source/conf.py index 840ef4a44..8d0df9777 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.1.2' +version = u'6.1.3' # The full version, including alpha/beta/rc tags. -version = u'6.1.2' +version = u'6.1.3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/developer_guide/dg_dynamics.rst b/doc/source/developer_guide/dg_dynamics.rst index 0a48513dc..3551763b5 100644 --- a/doc/source/developer_guide/dg_dynamics.rst +++ b/doc/source/developer_guide/dg_dynamics.rst @@ -50,8 +50,9 @@ abort if set. To override the abort, use value 102 for testing. Transport ----------------- -The transport (advection) methods are found in **cicecore/cicedynB/dynamics/**. Two methods are supported, -upwind and remap. These are set in namelist via the advection variable. +The transport (advection) methods are found in **cicecore/cicedynB/dynamics/**. Only the incremental +remapping method is supported at this time, and is set in namelist via the ``advection`` variable. +Transport can be turned off by setting ``advection = none`` or ``ktransport = -1``. Infrastructure diff --git a/doc/source/science_guide/sg_horiztrans.rst b/doc/source/science_guide/sg_horiztrans.rst index bafb4c72f..33b37564e 100644 --- a/doc/source/science_guide/sg_horiztrans.rst +++ b/doc/source/science_guide/sg_horiztrans.rst @@ -33,7 +33,7 @@ introductory comments in **ice\_transport\_remap.F90**. Prognostic equations for ice and/or snow density may be included in future model versions but have not yet been implemented. -Two transport schemes are available: upwind and the incremental +One transport scheme is available, the incremental remapping scheme of :cite:`Dukowicz00` as modified for sea ice by :cite:`Lipscomb04`. The remapping scheme has several desirable features: diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 550162515..032c8b529 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -358,13 +358,13 @@ dynamics_nml "", "", "", "" "``advection``", "``remap``", "linear remapping advection scheme", "``remap``" - "", "``upwind``", "donor cell advection", "" + "", "``none``", "advection off", "" "``alphab``", "real", ":math:`\alpha_{b}` factor in :cite:`Lemieux16`", "20.0" "``arlx``", "real", "revised_evp value", "300.0" "``brlx``", "real", "revised_evp value", "300.0" "``basalstress``", "logical", "use basal stress parameterization for landfast ice", "``.false.``" "``Cf``", "real", "ratio of ridging work to PE change in ridging", "17.0" - "``coriolis``", "``constant``", "constant coriolis value = 1.46e-4", "``latitude``" + "``coriolis``", "``constant``", "constant coriolis value = 1.46e-4 s\ :math:`^{-1}`", "``latitude``" "", "``latitude``", "coriolis variable by latitude", "" "", "``zero``", "zero coriolis", "" "``Cstar``", "real", "constant in Hibler strength formula", "20" diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 5369efe5f..d7e4a9fa4 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -228,7 +228,7 @@ boundary around the entire domain. It includes the following namelist modificat - ``dxrect``: ``16.e5`` cm - ``dyrect``: ``16.e5`` cm - ``ktherm``: ``-1`` (disables thermodynamics) -- ``coriolis``: ``zero`` (zero coriolis force) +- ``coriolis``: ``constant`` (``f=1.46e-4`` s\ :math:`^{-1}`) - ``ice_data_type`` : ``box2001`` (special ice concentration initialization) - ``atm_data_type`` : ``box2001`` (special atmospheric and ocean forcing) diff --git a/icepack b/icepack index 4c42a82e3..3b1ac0187 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 4c42a82e3d92f191a9c52bca3831e8d242e2e4c0 +Subproject commit 3b1ac0187ce30fbb950276b979376b041ca4467b From ac617cde36db5b41029d2c2523b0fb52c711897b Mon Sep 17 00:00:00 2001 From: Minsuk Ji <57227195+MinsukJi-NOAA@users.noreply.github.com> Date: Thu, 8 Oct 2020 07:13:14 -0400 Subject: [PATCH 28/90] Support TACC stampede (#19) --- .../forapps/ufs/comp_ice.backend.clean | 2 + .../forapps/ufs/comp_ice.backend.libcice | 2 + .../scripts/machines/Macros.stampede_intel | 56 +++++++++++++++++++ 3 files changed, 60 insertions(+) create mode 100644 configuration/scripts/machines/Macros.stampede_intel diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.clean b/configuration/scripts/forapps/ufs/comp_ice.backend.clean index af6cfe9ab..d75d381b4 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.clean +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.clean @@ -16,6 +16,8 @@ else if (${MACHINE_ID} =~ hera*) then setenv ARCH hera_intel else if (${MACHINE_ID} =~ wcoss*) then setenv ARCH wcoss_dell_p3_intel +else if (${MACHINE_ID} =~ stampede*) then + setenv ARCH stampede_intel else echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" exit -2 diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice index 1b5b142a5..47985bef2 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -24,6 +24,8 @@ else if (${MACHINE_ID} =~ hera*) then setenv ARCH hera_intel else if (${MACHINE_ID} =~ wcoss*) then setenv ARCH wcoss_dell_p3_intel +else if (${MACHINE_ID} =~ stampede*) then + setenv ARCH stampede_intel else echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" exit -2 diff --git a/configuration/scripts/machines/Macros.stampede_intel b/configuration/scripts/machines/Macros.stampede_intel new file mode 100644 index 000000000..14bbc7a4a --- /dev/null +++ b/configuration/scripts/machines/Macros.stampede_intel @@ -0,0 +1,56 @@ +#============================================================================== +# Makefile macros for TACC stampede, intel compiler +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -xHost + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg +else + FFLAGS += -O2 +endif + +SCC := icc +SFC := ifort +MPICC := mpiicc +MPIFC := mpiifort + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +NETCDF_PATH := $(NETCDF_ROOT) + +PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/u/apps/ch/opt/pio/2.2/mpt/2.15f/intel/17.0.1/lib + +INC_NETCDF := $(NETCDF_PATH)/include +LIB_NETCDF := $(NETCDF_PATH)/lib + +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) + +INCLDIR := $(INCLDIR) -I$(INC_NETCDF) +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + From 1e4f42bcd2a11cac6fda8b2a49a7af45a41c459f Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Fri, 30 Oct 2020 11:13:17 -0400 Subject: [PATCH 29/90] update icepack --- .gitmodules | 2 +- icepack | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 8a773d230..c095cc774 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "icepack"] path = icepack - url = https://github.com/NOAA-EMC/Icepack + url = https://github.com/DeniseWorthen/Icepack diff --git a/icepack b/icepack index 3b1ac0187..f11d17a01 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 3b1ac0187ce30fbb950276b979376b041ca4467b +Subproject commit f11d17a01472ce2ddfb77dbbf8ef8432114aa1ba From 41afe74306043b904ec6529f1d0bc2ec89293feb Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Fri, 30 Oct 2020 17:47:51 +0000 Subject: [PATCH 30/90] add ice_dyn_vp module to CICE_InitMod --- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index cb70c9b4a..b37d73f65 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -53,6 +53,7 @@ subroutine cice_init use ice_domain_size, only: ncat, nfsd use ice_dyn_eap, only: init_eap, alloc_dyn_eap use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_vp, only: init_vp use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn @@ -162,8 +163,8 @@ subroutine cice_init call faero_optics !initialize aerosol optical property tables end if - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) then @@ -250,7 +251,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar(time) ! update time parameters @@ -261,17 +262,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -282,7 +283,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -293,7 +294,7 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk @@ -306,7 +307,7 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk @@ -319,7 +320,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -334,7 +335,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -357,7 +358,7 @@ subroutine init_restart if (restart_iso) then call read_restart_iso else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) enddo ! iblk @@ -369,7 +370,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero From 2a0f3329b3dbea40b70a03830ce1cb6e86d6d8b0 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Tue, 10 Nov 2020 10:29:03 -0500 Subject: [PATCH 31/90] update gitmodules, update icepack --- .gitmodules | 2 +- icepack | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index c095cc774..8a773d230 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "icepack"] path = icepack - url = https://github.com/DeniseWorthen/Icepack + url = https://github.com/NOAA-EMC/Icepack diff --git a/icepack b/icepack index f11d17a01..db2a47789 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit f11d17a01472ce2ddfb77dbbf8ef8432114aa1ba +Subproject commit db2a4778970ae340b6bdd62eb03f60cd37a13f75 From f773ef3892615da4b4af26b4be3e57c9f29b9343 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Tue, 10 Nov 2020 10:37:11 -0500 Subject: [PATCH 32/90] Update CICE to consortium master (#23) updates include: * deprecate upwind advection (CICE-Consortium#508) * add implicit VP solver (CICE-Consortium#491) --- cicecore/cicedynB/dynamics/ice_dyn_eap.F90 | 44 +- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 115 +- cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 | 4 +- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 280 +- cicecore/cicedynB/dynamics/ice_dyn_vp.F90 | 3689 +++++++++++++++++ cicecore/cicedynB/general/ice_forcing.F90 | 14 +- cicecore/cicedynB/general/ice_init.F90 | 150 +- cicecore/cicedynB/general/ice_step_mod.F90 | 4 +- .../comm/mpi/ice_global_reductions.F90 | 72 +- .../comm/serial/ice_global_reductions.F90 | 72 +- cicecore/cicedynB/infrastructure/ice_grid.F90 | 1 + .../drivers/direct/hadgem3/CICE_InitMod.F90 | 10 +- .../drivers/direct/hadgem3/CICE_RunMod.F90 | 5 + cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 10 +- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 5 + cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 38 +- cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 9 +- cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 | 5 + .../drivers/standalone/cice/CICE_InitMod.F90 | 10 +- .../drivers/standalone/cice/CICE_RunMod.F90 | 3 +- .../standalone/cice/CICE_RunMod.F90_debug | 5 + cicecore/shared/ice_fileunits.F90 | 1 - configuration/scripts/ice_in | 15 + .../scripts/machines/Macros.banting_intel | 2 +- .../scripts/machines/Macros.cesium_intel | 4 +- .../scripts/machines/Macros.conda_linux | 2 +- .../scripts/machines/Macros.conda_macos | 2 +- .../scripts/machines/Macros.daley_intel | 2 +- .../scripts/machines/Macros.fram_intel | 2 +- .../scripts/machines/Macros.millikan_intel | 2 +- .../scripts/machines/environment.yml | 1 + configuration/scripts/options/set_env.lapack | 1 + configuration/scripts/options/set_nml.diagimp | 3 + .../scripts/options/set_nml.dynanderson | 3 + .../scripts/options/set_nml.dynpicard | 3 + .../scripts/options/set_nml.nonlin5000 | 1 + configuration/scripts/options/set_nml.run3dt | 6 + configuration/scripts/tests/base_suite.ts | 1 + doc/source/cice_index.rst | 2 +- doc/source/developer_guide/dg_driver.rst | 7 +- doc/source/developer_guide/dg_dynamics.rst | 10 +- doc/source/master_list.bib | 27 + doc/source/science_guide/sg_dynamics.rst | 228 +- doc/source/user_guide/ug_case_settings.rst | 19 + icepack | 2 +- 45 files changed, 4622 insertions(+), 269 deletions(-) create mode 100644 cicecore/cicedynB/dynamics/ice_dyn_vp.F90 create mode 100644 configuration/scripts/options/set_env.lapack create mode 100644 configuration/scripts/options/set_nml.diagimp create mode 100644 configuration/scripts/options/set_nml.dynanderson create mode 100644 configuration/scripts/options/set_nml.dynpicard create mode 100644 configuration/scripts/options/set_nml.nonlin5000 create mode 100644 configuration/scripts/options/set_nml.run3dt diff --git a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 index 3b31fa8cd..e6bb86bff 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 @@ -122,7 +122,8 @@ subroutine eap (dt) use ice_dyn_shared, only: fcor_blk, ndte, dtei, & denom1, uvel_init, vvel_init, arlx1i, & dyn_prep1, dyn_prep2, stepu, dyn_finish, & - basal_stress_coeff, basalstress + basal_stress_coeff, basalstress, & + stack_velocity_field, unstack_velocity_field use ice_flux, only: rdg_conv, strairxT, strairyT, & strairx, strairy, uocn, vocn, ss_tltx, ss_tlty, iceumask, fm, & strtltx, strtlty, strocnx, strocny, strintx, strinty, taubx, tauby, & @@ -354,11 +355,6 @@ subroutine eap (dt) vicen = vicen (i,j,:,iblk), & strength = strength(i,j, iblk) ) enddo ! ij - - ! load velocity into array for boundary updates - fld2(:,:,1,iblk) = uvel(:,:,iblk) - fld2(:,:,2,iblk) = vvel(:,:,iblk) - enddo ! iblk !$TCXOMP END PARALLEL DO @@ -370,18 +366,12 @@ subroutine eap (dt) call ice_HaloUpdate (strength, halo_info, & field_loc_center, field_type_scalar) ! velocities may have changed in dyn_prep2 + call stack_velocity_field(uvel, vvel, fld2) call ice_HaloUpdate (fld2, halo_info, & field_loc_NEcorner, field_type_vector) + call unstack_velocity_field(fld2, uvel, vvel) call ice_timer_stop(timer_bound) - ! unload - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - uvel(:,:,iblk) = fld2(:,:,1,iblk) - vvel(:,:,iblk) = fld2(:,:,2,iblk) - enddo - !$OMP END PARALLEL DO - if (maskhalo_dyn) then call ice_timer_start(timer_bound) halomask = 0 @@ -472,10 +462,6 @@ subroutine eap (dt) uvel (:,:,iblk), vvel (:,:,iblk), & Tbu (:,:,iblk)) - ! load velocity into array for boundary updates - fld2(:,:,1,iblk) = uvel(:,:,iblk) - fld2(:,:,2,iblk) = vvel(:,:,iblk) - !----------------------------------------------------------------- ! evolution of structure tensor A !----------------------------------------------------------------- @@ -501,6 +487,7 @@ subroutine eap (dt) enddo !$TCXOMP END PARALLEL DO + call stack_velocity_field(uvel, vvel, fld2) call ice_timer_start(timer_bound) if (maskhalo_dyn) then call ice_HaloUpdate (fld2, halo_info_mask, & @@ -510,14 +497,7 @@ subroutine eap (dt) field_loc_NEcorner, field_type_vector) endif call ice_timer_stop(timer_bound) - - ! unload - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - uvel(:,:,iblk) = fld2(:,:,1,iblk) - vvel(:,:,iblk) = fld2(:,:,2,iblk) - enddo - !$OMP END PARALLEL DO + call unstack_velocity_field(fld2, uvel, vvel) enddo ! subcycling @@ -556,16 +536,12 @@ end subroutine eap !======================================================================= ! Initialize parameters and variables needed for the eap dynamics -! (based on init_evp) +! (based on init_dyn) - subroutine init_eap (dt) + subroutine init_eap use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks - use ice_dyn_shared, only: init_evp - - real (kind=dbl_kind), intent(in) :: & - dt ! time step ! local variables @@ -595,8 +571,6 @@ subroutine init_eap (dt) file=__FILE__, line=__LINE__) phi = pi/c12 ! diamond shaped floe smaller angle (default phi = 30 deg) - call init_evp (dt) - !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block @@ -1321,7 +1295,7 @@ subroutine stress_eap (nx_block, ny_block, & tensionse = -cym(i,j)*uvel(i ,j-1) - dyt(i,j)*uvel(i-1,j-1) & + cxp(i,j)*vvel(i ,j-1) - dxt(i,j)*vvel(i ,j ) - ! shearing strain rate = e_12 + ! shearing strain rate = 2*e_12 shearne = -cym(i,j)*vvel(i ,j ) - dyt(i,j)*vvel(i-1,j ) & - cxm(i,j)*uvel(i ,j ) - dxt(i,j)*uvel(i ,j-1) shearnw = -cyp(i,j)*vvel(i-1,j ) + dyt(i,j)*vvel(i ,j ) & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index 0f8acd547..5846cf143 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -94,7 +94,7 @@ subroutine evp (dt) ice_timer_start, ice_timer_stop, timer_evp_1d, timer_evp_2d use ice_dyn_evp_1d, only: ice_dyn_evp_1d_copyin, ice_dyn_evp_1d_kernel, & ice_dyn_evp_1d_copyout - use ice_dyn_shared, only: kevp_kernel + use ice_dyn_shared, only: kevp_kernel, stack_velocity_field, unstack_velocity_field real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -297,10 +297,6 @@ subroutine evp (dt) strength = strength(i,j, iblk) ) enddo ! ij - ! load velocity into array for boundary updates - fld2(:,:,1,iblk) = uvel(:,:,iblk) - fld2(:,:,2,iblk) = vvel(:,:,iblk) - enddo ! iblk !$TCXOMP END PARALLEL DO @@ -312,18 +308,12 @@ subroutine evp (dt) call ice_HaloUpdate (strength, halo_info, & field_loc_center, field_type_scalar) ! velocities may have changed in dyn_prep2 + call stack_velocity_field(uvel, vvel, fld2) call ice_HaloUpdate (fld2, halo_info, & field_loc_NEcorner, field_type_vector) + call unstack_velocity_field(fld2, uvel, vvel) call ice_timer_stop(timer_bound) - ! unload - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - uvel(:,:,iblk) = fld2(:,:,1,iblk) - vvel(:,:,iblk) = fld2(:,:,2,iblk) - enddo - !$OMP END PARALLEL DO - if (maskhalo_dyn) then call ice_timer_start(timer_bound) halomask = 0 @@ -442,13 +432,10 @@ subroutine evp (dt) uvel_init(:,:,iblk), vvel_init(:,:,iblk),& uvel (:,:,iblk), vvel (:,:,iblk), & Tbu (:,:,iblk)) - - ! load velocity into array for boundary updates - fld2(:,:,1,iblk) = uvel(:,:,iblk) - fld2(:,:,2,iblk) = vvel(:,:,iblk) enddo !$TCXOMP END PARALLEL DO + call stack_velocity_field(uvel, vvel, fld2) call ice_timer_start(timer_bound) if (maskhalo_dyn) then call ice_HaloUpdate (fld2, halo_info_mask, & @@ -458,14 +445,7 @@ subroutine evp (dt) field_loc_NEcorner, field_type_vector) endif call ice_timer_stop(timer_bound) - - ! unload - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - uvel(:,:,iblk) = fld2(:,:,1,iblk) - vvel(:,:,iblk) = fld2(:,:,2,iblk) - enddo - !$OMP END PARALLEL DO + call unstack_velocity_field(fld2, uvel, vvel) enddo ! subcycling endif ! kevp_kernel @@ -599,6 +579,8 @@ subroutine stress (nx_block, ny_block, & rdg_conv, rdg_shear, & str ) + use ice_dyn_shared, only: strain_rates, deformations + integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions ksub , & ! subcycling step @@ -676,58 +658,20 @@ subroutine stress (nx_block, ny_block, & ! strain rates ! NOTE these are actually strain rates * area (m^2/s) !----------------------------------------------------------------- - ! divergence = e_11 + e_22 - divune = cyp(i,j)*uvel(i ,j ) - dyt(i,j)*uvel(i-1,j ) & - + cxp(i,j)*vvel(i ,j ) - dxt(i,j)*vvel(i ,j-1) - divunw = cym(i,j)*uvel(i-1,j ) + dyt(i,j)*uvel(i ,j ) & - + cxp(i,j)*vvel(i-1,j ) - dxt(i,j)*vvel(i-1,j-1) - divusw = cym(i,j)*uvel(i-1,j-1) + dyt(i,j)*uvel(i ,j-1) & - + cxm(i,j)*vvel(i-1,j-1) + dxt(i,j)*vvel(i-1,j ) - divuse = cyp(i,j)*uvel(i ,j-1) - dyt(i,j)*uvel(i-1,j-1) & - + cxm(i,j)*vvel(i ,j-1) + dxt(i,j)*vvel(i ,j ) - - ! tension strain rate = e_11 - e_22 - tensionne = -cym(i,j)*uvel(i ,j ) - dyt(i,j)*uvel(i-1,j ) & - + cxm(i,j)*vvel(i ,j ) + dxt(i,j)*vvel(i ,j-1) - tensionnw = -cyp(i,j)*uvel(i-1,j ) + dyt(i,j)*uvel(i ,j ) & - + cxm(i,j)*vvel(i-1,j ) + dxt(i,j)*vvel(i-1,j-1) - tensionsw = -cyp(i,j)*uvel(i-1,j-1) + dyt(i,j)*uvel(i ,j-1) & - + cxp(i,j)*vvel(i-1,j-1) - dxt(i,j)*vvel(i-1,j ) - tensionse = -cym(i,j)*uvel(i ,j-1) - dyt(i,j)*uvel(i-1,j-1) & - + cxp(i,j)*vvel(i ,j-1) - dxt(i,j)*vvel(i ,j ) - - ! shearing strain rate = e_12 - shearne = -cym(i,j)*vvel(i ,j ) - dyt(i,j)*vvel(i-1,j ) & - - cxm(i,j)*uvel(i ,j ) - dxt(i,j)*uvel(i ,j-1) - shearnw = -cyp(i,j)*vvel(i-1,j ) + dyt(i,j)*vvel(i ,j ) & - - cxm(i,j)*uvel(i-1,j ) - dxt(i,j)*uvel(i-1,j-1) - shearsw = -cyp(i,j)*vvel(i-1,j-1) + dyt(i,j)*vvel(i ,j-1) & - - cxp(i,j)*uvel(i-1,j-1) + dxt(i,j)*uvel(i-1,j ) - shearse = -cym(i,j)*vvel(i ,j-1) - dyt(i,j)*vvel(i-1,j-1) & - - cxp(i,j)*uvel(i ,j-1) + dxt(i,j)*uvel(i ,j ) - - ! Delta (in the denominator of zeta, eta) - Deltane = sqrt(divune**2 + ecci*(tensionne**2 + shearne**2)) - Deltanw = sqrt(divunw**2 + ecci*(tensionnw**2 + shearnw**2)) - Deltasw = sqrt(divusw**2 + ecci*(tensionsw**2 + shearsw**2)) - Deltase = sqrt(divuse**2 + ecci*(tensionse**2 + shearse**2)) - - !----------------------------------------------------------------- - ! on last subcycle, save quantities for mechanical redistribution - !----------------------------------------------------------------- - if (ksub == ndte) then - divu(i,j) = p25*(divune + divunw + divuse + divusw) * tarear(i,j) - tmp = p25*(Deltane + Deltanw + Deltase + Deltasw) * tarear(i,j) - rdg_conv(i,j) = -min(divu(i,j),c0) - rdg_shear(i,j) = p5*(tmp-abs(divu(i,j))) - - ! diagnostic only - ! shear = sqrt(tension**2 + shearing**2) - shear(i,j) = p25*tarear(i,j)*sqrt( & - (tensionne + tensionnw + tensionse + tensionsw)**2 & - + (shearne + shearnw + shearse + shearsw)**2) - - endif + call strain_rates (nx_block, ny_block, & + i, j, & + uvel, vvel, & + dxt, dyt, & + cxp, cyp, & + cxm, cym, & + divune, divunw, & + divuse, divusw, & + tensionne, tensionnw, & + tensionse, tensionsw, & + shearne, shearnw, & + shearse, shearsw, & + Deltane, Deltanw, & + Deltase, Deltasw ) !----------------------------------------------------------------- ! strength/Delta ! kg/s @@ -902,6 +846,23 @@ subroutine stress (nx_block, ny_block, & enddo ! ij + !----------------------------------------------------------------- + ! on last subcycle, save quantities for mechanical redistribution + !----------------------------------------------------------------- + if (ksub == ndte) then + call deformations (nx_block , ny_block , & + icellt , & + indxti , indxtj , & + uvel , vvel , & + dxt , dyt , & + cxp , cyp , & + cxm , cym , & + tarear , & + shear , divu , & + rdg_conv , rdg_shear ) + + endif + end subroutine stress !======================================================================= diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 index c88a7de3a..9fac97a89 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 @@ -326,7 +326,7 @@ subroutine stress_i(NA_len, & ! tension strain rate = e_11 - e_22 tensionne = -cym*uvel(iw) - dyt(iw)*tmp_uvel_ee & + cxm*vvel(iw) + dxt(iw)*tmp_vvel_se - ! shearing strain rate = e_12 + ! shearing strain rate = 2*e_12 shearne = -cym*vvel(iw) - dyt(iw)*tmp_vvel_ee & - cxm*uvel(iw) - dxt(iw)*tmp_uvel_se ! Delta (in the denominator of zeta, eta) @@ -614,7 +614,7 @@ subroutine stress_l(NA_len, tarear, & tensionse = -cym*tmp_uvel_se - dyt(iw)*tmp_uvel_ne & + cxp*tmp_vvel_se - dxt(iw)*vvel(iw) - ! shearing strain rate = e_12 + ! shearing strain rate = 2*e_12 shearne = -cym*vvel(iw) - dyt(iw)*tmp_vvel_ee & - cxm*uvel(iw) - dxt(iw)*tmp_uvel_se shearnw = -cyp*tmp_vvel_ee + dyt(iw)*vvel(iw) & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index c3dc83a24..d9a0919e6 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -22,9 +22,10 @@ module ice_dyn_shared implicit none private - public :: init_evp, set_evp_parameters, stepu, principal_stress, & + public :: init_dyn, set_evp_parameters, stepu, principal_stress, & dyn_prep1, dyn_prep2, dyn_finish, basal_stress_coeff, & - alloc_dyn_shared + alloc_dyn_shared, deformations, strain_rates, & + stack_velocity_field, unstack_velocity_field ! namelist parameters @@ -78,7 +79,7 @@ module ice_dyn_shared real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & uvel_init, & ! x-component of velocity (m/s), beginning of timestep vvel_init ! y-component of velocity (m/s), beginning of timestep - + ! ice isotropic tensile strength parameter real (kind=dbl_kind), public :: & Ktens ! T=Ktens*P (tensile strength: see Konig and Holland, 2010) @@ -91,9 +92,9 @@ module ice_dyn_shared k1, & ! 1st free parameter for landfast parameterization k2, & ! second free parameter (N/m^3) for landfast parametrization alphab, & ! alphab=Cb factor in Lemieux et al 2015 - threshold_hw ! max water depth for grounding - ! see keel data from Amundrud et al. 2004 (JGR) - + threshold_hw, & ! max water depth for grounding + ! see keel data from Amundrud et al. 2004 (JGR) + u0 = 5e-5_dbl_kind ! residual velocity for basal stress (m/s) !======================================================================= @@ -117,10 +118,10 @@ end subroutine alloc_dyn_shared !======================================================================= -! Initialize parameters and variables needed for the evp dynamics +! Initialize parameters and variables needed for the dynamics ! author: Elizabeth C. Hunke, LANL - subroutine init_evp (dt) + subroutine init_dyn (dt) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks @@ -141,7 +142,7 @@ subroutine init_evp (dt) i, j, & iblk ! block index - character(len=*), parameter :: subname = '(init_evp)' + character(len=*), parameter :: subname = '(init_dyn)' call set_evp_parameters (dt) @@ -199,7 +200,7 @@ subroutine init_evp (dt) enddo ! iblk !$OMP END PARALLEL DO - end subroutine init_evp + end subroutine init_dyn !======================================================================= @@ -690,9 +691,6 @@ subroutine stepu (nx_block, ny_block, & Cb , & ! complete basal stress coeff rhow ! - real (kind=dbl_kind) :: & - u0 = 5e-5_dbl_kind ! residual velocity for basal stress (m/s) - character(len=*), parameter :: subname = '(stepu)' !----------------------------------------------------------------- @@ -993,6 +991,262 @@ end subroutine principal_stress !======================================================================= +! Compute deformations for mechanical redistribution +! +! author: Elizabeth C. Hunke, LANL +! +! 2019: subroutine created by Philippe Blain, ECCC + + subroutine deformations (nx_block, ny_block, & + icellt, & + indxti, indxtj, & + uvel, vvel, & + dxt, dyt, & + cxp, cyp, & + cxm, cym, & + tarear, & + shear, divu, & + rdg_conv, rdg_shear ) + + use ice_constants, only: p25, p5 + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellt ! no. of cells where icetmask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block), & + intent(in) :: & + indxti , & ! compressed index in i-direction + indxtj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uvel , & ! x-component of velocity (m/s) + vvel , & ! y-component of velocity (m/s) + dxt , & ! width of T-cell through the middle (m) + dyt , & ! height of T-cell through the middle (m) + cyp , & ! 1.5*HTE - 0.5*HTE + cxp , & ! 1.5*HTN - 0.5*HTN + cym , & ! 0.5*HTE - 1.5*HTE + cxm , & ! 0.5*HTN - 1.5*HTN + tarear ! 1/tarea + + real (kind=dbl_kind), dimension (nx_block,ny_block), & + intent(inout) :: & + shear , & ! strain rate II component (1/s) + divu , & ! strain rate I component, velocity divergence (1/s) + rdg_conv , & ! convergence term for ridging (1/s) + rdg_shear ! shear term for ridging (1/s) + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: & ! at each corner : + divune, divunw, divuse, divusw , & ! divergence + tensionne, tensionnw, tensionse, tensionsw, & ! tension + shearne, shearnw, shearse, shearsw , & ! shearing + Deltane, Deltanw, Deltase, Deltasw , & ! Delta + tmp ! useful combination + + character(len=*), parameter :: subname = '(deformations)' + + do ij = 1, icellt + i = indxti(ij) + j = indxtj(ij) + + !----------------------------------------------------------------- + ! strain rates + ! NOTE these are actually strain rates * area (m^2/s) + !----------------------------------------------------------------- + call strain_rates (nx_block, ny_block, & + i, j, & + uvel, vvel, & + dxt, dyt, & + cxp, cyp, & + cxm, cym, & + divune, divunw, & + divuse, divusw, & + tensionne, tensionnw, & + tensionse, tensionsw, & + shearne, shearnw, & + shearse, shearsw, & + Deltane, Deltanw, & + Deltase, Deltasw ) + !----------------------------------------------------------------- + ! deformations for mechanical redistribution + !----------------------------------------------------------------- + divu(i,j) = p25*(divune + divunw + divuse + divusw) * tarear(i,j) + tmp = p25*(Deltane + Deltanw + Deltase + Deltasw) * tarear(i,j) + rdg_conv(i,j) = -min(divu(i,j),c0) + rdg_shear(i,j) = p5*(tmp-abs(divu(i,j))) + + ! diagnostic only + ! shear = sqrt(tension**2 + shearing**2) + shear(i,j) = p25*tarear(i,j)*sqrt( & + (tensionne + tensionnw + tensionse + tensionsw)**2 + & + (shearne + shearnw + shearse + shearsw )**2) + + enddo ! ij + + end subroutine deformations + +!======================================================================= + +! Compute strain rates +! +! author: Elizabeth C. Hunke, LANL +! +! 2019: subroutine created by Philippe Blain, ECCC + + subroutine strain_rates (nx_block, ny_block, & + i, j, & + uvel, vvel, & + dxt, dyt, & + cxp, cyp, & + cxm, cym, & + divune, divunw, & + divuse, divusw, & + tensionne, tensionnw, & + tensionse, tensionsw, & + shearne, shearnw, & + shearse, shearsw, & + Deltane, Deltanw, & + Deltase, Deltasw ) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block ! block dimensions + + integer (kind=int_kind) :: & + i, j ! indices + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uvel , & ! x-component of velocity (m/s) + vvel , & ! y-component of velocity (m/s) + dxt , & ! width of T-cell through the middle (m) + dyt , & ! height of T-cell through the middle (m) + cyp , & ! 1.5*HTE - 0.5*HTE + cxp , & ! 1.5*HTN - 0.5*HTN + cym , & ! 0.5*HTE - 1.5*HTE + cxm ! 0.5*HTN - 1.5*HTN + + real (kind=dbl_kind), intent(out):: & ! at each corner : + divune, divunw, divuse, divusw , & ! divergence + tensionne, tensionnw, tensionse, tensionsw, & ! tension + shearne, shearnw, shearse, shearsw , & ! shearing + Deltane, Deltanw, Deltase, Deltasw ! Delta + + character(len=*), parameter :: subname = '(strain_rates)' + + !----------------------------------------------------------------- + ! strain rates + ! NOTE these are actually strain rates * area (m^2/s) + !----------------------------------------------------------------- + + ! divergence = e_11 + e_22 + divune = cyp(i,j)*uvel(i ,j ) - dyt(i,j)*uvel(i-1,j ) & + + cxp(i,j)*vvel(i ,j ) - dxt(i,j)*vvel(i ,j-1) + divunw = cym(i,j)*uvel(i-1,j ) + dyt(i,j)*uvel(i ,j ) & + + cxp(i,j)*vvel(i-1,j ) - dxt(i,j)*vvel(i-1,j-1) + divusw = cym(i,j)*uvel(i-1,j-1) + dyt(i,j)*uvel(i ,j-1) & + + cxm(i,j)*vvel(i-1,j-1) + dxt(i,j)*vvel(i-1,j ) + divuse = cyp(i,j)*uvel(i ,j-1) - dyt(i,j)*uvel(i-1,j-1) & + + cxm(i,j)*vvel(i ,j-1) + dxt(i,j)*vvel(i ,j ) + + ! tension strain rate = e_11 - e_22 + tensionne = -cym(i,j)*uvel(i ,j ) - dyt(i,j)*uvel(i-1,j ) & + + cxm(i,j)*vvel(i ,j ) + dxt(i,j)*vvel(i ,j-1) + tensionnw = -cyp(i,j)*uvel(i-1,j ) + dyt(i,j)*uvel(i ,j ) & + + cxm(i,j)*vvel(i-1,j ) + dxt(i,j)*vvel(i-1,j-1) + tensionsw = -cyp(i,j)*uvel(i-1,j-1) + dyt(i,j)*uvel(i ,j-1) & + + cxp(i,j)*vvel(i-1,j-1) - dxt(i,j)*vvel(i-1,j ) + tensionse = -cym(i,j)*uvel(i ,j-1) - dyt(i,j)*uvel(i-1,j-1) & + + cxp(i,j)*vvel(i ,j-1) - dxt(i,j)*vvel(i ,j ) + + ! shearing strain rate = 2*e_12 + shearne = -cym(i,j)*vvel(i ,j ) - dyt(i,j)*vvel(i-1,j ) & + - cxm(i,j)*uvel(i ,j ) - dxt(i,j)*uvel(i ,j-1) + shearnw = -cyp(i,j)*vvel(i-1,j ) + dyt(i,j)*vvel(i ,j ) & + - cxm(i,j)*uvel(i-1,j ) - dxt(i,j)*uvel(i-1,j-1) + shearsw = -cyp(i,j)*vvel(i-1,j-1) + dyt(i,j)*vvel(i ,j-1) & + - cxp(i,j)*uvel(i-1,j-1) + dxt(i,j)*uvel(i-1,j ) + shearse = -cym(i,j)*vvel(i ,j-1) - dyt(i,j)*vvel(i-1,j-1) & + - cxp(i,j)*uvel(i ,j-1) + dxt(i,j)*uvel(i ,j ) + + ! Delta (in the denominator of zeta, eta) + Deltane = sqrt(divune**2 + ecci*(tensionne**2 + shearne**2)) + Deltanw = sqrt(divunw**2 + ecci*(tensionnw**2 + shearnw**2)) + Deltasw = sqrt(divusw**2 + ecci*(tensionsw**2 + shearsw**2)) + Deltase = sqrt(divuse**2 + ecci*(tensionse**2 + shearse**2)) + + end subroutine strain_rates + +!======================================================================= + +! Load velocity components into array for boundary updates + + subroutine stack_velocity_field(uvel, vvel, fld2) + + use ice_domain, only: nblocks + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & + uvel , & ! u components of velocity vector + vvel ! v components of velocity vector + + real (kind=dbl_kind), dimension (nx_block,ny_block,2,max_blocks), intent(out) :: & + fld2 ! work array for boundary updates + + ! local variables + + integer (kind=int_kind) :: & + iblk ! block index + + character(len=*), parameter :: subname = '(stack_velocity_field)' + + ! load velocity into array for boundary updates + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + fld2(:,:,1,iblk) = uvel(:,:,iblk) + fld2(:,:,2,iblk) = vvel(:,:,iblk) + enddo + !$OMP END PARALLEL DO + + end subroutine stack_velocity_field + +!======================================================================= + +! Unload velocity components from array after boundary updates + + subroutine unstack_velocity_field(fld2, uvel, vvel) + + use ice_domain, only: nblocks + + real (kind=dbl_kind), dimension (nx_block,ny_block,2,max_blocks), intent(in) :: & + fld2 ! work array for boundary updates + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(out) :: & + uvel , & ! u components of velocity vector + vvel ! v components of velocity vector + + ! local variables + + integer (kind=int_kind) :: & + iblk ! block index + + character(len=*), parameter :: subname = '(unstack_velocity_field)' + + ! Unload velocity from array after boundary updates + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + uvel(:,:,iblk) = fld2(:,:,1,iblk) + vvel(:,:,iblk) = fld2(:,:,2,iblk) + enddo + !$OMP END PARALLEL DO + + end subroutine unstack_velocity_field + +!======================================================================= + end module ice_dyn_shared !======================================================================= diff --git a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 new file mode 100644 index 000000000..570e202c2 --- /dev/null +++ b/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 @@ -0,0 +1,3689 @@ +!======================================================================= +! +! Viscous-plastic sea ice dynamics model +! Computes ice velocity and deformation +! +! See: +! +! Lemieux, J.‐F., Tremblay, B., Thomas, S., Sedláček, J., and Mysak, L. A. (2008), +! Using the preconditioned Generalized Minimum RESidual (GMRES) method to solve +! the sea‐ice momentum equation, J. Geophys. Res., 113, C10004, doi:10.1029/2007JC004680. +! +! Hibler, W. D., and Ackley, S. F. (1983), Numerical simulation of the Weddell Sea pack ice, +! J. Geophys. Res., 88( C5), 2873– 2887, doi:10.1029/JC088iC05p02873. +! +! Y. Saad. A Flexible Inner-Outer Preconditioned GMRES Algorithm. SIAM J. Sci. Comput., +! 14(2):461–469, 1993. URL: https://doi.org/10.1137/0914028, doi:10.1137/0914028. +! +! C. T. Kelley, Iterative Methods for Linear and Nonlinear Equations, SIAM, 1995. +! (https://www.siam.org/books/textbooks/fr16_book.pdf) +! +! Y. Saad, Iterative Methods for Sparse Linear Systems. SIAM, 2003. +! (http://www-users.cs.umn.edu/~saad/IterMethBook_2ndEd.pdf) +! +! Walker, H. F., & Ni, P. (2011). Anderson Acceleration for Fixed-Point Iterations. +! SIAM Journal on Numerical Analysis, 49(4), 1715–1735. https://doi.org/10.1137/10078356X +! +! Fang, H., & Saad, Y. (2009). Two classes of multisecant methods for nonlinear acceleration. +! Numerical Linear Algebra with Applications, 16(3), 197–221. https://doi.org/10.1002/nla.617 +! +! Birken, P. (2015) Termination criteria for inexact fixed‐point schemes. +! Numer. Linear Algebra Appl., 22: 702– 716. doi: 10.1002/nla.1982. +! +! authors: JF Lemieux, ECCC, Philppe Blain, ECCC +! + + module ice_dyn_vp + + use ice_kinds_mod + use ice_blocks, only: nx_block, ny_block + use ice_boundary, only: ice_halo + use ice_communicate, only: my_task, master_task, get_num_procs + use ice_constants, only: field_loc_center, field_loc_NEcorner, & + field_type_scalar, field_type_vector + use ice_constants, only: c0, p027, p055, p111, p166, & + p222, p25, p333, p5, c1 + use ice_domain, only: nblocks, distrb_info + use ice_domain_size, only: max_blocks + use ice_dyn_shared, only: dyn_prep1, dyn_prep2, dyn_finish, & + ecci, cosw, sinw, fcor_blk, uvel_init, & + vvel_init, basal_stress_coeff, basalstress, Ktens, & + stack_velocity_field, unstack_velocity_field + use ice_fileunits, only: nu_diag + use ice_flux, only: fm + use ice_global_reductions, only: global_sum, global_allreduce_sum + use ice_grid, only: dxt, dyt, dxhy, dyhx, cxp, cyp, cxm, cym, uarear + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_ice_strength, icepack_query_parameters + + implicit none + private + public :: implicit_solver, init_vp + + ! namelist parameters + + integer (kind=int_kind), public :: & + maxits_nonlin , & ! max nb of iteration for nonlinear solver + dim_fgmres , & ! size of fgmres Krylov subspace + dim_pgmres , & ! size of pgmres Krylov subspace + maxits_fgmres , & ! max nb of iteration for fgmres + maxits_pgmres , & ! max nb of iteration for pgmres + fpfunc_andacc , & ! fixed point function for Anderson acceleration: 1: g(x) = FMGRES(A(x),b(x)), 2: g(x) = x - A(x)x + b(x) + dim_andacc , & ! size of Anderson minimization matrix (number of saved previous residuals) + start_andacc ! acceleration delay factor (acceleration starts at this iteration) + + logical (kind=log_kind), public :: & + monitor_nonlin , & ! print nonlinear residual norm + monitor_fgmres , & ! print fgmres residual norm + monitor_pgmres , & ! print pgmres residual norm + use_mean_vrel ! use mean of previous 2 iterates to compute vrel (see Hibler and Ackley 1983) + + real (kind=dbl_kind), public :: & + reltol_nonlin , & ! nonlinear stopping criterion: reltol_nonlin*res(k=0) + reltol_fgmres , & ! fgmres stopping criterion: reltol_fgmres*res(k) + reltol_pgmres , & ! pgmres stopping criterion: reltol_pgmres*res(k) + damping_andacc , & ! damping factor for Anderson acceleration + reltol_andacc ! relative tolerance for Anderson acceleration + + character (len=char_len), public :: & + precond , & ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), 'pgmres' (Jacobi-preconditioned GMRES) + algo_nonlin , & ! nonlinear algorithm: 'picard' (Picard iteration), 'anderson' (Anderson acceleration) + ortho_type ! type of orthogonalization for FGMRES ('cgs' or 'mgs') + + ! module variables + + integer (kind=int_kind), allocatable :: & + icellt(:) , & ! no. of cells where icetmask = 1 + icellu(:) ! no. of cells where iceumask = 1 + + integer (kind=int_kind), allocatable :: & + indxti(:,:) , & ! compressed index in i-direction + indxtj(:,:) , & ! compressed index in j-direction + indxui(:,:) , & ! compressed index in i-direction + indxuj(:,:) ! compressed index in j-direction + + real (kind=dbl_kind), allocatable :: & + fld2(:,:,:,:) ! work array for boundary updates + +!======================================================================= + + contains + +!======================================================================= + +! Initialize parameters and variables needed for the vp dynamics +! author: Philippe Blain, ECCC + + subroutine init_vp + + use ice_blocks, only: get_block, block + use ice_boundary, only: ice_HaloUpdate + use ice_constants, only: c1, & + field_loc_center, field_type_scalar + use ice_domain, only: blocks_ice, halo_info + use ice_grid, only: tarea, tinyarea + + ! local variables + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + type (block) :: & + this_block ! block information for current block + + real (kind=dbl_kind) :: & + min_strain_rate = 2e-09_dbl_kind ! used for recomputing tinyarea + + ! Initialize module variables + allocate(icellt(max_blocks), icellu(max_blocks)) + allocate(indxti(nx_block*ny_block, max_blocks), & + indxtj(nx_block*ny_block, max_blocks), & + indxui(nx_block*ny_block, max_blocks), & + indxuj(nx_block*ny_block, max_blocks)) + allocate(fld2(nx_block,ny_block,2,max_blocks)) + + ! Redefine tinyarea using min_strain_rate + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + tinyarea(i,j,iblk) = min_strain_rate*tarea(i,j,iblk) + enddo + enddo + enddo ! iblk + !$OMP END PARALLEL DO + + call ice_HaloUpdate (tinyarea, halo_info, & + field_loc_center, field_type_scalar, & + fillValue=c1) + + end subroutine init_vp + +!======================================================================= + +! Viscous-plastic dynamics driver +! +#ifdef CICE_IN_NEMO +! Wind stress is set during this routine from the values supplied +! via NEMO (unless calc_strair is true). These values are supplied +! rotated on u grid and multiplied by aice. strairxT = 0 in this +! case so operations in dyn_prep1 are pointless but carried out to +! minimise code changes. +#endif +! +! author: JF Lemieux, A. Qaddouri and F. Dupont ECCC + + subroutine implicit_solver (dt) + + use ice_arrays_column, only: Cdn_ocn + use ice_boundary, only: ice_HaloMask, ice_HaloUpdate, & + ice_HaloDestroy, ice_HaloUpdate_stress + use ice_blocks, only: block, get_block, nx_block, ny_block + use ice_domain, only: blocks_ice, halo_info, maskhalo_dyn + use ice_domain_size, only: max_blocks, ncat + use ice_dyn_shared, only: deformations + use ice_flux, only: rdg_conv, rdg_shear, strairxT, strairyT, & + strairx, strairy, uocn, vocn, ss_tltx, ss_tlty, iceumask, fm, & + strtltx, strtlty, strocnx, strocny, strintx, strinty, taubx, tauby, & + strocnxT, strocnyT, strax, stray, & + Tbu, hwater, & + stressp_1, stressp_2, stressp_3, stressp_4, & + stressm_1, stressm_2, stressm_3, stressm_4, & + stress12_1, stress12_2, stress12_3, stress12_4 + use ice_grid, only: tmask, umask, dxt, dyt, cxp, cyp, cxm, cym, & + tarear, to_ugrid, t2ugrid_vector, u2tgrid_vector, & + grid_type + use ice_state, only: aice, vice, vsno, uvel, vvel, divu, shear, & + aice_init, aice0, aicen, vicen, strength + use ice_timers, only: timer_dynamics, timer_bound, & + ice_timer_start, ice_timer_stop + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + ! local variables + + integer (kind=int_kind) :: & + ntot , & ! size of problem for Anderson + iblk , & ! block index + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + i, j, ij + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + tmass , & ! total mass of ice and snow (kg/m^2) + waterx , & ! for ocean stress calculation, x (m/s) + watery , & ! for ocean stress calculation, y (m/s) + forcex , & ! work array: combined atm stress and ocn tilt, x + forcey , & ! work array: combined atm stress and ocn tilt, y + bxfix , & ! part of bx that is constant during Picard + byfix , & ! part of by that is constant during Picard + Cb , & ! seabed stress coefficient + fpresx , & ! fixed point residual vector, x components: fx = uvel - uprev_k + fpresy , & ! fixed point residual vector, y components: fy = vvel - vprev_k + aiu , & ! ice fraction on u-grid + umass , & ! total mass of ice and snow (u grid) + umassdti ! mass of U-cell/dte (kg/m^2 s) + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks,4):: & + zetaD ! zetaD = 2zeta (viscous coeff) + + logical (kind=log_kind) :: calc_strair + + integer (kind=int_kind), dimension (nx_block,ny_block,max_blocks) :: & + icetmask, & ! ice extent mask (T-cell) + halomask ! generic halo mask + + type (ice_halo) :: & + halo_info_mask ! ghost cell update info for masked halo + + type (block) :: & + this_block ! block information for current block + + real (kind=dbl_kind), allocatable :: & + sol(:) ! solution vector + + character(len=*), parameter :: subname = '(implicit_solver)' + + call ice_timer_start(timer_dynamics) ! dynamics + + !----------------------------------------------------------------- + ! Initialize + !----------------------------------------------------------------- + + ! This call is needed only if dt changes during runtime. +! call set_evp_parameters (dt) + + !----------------------------------------------------------------- + ! boundary updates + ! commented out because the ghost cells are freshly + ! updated after cleanup_itd + !----------------------------------------------------------------- + +! call ice_timer_start(timer_bound) +! call ice_HaloUpdate (aice, halo_info, & +! field_loc_center, field_type_scalar) +! call ice_HaloUpdate (vice, halo_info, & +! field_loc_center, field_type_scalar) +! call ice_HaloUpdate (vsno, halo_info, & +! field_loc_center, field_type_scalar) +! call ice_timer_stop(timer_bound) + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + + do j = 1, ny_block + do i = 1, nx_block + rdg_conv (i,j,iblk) = c0 + rdg_shear(i,j,iblk) = c0 + divu (i,j,iblk) = c0 + shear(i,j,iblk) = c0 + enddo + enddo + + !----------------------------------------------------------------- + ! preparation for dynamics + !----------------------------------------------------------------- + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + call dyn_prep1 (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + aice (:,:,iblk), vice (:,:,iblk), & + vsno (:,:,iblk), tmask (:,:,iblk), & + strairxT(:,:,iblk), strairyT(:,:,iblk), & + strairx (:,:,iblk), strairy (:,:,iblk), & + tmass (:,:,iblk), icetmask(:,:,iblk)) + + enddo ! iblk + !$OMP END PARALLEL DO + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (icetmask, halo_info, & + field_loc_center, field_type_scalar) + call ice_timer_stop(timer_bound) + + !----------------------------------------------------------------- + ! convert fields from T to U grid + !----------------------------------------------------------------- + + call to_ugrid(tmass,umass) + call to_ugrid(aice_init, aiu) + + !---------------------------------------------------------------- + ! Set wind stress to values supplied via NEMO or other forcing + ! This wind stress is rotated on u grid and multiplied by aice + !---------------------------------------------------------------- + call icepack_query_parameters(calc_strair_out=calc_strair) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (.not. calc_strair) then + strairx(:,:,:) = strax(:,:,:) + strairy(:,:,:) = stray(:,:,:) + else + call t2ugrid_vector(strairx) + call t2ugrid_vector(strairy) + endif + +! tcraig, tcx, threading here leads to some non-reproducbile results and failures in icepack_ice_strength +! need to do more debugging + !$TCXOMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + + !----------------------------------------------------------------- + ! more preparation for dynamics + !----------------------------------------------------------------- + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + call dyn_prep2 (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + icellt(iblk), icellu(iblk), & + indxti (:,iblk), indxtj (:,iblk), & + indxui (:,iblk), indxuj (:,iblk), & + aiu (:,:,iblk), umass (:,:,iblk), & + umassdti (:,:,iblk), fcor_blk (:,:,iblk), & + umask (:,:,iblk), & + uocn (:,:,iblk), vocn (:,:,iblk), & + strairx (:,:,iblk), strairy (:,:,iblk), & + ss_tltx (:,:,iblk), ss_tlty (:,:,iblk), & + icetmask (:,:,iblk), iceumask (:,:,iblk), & + fm (:,:,iblk), dt, & + strtltx (:,:,iblk), strtlty (:,:,iblk), & + strocnx (:,:,iblk), strocny (:,:,iblk), & + strintx (:,:,iblk), strinty (:,:,iblk), & + taubx (:,:,iblk), tauby (:,:,iblk), & + waterx (:,:,iblk), watery (:,:,iblk), & + forcex (:,:,iblk), forcey (:,:,iblk), & + stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & + stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & + stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & + stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & + stress12_1(:,:,iblk), stress12_2(:,:,iblk), & + stress12_3(:,:,iblk), stress12_4(:,:,iblk), & + uvel_init (:,:,iblk), vvel_init (:,:,iblk), & + uvel (:,:,iblk), vvel (:,:,iblk), & + Tbu (:,:,iblk)) + + call calc_bfix (nx_block , ny_block , & + icellu(iblk) , & + indxui (:,iblk), indxuj (:,iblk), & + umassdti (:,:,iblk), & + forcex (:,:,iblk), forcey (:,:,iblk), & + uvel_init (:,:,iblk), vvel_init (:,:,iblk), & + bxfix (:,:,iblk), byfix (:,:,iblk)) + + !----------------------------------------------------------------- + ! ice strength + !----------------------------------------------------------------- + + strength(:,:,iblk) = c0 ! initialize + do ij = 1, icellt(iblk) + i = indxti(ij, iblk) + j = indxtj(ij, iblk) + call icepack_ice_strength (ncat, & + aice (i,j, iblk), & + vice (i,j, iblk), & + aice0 (i,j, iblk), & + aicen (i,j,:,iblk), & + vicen (i,j,:,iblk), & + strength(i,j, iblk)) + enddo ! ij + + enddo ! iblk + !$TCXOMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (strength, halo_info, & + field_loc_center, field_type_scalar) + ! velocities may have changed in dyn_prep2 + call stack_velocity_field(uvel, vvel, fld2) + call ice_HaloUpdate (fld2, halo_info, & + field_loc_NEcorner, field_type_vector) + call unstack_velocity_field(fld2, uvel, vvel) + call ice_timer_stop(timer_bound) + + if (maskhalo_dyn) then + call ice_timer_start(timer_bound) + halomask = 0 + where (iceumask) halomask = 1 + call ice_HaloUpdate (halomask, halo_info, & + field_loc_center, field_type_scalar) + call ice_timer_stop(timer_bound) + call ice_HaloMask(halo_info_mask, halo_info, halomask) + endif + + !----------------------------------------------------------------- + ! basal stress coefficients (landfast ice) + !----------------------------------------------------------------- + + if (basalstress) then + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call basal_stress_coeff (nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj(:,iblk), & + vice (:,:,iblk), aice(:,:,iblk), & + hwater(:,:,iblk), Tbu (:,:,iblk)) + enddo + !$OMP END PARALLEL DO + endif + + !----------------------------------------------------------------- + ! calc size of problem (ntot) and allocate solution vector + !----------------------------------------------------------------- + + ntot = 0 + do iblk = 1, nblocks + ntot = ntot + icellu(iblk) + enddo + ntot = 2 * ntot ! times 2 because of u and v + + allocate(sol(ntot)) + + !----------------------------------------------------------------- + ! Start of nonlinear iteration + !----------------------------------------------------------------- + call anderson_solver (icellt , icellu, & + indxti , indxtj, & + indxui , indxuj, & + aiu , ntot , & + waterx , watery, & + bxfix , byfix , & + umassdti, sol , & + fpresx , fpresy, & + zetaD , Cb , & + halo_info_mask) + !----------------------------------------------------------------- + ! End of nonlinear iteration + !----------------------------------------------------------------- + + deallocate(sol) + + if (maskhalo_dyn) call ice_HaloDestroy(halo_info_mask) + + !----------------------------------------------------------------- + ! Compute stresses + !----------------------------------------------------------------- + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call stress_vp (nx_block , ny_block , & + icellt(iblk) , & + indxti (:,iblk), indxtj (:,iblk), & + uvel (:,:,iblk), vvel (:,:,iblk), & + dxt (:,:,iblk), dyt (:,:,iblk), & + cxp (:,:,iblk), cyp (:,:,iblk), & + cxm (:,:,iblk), cym (:,:,iblk), & + zetaD (:,:,iblk,:), & + stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & + stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & + stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & + stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & + stress12_1(:,:,iblk), stress12_2(:,:,iblk), & + stress12_3(:,:,iblk), stress12_4(:,:,iblk)) + enddo ! iblk + !$OMP END PARALLEL DO + + !----------------------------------------------------------------- + ! Compute deformations + !----------------------------------------------------------------- + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call deformations (nx_block , ny_block , & + icellt(iblk) , & + indxti (:,iblk), indxtj (:,iblk), & + uvel (:,:,iblk), vvel (:,:,iblk), & + dxt (:,:,iblk), dyt (:,:,iblk), & + cxp (:,:,iblk), cyp (:,:,iblk), & + cxm (:,:,iblk), cym (:,:,iblk), & + tarear (:,:,iblk), & + shear (:,:,iblk), divu (:,:,iblk), & + rdg_conv (:,:,iblk), rdg_shear (:,:,iblk)) + enddo + !$OMP END PARALLEL DO + + !----------------------------------------------------------------- + ! Compute seabed stress (diagnostic) + !----------------------------------------------------------------- + if (basalstress) then + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call calc_seabed_stress (nx_block , ny_block , & + icellu(iblk) , & + indxui (:,iblk), indxuj (:,iblk), & + uvel (:,:,iblk), vvel (:,:,iblk), & + Cb (:,:,iblk), & + taubx (:,:,iblk), tauby (:,:,iblk)) + enddo + !$OMP END PARALLEL DO + endif + + ! Force symmetry across the tripole seam + if (trim(grid_type) == 'tripole') then + if (maskhalo_dyn) then + !------------------------------------------------------- + ! set halomask to zero because ice_HaloMask always keeps + ! local copies AND tripole zipper communication + !------------------------------------------------------- + halomask = 0 + call ice_HaloMask(halo_info_mask, halo_info, halomask) + + call ice_HaloUpdate_stress(stressp_1, stressp_3, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressp_3, stressp_1, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressp_2, stressp_4, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressp_4, stressp_2, halo_info_mask, & + field_loc_center, field_type_scalar) + + call ice_HaloUpdate_stress(stressm_1, stressm_3, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressm_3, stressm_1, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressm_2, stressm_4, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressm_4, stressm_2, halo_info_mask, & + field_loc_center, field_type_scalar) + + call ice_HaloUpdate_stress(stress12_1, stress12_3, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stress12_3, stress12_1, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stress12_2, stress12_4, halo_info_mask, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stress12_4, stress12_2, halo_info_mask, & + field_loc_center, field_type_scalar) + + call ice_HaloDestroy(halo_info_mask) + else + call ice_HaloUpdate_stress(stressp_1, stressp_3, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressp_3, stressp_1, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressp_2, stressp_4, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressp_4, stressp_2, halo_info, & + field_loc_center, field_type_scalar) + + call ice_HaloUpdate_stress(stressm_1, stressm_3, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressm_3, stressm_1, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressm_2, stressm_4, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stressm_4, stressm_2, halo_info, & + field_loc_center, field_type_scalar) + + call ice_HaloUpdate_stress(stress12_1, stress12_3, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stress12_3, stress12_1, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stress12_2, stress12_4, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate_stress(stress12_4, stress12_2, halo_info, & + field_loc_center, field_type_scalar) + endif ! maskhalo + endif ! tripole + + !----------------------------------------------------------------- + ! ice-ocean stress + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + call dyn_finish & + (nx_block, ny_block, & + icellu (iblk), Cdn_ocn (:,:,iblk), & + indxui (:,iblk), indxuj (:,iblk), & + uvel (:,:,iblk), vvel (:,:,iblk), & + uocn (:,:,iblk), vocn (:,:,iblk), & + aiu (:,:,iblk), fm (:,:,iblk), & + strintx (:,:,iblk), strinty (:,:,iblk), & + strairx (:,:,iblk), strairy (:,:,iblk), & + strocnx (:,:,iblk), strocny (:,:,iblk), & + strocnxT(:,:,iblk), strocnyT(:,:,iblk)) + + enddo + !$OMP END PARALLEL DO + + call u2tgrid_vector(strocnxT) ! shift + call u2tgrid_vector(strocnyT) + + call ice_timer_stop(timer_dynamics) ! dynamics + + end subroutine implicit_solver + +!======================================================================= + +! Solve the nonlinear equation F(u,v) = 0, where +! F(u,v) := A(u,v) * (u,v) - b(u,v) +! using Anderson acceleration (accelerated fixed point (Picard) iteration) +! +! author: JF Lemieux, A. Qaddouri, F. Dupont and P. Blain ECCC +! +! Anderson algorithm adadpted from: +! H. F. Walker, “Anderson Acceleration: Algorithms and Implementations” +! [Online]. Available: https://users.wpi.edu/~walker/Papers/anderson_accn_algs_imps.pdf + + subroutine anderson_solver (icellt , icellu, & + indxti , indxtj, & + indxui , indxuj, & + aiu , ntot , & + waterx , watery, & + bxfix , byfix , & + umassdti, sol , & + fpresx , fpresy, & + zetaD , Cb , & + halo_info_mask) + + use ice_arrays_column, only: Cdn_ocn + use ice_blocks, only: nx_block, ny_block + use ice_boundary, only: ice_HaloUpdate + use ice_constants, only: c1 + use ice_domain, only: maskhalo_dyn, halo_info + use ice_domain_size, only: max_blocks + use ice_flux, only: uocn, vocn, fm, Tbu + use ice_grid, only: dxt, dyt, dxhy, dyhx, cxp, cyp, cxm, cym, & + uarear, tinyarea + use ice_state, only: uvel, vvel, strength + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound + + integer (kind=int_kind), intent(in) :: & + ntot ! size of problem for Anderson + + integer (kind=int_kind), dimension(max_blocks), intent(in) :: & + icellt , & ! no. of cells where icetmask = 1 + icellu ! no. of cells where iceumask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block, max_blocks), intent(in) :: & + indxti , & ! compressed index in i-direction + indxtj , & ! compressed index in j-direction + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & + aiu , & ! ice fraction on u-grid + waterx , & ! for ocean stress calculation, x (m/s) + watery , & ! for ocean stress calculation, y (m/s) + bxfix , & ! part of bx that is constant during Picard + byfix , & ! part of by that is constant during Picard + umassdti ! mass of U-cell/dte (kg/m^2 s) + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks,4), intent(out) :: & + zetaD ! zetaD = 2zeta (viscous coeff) + + type (ice_halo), intent(in) :: & + halo_info_mask ! ghost cell update info for masked halo + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(inout) :: & + fpresx , & ! fixed point residual vector, x components: fx = uvel - uprev_k + fpresy , & ! fixed point residual vector, y components: fy = vvel - vprev_k + Cb ! seabed stress coefficient + + real (kind=dbl_kind), dimension (ntot), intent(inout) :: & + sol ! current approximate solution + + ! local variables + + integer (kind=int_kind) :: & + it_nl , & ! nonlinear loop iteration index + res_num , & ! current number of stored residuals + j , & ! iteration index for QR update + iblk , & ! block index + nbiter ! number of FGMRES iterations performed + + integer (kind=int_kind), parameter :: & + inc = 1 ! increment value for BLAS calls + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + uprev_k , & ! uvel at previous Picard iteration + vprev_k , & ! vvel at previous Picard iteration + ulin , & ! uvel to linearize vrel + vlin , & ! vvel to linearize vrel + vrel , & ! coeff for tauw + bx , & ! b vector + by , & ! b vector + diagx , & ! Diagonal (x component) of the matrix A + diagy , & ! Diagonal (y component) of the matrix A + Au , & ! matvec, Fx = bx - Au + Av , & ! matvec, Fy = by - Av + Fx , & ! x residual vector, Fx = bx - Au + Fy , & ! y residual vector, Fy = by - Av + solx , & ! solution of FGMRES (x components) + soly ! solution of FGMRES (y components) + + real (kind=dbl_kind), dimension(nx_block,ny_block,8):: & + stress_Pr, & ! x,y-derivatives of the replacement pressure + diag_rheo ! contributions of the rhelogy term to the diagonal + + real (kind=dbl_kind), dimension (max_blocks) :: & + L2norm ! array used to compute l^2 norm of grid function + + real (kind=dbl_kind), dimension (ntot) :: & + res , & ! current residual + res_old , & ! previous residual + res_diff , & ! difference between current and previous residuals + fpfunc , & ! current value of fixed point function + fpfunc_old , & ! previous value of fixed point function + tmp ! temporary vector for BLAS calls + + real (kind=dbl_kind), dimension(ntot,dim_andacc) :: & + Q , & ! Q factor for QR factorization of F (residuals) matrix + G_diff ! Matrix containing the differences of g(x) (fixed point function) evaluations + + real (kind=dbl_kind), dimension(dim_andacc,dim_andacc) :: & + R ! R factor for QR factorization of F (residuals) matrix + + real (kind=dbl_kind), dimension(dim_andacc) :: & + rhs_tri , & ! right hand side vector for matrix-vector product + coeffs ! coeffs used to combine previous solutions + + real (kind=dbl_kind) :: & + ! tol , & ! tolerance for fixed point convergence: reltol_andacc * (initial fixed point residual norm) [unused for now] + tol_nl , & ! tolerance for nonlinear convergence: reltol_nonlin * (initial nonlinear residual norm) + fpres_norm , & ! norm of current fixed point residual : f(x) = g(x) - x + prog_norm , & ! norm of difference between current and previous solution + nlres_norm ! norm of current nonlinear residual : F(x) = A(x)x -b(x) + +#ifdef USE_LAPACK + real (kind=dbl_kind) :: & + ddot, dnrm2 ! external BLAS functions +#endif + + character(len=*), parameter :: subname = '(anderson_solver)' + + ! Initialization + res_num = 0 + L2norm = c0 + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + uprev_k(:,:,iblk) = uvel(:,:,iblk) + vprev_k(:,:,iblk) = vvel(:,:,iblk) + enddo + !$OMP END PARALLEL DO + + ! Start iterations + do it_nl = 0, maxits_nonlin ! nonlinear iteration loop + ! Compute quantities needed for computing PDE residual and g(x) (fixed point map) + !----------------------------------------------------------------- + ! Calc zetaD, dPr/dx, dPr/dy, Cb and vrel = f(uprev_k, vprev_k) + !----------------------------------------------------------------- + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (use_mean_vrel) then + ulin(:,:,iblk) = p5*uprev_k(:,:,iblk) + p5*uvel(:,:,iblk) + vlin(:,:,iblk) = p5*vprev_k(:,:,iblk) + p5*vvel(:,:,iblk) + else + ulin(:,:,iblk) = uvel(:,:,iblk) + vlin(:,:,iblk) = vvel(:,:,iblk) + endif + uprev_k(:,:,iblk) = uvel(:,:,iblk) + vprev_k(:,:,iblk) = vvel(:,:,iblk) + + call calc_zeta_dPr (nx_block , ny_block , & + icellt (iblk), & + indxti (:,iblk), indxtj (:,iblk), & + uprev_k (:,:,iblk), vprev_k (:,:,iblk), & + dxt (:,:,iblk), dyt (:,:,iblk), & + dxhy (:,:,iblk), dyhx (:,:,iblk), & + cxp (:,:,iblk), cyp (:,:,iblk), & + cxm (:,:,iblk), cym (:,:,iblk), & + tinyarea (:,:,iblk), & + strength (:,:,iblk), zetaD (:,:,iblk,:), & + stress_Pr (:,:,:)) + + call calc_vrel_Cb (nx_block , ny_block , & + icellu (iblk), Cdn_ocn (:,:,iblk), & + indxui (:,iblk), indxuj (:,iblk), & + aiu (:,:,iblk), Tbu (:,:,iblk), & + uocn (:,:,iblk), vocn (:,:,iblk), & + ulin (:,:,iblk), vlin (:,:,iblk), & + vrel (:,:,iblk), Cb (:,:,iblk)) + + ! prepare b vector (RHS) + call calc_bvec (nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj (:,iblk), & + stress_Pr (:,:,:), uarear (:,:,iblk), & + waterx (:,:,iblk), watery (:,:,iblk), & + bxfix (:,:,iblk), byfix (:,:,iblk), & + bx (:,:,iblk), by (:,:,iblk), & + vrel (:,:,iblk)) + + ! Compute nonlinear residual norm (PDE residual) + call matvec (nx_block , ny_block , & + icellu (iblk) , icellt (iblk), & + indxui (:,iblk) , indxuj (:,iblk), & + indxti (:,iblk) , indxtj (:,iblk), & + dxt (:,:,iblk) , dyt (:,:,iblk), & + dxhy (:,:,iblk) , dyhx (:,:,iblk), & + cxp (:,:,iblk) , cyp (:,:,iblk), & + cxm (:,:,iblk) , cym (:,:,iblk), & + uprev_k (:,:,iblk) , vprev_k (:,:,iblk), & + vrel (:,:,iblk) , Cb (:,:,iblk), & + zetaD (:,:,iblk,:), & + umassdti (:,:,iblk) , fm (:,:,iblk), & + uarear (:,:,iblk) , & + Au (:,:,iblk) , Av (:,:,iblk)) + call residual_vec (nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj (:,iblk), & + bx (:,:,iblk), by (:,:,iblk), & + Au (:,:,iblk), Av (:,:,iblk), & + Fx (:,:,iblk), Fy (:,:,iblk), & + L2norm (iblk)) + enddo + !$OMP END PARALLEL DO + nlres_norm = sqrt(global_sum(sum(L2norm), distrb_info)) + if (my_task == master_task .and. monitor_nonlin) then + write(nu_diag, '(a,i4,a,d26.16)') "monitor_nonlin: iter_nonlin= ", it_nl, & + " nonlin_res_L2norm= ", nlres_norm + endif + ! Compute relative tolerance at first iteration + if (it_nl == 0) then + tol_nl = reltol_nonlin*nlres_norm + endif + + ! Check for nonlinear convergence + if (nlres_norm < tol_nl) then + exit + endif + + ! Put initial guess for FGMRES in solx,soly and sol (needed for anderson) + solx = uprev_k + soly = vprev_k + call arrays_to_vec (nx_block , ny_block , & + nblocks , max_blocks , & + icellu (:), ntot , & + indxui (:,:), indxuj (:,:), & + uprev_k (:,:,:), vprev_k (:,:,:), & + sol (:)) + + ! Compute fixed point map g(x) + if (fpfunc_andacc == 1) then + ! g_1(x) = FGMRES(A(x), b(x)) + + ! Prepare diagonal for preconditioner + if (precond == 'diag' .or. precond == 'pgmres') then + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + ! first compute diagonal contributions due to rheology term + call formDiag_step1 (nx_block , ny_block , & + icellu (iblk) , & + indxui (:,iblk) , indxuj(:,iblk), & + dxt (:,:,iblk) , dyt (:,:,iblk), & + dxhy (:,:,iblk) , dyhx(:,:,iblk), & + cxp (:,:,iblk) , cyp (:,:,iblk), & + cxm (:,:,iblk) , cym (:,:,iblk), & + zetaD (:,:,iblk,:), diag_rheo(:,:,:)) + ! second compute the full diagonal + call formDiag_step2 (nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj (:,iblk), & + diag_rheo (:,:,:), vrel (:,:,iblk), & + umassdti (:,:,iblk), & + uarear (:,:,iblk), Cb (:,:,iblk), & + diagx (:,:,iblk), diagy (:,:,iblk)) + enddo + !$OMP END PARALLEL DO + endif + + ! FGMRES linear solver + call fgmres (zetaD , & + Cb , vrel , & + umassdti , & + halo_info_mask, & + bx , by , & + diagx , diagy , & + reltol_fgmres , dim_fgmres, & + maxits_fgmres , & + solx , soly , & + nbiter) + ! Put FGMRES solution solx,soly in fpfunc vector (needed for Anderson) + call arrays_to_vec (nx_block , ny_block , & + nblocks , max_blocks , & + icellu (:), ntot , & + indxui (:,:), indxuj (:,:), & + solx (:,:,:), soly (:,:,:), & + fpfunc (:)) + elseif (fpfunc_andacc == 2) then + ! g_2(x) = x - A(x)x + b(x) = x - F(x) + call abort_ice(error_message=subname // " Fixed point function g_2(x) not yet implemented (fpfunc_andacc = 2)" , & + file=__FILE__, line=__LINE__) + endif + + ! Compute fixed point residual f(x) = g(x) - x + res = fpfunc - sol +#ifdef USE_LAPACK + fpres_norm = global_sum(dnrm2(size(res), res, inc)**2, distrb_info) +#else + call vec_to_arrays (nx_block , ny_block , & + nblocks , max_blocks , & + icellu (:), ntot , & + indxui (:,:), indxuj(:,:) , & + res (:), & + fpresx (:,:,:), fpresy (:,:,:)) + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call calc_L2norm_squared (nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj (:,iblk), & + fpresx(:,:,iblk), fpresy(:,:,iblk), & + L2norm (iblk)) + enddo + !$OMP END PARALLEL DO + fpres_norm = sqrt(global_sum(sum(L2norm), distrb_info)) +#endif + if (my_task == master_task .and. monitor_nonlin) then + write(nu_diag, '(a,i4,a,d26.16)') "monitor_nonlin: iter_nonlin= ", it_nl, & + " fixed_point_res_L2norm= ", fpres_norm + endif + + ! Not used for now (only nonlinear residual is checked) + ! ! Store initial residual norm + ! if (it_nl == 0) then + ! tol = reltol_andacc*fpres_norm + ! endif + ! + ! ! Check residual + ! if (fpres_norm < tol) then + ! exit + ! endif + + if (dim_andacc == 0 .or. it_nl < start_andacc) then + ! Simple fixed point (Picard) iteration in this case + sol = fpfunc + else +#ifdef USE_LAPACK + ! Begin Anderson acceleration + if (get_num_procs() > 1) then + ! Anderson solver is not yet parallelized; abort + if (my_task == master_task) then + call abort_ice(error_message=subname // " Anderson solver (algo_nonlin = 'anderson') is not yet parallelized, and nprocs > 1 " , & + file=__FILE__, line=__LINE__) + endif + endif + if (it_nl > start_andacc) then + ! Update residual difference vector + res_diff = res - res_old + ! Update fixed point function difference matrix + if (res_num < dim_andacc) then + ! Add column + G_diff(:,res_num+1) = fpfunc - fpfunc_old + else + ! Delete first column and add column + G_diff(:,1:res_num-1) = G_diff(:,2:res_num) + G_diff(:,res_num) = fpfunc - fpfunc_old + endif + res_num = res_num + 1 + endif + res_old = res + fpfunc_old = fpfunc + if (res_num == 0) then + sol = fpfunc + else + if (res_num == 1) then + ! Initialize QR factorization + R(1,1) = dnrm2(size(res_diff), res_diff, inc) + Q(:,1) = res_diff/R(1,1) + else + if (res_num > dim_andacc) then + ! Update factorization since 1st column was deleted + call qr_delete(Q,R) + res_num = res_num - 1 + endif + ! Update QR factorization for new column + do j = 1, res_num - 1 + R(j,res_num) = ddot(ntot, Q(:,j), inc, res_diff, inc) + res_diff = res_diff - R(j,res_num) * Q(:,j) + enddo + R(res_num, res_num) = dnrm2(size(res_diff) ,res_diff, inc) + Q(:,res_num) = res_diff / R(res_num, res_num) + endif + ! TODO: here, drop more columns to improve conditioning + ! if (droptol) then + + ! endif + ! Solve least square problem for coefficients + ! 1. Compute rhs_tri = Q^T * res + call dgemv ('t', size(Q,1), res_num, c1, Q(:,1:res_num), size(Q,1), res, inc, c0, rhs_tri, inc) + ! 2. Solve R*coeffs = rhs_tri, put result in rhs_tri + call dtrsv ('u', 'n', 'n', res_num, R(1:res_num,1:res_num), res_num, rhs_tri, inc) + coeffs = rhs_tri + ! Update approximate solution: x = fpfunc - G_diff*coeffs, put result in fpfunc + call dgemv ('n', size(G_diff,1), res_num, -c1, G_diff(:,1:res_num), size(G_diff,1), coeffs, inc, c1, fpfunc, inc) + sol = fpfunc + ! Apply damping + if (damping_andacc > 0 .and. damping_andacc /= 1) then + ! x = x - (1-beta) (res - Q*R*coeffs) + + ! tmp = R*coeffs + call dgemv ('n', res_num, res_num, c1, R(1:res_num,1:res_num), res_num, coeffs, inc, c0, tmp, inc) + ! res = res - Q*tmp + call dgemv ('n', size(Q,1), res_num, -c1, Q(:,1:res_num), size(Q,1), tmp, inc, c1, res, inc) + ! x = x - (1-beta)*res + sol = sol - (1-damping_andacc)*res + endif + endif +#else + ! Anderson solver is not usable without LAPACK; abort + call abort_ice(error_message=subname // " CICE was not compiled with LAPACK, and Anderson solver was chosen (algo_nonlin = 'anderson')" , & + file=__FILE__, line=__LINE__) +#endif + endif + + !----------------------------------------------------------------------- + ! Put vector sol in uvel and vvel arrays + !----------------------------------------------------------------------- + call vec_to_arrays (nx_block , ny_block , & + nblocks , max_blocks , & + icellu (:), ntot , & + indxui (:,:), indxuj (:,:), & + sol (:), & + uvel (:,:,:), vvel (:,:,:)) + + ! Do halo update so that halo cells contain up to date info for advection + call stack_velocity_field(uvel, vvel, fld2) + call ice_timer_start(timer_bound) + if (maskhalo_dyn) then + call ice_HaloUpdate (fld2, halo_info_mask, & + field_loc_NEcorner, field_type_vector) + else + call ice_HaloUpdate (fld2, halo_info, & + field_loc_NEcorner, field_type_vector) + endif + call ice_timer_stop(timer_bound) + call unstack_velocity_field(fld2, uvel, vvel) + + ! Compute "progress" residual norm + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + fpresx(:,:,iblk) = uvel(:,:,iblk) - uprev_k(:,:,iblk) + fpresy(:,:,iblk) = vvel(:,:,iblk) - vprev_k(:,:,iblk) + call calc_L2norm_squared (nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj (:,iblk), & + fpresx(:,:,iblk), fpresy(:,:,iblk), & + L2norm (iblk)) + enddo + !$OMP END PARALLEL DO + prog_norm = sqrt(global_sum(sum(L2norm), distrb_info)) + if (my_task == master_task .and. monitor_nonlin) then + write(nu_diag, '(a,i4,a,d26.16)') "monitor_nonlin: iter_nonlin= ", it_nl, & + " progress_res_L2norm= ", prog_norm + endif + + enddo ! nonlinear iteration loop + + end subroutine anderson_solver + +!======================================================================= + +! Computes the viscous coefficients (in fact zetaD=2*zeta) and dPr/dx, dPr/dy + + subroutine calc_zeta_dPr (nx_block, ny_block, & + icellt , & + indxti , indxtj , & + uvel , vvel , & + dxt , dyt , & + dxhy , dyhx , & + cxp , cyp , & + cxm , cym , & + tinyarea, & + strength, zetaD , & + stPr) + + use ice_dyn_shared, only: strain_rates + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellt ! no. of cells where icetmask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxti , & ! compressed index in i-direction + indxtj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + strength , & ! ice strength (N/m) + uvel , & ! x-component of velocity (m/s) + vvel , & ! y-component of velocity (m/s) + dxt , & ! width of T-cell through the middle (m) + dyt , & ! height of T-cell through the middle (m) + dxhy , & ! 0.5*(HTE - HTE) + dyhx , & ! 0.5*(HTN - HTN) + cyp , & ! 1.5*HTE - 0.5*HTE + cxp , & ! 1.5*HTN - 0.5*HTN + cym , & ! 0.5*HTE - 1.5*HTE + cxm , & ! 0.5*HTN - 1.5*HTN + tinyarea ! min_strain_rate*tarea + + real (kind=dbl_kind), dimension(nx_block,ny_block,4), intent(out) :: & + zetaD ! 2*zeta + + real (kind=dbl_kind), dimension(nx_block,ny_block,8), intent(out) :: & + stPr ! stress combinations from replacement pressure + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: & + divune, divunw, divuse, divusw , & ! divergence + tensionne, tensionnw, tensionse, tensionsw , & ! tension + shearne, shearnw, shearse, shearsw , & ! shearing + Deltane, Deltanw, Deltase, Deltasw , & ! Delt + ssigpn, ssigps, ssigpe, ssigpw, ssigp1, ssigp2, & + csigpne, csigpnw, csigpsw, csigpse , & + stressp_1, stressp_2, stressp_3, stressp_4 , & + strp_tmp + + logical :: capping ! of the viscous coeff + + character(len=*), parameter :: subname = '(calc_zeta_dPr)' + + ! Initialize + + capping = .false. + + ! Initialize stPr and zetaD to zero (for cells where icetmask is false) + stPr = c0 + zetaD = c0 + + do ij = 1, icellt + i = indxti(ij) + j = indxtj(ij) + + !----------------------------------------------------------------- + ! strain rates + ! NOTE these are actually strain rates * area (m^2/s) + !----------------------------------------------------------------- + call strain_rates (nx_block , ny_block , & + i , j , & + uvel , vvel , & + dxt , dyt , & + cxp , cyp , & + cxm , cym , & + divune , divunw , & + divuse , divusw , & + tensionne, tensionnw, & + tensionse, tensionsw, & + shearne , shearnw , & + shearse , shearsw , & + Deltane , Deltanw , & + Deltase , Deltasw) + + if (capping) then + zetaD(i,j,1) = strength(i,j)/max(Deltane,tinyarea(i,j)) + zetaD(i,j,2) = strength(i,j)/max(Deltanw,tinyarea(i,j)) + zetaD(i,j,3) = strength(i,j)/max(Deltasw,tinyarea(i,j)) + zetaD(i,j,4) = strength(i,j)/max(Deltase,tinyarea(i,j)) + else + zetaD(i,j,1) = strength(i,j)/(Deltane + tinyarea(i,j)) + zetaD(i,j,2) = strength(i,j)/(Deltanw + tinyarea(i,j)) + zetaD(i,j,3) = strength(i,j)/(Deltasw + tinyarea(i,j)) + zetaD(i,j,4) = strength(i,j)/(Deltase + tinyarea(i,j)) + endif + + !----------------------------------------------------------------- + ! the stresses ! kg/s^2 + ! (1) northeast, (2) northwest, (3) southwest, (4) southeast + !----------------------------------------------------------------- + + stressp_1 = -zetaD(i,j,1)*(Deltane*(c1-Ktens)) + stressp_2 = -zetaD(i,j,2)*(Deltanw*(c1-Ktens)) + stressp_3 = -zetaD(i,j,3)*(Deltasw*(c1-Ktens)) + stressp_4 = -zetaD(i,j,4)*(Deltase*(c1-Ktens)) + + !----------------------------------------------------------------- + ! combinations of the Pr related stresses for the momentum equation ! kg/s^2 + !----------------------------------------------------------------- + + ssigpn = stressp_1 + stressp_2 + ssigps = stressp_3 + stressp_4 + ssigpe = stressp_1 + stressp_4 + ssigpw = stressp_2 + stressp_3 + ssigp1 =(stressp_1 + stressp_3)*p055 + ssigp2 =(stressp_2 + stressp_4)*p055 + + csigpne = p111*stressp_1 + ssigp2 + p027*stressp_3 + csigpnw = p111*stressp_2 + ssigp1 + p027*stressp_4 + csigpsw = p111*stressp_3 + ssigp2 + p027*stressp_1 + csigpse = p111*stressp_4 + ssigp1 + p027*stressp_2 + + !----------------------------------------------------------------- + ! for dF/dx (u momentum) + !----------------------------------------------------------------- + strp_tmp = p25*dyt(i,j)*(p333*ssigpn + p166*ssigps) + + ! northeast (i,j) + stPr(i,j,1) = -strp_tmp & + + dxhy(i,j)*(-csigpne) + + ! northwest (i+1,j) + stPr(i,j,2) = strp_tmp & + + dxhy(i,j)*(-csigpnw) + + strp_tmp = p25*dyt(i,j)*(p333*ssigps + p166*ssigpn) + + ! southeast (i,j+1) + stPr(i,j,3) = -strp_tmp & + + dxhy(i,j)*(-csigpse) + + ! southwest (i+1,j+1) + stPr(i,j,4) = strp_tmp & + + dxhy(i,j)*(-csigpsw) + + !----------------------------------------------------------------- + ! for dF/dy (v momentum) + !----------------------------------------------------------------- + strp_tmp = p25*dxt(i,j)*(p333*ssigpe + p166*ssigpw) + + ! northeast (i,j) + stPr(i,j,5) = -strp_tmp & + - dyhx(i,j)*(csigpne) + + ! southeast (i,j+1) + stPr(i,j,6) = strp_tmp & + - dyhx(i,j)*(csigpse) + + strp_tmp = p25*dxt(i,j)*(p333*ssigpw + p166*ssigpe) + + ! northwest (i+1,j) + stPr(i,j,7) = -strp_tmp & + - dyhx(i,j)*(csigpnw) + + ! southwest (i+1,j+1) + stPr(i,j,8) = strp_tmp & + - dyhx(i,j)*(csigpsw) + + enddo ! ij + + end subroutine calc_zeta_dPr + +!======================================================================= + +! Computes the VP stresses (as diagnostic) + + subroutine stress_vp (nx_block , ny_block , & + icellt , & + indxti , indxtj , & + uvel , vvel , & + dxt , dyt , & + cxp , cyp , & + cxm , cym , & + zetaD , & + stressp_1 , stressp_2 , & + stressp_3 , stressp_4 , & + stressm_1 , stressm_2 , & + stressm_3 , stressm_4 , & + stress12_1, stress12_2, & + stress12_3, stress12_4) + + use ice_dyn_shared, only: strain_rates + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellt ! no. of cells where icetmask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxti , & ! compressed index in i-direction + indxtj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uvel , & ! x-component of velocity (m/s) + vvel , & ! y-component of velocity (m/s) + dxt , & ! width of T-cell through the middle (m) + dyt , & ! height of T-cell through the middle (m) + cyp , & ! 1.5*HTE - 0.5*HTE + cxp , & ! 1.5*HTN - 0.5*HTN + cym , & ! 0.5*HTE - 1.5*HTE + cxm ! 0.5*HTN - 1.5*HTN + + real (kind=dbl_kind), dimension(nx_block,ny_block,4), intent(in) :: & + zetaD ! 2*zeta + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + stressp_1, stressp_2, stressp_3, stressp_4 , & ! sigma11+sigma22 + stressm_1, stressm_2, stressm_3, stressm_4 , & ! sigma11-sigma22 + stress12_1,stress12_2,stress12_3,stress12_4 ! sigma12 + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: & + divune, divunw, divuse, divusw , & ! divergence + tensionne, tensionnw, tensionse, tensionsw, & ! tension + shearne, shearnw, shearse, shearsw , & ! shearing + Deltane, Deltanw, Deltase, Deltasw ! Delt + + character(len=*), parameter :: subname = '(stress_vp)' + + do ij = 1, icellt + i = indxti(ij) + j = indxtj(ij) + + !----------------------------------------------------------------- + ! strain rates + ! NOTE these are actually strain rates * area (m^2/s) + !----------------------------------------------------------------- + call strain_rates (nx_block , ny_block , & + i , j , & + uvel , vvel , & + dxt , dyt , & + cxp , cyp , & + cxm , cym , & + divune , divunw , & + divuse , divusw , & + tensionne, tensionnw, & + tensionse, tensionsw, & + shearne , shearnw , & + shearse , shearsw , & + Deltane , Deltanw , & + Deltase , Deltasw) + + !----------------------------------------------------------------- + ! the stresses ! kg/s^2 + ! (1) northeast, (2) northwest, (3) southwest, (4) southeast + !----------------------------------------------------------------- + + stressp_1(i,j) = zetaD(i,j,1)*(divune*(c1+Ktens) - Deltane*(c1-Ktens)) + stressp_2(i,j) = zetaD(i,j,2)*(divunw*(c1+Ktens) - Deltanw*(c1-Ktens)) + stressp_3(i,j) = zetaD(i,j,3)*(divusw*(c1+Ktens) - Deltasw*(c1-Ktens)) + stressp_4(i,j) = zetaD(i,j,4)*(divuse*(c1+Ktens) - Deltase*(c1-Ktens)) + + stressm_1(i,j) = zetaD(i,j,1)*tensionne*(c1+Ktens)*ecci + stressm_2(i,j) = zetaD(i,j,2)*tensionnw*(c1+Ktens)*ecci + stressm_3(i,j) = zetaD(i,j,3)*tensionsw*(c1+Ktens)*ecci + stressm_4(i,j) = zetaD(i,j,4)*tensionse*(c1+Ktens)*ecci + + stress12_1(i,j) = zetaD(i,j,1)*shearne*p5*(c1+Ktens)*ecci + stress12_2(i,j) = zetaD(i,j,2)*shearnw*p5*(c1+Ktens)*ecci + stress12_3(i,j) = zetaD(i,j,3)*shearsw*p5*(c1+Ktens)*ecci + stress12_4(i,j) = zetaD(i,j,4)*shearse*p5*(c1+Ktens)*ecci + + enddo ! ij + + end subroutine stress_vp + +!======================================================================= + +! Compute vrel and seabed stress coefficients + + subroutine calc_vrel_Cb (nx_block, ny_block, & + icellu , Cw , & + indxui , indxuj , & + aiu , Tbu , & + uocn , vocn , & + uvel , vvel , & + vrel , Cb) + + use ice_dyn_shared, only: u0 ! residual velocity for basal stress (m/s) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! total count when iceumask is true + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + Tbu, & ! coefficient for basal stress (N/m^2) + aiu , & ! ice fraction on u-grid + uocn , & ! ocean current, x-direction (m/s) + vocn , & ! ocean current, y-direction (m/s) + Cw ! ocean-ice neutral drag coefficient + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uvel , & ! x-component of velocity (m/s) + vvel ! y-component of velocity (m/s) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + vrel , & ! coeff for tauw + Cb ! seabed stress coeff + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: & + rhow ! + + character(len=*), parameter :: subname = '(calc_vrel_Cb)' + + call icepack_query_parameters(rhow_out=rhow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + ! (magnitude of relative ocean current)*rhow*drag*aice + vrel(i,j) = aiu(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uvel(i,j))**2 + & + (vocn(i,j) - vvel(i,j))**2) ! m/s + + Cb(i,j) = Tbu(i,j) / (sqrt(uvel(i,j)**2 + vvel(i,j)**2) + u0) ! for seabed stress + enddo ! ij + + end subroutine calc_vrel_Cb + +!======================================================================= + +! Compute seabed stress (diagnostic) + + subroutine calc_seabed_stress (nx_block, ny_block, & + icellu , & + indxui , indxuj , & + uvel , vvel , & + Cb , & + taubx , tauby) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! total count when iceumask is true + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uvel , & ! x-component of velocity (m/s) + vvel , & ! y-component of velocity (m/s) + Cb ! seabed stress coefficient + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + taubx , & ! seabed stress, x-direction (N/m^2) + tauby ! seabed stress, y-direction (N/m^2) + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + character(len=*), parameter :: subname = '(calc_seabed_stress)' + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + taubx(i,j) = -uvel(i,j)*Cb(i,j) + tauby(i,j) = -vvel(i,j)*Cb(i,j) + enddo ! ij + + end subroutine calc_seabed_stress + +!======================================================================= + +! Computes the matrix vector product A(u,v) * (u,v) +! Au = A(u,v)_[x] * uvel (x components of A(u,v) * (u,v)) +! Av = A(u,v)_[y] * vvel (y components of A(u,v) * (u,v)) + + subroutine matvec (nx_block, ny_block, & + icellu , icellt , & + indxui , indxuj , & + indxti , indxtj , & + dxt , dyt , & + dxhy , dyhx , & + cxp , cyp , & + cxm , cym , & + uvel , vvel , & + vrel , Cb , & + zetaD , & + umassdti, fm , & + uarear , & + Au , Av) + + use ice_dyn_shared, only: strain_rates + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu, & ! total count when iceumask is true + icellt ! no. of cells where icetmask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj , & ! compressed index in j-direction + indxti , & ! compressed index in i-direction + indxtj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + dxt , & ! width of T-cell through the middle (m) + dyt , & ! height of T-cell through the middle (m) + dxhy , & ! 0.5*(HTE - HTE) + dyhx , & ! 0.5*(HTN - HTN) + cyp , & ! 1.5*HTE - 0.5*HTE + cxp , & ! 1.5*HTN - 0.5*HTN + cym , & ! 0.5*HTE - 1.5*HTE + cxm ! 0.5*HTN - 1.5*HTN + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uvel , & ! x-component of velocity (m/s) + vvel , & ! y-component of velocity (m/s) + vrel , & ! coefficient for tauw + Cb , & ! coefficient for basal stress + umassdti, & ! mass of U-cell/dt (kg/m^2 s) + fm , & ! Coriolis param. * mass in U-cell (kg/s) + uarear ! 1/uarea + + real (kind=dbl_kind), dimension(nx_block,ny_block,4), intent(in) :: & + zetaD ! 2*zeta + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + Au , & ! matvec, Fx = bx - Au (N/m^2) + Av ! matvec, Fy = by - Av (N/m^2) + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind), dimension(nx_block,ny_block,8):: & + str + + real (kind=dbl_kind) :: & + ccaimp,ccb , & ! intermediate variables + strintx, strinty ! divergence of the internal stress tensor + + real (kind=dbl_kind) :: & + divune, divunw, divuse, divusw , & ! divergence + tensionne, tensionnw, tensionse, tensionsw, & ! tension + shearne, shearnw, shearse, shearsw , & ! shearing + Deltane, Deltanw, Deltase, Deltasw , & ! Delt + ssigpn, ssigps, ssigpe, ssigpw , & + ssigmn, ssigms, ssigme, ssigmw , & + ssig12n, ssig12s, ssig12e, ssig12w , & + ssigp1, ssigp2, ssigm1, ssigm2, ssig121, ssig122, & + csigpne, csigpnw, csigpse, csigpsw , & + csigmne, csigmnw, csigmse, csigmsw , & + csig12ne, csig12nw, csig12se, csig12sw , & + str12ew, str12we, str12ns, str12sn , & + strp_tmp, strm_tmp + + real (kind=dbl_kind) :: & + stressp_1, stressp_2, stressp_3, stressp_4 , & ! sigma11+sigma22 (without Pr) + stressm_1, stressm_2, stressm_3, stressm_4 , & ! sigma11-sigma22 + stress12_1,stress12_2,stress12_3,stress12_4 ! sigma12 + + character(len=*), parameter :: subname = '(matvec)' + + !----------------------------------------------------------------- + ! Initialize + !----------------------------------------------------------------- + + str(:,:,:) = c0 + + do ij = 1, icellt + i = indxti(ij) + j = indxtj(ij) + + !----------------------------------------------------------------- + ! strain rates + ! NOTE these are actually strain rates * area (m^2/s) + !----------------------------------------------------------------- + call strain_rates (nx_block , ny_block , & + i , j , & + uvel , vvel , & + dxt , dyt , & + cxp , cyp , & + cxm , cym , & + divune , divunw , & + divuse , divusw , & + tensionne, tensionnw, & + tensionse, tensionsw, & + shearne , shearnw , & + shearse , shearsw , & + Deltane , Deltanw , & + Deltase , Deltasw) + + !----------------------------------------------------------------- + ! the stresses ! kg/s^2 + ! (1) northeast, (2) northwest, (3) southwest, (4) southeast + ! NOTE: commented part of stressp is from the replacement pressure Pr + !----------------------------------------------------------------- + + stressp_1 = zetaD(i,j,1)*(divune*(c1+Ktens))! - Deltane*(c1-Ktens)) + stressp_2 = zetaD(i,j,2)*(divunw*(c1+Ktens))! - Deltanw*(c1-Ktens)) + stressp_3 = zetaD(i,j,3)*(divusw*(c1+Ktens))! - Deltasw*(c1-Ktens)) + stressp_4 = zetaD(i,j,4)*(divuse*(c1+Ktens))! - Deltase*(c1-Ktens)) + + stressm_1 = zetaD(i,j,1)*tensionne*(c1+Ktens)*ecci + stressm_2 = zetaD(i,j,2)*tensionnw*(c1+Ktens)*ecci + stressm_3 = zetaD(i,j,3)*tensionsw*(c1+Ktens)*ecci + stressm_4 = zetaD(i,j,4)*tensionse*(c1+Ktens)*ecci + + stress12_1 = zetaD(i,j,1)*shearne*p5*(c1+Ktens)*ecci + stress12_2 = zetaD(i,j,2)*shearnw*p5*(c1+Ktens)*ecci + stress12_3 = zetaD(i,j,3)*shearsw*p5*(c1+Ktens)*ecci + stress12_4 = zetaD(i,j,4)*shearse*p5*(c1+Ktens)*ecci + + !----------------------------------------------------------------- + ! combinations of the stresses for the momentum equation ! kg/s^2 + !----------------------------------------------------------------- + + ssigpn = stressp_1 + stressp_2 + ssigps = stressp_3 + stressp_4 + ssigpe = stressp_1 + stressp_4 + ssigpw = stressp_2 + stressp_3 + ssigp1 =(stressp_1 + stressp_3)*p055 + ssigp2 =(stressp_2 + stressp_4)*p055 + + ssigmn = stressm_1 + stressm_2 + ssigms = stressm_3 + stressm_4 + ssigme = stressm_1 + stressm_4 + ssigmw = stressm_2 + stressm_3 + ssigm1 =(stressm_1 + stressm_3)*p055 + ssigm2 =(stressm_2 + stressm_4)*p055 + + ssig12n = stress12_1 + stress12_2 + ssig12s = stress12_3 + stress12_4 + ssig12e = stress12_1 + stress12_4 + ssig12w = stress12_2 + stress12_3 + ssig121 =(stress12_1 + stress12_3)*p111 + ssig122 =(stress12_2 + stress12_4)*p111 + + csigpne = p111*stressp_1 + ssigp2 + p027*stressp_3 + csigpnw = p111*stressp_2 + ssigp1 + p027*stressp_4 + csigpsw = p111*stressp_3 + ssigp2 + p027*stressp_1 + csigpse = p111*stressp_4 + ssigp1 + p027*stressp_2 + + csigmne = p111*stressm_1 + ssigm2 + p027*stressm_3 + csigmnw = p111*stressm_2 + ssigm1 + p027*stressm_4 + csigmsw = p111*stressm_3 + ssigm2 + p027*stressm_1 + csigmse = p111*stressm_4 + ssigm1 + p027*stressm_2 + + csig12ne = p222*stress12_1 + ssig122 & + + p055*stress12_3 + csig12nw = p222*stress12_2 + ssig121 & + + p055*stress12_4 + csig12sw = p222*stress12_3 + ssig122 & + + p055*stress12_1 + csig12se = p222*stress12_4 + ssig121 & + + p055*stress12_2 + + str12ew = p5*dxt(i,j)*(p333*ssig12e + p166*ssig12w) + str12we = p5*dxt(i,j)*(p333*ssig12w + p166*ssig12e) + str12ns = p5*dyt(i,j)*(p333*ssig12n + p166*ssig12s) + str12sn = p5*dyt(i,j)*(p333*ssig12s + p166*ssig12n) + + !----------------------------------------------------------------- + ! for dF/dx (u momentum) + !----------------------------------------------------------------- + strp_tmp = p25*dyt(i,j)*(p333*ssigpn + p166*ssigps) + strm_tmp = p25*dyt(i,j)*(p333*ssigmn + p166*ssigms) + + ! northeast (i,j) + str(i,j,1) = -strp_tmp - strm_tmp - str12ew & + + dxhy(i,j)*(-csigpne + csigmne) + dyhx(i,j)*csig12ne + + ! northwest (i+1,j) + str(i,j,2) = strp_tmp + strm_tmp - str12we & + + dxhy(i,j)*(-csigpnw + csigmnw) + dyhx(i,j)*csig12nw + + strp_tmp = p25*dyt(i,j)*(p333*ssigps + p166*ssigpn) + strm_tmp = p25*dyt(i,j)*(p333*ssigms + p166*ssigmn) + + ! southeast (i,j+1) + str(i,j,3) = -strp_tmp - strm_tmp + str12ew & + + dxhy(i,j)*(-csigpse + csigmse) + dyhx(i,j)*csig12se + + ! southwest (i+1,j+1) + str(i,j,4) = strp_tmp + strm_tmp + str12we & + + dxhy(i,j)*(-csigpsw + csigmsw) + dyhx(i,j)*csig12sw + + !----------------------------------------------------------------- + ! for dF/dy (v momentum) + !----------------------------------------------------------------- + strp_tmp = p25*dxt(i,j)*(p333*ssigpe + p166*ssigpw) + strm_tmp = p25*dxt(i,j)*(p333*ssigme + p166*ssigmw) + + ! northeast (i,j) + str(i,j,5) = -strp_tmp + strm_tmp - str12ns & + - dyhx(i,j)*(csigpne + csigmne) + dxhy(i,j)*csig12ne + + ! southeast (i,j+1) + str(i,j,6) = strp_tmp - strm_tmp - str12sn & + - dyhx(i,j)*(csigpse + csigmse) + dxhy(i,j)*csig12se + + strp_tmp = p25*dxt(i,j)*(p333*ssigpw + p166*ssigpe) + strm_tmp = p25*dxt(i,j)*(p333*ssigmw + p166*ssigme) + + ! northwest (i+1,j) + str(i,j,7) = -strp_tmp + strm_tmp + str12ns & + - dyhx(i,j)*(csigpnw + csigmnw) + dxhy(i,j)*csig12nw + + ! southwest (i+1,j+1) + str(i,j,8) = strp_tmp - strm_tmp + str12sn & + - dyhx(i,j)*(csigpsw + csigmsw) + dxhy(i,j)*csig12sw + + enddo ! ij - icellt + + !----------------------------------------------------------------- + ! Form Au and Av + !----------------------------------------------------------------- + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + ccaimp = umassdti(i,j) + vrel(i,j) * cosw + Cb(i,j) ! kg/m^2 s + + ccb = fm(i,j) + sign(c1,fm(i,j)) * vrel(i,j) * sinw ! kg/m^2 s + + ! divergence of the internal stress tensor + strintx = uarear(i,j)* & + (str(i,j,1) + str(i+1,j,2) + str(i,j+1,3) + str(i+1,j+1,4)) + strinty = uarear(i,j)* & + (str(i,j,5) + str(i,j+1,6) + str(i+1,j,7) + str(i+1,j+1,8)) + + Au(i,j) = ccaimp*uvel(i,j) - ccb*vvel(i,j) - strintx + Av(i,j) = ccaimp*vvel(i,j) + ccb*uvel(i,j) - strinty + enddo ! ij - icellu + + end subroutine matvec + +!======================================================================= + +! Compute the constant component of b(u,v) i.e. the part of b(u,v) that +! does not depend on (u,v) and thus do not change during the nonlinear iteration + + subroutine calc_bfix (nx_block , ny_block , & + icellu , & + indxui , indxuj , & + umassdti , & + forcex , forcey , & + uvel_init, vvel_init, & + bxfix , byfix) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! no. of cells where iceumask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uvel_init,& ! x-component of velocity (m/s), beginning of time step + vvel_init,& ! y-component of velocity (m/s), beginning of time step + umassdti, & ! mass of U-cell/dt (kg/m^2 s) + forcex , & ! work array: combined atm stress and ocn tilt, x + forcey ! work array: combined atm stress and ocn tilt, y + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + bxfix , & ! bx = taux + bxfix + byfix ! by = tauy + byfix + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + character(len=*), parameter :: subname = '(calc_bfix)' + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + bxfix(i,j) = umassdti(i,j)*uvel_init(i,j) + forcex(i,j) + byfix(i,j) = umassdti(i,j)*vvel_init(i,j) + forcey(i,j) + enddo + + end subroutine calc_bfix + +!======================================================================= + +! Compute the vector b(u,v), i.e. the part of the nonlinear function F(u,v) +! that cannot be written as A(u,v)*(u,v), where A(u,v) is a matrix with entries +! depending on (u,v) + + subroutine calc_bvec (nx_block, ny_block, & + icellu , & + indxui , indxuj , & + stPr , uarear , & + waterx , watery , & + bxfix , byfix , & + bx , by , & + vrel) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! total count when iceumask is true + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uarear , & ! 1/uarea + waterx , & ! for ocean stress calculation, x (m/s) + watery , & ! for ocean stress calculation, y (m/s) + bxfix , & ! bx = taux + bxfix + byfix , & ! by = tauy + byfix + vrel ! relative ice-ocean velocity + + real (kind=dbl_kind), dimension(nx_block,ny_block,8), intent(in) :: & + stPr + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + bx , & ! b vector, bx = taux + bxfix (N/m^2) + by ! b vector, by = tauy + byfix (N/m^2) + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: & + taux, tauy , & ! part of ocean stress term + strintx, strinty , & ! divergence of the internal stress tensor (only Pr contributions) + rhow ! + + character(len=*), parameter :: subname = '(calc_bvec)' + + !----------------------------------------------------------------- + ! calc b vector + !----------------------------------------------------------------- + + call icepack_query_parameters(rhow_out=rhow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + ! ice/ocean stress + taux = vrel(i,j)*waterx(i,j) ! NOTE this is not the entire + tauy = vrel(i,j)*watery(i,j) ! ocn stress term + + ! divergence of the internal stress tensor (only Pr part, i.e. dPr/dx, dPr/dy) + strintx = uarear(i,j)* & + (stPr(i,j,1) + stPr(i+1,j,2) + stPr(i,j+1,3) + stPr(i+1,j+1,4)) + strinty = uarear(i,j)* & + (stPr(i,j,5) + stPr(i,j+1,6) + stPr(i+1,j,7) + stPr(i+1,j+1,8)) + + bx(i,j) = bxfix(i,j) + taux + strintx + by(i,j) = byfix(i,j) + tauy + strinty + enddo ! ij + + end subroutine calc_bvec + +!======================================================================= + +! Compute the non linear residual F(u,v) = b(u,v) - A(u,v) * (u,v), +! with Au, Av precomputed as +! Au = A(u,v)_[x] * uvel (x components of A(u,v) * (u,v)) +! Av = A(u,v)_[y] * vvel (y components of A(u,v) * (u,v)) + + subroutine residual_vec (nx_block , ny_block, & + icellu , & + indxui , indxuj , & + bx , by , & + Au , Av , & + Fx , Fy , & + sum_squared) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! total count when iceumask is true + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + bx , & ! b vector, bx = taux + bxfix (N/m^2) + by , & ! b vector, by = tauy + byfix (N/m^2) + Au , & ! matvec, Fx = bx - Au (N/m^2) + Av ! matvec, Fy = by - Av (N/m^2) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + Fx , & ! x residual vector, Fx = bx - Au (N/m^2) + Fy ! y residual vector, Fy = by - Av (N/m^2) + + real (kind=dbl_kind), intent(out), optional :: & + sum_squared ! sum of squared residual vector components + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + character(len=*), parameter :: subname = '(residual_vec)' + + !----------------------------------------------------------------- + ! compute residual and sum its squared components + !----------------------------------------------------------------- + + if (present(sum_squared)) then + sum_squared = c0 + endif + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + Fx(i,j) = bx(i,j) - Au(i,j) + Fy(i,j) = by(i,j) - Av(i,j) + if (present(sum_squared)) then + sum_squared = sum_squared + Fx(i,j)**2 + Fy(i,j)**2 + endif + enddo ! ij + + end subroutine residual_vec + +!======================================================================= + +! Form the diagonal of the matrix A(u,v) (first part of the computation) +! Part 1: compute the contributions to the diagonal from the rheology term + + subroutine formDiag_step1 (nx_block, ny_block, & + icellu , & + indxui , indxuj , & + dxt , dyt , & + dxhy , dyhx , & + cxp , cyp , & + cxm , cym , & + zetaD , Drheo) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! no. of cells where icetmask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + dxt , & ! width of T-cell through the middle (m) + dyt , & ! height of T-cell through the middle (m) + dxhy , & ! 0.5*(HTE - HTE) + dyhx , & ! 0.5*(HTN - HTN) + cyp , & ! 1.5*HTE - 0.5*HTE + cxp , & ! 1.5*HTN - 0.5*HTN + cym , & ! 0.5*HTE - 1.5*HTE + cxm ! 0.5*HTN - 1.5*HTN + + real (kind=dbl_kind), dimension(nx_block,ny_block,4), intent(in) :: & + zetaD ! 2*zeta + + real (kind=dbl_kind), dimension(nx_block,ny_block,8), intent(out) :: & + Drheo ! intermediate value for diagonal components of matrix A associated + ! with rheology term + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij, iu, ju, di, dj, cc + + real (kind=dbl_kind) :: & + divune, divunw, divuse, divusw , & ! divergence + tensionne, tensionnw, tensionse, tensionsw, & ! tension + shearne, shearnw, shearse, shearsw , & ! shearing + uij,ui1j,uij1,ui1j1,vij,vi1j,vij1,vi1j1 , & ! == c0 or c1 + stressp_1, stressp_2, stressp_3, stressp_4, & + stressm_1, stressm_2, stressm_3, stressm_4, & + stress12_1,stress12_2,stress12_3,stress12_4,& + ssigpn, ssigps, ssigpe, ssigpw , & + ssigmn, ssigms, ssigme, ssigmw , & + ssig12n, ssig12s, ssig12e, ssig12w , & + ssigp1, ssigp2, ssigm1, ssigm2, ssig121, ssig122, & + csigpne, csigpnw, csigpse, csigpsw , & + csigmne, csigmnw, csigmse, csigmsw , & + csig12ne, csig12nw, csig12se, csig12sw , & + str12ew, str12we, str12ns, str12sn , & + strp_tmp, strm_tmp + + character(len=*), parameter :: subname = '(formDiag_step1)' + + !----------------------------------------------------------------- + ! Initialize + !----------------------------------------------------------------- + + Drheo(:,:,:) = c0 + + ! Be careful: Drheo contains 4 terms for u and 4 terms for v. + ! These 8 terms come from the surrounding T cells but are all + ! refrerenced to the i,j (u point) : + + ! Drheo(i,j,1) corresponds to str(i,j,1) + ! Drheo(i,j,2) corresponds to str(i+1,j,2) + ! Drheo(i,j,3) corresponds to str(i,j+1,3) + ! Drheo(i,j,4) corresponds to str(i+1,j+1,4)) + ! Drheo(i,j,5) corresponds to str(i,j,5) + ! Drheo(i,j,6) corresponds to str(i,j+1,6) + ! Drheo(i,j,7) corresponds to str(i+1,j,7) + ! Drheo(i,j,8) corresponds to str(i+1,j+1,8)) + + do cc = 1, 8 ! 4 for u and 4 for v + + if (cc == 1) then ! u comp, T cell i,j + uij = c1 + ui1j = c0 + uij1 = c0 + ui1j1 = c0 + vij = c0 + vi1j = c0 + vij1 = c0 + vi1j1 = c0 + di = 0 + dj = 0 + elseif (cc == 2) then ! u comp, T cell i+1,j + uij = c0 + ui1j = c1 + uij1 = c0 + ui1j1 = c0 + vij = c0 + vi1j = c0 + vij1 = c0 + vi1j1 = c0 + di = 1 + dj = 0 + elseif (cc == 3) then ! u comp, T cell i,j+1 + uij = c0 + ui1j = c0 + uij1 = c1 + ui1j1 = c0 + vij = c0 + vi1j = c0 + vij1 = c0 + vi1j1 = c0 + di = 0 + dj = 1 + elseif (cc == 4) then ! u comp, T cell i+1,j+1 + uij = c0 + ui1j = c0 + uij1 = c0 + ui1j1 = c1 + vij = c0 + vi1j = c0 + vij1 = c0 + vi1j1 = c0 + di = 1 + dj = 1 + elseif (cc == 5) then ! v comp, T cell i,j + uij = c0 + ui1j = c0 + uij1 = c0 + ui1j1 = c0 + vij = c1 + vi1j = c0 + vij1 = c0 + vi1j1 = c0 + di = 0 + dj = 0 + elseif (cc == 6) then ! v comp, T cell i,j+1 + uij = c0 + ui1j = c0 + uij1 = c0 + ui1j1 = c0 + vij = c0 + vi1j = c0 + vij1 = c1 + vi1j1 = c0 + di = 0 + dj = 1 + elseif (cc == 7) then ! v comp, T cell i+1,j + uij = c0 + ui1j = c0 + uij1 = c0 + ui1j1 = c0 + vij = c0 + vi1j = c1 + vij1 = c0 + vi1j1 = c0 + di = 1 + dj = 0 + elseif (cc == 8) then ! v comp, T cell i+1,j+1 + uij = c0 + ui1j = c0 + uij1 = c0 + ui1j1 = c0 + vij = c0 + vi1j = c0 + vij1 = c0 + vi1j1 = c1 + di = 1 + dj = 1 + endif + + do ij = 1, icellu + + iu = indxui(ij) + ju = indxuj(ij) + i = iu + di + j = ju + dj + + !----------------------------------------------------------------- + ! strain rates + ! NOTE these are actually strain rates * area (m^2/s) + !----------------------------------------------------------------- + ! divergence = e_11 + e_22 + divune = cyp(i,j)*uij - dyt(i,j)*ui1j & + + cxp(i,j)*vij - dxt(i,j)*vij1 + divunw = cym(i,j)*ui1j + dyt(i,j)*uij & + + cxp(i,j)*vi1j - dxt(i,j)*vi1j1 + divusw = cym(i,j)*ui1j1 + dyt(i,j)*uij1 & + + cxm(i,j)*vi1j1 + dxt(i,j)*vi1j + divuse = cyp(i,j)*uij1 - dyt(i,j)*ui1j1 & + + cxm(i,j)*vij1 + dxt(i,j)*vij + + ! tension strain rate = e_11 - e_22 + tensionne = -cym(i,j)*uij - dyt(i,j)*ui1j & + + cxm(i,j)*vij + dxt(i,j)*vij1 + tensionnw = -cyp(i,j)*ui1j + dyt(i,j)*uij & + + cxm(i,j)*vi1j + dxt(i,j)*vi1j1 + tensionsw = -cyp(i,j)*ui1j1 + dyt(i,j)*uij1 & + + cxp(i,j)*vi1j1 - dxt(i,j)*vi1j + tensionse = -cym(i,j)*uij1 - dyt(i,j)*ui1j1 & + + cxp(i,j)*vij1 - dxt(i,j)*vij + + ! shearing strain rate = 2*e_12 + shearne = -cym(i,j)*vij - dyt(i,j)*vi1j & + - cxm(i,j)*uij - dxt(i,j)*uij1 + shearnw = -cyp(i,j)*vi1j + dyt(i,j)*vij & + - cxm(i,j)*ui1j - dxt(i,j)*ui1j1 + shearsw = -cyp(i,j)*vi1j1 + dyt(i,j)*vij1 & + - cxp(i,j)*ui1j1 + dxt(i,j)*ui1j + shearse = -cym(i,j)*vij1 - dyt(i,j)*vi1j1 & + - cxp(i,j)*uij1 + dxt(i,j)*uij + + !----------------------------------------------------------------- + ! the stresses ! kg/s^2 + ! (1) northeast, (2) northwest, (3) southwest, (4) southeast + !----------------------------------------------------------------- + + stressp_1 = zetaD(i,j,1)*divune*(c1+Ktens) + stressp_2 = zetaD(i,j,2)*divunw*(c1+Ktens) + stressp_3 = zetaD(i,j,3)*divusw*(c1+Ktens) + stressp_4 = zetaD(i,j,4)*divuse*(c1+Ktens) + + stressm_1 = zetaD(i,j,1)*tensionne*(c1+Ktens)*ecci + stressm_2 = zetaD(i,j,2)*tensionnw*(c1+Ktens)*ecci + stressm_3 = zetaD(i,j,3)*tensionsw*(c1+Ktens)*ecci + stressm_4 = zetaD(i,j,4)*tensionse*(c1+Ktens)*ecci + + stress12_1 = zetaD(i,j,1)*shearne*p5*(c1+Ktens)*ecci + stress12_2 = zetaD(i,j,2)*shearnw*p5*(c1+Ktens)*ecci + stress12_3 = zetaD(i,j,3)*shearsw*p5*(c1+Ktens)*ecci + stress12_4 = zetaD(i,j,4)*shearse*p5*(c1+Ktens)*ecci + + !----------------------------------------------------------------- + ! combinations of the stresses for the momentum equation ! kg/s^2 + !----------------------------------------------------------------- + + ssigpn = stressp_1 + stressp_2 + ssigps = stressp_3 + stressp_4 + ssigpe = stressp_1 + stressp_4 + ssigpw = stressp_2 + stressp_3 + ssigp1 =(stressp_1 + stressp_3)*p055 + ssigp2 =(stressp_2 + stressp_4)*p055 + + ssigmn = stressm_1 + stressm_2 + ssigms = stressm_3 + stressm_4 + ssigme = stressm_1 + stressm_4 + ssigmw = stressm_2 + stressm_3 + ssigm1 =(stressm_1 + stressm_3)*p055 + ssigm2 =(stressm_2 + stressm_4)*p055 + + ssig12n = stress12_1 + stress12_2 + ssig12s = stress12_3 + stress12_4 + ssig12e = stress12_1 + stress12_4 + ssig12w = stress12_2 + stress12_3 + ssig121 =(stress12_1 + stress12_3)*p111 + ssig122 =(stress12_2 + stress12_4)*p111 + + csigpne = p111*stressp_1 + ssigp2 + p027*stressp_3 + csigpnw = p111*stressp_2 + ssigp1 + p027*stressp_4 + csigpsw = p111*stressp_3 + ssigp2 + p027*stressp_1 + csigpse = p111*stressp_4 + ssigp1 + p027*stressp_2 + + csigmne = p111*stressm_1 + ssigm2 + p027*stressm_3 + csigmnw = p111*stressm_2 + ssigm1 + p027*stressm_4 + csigmsw = p111*stressm_3 + ssigm2 + p027*stressm_1 + csigmse = p111*stressm_4 + ssigm1 + p027*stressm_2 + + csig12ne = p222*stress12_1 + ssig122 & + + p055*stress12_3 + csig12nw = p222*stress12_2 + ssig121 & + + p055*stress12_4 + csig12sw = p222*stress12_3 + ssig122 & + + p055*stress12_1 + csig12se = p222*stress12_4 + ssig121 & + + p055*stress12_2 + + str12ew = p5*dxt(i,j)*(p333*ssig12e + p166*ssig12w) + str12we = p5*dxt(i,j)*(p333*ssig12w + p166*ssig12e) + str12ns = p5*dyt(i,j)*(p333*ssig12n + p166*ssig12s) + str12sn = p5*dyt(i,j)*(p333*ssig12s + p166*ssig12n) + + !----------------------------------------------------------------- + ! for dF/dx (u momentum) + !----------------------------------------------------------------- + + if (cc == 1) then ! T cell i,j + + strp_tmp = p25*dyt(i,j)*(p333*ssigpn + p166*ssigps) + strm_tmp = p25*dyt(i,j)*(p333*ssigmn + p166*ssigms) + + ! northeast (i,j) + Drheo(iu,ju,1) = -strp_tmp - strm_tmp - str12ew & + + dxhy(i,j)*(-csigpne + csigmne) + dyhx(i,j)*csig12ne + + elseif (cc == 2) then ! T cell i+1,j + + strp_tmp = p25*dyt(i,j)*(p333*ssigpn + p166*ssigps) + strm_tmp = p25*dyt(i,j)*(p333*ssigmn + p166*ssigms) + + ! northwest (i+1,j) + Drheo(iu,ju,2) = strp_tmp + strm_tmp - str12we & + + dxhy(i,j)*(-csigpnw + csigmnw) + dyhx(i,j)*csig12nw + + elseif (cc == 3) then ! T cell i,j+1 + + strp_tmp = p25*dyt(i,j)*(p333*ssigps + p166*ssigpn) + strm_tmp = p25*dyt(i,j)*(p333*ssigms + p166*ssigmn) + + ! southeast (i,j+1) + Drheo(iu,ju,3) = -strp_tmp - strm_tmp + str12ew & + + dxhy(i,j)*(-csigpse + csigmse) + dyhx(i,j)*csig12se + + elseif (cc == 4) then ! T cell i+1,j+1 + + strp_tmp = p25*dyt(i,j)*(p333*ssigps + p166*ssigpn) + strm_tmp = p25*dyt(i,j)*(p333*ssigms + p166*ssigmn) + + ! southwest (i+1,j+1) + Drheo(iu,ju,4) = strp_tmp + strm_tmp + str12we & + + dxhy(i,j)*(-csigpsw + csigmsw) + dyhx(i,j)*csig12sw + + !----------------------------------------------------------------- + ! for dF/dy (v momentum) + !----------------------------------------------------------------- + + elseif (cc == 5) then ! T cell i,j + + strp_tmp = p25*dxt(i,j)*(p333*ssigpe + p166*ssigpw) + strm_tmp = p25*dxt(i,j)*(p333*ssigme + p166*ssigmw) + + ! northeast (i,j) + Drheo(iu,ju,5) = -strp_tmp + strm_tmp - str12ns & + - dyhx(i,j)*(csigpne + csigmne) + dxhy(i,j)*csig12ne + + elseif (cc == 6) then ! T cell i,j+1 + + strp_tmp = p25*dxt(i,j)*(p333*ssigpe + p166*ssigpw) + strm_tmp = p25*dxt(i,j)*(p333*ssigme + p166*ssigmw) + + ! southeast (i,j+1) + Drheo(iu,ju,6) = strp_tmp - strm_tmp - str12sn & + - dyhx(i,j)*(csigpse + csigmse) + dxhy(i,j)*csig12se + + elseif (cc == 7) then ! T cell i,j+1 + + strp_tmp = p25*dxt(i,j)*(p333*ssigpw + p166*ssigpe) + strm_tmp = p25*dxt(i,j)*(p333*ssigmw + p166*ssigme) + + ! northwest (i+1,j) + Drheo(iu,ju,7) = -strp_tmp + strm_tmp + str12ns & + - dyhx(i,j)*(csigpnw + csigmnw) + dxhy(i,j)*csig12nw + + elseif (cc == 8) then ! T cell i+1,j+1 + + strp_tmp = p25*dxt(i,j)*(p333*ssigpw + p166*ssigpe) + strm_tmp = p25*dxt(i,j)*(p333*ssigmw + p166*ssigme) + + ! southwest (i+1,j+1) + Drheo(iu,ju,8) = strp_tmp - strm_tmp + str12sn & + - dyhx(i,j)*(csigpsw + csigmsw) + dxhy(i,j)*csig12sw + + endif + + enddo ! ij + + enddo ! cc + + end subroutine formDiag_step1 + +!======================================================================= + +! Form the diagonal of the matrix A(u,v) (second part of the computation) +! Part 2: compute diagonal + + subroutine formDiag_step2 (nx_block, ny_block, & + icellu , & + indxui , indxuj , & + Drheo , vrel , & + umassdti, & + uarear , Cb , & + diagx , diagy) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! total count when iceumask is true + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + vrel, & ! coefficient for tauw + Cb, & ! coefficient for basal stress + umassdti, & ! mass of U-cell/dt (kg/m^2 s) + uarear ! 1/uarea + + real (kind=dbl_kind), dimension(nx_block,ny_block,8), intent(in) :: & + Drheo + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + diagx , & ! Diagonal (x component) of the matrix A + diagy ! Diagonal (y component) of the matrix A + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: & + ccaimp , & ! intermediate variables + strintx, strinty ! diagonal contributions to the divergence + + character(len=*), parameter :: subname = '(formDiag_step2)' + + !----------------------------------------------------------------- + ! integrate the momentum equation + !----------------------------------------------------------------- + + strintx = c0 + strinty = c0 + + ! Be careful: Drheo contains 4 terms for u and 4 terms for v. + ! These 8 terms come from the surrounding T cells but are all + ! refrerenced to the i,j (u point) : + + ! Drheo(i,j,1) corresponds to str(i,j,1) + ! Drheo(i,j,2) corresponds to str(i+1,j,2) + ! Drheo(i,j,3) corresponds to str(i,j+1,3) + ! Drheo(i,j,4) corresponds to str(i+1,j+1,4)) + ! Drheo(i,j,5) corresponds to str(i,j,5) + ! Drheo(i,j,6) corresponds to str(i,j+1,6) + ! Drheo(i,j,7) corresponds to str(i+1,j,7) + ! Drheo(i,j,8) corresponds to str(i+1,j+1,8)) + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + ccaimp = umassdti(i,j) + vrel(i,j) * cosw + Cb(i,j) ! kg/m^2 s + + strintx = uarear(i,j)* & + (Drheo(i,j,1) + Drheo(i,j,2) + Drheo(i,j,3) + Drheo(i,j,4)) + strinty = uarear(i,j)* & + (Drheo(i,j,5) + Drheo(i,j,6) + Drheo(i,j,7) + Drheo(i,j,8)) + + diagx(i,j) = ccaimp - strintx + diagy(i,j) = ccaimp - strinty + enddo ! ij + + end subroutine formDiag_step2 + +!======================================================================= + +! Compute squared l^2 norm of a grid function (tpu,tpv) + + subroutine calc_L2norm_squared (nx_block, ny_block, & + icellu , & + indxui , indxuj , & + tpu , tpv , & + L2norm) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! total count when iceumask is true + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + tpu , & ! x-component of vector grid function + tpv ! y-component of vector grid function + + real (kind=dbl_kind), intent(out) :: & + L2norm ! squared l^2 norm of vector grid function (tpu,tpv) + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + character(len=*), parameter :: subname = '(calc_L2norm_squared)' + + !----------------------------------------------------------------- + ! compute squared l^2 norm of vector grid function (tpu,tpv) + !----------------------------------------------------------------- + + L2norm = c0 + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + L2norm = L2norm + tpu(i,j)**2 + tpv(i,j)**2 + enddo ! ij + + end subroutine calc_L2norm_squared + +!======================================================================= + +! Convert a grid function (tpu,tpv) to a one dimensional vector + + subroutine arrays_to_vec (nx_block, ny_block , & + nblocks , max_blocks, & + icellu , ntot , & + indxui , indxuj , & + tpu , tpv , & + outvec) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + nblocks, & ! nb of blocks + max_blocks, & ! max nb of blocks + ntot ! size of problem for Anderson + + integer (kind=int_kind), dimension (max_blocks), intent(in) :: & + icellu + + integer (kind=int_kind), dimension (nx_block*ny_block, max_blocks), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block, max_blocks), intent(in) :: & + tpu , & ! x-component of vector + tpv ! y-component of vector + + real (kind=dbl_kind), dimension (ntot), intent(out) :: & + outvec ! output 1D vector + + ! local variables + + integer (kind=int_kind) :: & + i, j, iblk, tot, ij + + character(len=*), parameter :: subname = '(arrays_to_vec)' + + !----------------------------------------------------------------- + ! form vector (converts from max_blocks arrays to single vector) + !----------------------------------------------------------------- + + outvec(:) = c0 + tot = 0 + + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + tot = tot + 1 + outvec(tot) = tpu(i, j, iblk) + tot = tot + 1 + outvec(tot) = tpv(i, j, iblk) + enddo + enddo ! ij + + end subroutine arrays_to_vec + +!======================================================================= + +! Convert one dimensional vector to a grid function (tpu,tpv) + + subroutine vec_to_arrays (nx_block, ny_block , & + nblocks , max_blocks, & + icellu , ntot , & + indxui , indxuj , & + invec , & + tpu , tpv) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + nblocks, & ! nb of blocks + max_blocks, & ! max nb of blocks + ntot ! size of problem for Anderson + + integer (kind=int_kind), dimension (max_blocks), intent(in) :: & + icellu + + integer (kind=int_kind), dimension (nx_block*ny_block, max_blocks), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (ntot), intent(in) :: & + invec ! input 1D vector + + real (kind=dbl_kind), dimension (nx_block,ny_block, max_blocks), intent(out) :: & + tpu , & ! x-component of vector + tpv ! y-component of vector + + ! local variables + + integer (kind=int_kind) :: & + i, j, iblk, tot, ij + + character(len=*), parameter :: subname = '(vec_to_arrays)' + + !----------------------------------------------------------------- + ! form arrays (converts from vector to the max_blocks arrays) + !----------------------------------------------------------------- + + tpu(:,:,:) = c0 + tpv(:,:,:) = c0 + tot = 0 + + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + tot = tot + 1 + tpu(i, j, iblk) = invec(tot) + tot = tot + 1 + tpv(i, j, iblk) = invec(tot) + enddo + enddo! ij + + end subroutine vec_to_arrays + +!======================================================================= + +! Update Q and R factors after deletion of the 1st column of G_diff +! +! author: P. Blain ECCC +! +! adapted from : +! H. F. Walker, “Anderson Acceleration: Algorithms and Implementations” +! [Online]. Available: https://users.wpi.edu/~walker/Papers/anderson_accn_algs_imps.pdf + + subroutine qr_delete(Q, R) + + real (kind=dbl_kind), intent(inout) :: & + Q(:,:), & ! Q factor + R(:,:) ! R factor + + ! local variables + + integer (kind=int_kind) :: & + i, j, k, & ! loop indices + m, n ! size of Q matrix + + real (kind=dbl_kind) :: & + temp, c, s + + character(len=*), parameter :: subname = '(qr_delete)' + + n = size(Q, 1) + m = size(Q, 2) + do i = 1, m-1 + temp = sqrt(R(i, i+1)**2 + R(i+1, i+1)**2) + c = R(i , i+1) / temp + s = R(i+1, i+1) / temp + R(i , i+1) = temp + R(i+1, i+1) = 0 + if (i < m-1) then + do j = i+2, m + temp = c*R(i, j) + s*R(i+1, j) + R(i+1, j) = -s*R(i, j) + c*R(i+1, j) + R(i , j) = temp + enddo + endif + do k = 1, n + temp = c*Q(k, i) + s*Q(k, i+1); + Q(k, i+1) = -s*Q(k, i) + c*Q(k, i+1); + Q(k, i) = temp + enddo + enddo + R(:, 1:m-1) = R(:, 2:m) + + end subroutine qr_delete + +!======================================================================= + +! FGMRES: Flexible generalized minimum residual method (with restarts). +! Solves the linear system A x = b using GMRES with a varying (right) preconditioner +! +! authors: Stéphane Gaudreault, Abdessamad Qaddouri, Philippe Blain, ECCC + + subroutine fgmres (zetaD , & + Cb , vrel , & + umassdti , & + halo_info_mask , & + bx , by , & + diagx , diagy , & + tolerance, maxinner, & + maxouter , & + solx , soly , & + nbiter) + + use ice_boundary, only: ice_HaloUpdate + use ice_domain, only: maskhalo_dyn, halo_info + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks,4), intent(in) :: & + zetaD ! zetaD = 2*zeta (viscous coefficient) + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(in) :: & + vrel , & ! coefficient for tauw + Cb , & ! seabed stress coefficient + umassdti ! mass of U-cell/dte (kg/m^2 s) + + type (ice_halo), intent(in) :: & + halo_info_mask ! ghost cell update info for masked halo + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(in) :: & + bx , & ! Right hand side of the linear system (x components) + by , & ! Right hand side of the linear system (y components) + diagx , & ! Diagonal of the system matrix (x components) + diagy ! Diagonal of the system matrix (y components) + + real (kind=dbl_kind), intent(in) :: & + tolerance ! Tolerance to achieve. The algorithm terminates when the relative + ! residual is below tolerance + + integer (kind=int_kind), intent(in) :: & + maxinner, & ! Restart the method every maxinner inner (Arnoldi) iterations + maxouter ! Maximum number of outer (restarts) iterations + ! Iteration will stop after maxinner*maxouter Arnoldi steps + ! even if the specified tolerance has not been achieved + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(inout) :: & + solx , & ! Initial guess on input, approximate solution on output (x components) + soly ! Initial guess on input, approximate solution on output (y components) + + integer (kind=int_kind), intent(out) :: & + nbiter ! Total number of Arnoldi iterations performed + + ! local variables + + integer (kind=int_kind) :: & + iblk , & ! block index + ij , & ! index for indx[t|u][i|j] + i, j ! grid indices + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + workspace_x , & ! work vector (x components) + workspace_y ! work vector (y components) + + real (kind=dbl_kind), dimension (max_blocks) :: & + norm_squared ! array to accumulate squared norm of grid function over blocks + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks, maxinner+1) :: & + arnoldi_basis_x , & ! Arnoldi basis (x components) + arnoldi_basis_y ! Arnoldi basis (y components) + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks, maxinner) :: & + orig_basis_x , & ! original basis (x components) + orig_basis_y ! original basis (y components) + + real (kind=dbl_kind) :: & + norm_residual , & ! current L^2 norm of residual vector + inverse_norm , & ! inverse of the norm of a vector + nu, t ! local temporary values + + integer (kind=int_kind) :: & + initer , & ! inner (Arnoldi) loop counter + outiter , & ! outer (restarts) loop counter + nextit , & ! nextit == initer+1 + it, k, ii, jj ! reusable loop counters + + real (kind=dbl_kind), dimension(maxinner+1) :: & + rot_cos , & ! cosine elements of Givens rotations + rot_sin , & ! sine elements of Givens rotations + rhs_hess ! right hand side vector of the Hessenberg (least squares) system + + real (kind=dbl_kind), dimension(maxinner+1, maxinner) :: & + hessenberg ! system matrix of the Hessenberg (least squares) system + + character (len=char_len) :: & + precond_type ! type of preconditioner + + real (kind=dbl_kind) :: & + relative_tolerance ! relative_tolerance, i.e. tolerance*norm(initial residual) + + character(len=*), parameter :: subname = '(fgmres)' + + ! Here we go ! + + ! Initialize + outiter = 0 + nbiter = 0 + + norm_squared = c0 + precond_type = precond + + ! Cells with no ice should be zero-initialized + workspace_x = c0 + workspace_y = c0 + arnoldi_basis_x = c0 + arnoldi_basis_y = c0 + + ! Residual of the initial iterate + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call matvec (nx_block , ny_block , & + icellu (iblk) , icellt (iblk), & + indxui (:,iblk) , indxuj (:,iblk), & + indxti (:,iblk) , indxtj (:,iblk), & + dxt (:,:,iblk) , dyt (:,:,iblk), & + dxhy (:,:,iblk) , dyhx (:,:,iblk), & + cxp (:,:,iblk) , cyp (:,:,iblk), & + cxm (:,:,iblk) , cym (:,:,iblk), & + solx (:,:,iblk) , soly (:,:,iblk), & + vrel (:,:,iblk) , Cb (:,:,iblk), & + zetaD (:,:,iblk,:), & + umassdti (:,:,iblk) , fm (:,:,iblk), & + uarear (:,:,iblk) , & + workspace_x(:,:,iblk) , workspace_y(:,:,iblk)) + call residual_vec (nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj (:,iblk), & + bx (:,:,iblk), by (:,:,iblk), & + workspace_x(:,:,iblk), workspace_y(:,:,iblk), & + arnoldi_basis_x (:,:,iblk, 1), & + arnoldi_basis_y (:,:,iblk, 1)) + enddo + !$OMP END PARALLEL DO + + ! Start outer (restarts) loop + do + ! Compute norm of initial residual + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call calc_L2norm_squared(nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj(:,iblk), & + arnoldi_basis_x(:,:,iblk, 1) , & + arnoldi_basis_y(:,:,iblk, 1) , & + norm_squared(iblk)) + + enddo + !$OMP END PARALLEL DO + norm_residual = sqrt(global_sum(sum(norm_squared), distrb_info)) + + if (my_task == master_task .and. monitor_fgmres) then + write(nu_diag, '(a,i4,a,d26.16)') "monitor_fgmres: iter_fgmres= ", nbiter, & + " fgmres_L2norm= ", norm_residual + endif + + ! Current guess is a good enough solution TODO: reactivate and test this + ! if (norm_residual < tolerance) then + ! return + ! end if + + ! Normalize the first Arnoldi vector + inverse_norm = c1 / norm_residual + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + arnoldi_basis_x(i, j, iblk, 1) = arnoldi_basis_x(i, j, iblk, 1) * inverse_norm + arnoldi_basis_y(i, j, iblk, 1) = arnoldi_basis_y(i, j, iblk, 1) * inverse_norm + enddo ! ij + enddo + !$OMP END PARALLEL DO + + if (outiter == 0) then + relative_tolerance = tolerance * norm_residual + end if + + ! Initialize 1-st term of RHS of Hessenberg system + rhs_hess(1) = norm_residual + rhs_hess(2:) = c0 + + initer = 0 + + ! Start of inner (Arnoldi) loop + do + + nbiter = nbiter + 1 + initer = initer + 1 + nextit = initer + 1 + ! precondition the current Arnoldi vector + call precondition(zetaD , & + Cb , vrel , & + umassdti , & + arnoldi_basis_x(:,:,:,initer), & + arnoldi_basis_y(:,:,:,initer), & + diagx , diagy , & + precond_type, & + workspace_x , workspace_y) + orig_basis_x(:,:,:,initer) = workspace_x + orig_basis_y(:,:,:,initer) = workspace_y + + ! Update workspace with boundary values + call stack_velocity_field(workspace_x, workspace_y, fld2) + call ice_timer_start(timer_bound) + if (maskhalo_dyn) then + call ice_HaloUpdate (fld2, halo_info_mask, & + field_loc_NEcorner, field_type_vector) + else + call ice_HaloUpdate (fld2, halo_info, & + field_loc_NEcorner, field_type_vector) + endif + call ice_timer_stop(timer_bound) + call unstack_velocity_field(fld2, workspace_x, workspace_y) + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call matvec (nx_block , ny_block , & + icellu (iblk) , icellt (iblk), & + indxui (:,iblk) , indxuj (:,iblk), & + indxti (:,iblk) , indxtj (:,iblk), & + dxt (:,:,iblk) , dyt (:,:,iblk), & + dxhy (:,:,iblk) , dyhx (:,:,iblk), & + cxp (:,:,iblk) , cyp (:,:,iblk), & + cxm (:,:,iblk) , cym (:,:,iblk), & + workspace_x(:,:,iblk) , workspace_y(:,:,iblk), & + vrel (:,:,iblk) , Cb (:,:,iblk), & + zetaD (:,:,iblk,:), & + umassdti (:,:,iblk) , fm (:,:,iblk), & + uarear (:,:,iblk) , & + arnoldi_basis_x(:,:,iblk,nextit), & + arnoldi_basis_y(:,:,iblk,nextit)) + enddo + !$OMP END PARALLEL DO + + ! Orthogonalize the new vector + call orthogonalize(ortho_type , initer , & + nextit , maxinner , & + arnoldi_basis_x, arnoldi_basis_y, & + hessenberg) + + ! Compute norm of new Arnoldi vector and update Hessenberg matrix + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call calc_L2norm_squared(nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj(:, iblk) , & + arnoldi_basis_x(:,:,iblk, nextit), & + arnoldi_basis_y(:,:,iblk, nextit), & + norm_squared(iblk)) + enddo + !$OMP END PARALLEL DO + hessenberg(nextit,initer) = sqrt(global_sum(sum(norm_squared), distrb_info)) + + ! Watch out for happy breakdown + if (.not. almost_zero( hessenberg(nextit,initer) ) ) then + ! Normalize next Arnoldi vector + inverse_norm = c1 / hessenberg(nextit,initer) + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + arnoldi_basis_x(i, j, iblk, nextit) = arnoldi_basis_x(i, j, iblk, nextit)*inverse_norm + arnoldi_basis_y(i, j, iblk, nextit) = arnoldi_basis_y(i, j, iblk, nextit)*inverse_norm + enddo ! ij + enddo + !$OMP END PARALLEL DO + end if + + ! Apply previous Givens rotation to the last column of the Hessenberg matrix + if (initer > 1) then + do k = 2, initer + t = hessenberg(k-1, initer) + hessenberg(k-1, initer) = rot_cos(k-1)*t + rot_sin(k-1)*hessenberg(k, initer) + hessenberg(k, initer) = -rot_sin(k-1)*t + rot_cos(k-1)*hessenberg(k, initer) + end do + end if + + ! Compute and apply new Givens rotation + nu = sqrt(hessenberg(initer,initer)**2 + hessenberg(nextit,initer)**2) + if (.not. almost_zero(nu)) then + rot_cos(initer) = hessenberg(initer,initer) / nu + rot_sin(initer) = hessenberg(nextit,initer) / nu + + rhs_hess(nextit) = -rot_sin(initer) * rhs_hess(initer) + rhs_hess(initer) = rot_cos(initer) * rhs_hess(initer) + + hessenberg(initer,initer) = rot_cos(initer) * hessenberg(initer,initer) + rot_sin(initer) * hessenberg(nextit,initer) + end if + + ! Check for convergence + norm_residual = abs(rhs_hess(nextit)) + + if (my_task == master_task .and. monitor_fgmres) then + write(nu_diag, '(a,i4,a,d26.16)') "monitor_fgmres: iter_fgmres= ", nbiter, & + " fgmres_L2norm= ", norm_residual + endif + + if ((initer >= maxinner) .or. (norm_residual <= relative_tolerance)) then + exit + endif + + end do ! end of inner (Arnoldi) loop + + ! At this point either the maximum number of inner iterations + ! was reached or the absolute residual is below the scaled tolerance. + + ! Solve the (now upper triangular) system "hessenberg * sol_hess = rhs_hess" + ! (sol_hess is stored in rhs_hess) + rhs_hess(initer) = rhs_hess(initer) / hessenberg(initer,initer) + do ii = 2, initer + k = initer - ii + 1 + t = rhs_hess(k) + do j = k + 1, initer + t = t - hessenberg(k,j) * rhs_hess(j) + end do + rhs_hess(k) = t / hessenberg(k,k) + end do + + ! Form linear combination to get new solution iterate + do it = 1, initer + t = rhs_hess(it) + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + solx(i, j, iblk) = solx(i, j, iblk) + t * orig_basis_x(i, j, iblk, it) + soly(i, j, iblk) = soly(i, j, iblk) + t * orig_basis_y(i, j, iblk, it) + enddo ! ij + enddo + !$OMP END PARALLEL DO + end do + + ! Increment outer loop counter and check for convergence + outiter = outiter + 1 + if (norm_residual <= relative_tolerance .or. outiter >= maxouter) then + return + end if + + ! Solution is not convergent : compute residual vector and continue. + + ! The residual vector is computed here using (see Saad p. 177) : + ! \begin{equation} + ! r = V_{m+1} * Q_m^T * (\gamma_{m+1} * e_{m+1}) + ! \end{equation} + ! where : + ! $r$ is the residual + ! $V_{m+1}$ is a matrix whose columns are the Arnoldi vectors from 1 to nextit (m+1) + ! $Q_m$ is the product of the Givens rotation : Q_m = G_m G_{m-1} ... G_1 + ! $gamma_{m+1}$ is the last element of rhs_hess + ! $e_{m+1})$ is the unit vector (0, 0, ..., 1)^T \in \reals^{m+1} + + ! Apply the Givens rotation in reverse order to g := \gamma_{m+1} * e_{m+1}, + ! store the result in rhs_hess + do it = 1, initer + jj = nextit - it + 1 + rhs_hess(jj-1) = -rot_sin(jj-1) * rhs_hess(jj) ! + rot_cos(jj-1) * g(jj-1) (== 0) + rhs_hess(jj) = rot_cos(jj-1) * rhs_hess(jj) ! + rot_sin(jj-1) * g(jj-1) (== 0) + end do + + ! Compute the residual by multiplying V_{m+1} and rhs_hess + workspace_x = c0 + workspace_y = c0 + do it = 1, nextit + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + workspace_x(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_x(i, j, iblk, it) + workspace_y(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_y(i, j, iblk, it) + enddo ! ij + enddo + !$OMP END PARALLEL DO + arnoldi_basis_x(:,:,:,1) = workspace_x + arnoldi_basis_y(:,:,:,1) = workspace_y + end do + end do ! end of outer (restarts) loop + + end subroutine fgmres + +!======================================================================= + +! PGMRES: Right-preconditioned generalized minimum residual method (with restarts). +! Solves the linear A x = b using GMRES with a right preconditioner +! +! authors: Stéphane Gaudreault, Abdessamad Qaddouri, Philippe Blain, ECCC + + subroutine pgmres (zetaD , & + Cb , vrel , & + umassdti , & + bx , by , & + diagx , diagy , & + tolerance, maxinner, & + maxouter , & + solx , soly , & + nbiter) + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks,4), intent(in) :: & + zetaD ! zetaD = 2*zeta (viscous coefficient) + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(in) :: & + vrel , & ! coefficient for tauw + Cb , & ! seabed stress coefficient + umassdti ! mass of U-cell/dte (kg/m^2 s) + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(in) :: & + bx , & ! Right hand side of the linear system (x components) + by ! Right hand side of the linear system (y components) + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(in) :: & + diagx , & ! Diagonal of the system matrix (x components) + diagy ! Diagonal of the system matrix (y components) + + real (kind=dbl_kind), intent(in) :: & + tolerance ! Tolerance to achieve. The algorithm terminates when the relative + ! residual is below tolerance + + integer (kind=int_kind), intent(in) :: & + maxinner, & ! Restart the method every maxinner inner (Arnoldi) iterations + maxouter ! Maximum number of outer (restarts) iterations + ! Iteration will stop after maxinner*maxouter Arnoldi steps + ! even if the specified tolerance has not been achieved + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(inout) :: & + solx , & ! Initial guess on input, approximate solution on output (x components) + soly ! Initial guess on input, approximate solution on output (y components) + + integer (kind=int_kind), intent(out) :: & + nbiter ! Total number of Arnoldi iterations performed + + ! local variables + + integer (kind=int_kind) :: & + iblk , & ! block index + ij , & ! index for indx[t|u][i|j] + i, j ! grid indices + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + workspace_x , & ! work vector (x components) + workspace_y ! work vector (y components) + + real (kind=dbl_kind), dimension (max_blocks) :: & + norm_squared ! array to accumulate squared norm of grid function over blocks + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks, maxinner+1) :: & + arnoldi_basis_x , & ! Arnoldi basis (x components) + arnoldi_basis_y ! Arnoldi basis (y components) + + real (kind=dbl_kind) :: & + norm_residual , & ! current L^2 norm of residual vector + inverse_norm , & ! inverse of the norm of a vector + nu, t ! local temporary values + + integer (kind=int_kind) :: & + initer , & ! inner (Arnoldi) loop counter + outiter , & ! outer (restarts) loop counter + nextit , & ! nextit == initer+1 + it, k, ii, jj ! reusable loop counters + + real (kind=dbl_kind), dimension(maxinner+1) :: & + rot_cos , & ! cosine elements of Givens rotations + rot_sin , & ! sine elements of Givens rotations + rhs_hess ! right hand side vector of the Hessenberg (least squares) system + + real (kind=dbl_kind), dimension(maxinner+1, maxinner) :: & + hessenberg ! system matrix of the Hessenberg (least squares) system + + character(len=char_len) :: & + precond_type , & ! type of preconditioner + ortho_type ! type of orthogonalization + + real (kind=dbl_kind) :: & + relative_tolerance ! relative_tolerance, i.e. tolerance*norm(initial residual) + + character(len=*), parameter :: subname = '(pgmres)' + + ! Here we go ! + + ! Initialize + outiter = 0 + nbiter = 0 + + norm_squared = c0 + precond_type = 'diag' ! Jacobi preconditioner + ortho_type = 'cgs' ! classical gram-schmidt TODO: try with MGS + + ! Cells with no ice should be zero-initialized + workspace_x = c0 + workspace_y = c0 + arnoldi_basis_x = c0 + arnoldi_basis_y = c0 + + ! Residual of the initial iterate + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call matvec (nx_block , ny_block , & + icellu (iblk) , icellt (iblk), & + indxui (:,iblk) , indxuj (:,iblk), & + indxti (:,iblk) , indxtj (:,iblk), & + dxt (:,:,iblk) , dyt (:,:,iblk), & + dxhy (:,:,iblk) , dyhx (:,:,iblk), & + cxp (:,:,iblk) , cyp (:,:,iblk), & + cxm (:,:,iblk) , cym (:,:,iblk), & + solx (:,:,iblk) , soly (:,:,iblk), & + vrel (:,:,iblk) , Cb (:,:,iblk), & + zetaD (:,:,iblk,:), & + umassdti (:,:,iblk) , fm (:,:,iblk), & + uarear (:,:,iblk) , & + workspace_x(:,:,iblk) , workspace_y(:,:,iblk)) + call residual_vec (nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj (:,iblk), & + bx (:,:,iblk), by (:,:,iblk), & + workspace_x(:,:,iblk), workspace_y(:,:,iblk), & + arnoldi_basis_x (:,:,iblk, 1), & + arnoldi_basis_y (:,:,iblk, 1)) + enddo + !$OMP END PARALLEL DO + + ! Start outer (restarts) loop + do + ! Compute norm of initial residual + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call calc_L2norm_squared(nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj(:, iblk), & + arnoldi_basis_x(:,:,iblk, 1), & + arnoldi_basis_y(:,:,iblk, 1), & + norm_squared(iblk)) + + enddo + !$OMP END PARALLEL DO + norm_residual = sqrt(global_sum(sum(norm_squared), distrb_info)) + + if (my_task == master_task .and. monitor_pgmres) then + write(nu_diag, '(a,i4,a,d26.16)') "monitor_pgmres: iter_pgmres= ", nbiter, & + " pgmres_L2norm= ", norm_residual + endif + + ! Current guess is a good enough solution + ! if (norm_residual < tolerance) then + ! return + ! end if + + ! Normalize the first Arnoldi vector + inverse_norm = c1 / norm_residual + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + arnoldi_basis_x(i, j, iblk, 1) = arnoldi_basis_x(i, j, iblk, 1) * inverse_norm + arnoldi_basis_y(i, j, iblk, 1) = arnoldi_basis_y(i, j, iblk, 1) * inverse_norm + enddo ! ij + enddo + !$OMP END PARALLEL DO + + if (outiter == 0) then + relative_tolerance = tolerance * norm_residual + end if + + ! Initialize 1-st term of RHS of Hessenberg system + rhs_hess(1) = norm_residual + rhs_hess(2:) = c0 + + initer = 0 + + ! Start of inner (Arnoldi) loop + do + + nbiter = nbiter + 1 + initer = initer + 1 + nextit = initer + 1 + + ! precondition the current Arnoldi vector + call precondition(zetaD , & + Cb , vrel , & + umassdti , & + arnoldi_basis_x(:,:,:,initer), & + arnoldi_basis_y(:,:,:,initer), & + diagx , diagy , & + precond_type, & + workspace_x , workspace_y) + + ! NOTE: halo updates for (workspace_x, workspace_y) + ! are skipped here for efficiency since this is just a preconditioner + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call matvec (nx_block , ny_block , & + icellu (iblk) , icellt (iblk), & + indxui (:,iblk) , indxuj (:,iblk), & + indxti (:,iblk) , indxtj (:,iblk), & + dxt (:,:,iblk) , dyt (:,:,iblk), & + dxhy (:,:,iblk) , dyhx (:,:,iblk), & + cxp (:,:,iblk) , cyp (:,:,iblk), & + cxm (:,:,iblk) , cym (:,:,iblk), & + workspace_x(:,:,iblk) , workspace_y(:,:,iblk), & + vrel (:,:,iblk) , Cb (:,:,iblk), & + zetaD (:,:,iblk,:), & + umassdti (:,:,iblk) , fm (:,:,iblk), & + uarear (:,:,iblk) , & + arnoldi_basis_x(:,:,iblk,nextit), & + arnoldi_basis_y(:,:,iblk,nextit)) + enddo + !$OMP END PARALLEL DO + + ! Orthogonalize the new vector + call orthogonalize(ortho_type , initer , & + nextit , maxinner , & + arnoldi_basis_x, arnoldi_basis_y, & + hessenberg) + + ! Compute norm of new Arnoldi vector and update Hessenberg matrix + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call calc_L2norm_squared(nx_block , ny_block , & + icellu (iblk), & + indxui (:,iblk), indxuj(:, iblk) , & + arnoldi_basis_x(:,:,iblk, nextit), & + arnoldi_basis_y(:,:,iblk, nextit), & + norm_squared(iblk)) + enddo + !$OMP END PARALLEL DO + hessenberg(nextit,initer) = sqrt(global_sum(sum(norm_squared), distrb_info)) + + ! Watch out for happy breakdown + if (.not. almost_zero( hessenberg(nextit,initer) ) ) then + ! Normalize next Arnoldi vector + inverse_norm = c1 / hessenberg(nextit,initer) + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + arnoldi_basis_x(i, j, iblk, nextit) = arnoldi_basis_x(i, j, iblk, nextit)*inverse_norm + arnoldi_basis_y(i, j, iblk, nextit) = arnoldi_basis_y(i, j, iblk, nextit)*inverse_norm + enddo ! ij + enddo + !$OMP END PARALLEL DO + end if + + ! Apply previous Givens rotation to the last column of the Hessenberg matrix + if (initer > 1) then + do k = 2, initer + t = hessenberg(k-1, initer) + hessenberg(k-1, initer) = rot_cos(k-1)*t + rot_sin(k-1)*hessenberg(k, initer) + hessenberg(k, initer) = -rot_sin(k-1)*t + rot_cos(k-1)*hessenberg(k, initer) + end do + end if + + ! Compute and apply new Givens rotation + nu = sqrt(hessenberg(initer,initer)**2 + hessenberg(nextit,initer)**2) + if (.not. almost_zero(nu)) then + rot_cos(initer) = hessenberg(initer,initer) / nu + rot_sin(initer) = hessenberg(nextit,initer) / nu + + rhs_hess(nextit) = -rot_sin(initer) * rhs_hess(initer) + rhs_hess(initer) = rot_cos(initer) * rhs_hess(initer) + + hessenberg(initer,initer) = rot_cos(initer) * hessenberg(initer,initer) + rot_sin(initer) * hessenberg(nextit,initer) + end if + + ! Check for convergence + norm_residual = abs(rhs_hess(nextit)) + + if (my_task == master_task .and. monitor_pgmres) then + write(nu_diag, '(a,i4,a,d26.16)') "monitor_pgmres: iter_pgmres= ", nbiter, & + " pgmres_L2norm= ", norm_residual + endif + + if ((initer >= maxinner) .or. (norm_residual <= relative_tolerance)) then + exit + endif + + end do ! end of inner (Arnoldi) loop + + ! At this point either the maximum number of inner iterations + ! was reached or the absolute residual is below the scaled tolerance. + + ! Solve the (now upper triangular) system "hessenberg * sol_hess = rhs_hess" + ! (sol_hess is stored in rhs_hess) + rhs_hess(initer) = rhs_hess(initer) / hessenberg(initer,initer) + do ii = 2, initer + k = initer - ii + 1 + t = rhs_hess(k) + do j = k + 1, initer + t = t - hessenberg(k,j) * rhs_hess(j) + end do + rhs_hess(k) = t / hessenberg(k,k) + end do + + ! Form linear combination to get new solution iterate + workspace_x = c0 + workspace_y = c0 + do it = 1, initer + t = rhs_hess(it) + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + workspace_x(i, j, iblk) = workspace_x(i, j, iblk) + t * arnoldi_basis_x(i, j, iblk, it) + workspace_y(i, j, iblk) = workspace_y(i, j, iblk) + t * arnoldi_basis_y(i, j, iblk, it) + enddo ! ij + enddo + !$OMP END PARALLEL DO + end do + + ! Call preconditioner + call precondition(zetaD , & + Cb , vrel , & + umassdti , & + workspace_x , workspace_y, & + diagx , diagy , & + precond_type, & + workspace_x , workspace_y) + + solx = solx + workspace_x + soly = soly + workspace_y + + ! Increment outer loop counter and check for convergence + outiter = outiter + 1 + if (norm_residual <= relative_tolerance .or. outiter >= maxouter) then + return + end if + + ! Solution is not convergent : compute residual vector and continue. + + ! The residual vector is computed here using (see Saad p. 177) : + ! \begin{equation} + ! r = V_{m+1} * Q_m^T * (\gamma_{m+1} * e_{m+1}) + ! \end{equation} + ! where : + ! $r$ is the residual + ! $V_{m+1}$ is a matrix whose columns are the Arnoldi vectors from 1 to nextit (m+1) + ! $Q_m$ is the product of the Givens rotation : Q_m = G_m G_{m-1} ... G_1 + ! $gamma_{m+1}$ is the last element of rhs_hess + ! $e_{m+1})$ is the unit vector (0, 0, ..., 1)^T \in \reals^{m+1} + + ! Apply the Givens rotation in reverse order to g := \gamma_{m+1} * e_{m+1}, + ! store the result in rhs_hess + do it = 1, initer + jj = nextit - it + 1 + rhs_hess(jj-1) = -rot_sin(jj-1) * rhs_hess(jj) ! + rot_cos(jj-1) * g(jj-1) (== 0) + rhs_hess(jj) = rot_cos(jj-1) * rhs_hess(jj) ! + rot_sin(jj-1) * g(jj-1) (== 0) + end do + + ! Compute the residual by multiplying V_{m+1} and rhs_hess + workspace_x = c0 + workspace_y = c0 + do it = 1, nextit + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + workspace_x(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_x(i, j, iblk, it) + workspace_y(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_y(i, j, iblk, it) + enddo ! ij + enddo + !$OMP END PARALLEL DO + arnoldi_basis_x(:,:,:,1) = workspace_x + arnoldi_basis_y(:,:,:,1) = workspace_y + end do + end do ! end of outer (restarts) loop + + end subroutine pgmres + +!======================================================================= + +! Generic routine to precondition a vector +! +! authors: Philippe Blain, ECCC + + subroutine precondition(zetaD , & + Cb , vrel , & + umassdti , & + vx , vy , & + diagx , diagy, & + precond_type, & + wx , wy) + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks,4), intent(in) :: & + zetaD ! zetaD = 2*zeta (viscous coefficient) + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(in) :: & + vrel , & ! coefficient for tauw + Cb , & ! seabed stress coefficient + umassdti ! mass of U-cell/dte (kg/m^2 s) + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & + vx , & ! input vector (x components) + vy ! input vector (y components) + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & + diagx , & ! diagonal of the system matrix (x components) + diagy ! diagonal of the system matrix (y components) + + character (len=char_len), intent(in) :: & + precond_type ! type of preconditioner + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(inout) :: & + wx , & ! preconditionned vector (x components) + wy ! preconditionned vector (y components) + + ! local variables + + integer (kind=int_kind) :: & + iblk , & ! block index + ij , & ! compressed index + i, j ! grid indices + + real (kind=dbl_kind) :: & + tolerance ! Tolerance for PGMRES + + integer (kind=int_kind) :: & + maxinner ! Restart parameter for PGMRES + + integer (kind=int_kind) :: & + maxouter ! Maximum number of outer iterations for PGMRES + + integer (kind=int_kind) :: & + nbiter ! Total number of iteration PGMRES performed + + character(len=*), parameter :: subname = '(precondition)' + + if (precond_type == 'ident') then ! identity (no preconditioner) + wx = vx + wy = vy + elseif (precond_type == 'diag') then ! Jacobi preconditioner (diagonal) + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + wx(i,j,iblk) = vx(i,j,iblk) / diagx(i,j,iblk) + wy(i,j,iblk) = vy(i,j,iblk) / diagy(i,j,iblk) + enddo ! ij + enddo + !$OMP END PARALLEL DO + elseif (precond_type == 'pgmres') then ! PGMRES (Jacobi-preconditioned GMRES) + ! Initialize preconditioned vector to 0 ! TODO: try with wx = vx or vx/diagx + wx = c0 + wy = c0 + tolerance = reltol_pgmres + maxinner = dim_pgmres + maxouter = maxits_pgmres + call pgmres (zetaD, & + Cb , vrel , & + umassdti , & + vx , vy , & + diagx , diagy , & + tolerance, maxinner, & + maxouter , & + wx , wy , & + nbiter) + else + call abort_ice(error_message='wrong preconditioner in ' // subname, & + file=__FILE__, line=__LINE__) + endif + end subroutine precondition + +!======================================================================= + +! Generic routine to orthogonalize a vector (arnoldi_basis_[xy](:, :, :, nextit)) +! against a set of vectors (arnoldi_basis_[xy](:, :, :, 1:initer)) +! +! authors: Philippe Blain, ECCC + + subroutine orthogonalize(ortho_type , initer , & + nextit , maxinner , & + arnoldi_basis_x, arnoldi_basis_y, & + hessenberg) + + character(len=*), intent(in) :: & + ortho_type ! type of orthogonalization + + integer (kind=int_kind), intent(in) :: & + initer , & ! inner (Arnoldi) loop counter + nextit , & ! nextit == initer+1 + maxinner ! Restart the method every maxinner inner iterations + + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks, maxinner+1), intent(inout) :: & + arnoldi_basis_x , & ! arnoldi basis (x components) + arnoldi_basis_y ! arnoldi basis (y components) + + real (kind=dbl_kind), dimension(maxinner+1, maxinner), intent(inout) :: & + hessenberg ! system matrix of the Hessenberg (least squares) system + + ! local variables + + integer (kind=int_kind) :: & + it , & ! reusable loop counter + iblk , & ! block index + ij , & ! compressed index + i, j ! grid indices + + real (kind=dbl_kind), dimension (max_blocks) :: & + local_dot ! local array value to accumulate dot product of grid function over blocks + + real (kind=dbl_kind), dimension(maxinner) :: & + dotprod_local ! local array to accumulate several dot product computations + + character(len=*), parameter :: subname = '(orthogonalize)' + + if (trim(ortho_type) == 'cgs') then ! Classical Gram-Schmidt + ! Classical Gram-Schmidt orthogonalisation process + ! First loop of Gram-Schmidt (compute coefficients) + dotprod_local = c0 + do it = 1, initer + local_dot = c0 + + !$OMP PARALLEL DO PRIVATE(iblk, ij, i, j) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + local_dot(iblk) = local_dot(iblk) + & + (arnoldi_basis_x(i, j, iblk, it) * arnoldi_basis_x(i, j, iblk, nextit)) + & + (arnoldi_basis_y(i, j, iblk, it) * arnoldi_basis_y(i, j, iblk, nextit)) + enddo ! ij + enddo + !$OMP END PARALLEL DO + + dotprod_local(it) = sum(local_dot) + end do + + hessenberg(1:initer, initer) = global_allreduce_sum(dotprod_local(1:initer), distrb_info) + + ! Second loop of Gram-Schmidt (orthonormalize) + do it = 1, initer + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + arnoldi_basis_x(i, j, iblk, nextit) = arnoldi_basis_x(i, j, iblk, nextit) & + - hessenberg(it,initer) * arnoldi_basis_x(i, j, iblk, it) + arnoldi_basis_y(i, j, iblk, nextit) = arnoldi_basis_y(i, j, iblk, nextit) & + - hessenberg(it,initer) * arnoldi_basis_y(i, j, iblk, it) + enddo ! ij + enddo + !$OMP END PARALLEL DO + end do + elseif (trim(ortho_type) == 'mgs') then ! Modified Gram-Schmidt + ! Modified Gram-Schmidt orthogonalisation process + do it = 1, initer + local_dot = c0 + + !$OMP PARALLEL DO PRIVATE(iblk, ij, i, j) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + local_dot(iblk) = local_dot(iblk) + & + (arnoldi_basis_x(i, j, iblk, it) * arnoldi_basis_x(i, j, iblk, nextit)) + & + (arnoldi_basis_y(i, j, iblk, it) * arnoldi_basis_y(i, j, iblk, nextit)) + enddo ! ij + enddo + !$OMP END PARALLEL DO + + hessenberg(it,initer) = global_sum(sum(local_dot), distrb_info) + + !$OMP PARALLEL DO PRIVATE(iblk, ij, i, j) + do iblk = 1, nblocks + do ij = 1, icellu(iblk) + i = indxui(ij, iblk) + j = indxuj(ij, iblk) + + arnoldi_basis_x(i, j, iblk, nextit) = arnoldi_basis_x(i, j, iblk, nextit) & + - hessenberg(it,initer) * arnoldi_basis_x(i, j, iblk, it) + arnoldi_basis_y(i, j, iblk, nextit) = arnoldi_basis_y(i, j, iblk, nextit) & + - hessenberg(it,initer) * arnoldi_basis_y(i, j, iblk, it) + enddo ! ij + enddo + !$OMP END PARALLEL DO + end do + else + call abort_ice(error_message='wrong orthonalization in ' // subname, & + file=__FILE__, line=__LINE__) + endif + + end subroutine orthogonalize + +!======================================================================= + +! Check if value A is close to zero, up to machine precision +! +!author +! Stéphane Gaudreault, ECCC -- June 2014 +! +!revision +! v4-80 - Gaudreault S. - gfortran compatibility +! 2019 - Philippe Blain, ECCC - converted to CICE standards + + logical function almost_zero(A) result(retval) + + real (kind=dbl_kind), intent(in) :: A + + ! local variables + + character(len=*), parameter :: subname = '(almost_zero)' + + integer (kind=int8_kind) :: aBit + integer (kind=int8_kind), parameter :: two_complement = int(Z'80000000', kind=int8_kind) + aBit = 0 + aBit = transfer(A, aBit) + if (aBit < 0) then + aBit = two_complement - aBit + end if + ! lexicographic order test with a tolerance of 1 adjacent float + retval = (abs(aBit) <= 1) + + end function almost_zero + +!======================================================================= + + end module ice_dyn_vp + +!======================================================================= diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index 4c88037ed..43cf92a48 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -5059,23 +5059,31 @@ end subroutine ocn_data_ispol_init subroutine box2001_data ! wind and current fields as in Hunke, JCP 2001 +! these are defined at the u point ! authors: Elizabeth Hunke, LANL use ice_domain, only: nblocks + use ice_domain_size, only: max_blocks use ice_blocks, only: nx_block, ny_block, nghost use ice_flux, only: uocn, vocn, uatm, vatm, wind, rhoa, strax, stray - use ice_grid, only: uvm + use ice_grid, only: uvm, to_ugrid + use ice_state, only: aice ! local parameters integer (kind=int_kind) :: & iblk, i,j ! loop indices + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + aiu ! ice fraction on u-grid + real (kind=dbl_kind) :: & secday, pi , puny, period, pi2, tau call icepack_query_parameters(pi_out=pi, pi2_out=pi2, puny_out=puny) call icepack_query_parameters(secday_out=secday) + call to_ugrid(aice, aiu) + period = c4*secday do iblk = 1, nblocks @@ -5106,8 +5114,8 @@ subroutine box2001_data ! wind stress wind(i,j,iblk) = sqrt(uatm(i,j,iblk)**2 + vatm(i,j,iblk)**2) tau = rhoa(i,j,iblk) * 0.0012_dbl_kind * wind(i,j,iblk) - strax(i,j,iblk) = tau * uatm(i,j,iblk) - stray(i,j,iblk) = tau * vatm(i,j,iblk) + strax(i,j,iblk) = aiu(i,j,iblk) * tau * uatm(i,j,iblk) + stray(i,j,iblk) = aiu(i,j,iblk) * tau * vatm(i,j,iblk) ! initialization test ! Diagonal wind vectors 1 diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index f2eaae17d..fb9c45978 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -100,6 +100,11 @@ subroutine input_data basalstress, k1, k2, alphab, threshold_hw, & Ktens, e_ratio, coriolis, ssh_stress, & kridge, ktransport, brlx, arlx + use ice_dyn_vp, only: maxits_nonlin, precond, dim_fgmres, dim_pgmres, maxits_fgmres, & + maxits_pgmres, monitor_nonlin, monitor_fgmres, & + monitor_pgmres, reltol_nonlin, reltol_fgmres, reltol_pgmres, & + algo_nonlin, fpfunc_andacc, dim_andacc, reltol_andacc, & + damping_andacc, start_andacc, use_mean_vrel, ortho_type use ice_transport_driver, only: advection, conserv_check use ice_restoring, only: restore_ice #ifdef CESMCOUPLED @@ -194,7 +199,13 @@ subroutine input_data advection, coriolis, kridge, ktransport, & kstrength, krdg_partic, krdg_redist, mu_rdg, & e_ratio, Ktens, Cf, basalstress, & - k1, k2, alphab, threshold_hw, & + k1, maxits_nonlin, precond, dim_fgmres, & + dim_pgmres, maxits_fgmres, maxits_pgmres, monitor_nonlin, & + monitor_fgmres, monitor_pgmres, reltol_nonlin, reltol_fgmres, & + reltol_pgmres, algo_nonlin, dim_andacc, reltol_andacc, & + damping_andacc, start_andacc, fpfunc_andacc, use_mean_vrel, & + ortho_type, & + k2, alphab, threshold_hw, & Pstar, Cstar namelist /shortwave_nml/ & @@ -322,7 +333,27 @@ subroutine input_data alphab = 20.0_dbl_kind ! alphab=Cb factor in Lemieux et al 2015 threshold_hw = 30.0_dbl_kind ! max water depth for grounding Ktens = 0.0_dbl_kind ! T=Ktens*P (tensile strength: see Konig and Holland, 2010) - e_ratio = 2.0_dbl_kind ! EVP ellipse aspect ratio + e_ratio = 2.0_dbl_kind ! VP ellipse aspect ratio + maxits_nonlin = 4 ! max nb of iteration for nonlinear solver + precond = 'pgmres' ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), 'pgmres' (Jacobi-preconditioned GMRES) + dim_fgmres = 50 ! size of fgmres Krylov subspace + dim_pgmres = 5 ! size of pgmres Krylov subspace + maxits_fgmres = 50 ! max nb of iteration for fgmres + maxits_pgmres = 5 ! max nb of iteration for pgmres + monitor_nonlin = .false. ! print nonlinear residual norm + monitor_fgmres = .false. ! print fgmres residual norm + monitor_pgmres = .false. ! print pgmres residual norm + ortho_type = 'mgs' ! orthogonalization procedure 'cgs' or 'mgs' + reltol_nonlin = 1e-8_dbl_kind ! nonlinear stopping criterion: reltol_nonlin*res(k=0) + reltol_fgmres = 1e-2_dbl_kind ! fgmres stopping criterion: reltol_fgmres*res(k) + reltol_pgmres = 1e-6_dbl_kind ! pgmres stopping criterion: reltol_pgmres*res(k) + algo_nonlin = 'picard' ! nonlinear algorithm: 'picard' (Picard iteration), 'anderson' (Anderson acceleration) + fpfunc_andacc = 1 ! fixed point function for Anderson acceleration: 1: g(x) = FMGRES(A(x),b(x)), 2: g(x) = x - A(x)x + b(x) + dim_andacc = 5 ! size of Anderson minimization matrix (number of saved previous residuals) + reltol_andacc = 1e-6_dbl_kind ! relative tolerance for Anderson acceleration + damping_andacc = 0 ! damping factor for Anderson acceleration + start_andacc = 0 ! acceleration delay factor (acceleration starts at this iteration) + use_mean_vrel = .true. ! use mean of previous 2 iterates to compute vrel advection = 'remap' ! incremental remapping transport scheme conserv_check = .false.! tracer conservation check shortwave = 'ccsm3' ! 'ccsm3' or 'dEdd' (delta-Eddington) @@ -628,6 +659,26 @@ subroutine input_data call broadcast_scalar(ssh_stress, master_task) call broadcast_scalar(kridge, master_task) call broadcast_scalar(ktransport, master_task) + call broadcast_scalar(maxits_nonlin, master_task) + call broadcast_scalar(precond, master_task) + call broadcast_scalar(dim_fgmres, master_task) + call broadcast_scalar(dim_pgmres, master_task) + call broadcast_scalar(maxits_fgmres, master_task) + call broadcast_scalar(maxits_pgmres, master_task) + call broadcast_scalar(monitor_nonlin, master_task) + call broadcast_scalar(monitor_fgmres, master_task) + call broadcast_scalar(monitor_pgmres, master_task) + call broadcast_scalar(ortho_type, master_task) + call broadcast_scalar(reltol_nonlin, master_task) + call broadcast_scalar(reltol_fgmres, master_task) + call broadcast_scalar(reltol_pgmres, master_task) + call broadcast_scalar(algo_nonlin, master_task) + call broadcast_scalar(fpfunc_andacc, master_task) + call broadcast_scalar(dim_andacc, master_task) + call broadcast_scalar(reltol_andacc, master_task) + call broadcast_scalar(damping_andacc, master_task) + call broadcast_scalar(start_andacc, master_task) + call broadcast_scalar(use_mean_vrel, master_task) call broadcast_scalar(conduct, master_task) call broadcast_scalar(R_ice, master_task) call broadcast_scalar(R_pnd, master_task) @@ -831,7 +882,7 @@ subroutine input_data revised_evp = .false. endif - if (kdyn > 2) then + if (kdyn > 3) then if (my_task == master_task) then write(nu_diag,*) subname//' WARNING: kdyn out of range' endif @@ -1037,6 +1088,38 @@ subroutine input_data endif endif + ! Implicit solver input validation + if (kdyn == 3) then + if (.not. (trim(algo_nonlin) == 'picard' .or. trim(algo_nonlin) == 'anderson')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown algo_nonlin: '//algo_nonlin + write(nu_diag,*) subname//' ERROR: allowed values: ''picard'', ''anderson''' + endif + abort_list = trim(abort_list)//":60" + endif + + if (trim(algo_nonlin) == 'picard') then + ! Picard solver is implemented in the Anderson solver; reset number of saved residuals to zero + dim_andacc = 0 + endif + + if (.not. (trim(precond) == 'ident' .or. trim(precond) == 'diag' .or. trim(precond) == 'pgmres')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown precond: '//precond + write(nu_diag,*) subname//' ERROR: allowed values: ''ident'', ''diag'', ''pgmres''' + endif + abort_list = trim(abort_list)//":61" + endif + + if (.not. (trim(ortho_type) == 'cgs' .or. trim(ortho_type) == 'mgs')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown ortho_type: '//ortho_type + write(nu_diag,*) subname//' ERROR: allowed values: ''cgs'', ''mgs''' + endif + abort_list = trim(abort_list)//":62" + endif + endif + ice_IOUnitsMinUnit = numin ice_IOUnitsMaxUnit = numax @@ -1139,28 +1222,35 @@ subroutine input_data write(nu_diag,*) '--------------------------------' if (kdyn == 1) then tmpstr2 = ' elastic-viscous-plastic dynamics' - write(nu_diag,*) 'yield_curve = ', trim(yield_curve) - if (trim(yield_curve) == 'ellipse') & - write(nu_diag,1007) ' e_ratio = ', e_ratio, ' aspect ratio of ellipse' elseif (kdyn == 2) then tmpstr2 = ' elastic-anisotropic-plastic dynamics' + elseif (kdyn == 3) then + tmpstr2 = ' viscous-plastic dynamics' elseif (kdyn < 1) then tmpstr2 = ' dynamics disabled' endif write(nu_diag,1022) ' kdyn = ', kdyn,trim(tmpstr2) if (kdyn >= 1) then - if (revised_evp) then - tmpstr2 = ' revised EVP formulation used' - else - tmpstr2 = ' revised EVP formulation not used' - endif - write(nu_diag,1012) ' revised_evp = ', revised_evp,trim(tmpstr2) - write(nu_diag,1022) ' kevp_kernel = ', kevp_kernel,' EVP solver' + if (kdyn == 1 .or. kdyn == 2) then + if (revised_evp) then + tmpstr2 = ' revised EVP formulation used' + write(nu_diag,1007) ' arlx = ', arlx, ' stress equation factor alpha' + write(nu_diag,1007) ' brlx = ', brlx, ' stress equation factor beta' + else + tmpstr2 = ' revised EVP formulation not used' + endif + write(nu_diag,1012) ' revised_evp = ', revised_evp,trim(tmpstr2) + write(nu_diag,1022) ' kevp_kernel = ', kevp_kernel,' EVP solver' + + write(nu_diag,1022) ' ndtd = ', ndtd, ' number of dynamics/advection/ridging/steps per thermo timestep' + write(nu_diag,1022) ' ndte = ', ndte, ' number of EVP or EAP subcycles' + endif - write(nu_diag,1022) ' ndtd = ', ndtd, ' number of dynamics/advection/ridging/steps per thermo timestep' - write(nu_diag,1022) ' ndte = ', ndte, ' number of EVP or EAP subcycles' - write(nu_diag,1007) ' arlx = ', arlx, ' stress equation factor alpha' - write(nu_diag,1007) ' brlx = ', brlx, ' stress equation factor beta' + if (kdyn == 1 .or. kdyn == 3) then + write(nu_diag,*) 'yield_curve = ', trim(yield_curve) + if (trim(yield_curve) == 'ellipse') & + write(nu_diag,1007) ' e_ratio = ', e_ratio, ' aspect ratio of ellipse' + endif if (trim(coriolis) == 'latitude') then tmpstr2 = ': latitude-dependent Coriolis parameter' @@ -1524,6 +1614,31 @@ subroutine input_data write(nu_diag,1010) ' orca_halogrid = ', & orca_halogrid + if (kdyn == 3) then + write(nu_diag,1020) ' maxits_nonlin = ', maxits_nonlin + write(nu_diag,1030) ' precond = ', precond + write(nu_diag,1020) ' dim_fgmres = ', dim_fgmres + write(nu_diag,1020) ' dim_pgmres = ', dim_pgmres + write(nu_diag,1020) ' maxits_fgmres = ', maxits_fgmres + write(nu_diag,1020) ' maxits_pgmres = ', maxits_pgmres + write(nu_diag,1010) ' monitor_nonlin = ', monitor_nonlin + write(nu_diag,1010) ' monitor_fgmres = ', monitor_fgmres + write(nu_diag,1010) ' monitor_pgmres = ', monitor_pgmres + write(nu_diag,1030) ' ortho_type = ', ortho_type + write(nu_diag,1008) ' reltol_nonlin = ', reltol_nonlin + write(nu_diag,1008) ' reltol_fgmres = ', reltol_fgmres + write(nu_diag,1008) ' reltol_pgmres = ', reltol_pgmres + write(nu_diag,1030) ' algo_nonlin = ', algo_nonlin + write(nu_diag,1010) ' use_mean_vrel = ', use_mean_vrel + if (algo_nonlin == 'anderson') then + write(nu_diag,1020) ' fpfunc_andacc = ', fpfunc_andacc + write(nu_diag,1020) ' dim_andacc = ', dim_andacc + write(nu_diag,1008) ' reltol_andacc = ', reltol_andacc + write(nu_diag,1005) ' damping_andacc = ', damping_andacc + write(nu_diag,1020) ' start_andacc = ', start_andacc + endif + endif + write(nu_diag,1010) ' conserv_check = ', conserv_check write(nu_diag,1020) ' fyear_init = ', & @@ -1675,6 +1790,7 @@ subroutine input_data 1005 format (a30,2x,f12.6) ! float 1006 format (a20,2x,f10.6,a) 1007 format (a20,2x,f6.2,a) + 1008 format (a30,2x,d13.6) ! float, exponential notation 1009 format (a20,2x,d13.6,a) ! float, exponential notation 1010 format (a30,2x,l6) ! logical 1012 format (a20,2x,l3,1x,a) ! logical diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index 77d0ad492..4b92c2a42 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -850,6 +850,7 @@ subroutine step_dyn_horiz (dt) use ice_dyn_evp, only: evp use ice_dyn_eap, only: eap + use ice_dyn_vp, only: implicit_solver use ice_dyn_shared, only: kdyn, ktransport use ice_flux, only: init_history_dyn !deprecate upwind use ice_transport_driver, only: advection, transport_upwind, transport_remap @@ -863,11 +864,12 @@ subroutine step_dyn_horiz (dt) call init_history_dyn ! initialize dynamic history variables !----------------------------------------------------------------- - ! Elastic-viscous-plastic ice dynamics + ! Ice dynamics (momentum equation) !----------------------------------------------------------------- if (kdyn == 1) call evp (dt) if (kdyn == 2) call eap (dt) + if (kdyn == 3) call implicit_solver (dt) !----------------------------------------------------------------- ! Horizontal ice transport diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 index 2b4172d81..1d724fb39 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 @@ -22,7 +22,7 @@ module ice_global_reductions #else use ice_communicate, only: my_task, mpiR16, mpiR8, mpiR4, master_task #endif - use ice_constants, only: field_loc_Nface, field_loc_NEcorner + use ice_constants, only: field_loc_Nface, field_loc_NEcorner, c0 use ice_fileunits, only: bfbflag use ice_exit, only: abort_ice use ice_distribution, only: distrb, ice_distributionGet, & @@ -36,6 +36,7 @@ module ice_global_reductions private public :: global_sum, & + global_allreduce_sum, & global_sum_prod, & global_maxval, & global_minval @@ -55,6 +56,12 @@ module ice_global_reductions global_sum_scalar_int end interface + interface global_allreduce_sum + module procedure global_allreduce_sum_vector_dbl!, & + ! module procedure global_allreduce_sum_vector_real, & ! not yet implemented + ! module procedure global_allreduce_sum_vector_int ! not yet implemented + end interface + interface global_sum_prod module procedure global_sum_prod_dbl, & global_sum_prod_real, & @@ -700,6 +707,69 @@ function global_sum_scalar_int(scalar, dist) & end function global_sum_scalar_int +!*********************************************************************** + + function global_allreduce_sum_vector_dbl(vector, dist) & + result(globalSums) + +! Computes the global sums of sets of scalars (elements of 'vector') +! distributed across a parallel machine. +! +! This is actually the specific interface for the generic global_allreduce_sum +! function corresponding to double precision vectors. The generic +! interface is identical but will handle real and integer vectors. + + real (dbl_kind), dimension(:), intent(in) :: & + vector ! vector whose components are to be summed + + type (distrb), intent(in) :: & + dist ! block distribution + + real (dbl_kind), dimension(size(vector)) :: & + globalSums ! resulting array of global sums + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + ierr, &! mpi error flag + numProcs, &! number of processor participating + numBlocks, &! number of local blocks + communicator, &! communicator for this distribution + numElem ! number of elements in vector + + real (dbl_kind), dimension(:,:), allocatable :: & + work ! temporary local array + + character(len=*), parameter :: subname = '(global_allreduce_sum_vector_dbl)' + +!----------------------------------------------------------------------- +! +! get communicator for MPI calls +! +!----------------------------------------------------------------------- + + call ice_distributionGet(dist, & + numLocalBlocks = numBlocks, & + nprocs = numProcs, & + communicator = communicator) + + numElem = size(vector) + allocate(work(1,numElem)) + work(1,:) = vector + globalSums = c0 + + call compute_sums_dbl(work,globalSums,communicator,numProcs) + + deallocate(work) + +!----------------------------------------------------------------------- + + end function global_allreduce_sum_vector_dbl + !*********************************************************************** function global_sum_prod_dbl (array1, array2, dist, field_loc, & diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 index 1517bd73b..4d53e873e 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 @@ -23,7 +23,7 @@ module ice_global_reductions #else use ice_communicate, only: my_task, mpiR16, mpiR8, mpiR4, master_task #endif - use ice_constants, only: field_loc_Nface, field_loc_NEcorner + use ice_constants, only: field_loc_Nface, field_loc_NEcorner, c0 use ice_fileunits, only: bfbflag use ice_exit, only: abort_ice use ice_distribution, only: distrb, ice_distributionGet, & @@ -37,6 +37,7 @@ module ice_global_reductions private public :: global_sum, & + global_allreduce_sum, & global_sum_prod, & global_maxval, & global_minval @@ -56,6 +57,12 @@ module ice_global_reductions global_sum_scalar_int end interface + interface global_allreduce_sum + module procedure global_allreduce_sum_vector_dbl!, & + ! module procedure global_allreduce_sum_vector_real, & ! not yet implemented + ! module procedure global_allreduce_sum_vector_int ! not yet implemented + end interface + interface global_sum_prod module procedure global_sum_prod_dbl, & global_sum_prod_real, & @@ -701,6 +708,69 @@ function global_sum_scalar_int(scalar, dist) & end function global_sum_scalar_int +!*********************************************************************** + + function global_allreduce_sum_vector_dbl(vector, dist) & + result(globalSums) + +! Computes the global sums of sets of scalars (elements of 'vector') +! distributed across a parallel machine. +! +! This is actually the specific interface for the generic global_allreduce_sum +! function corresponding to double precision vectors. The generic +! interface is identical but will handle real and integer vectors. + + real (dbl_kind), dimension(:), intent(in) :: & + vector ! vector whose components are to be summed + + type (distrb), intent(in) :: & + dist ! block distribution + + real (dbl_kind), dimension(size(vector)) :: & + globalSums ! resulting array of global sums + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind) :: & + ierr, &! mpi error flag + numProcs, &! number of processor participating + numBlocks, &! number of local blocks + communicator, &! communicator for this distribution + numElem ! number of elements in vector + + real (dbl_kind), dimension(:,:), allocatable :: & + work ! temporary local array + + character(len=*), parameter :: subname = '(global_allreduce_sum_vector_dbl)' + +!----------------------------------------------------------------------- +! +! get communicator for MPI calls +! +!----------------------------------------------------------------------- + + call ice_distributionGet(dist, & + numLocalBlocks = numBlocks, & + nprocs = numProcs, & + communicator = communicator) + + numElem = size(vector) + allocate(work(1,numElem)) + work(1,:) = vector + globalSums = c0 + + call compute_sums_dbl(work,globalSums,communicator,numProcs) + + deallocate(work) + +!----------------------------------------------------------------------- + + end function global_allreduce_sum_vector_dbl + !*********************************************************************** function global_sum_prod_dbl (array1, array2, dist, field_loc, & diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index 34b37cf29..67129c911 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -340,6 +340,7 @@ subroutine init_grid2 real (kind=dbl_kind) :: & angle_0, angle_w, angle_s, angle_sw, & pi, pi2, puny + logical (kind=log_kind), dimension(nx_block,ny_block,max_blocks):: & out_of_range diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index dc41ff9fd..49cf12ce1 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -71,7 +71,8 @@ subroutine cice_init use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_evp, basalstress, alloc_dyn_shared + use ice_dyn_shared, only: kdyn, init_dyn, basalstress, alloc_dyn_shared + use ice_dyn_vp, only: init_vp use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -120,11 +121,12 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file + call init_dyn (dt_dyn) ! define dynamics parameters, variables if (kdyn == 2) then call alloc_dyn_eap ! allocate dyn_eap arrays - call init_eap (dt_dyn) ! define eap dynamics parameters, variables - else ! for both kdyn = 0 or 1 - call init_evp (dt_dyn) ! define evp dynamics parameters, variables + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler diff --git a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 index e43b4a24d..e8c809d9e 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 @@ -353,6 +353,7 @@ subroutine coupling_prep (iblk) alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyt, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & fsurfn_f, flatn_f, scale_fluxes, frzmlt_init, frzmlt @@ -528,6 +529,10 @@ subroutine coupling_prep (iblk) Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & fhocn (:,:,iblk), fswthru (:,:,iblk), & + fswthru_vdr(:,:,iblk), & + fswthru_vdf(:,:,iblk), & + fswthru_idr(:,:,iblk), & + fswthru_idf(:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index 80bb2570e..da745d965 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -71,7 +71,8 @@ subroutine cice_init(mpicom_ice) use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared + use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_vp, only: init_vp use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -122,11 +123,12 @@ subroutine cice_init(mpicom_ice) call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file + call init_dyn (dt_dyn) ! define dynamics parameters, variables if (kdyn == 2) then call alloc_dyn_eap ! allocate dyn_eap arrays - call init_eap (dt_dyn) ! define eap dynamics parameters, variables - else ! for both kdyn = 0 or 1 - call init_evp (dt_dyn) ! define evp dynamics parameters, variables + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index ee217712b..d53014b7b 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -367,6 +367,7 @@ subroutine coupling_prep (iblk) alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind @@ -565,6 +566,10 @@ subroutine coupling_prep (iblk) Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & fhocn (:,:,iblk), fswthru (:,:,iblk), & + fswthru_vdr(:,:,iblk), & + fswthru_vdf(:,:,iblk), & + fswthru_idr(:,:,iblk), & + fswthru_idf(:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 917774908..b37d73f65 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -52,7 +52,8 @@ subroutine cice_init use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared + use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_vp, only: init_vp use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn @@ -98,11 +99,12 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file + call init_dyn (dt_dyn) ! define dynamics parameters, variables if (kdyn == 2) then call alloc_dyn_eap ! allocate dyn_eap arrays - call init_eap (dt_dyn) ! define eap dynamics parameters, variables - else ! for both kdyn = 0 or 1 - call init_evp (dt_dyn) ! define evp dynamics parameters, variables + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler @@ -161,8 +163,8 @@ subroutine cice_init call faero_optics !initialize aerosol optical property tables end if - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) then @@ -249,7 +251,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar(time) ! update time parameters @@ -260,17 +262,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -281,7 +283,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -292,7 +294,7 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk @@ -305,7 +307,7 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk @@ -318,7 +320,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -333,7 +335,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -356,7 +358,7 @@ subroutine init_restart if (restart_iso) then call read_restart_iso else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) enddo ! iblk @@ -368,7 +370,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index 4e236bb11..70ef5f895 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -76,7 +76,7 @@ subroutine cice_init(mpi_comm) use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared + use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -134,11 +134,12 @@ subroutine cice_init(mpi_comm) call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file + call init_dyn (dt_dyn) ! define dynamics parameters, variables if (kdyn == 2) then call alloc_dyn_eap ! allocate dyn_eap arrays - call init_eap (dt_dyn) ! define eap dynamics parameters, variables - else ! for both kdyn = 0 or 1 - call init_evp (dt_dyn) ! define evp dynamics parameters, variables + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index ad575f714..df8fe4978 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -362,6 +362,7 @@ subroutine coupling_prep (iblk) alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt @@ -556,6 +557,10 @@ subroutine coupling_prep (iblk) Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & fhocn (:,:,iblk), fswthru (:,:,iblk), & + fswthru_vdr(:,:,iblk), & + fswthru_vdf(:,:,iblk), & + fswthru_idr(:,:,iblk), & + fswthru_idf(:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 0a8614eb2..8b507740d 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -71,7 +71,8 @@ subroutine cice_init use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared + use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_vp, only: init_vp use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -122,11 +123,12 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file + call init_dyn (dt_dyn) ! define dynamics parameters, variables if (kdyn == 2) then call alloc_dyn_eap ! allocate dyn_eap arrays - call init_eap (dt_dyn) ! define eap dynamics parameters, variables - else ! for both kdyn = 0 or 1 - call init_evp (dt_dyn) ! define evp dynamics parameters, variables + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index b45db2514..bd818211e 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -352,9 +352,8 @@ subroutine coupling_prep (iblk) albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, & + fswthru_ai, fhocn, scale_factor, snowfrac, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & - scale_factor, snowfrac, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug index c7ae7601f..8f5de17ea 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug @@ -395,6 +395,7 @@ alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt @@ -589,6 +590,10 @@ Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & fhocn (:,:,iblk), fswthru (:,:,iblk), & + fswthru_vdr(:,:,iblk), & + fswthru_vdf(:,:,iblk), & + fswthru_idr(:,:,iblk), & + fswthru_idf(:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index 4c91fdb2a..b6b30d47a 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -1,4 +1,3 @@ -! SVN:$Id: ice_fileunits.F90 1228 2017-05-23 21:33:34Z tcraig $ !======================================================================= ! ! This module contains an I/O unit manager for tracking, assigning diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index a26579df1..3139726f5 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -139,6 +139,21 @@ kridge = 1 ktransport = 1 ssh_stress = 'geostrophic' + maxits_nonlin = 4 + precond = 'pgmres' + dim_fgmres = 50 + dim_pgmres = 5 + maxits_fgmres = 1 + maxits_pgmres = 1 + monitor_nonlin = .false. + monitor_fgmres = .false. + monitor_pgmres = .false. + ortho_type = 'mgs' + reltol_nonlin = 1e-8 + reltol_fgmres = 1e-2 + reltol_pgmres = 1e-6 + algo_nonlin = 'picard' + use_mean_vrel = .true. / &shortwave_nml diff --git a/configuration/scripts/machines/Macros.banting_intel b/configuration/scripts/machines/Macros.banting_intel index 96b6933f0..7ed7f7b5a 100644 --- a/configuration/scripts/machines/Macros.banting_intel +++ b/configuration/scripts/machines/Macros.banting_intel @@ -9,7 +9,7 @@ CFLAGS := -c -O2 -fp-model precise #-xHost FREEFLAGS := -FR -FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceback +FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceback -no-wrap-margin #-xHost ifeq ($(ICE_BLDDEBUG), true) diff --git a/configuration/scripts/machines/Macros.cesium_intel b/configuration/scripts/machines/Macros.cesium_intel index 1bca1ddac..2ad3ff1f3 100644 --- a/configuration/scripts/machines/Macros.cesium_intel +++ b/configuration/scripts/machines/Macros.cesium_intel @@ -11,7 +11,7 @@ CFLAGS := -c -O2 -fp-model precise FIXEDFLAGS := -132 FREEFLAGS := -FR -FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceback +FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceback -no-wrap-margin #-xHost FFLAGS_NOOPT:= -O0 @@ -50,7 +50,7 @@ LIB_PNETCDF := $(PNETCDF_PATH)/lib LIB_MPI := $(IMPILIBDIR) #SLIBS := -L$(LIB_NETCDF) -lnetcdff -lnetcdf -L$(LIB_PNETCDF) -lpnetcdf -lgptl -SLIBS := -L$(LIB_NETCDF) -lnetcdff -lnetcdf +SLIBS := -L$(LIB_NETCDF) -lnetcdff -lnetcdf -llapack -lblas ifeq ($(ICE_THREADED), true) LDFLAGS += -openmp diff --git a/configuration/scripts/machines/Macros.conda_linux b/configuration/scripts/machines/Macros.conda_linux index 32c5ae012..c821a4592 100644 --- a/configuration/scripts/machines/Macros.conda_linux +++ b/configuration/scripts/machines/Macros.conda_linux @@ -40,7 +40,7 @@ LD:= $(FC) MODDIR += -I$(CONDA_PREFIX)/include # Libraries to be passed to the linker -SLIBS := -L$(CONDA_PREFIX)/lib -lnetcdf -lnetcdff +SLIBS := -L$(CONDA_PREFIX)/lib -lnetcdf -lnetcdff -llapack # Necessary flag to compile with OpenMP support ifeq ($(ICE_THREADED), true) diff --git a/configuration/scripts/machines/Macros.conda_macos b/configuration/scripts/machines/Macros.conda_macos index 0d866d9a2..4acc4d3ba 100644 --- a/configuration/scripts/machines/Macros.conda_macos +++ b/configuration/scripts/machines/Macros.conda_macos @@ -48,7 +48,7 @@ else endif # Libraries to be passed to the linker -SLIBS := -L$(CONDA_PREFIX)/lib -lnetcdf -lnetcdff +SLIBS := -L$(CONDA_PREFIX)/lib -lnetcdf -lnetcdff -llapack # Necessary flag to compile with OpenMP support ifeq ($(ICE_THREADED), true) diff --git a/configuration/scripts/machines/Macros.daley_intel b/configuration/scripts/machines/Macros.daley_intel index 373c9cc42..897e6e057 100644 --- a/configuration/scripts/machines/Macros.daley_intel +++ b/configuration/scripts/machines/Macros.daley_intel @@ -9,7 +9,7 @@ CFLAGS := -c -O2 -fp-model precise #-xHost FREEFLAGS := -FR -FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceback +FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceback -no-wrap-margin #-xHost ifeq ($(ICE_BLDDEBUG), true) diff --git a/configuration/scripts/machines/Macros.fram_intel b/configuration/scripts/machines/Macros.fram_intel index 5804b1475..11faa612d 100644 --- a/configuration/scripts/machines/Macros.fram_intel +++ b/configuration/scripts/machines/Macros.fram_intel @@ -11,7 +11,7 @@ CFLAGS := -c -O2 -fp-model precise FIXEDFLAGS := -132 FREEFLAGS := -FR -FFLAGS := -O2 -fp-model precise -convert big_endian -assume byterecl -ftz -traceback +FFLAGS := -O2 -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -no-wrap-margin #-xHost FFLAGS_NOOPT:= -O0 diff --git a/configuration/scripts/machines/Macros.millikan_intel b/configuration/scripts/machines/Macros.millikan_intel index 9b86e442b..4a3b21093 100644 --- a/configuration/scripts/machines/Macros.millikan_intel +++ b/configuration/scripts/machines/Macros.millikan_intel @@ -11,7 +11,7 @@ CFLAGS := -c -O2 -fp-model precise FIXEDFLAGS := -132 FREEFLAGS := -FR -FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceback +FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceback -no-wrap-margin #-xHost FFLAGS_NOOPT:= -O0 diff --git a/configuration/scripts/machines/environment.yml b/configuration/scripts/machines/environment.yml index aab90d23c..57bdacfec 100644 --- a/configuration/scripts/machines/environment.yml +++ b/configuration/scripts/machines/environment.yml @@ -8,6 +8,7 @@ dependencies: - netcdf-fortran - openmpi - make + - liblapack # Python dependencies for plotting scripts - numpy - matplotlib-base diff --git a/configuration/scripts/options/set_env.lapack b/configuration/scripts/options/set_env.lapack new file mode 100644 index 000000000..cf52ad1b0 --- /dev/null +++ b/configuration/scripts/options/set_env.lapack @@ -0,0 +1 @@ +setenv ICE_CPPDEFS -DUSE_LAPACK diff --git a/configuration/scripts/options/set_nml.diagimp b/configuration/scripts/options/set_nml.diagimp new file mode 100644 index 000000000..940754157 --- /dev/null +++ b/configuration/scripts/options/set_nml.diagimp @@ -0,0 +1,3 @@ +monitor_nonlin = .true. +monitor_fgmres = .true. +monitor_pgmres = .true. diff --git a/configuration/scripts/options/set_nml.dynanderson b/configuration/scripts/options/set_nml.dynanderson new file mode 100644 index 000000000..566c53a09 --- /dev/null +++ b/configuration/scripts/options/set_nml.dynanderson @@ -0,0 +1,3 @@ +kdyn = 3 +algo_nonlin = 'anderson' +use_mean_vrel = .false. diff --git a/configuration/scripts/options/set_nml.dynpicard b/configuration/scripts/options/set_nml.dynpicard new file mode 100644 index 000000000..b81f4d4e6 --- /dev/null +++ b/configuration/scripts/options/set_nml.dynpicard @@ -0,0 +1,3 @@ +kdyn = 3 +algo_nonlin = 'picard' +use_mean_vrel = .true. diff --git a/configuration/scripts/options/set_nml.nonlin5000 b/configuration/scripts/options/set_nml.nonlin5000 new file mode 100644 index 000000000..f767a3d0d --- /dev/null +++ b/configuration/scripts/options/set_nml.nonlin5000 @@ -0,0 +1 @@ +maxits_nonlin = 5000 diff --git a/configuration/scripts/options/set_nml.run3dt b/configuration/scripts/options/set_nml.run3dt new file mode 100644 index 000000000..102a19d80 --- /dev/null +++ b/configuration/scripts/options/set_nml.run3dt @@ -0,0 +1,6 @@ +npt = 3 +dump_last = .true. +histfreq = '1','x','x','x','x' +hist_avg = .false. +f_uvel = '1' +f_vvel = '1' diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index e96b07622..386c29e41 100755 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -50,3 +50,4 @@ restart gx3 4x4 iobinary restart gx3 4x4 histall,precision8,cdf64 smoke gx3 30x1 bgcz,histall smoke gx3 14x2 fsd12,histall +smoke gx3 4x1 dynpicard,medium diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 1fb73c2d7..8ea16261d 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -336,7 +336,7 @@ either Celsius or Kelvin units). "kalg", ":math:`\bullet` absorption coefficient for algae", "" "kappav", "visible extinction coefficient in ice, wavelength\ :math:`<`\ 700nm", "1.4 m\ :math:`^{-1}`" "kcatbound", ":math:`\bullet` category boundary formula", "" - "kdyn", ":math:`\bullet` type of dynamics (1 = EVP, 0 = off)", "1" + "kdyn", ":math:`\bullet` type of dynamics (1 = EVP, 2 = EAP, 3 = VP, 0,-1 = off)", "1" "kg_to_g", "kg to g conversion factor", "1000." "kice", "thermal conductivity of fresh ice (:cite:`Bitz99`)", "2.03 W/m/deg" "kitd", ":math:`\bullet` type of itd conversions (0 = delta function, 1 = linear remap)", "1" diff --git a/doc/source/developer_guide/dg_driver.rst b/doc/source/developer_guide/dg_driver.rst index dd560a17c..a10cb319a 100644 --- a/doc/source/developer_guide/dg_driver.rst +++ b/doc/source/developer_guide/dg_driver.rst @@ -55,10 +55,11 @@ The initialize calling sequence looks something like:: call init_zbgc ! vertical biogeochemistry initialization call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file + call init_dyn (dt_dyn) ! define dynamics parameters, variables if (kdyn == 2) then - call init_eap (dt_dyn) ! define eap dynamics parameters, variables - else ! for both kdyn = 0 or 1 - call init_evp (dt_dyn) ! define evp dynamics parameters, variables + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler call init_thermo_vertical ! initialize vertical thermodynamics diff --git a/doc/source/developer_guide/dg_dynamics.rst b/doc/source/developer_guide/dg_dynamics.rst index 3551763b5..eac19b1f6 100644 --- a/doc/source/developer_guide/dg_dynamics.rst +++ b/doc/source/developer_guide/dg_dynamics.rst @@ -30,13 +30,13 @@ Dynamical Solvers -------------------- The dynamics solvers are found in **cicecore/cicedynB/dynamics/**. A couple of different solvers are -available including EVP, revised EVP, and EAP. The dynamics solver is specified in namelist with the -``kdyn`` variable. ``kdyn=1`` is evp, ``kdyn=2`` is eap, and revised evp requires the ``revised_evp`` -namelist flag be set to true. +available including EVP, revised EVP, EAP and VP. The dynamics solver is specified in namelist with the +``kdyn`` variable. ``kdyn=1`` is evp, ``kdyn=2`` is eap, ``kdyn=3`` is VP and revised EVP requires +the ``revised_evp`` namelist flag be set to true. -Multiple evp solvers are supported thru the namelist flag ``kevp_kernel``. The standard implementation +Multiple EVP solvers are supported thru the namelist flag ``kevp_kernel``. The standard implementation and current default is ``kevp_kernel=0``. In this case, the stress is solved on the regular decomposition -via subcycling and calls to subroutine stress and subroutine stepu with MPI global sums required in each +via subcycling and calls to subroutine ``stress`` and subroutine ``stepu`` with MPI global sums required in each subcycling call. With ``kevp_kernel=2``, the data required to compute the stress is gathered to the root MPI process and the stress calculation is performed on the root task without any MPI global sums. OpenMP parallelism is supported in ``kevp_kernel=2``. The solutions with ``kevp_kernel`` set to 0 or 2 will diff --git a/doc/source/master_list.bib b/doc/source/master_list.bib index caa93ec06..0b928d012 100644 --- a/doc/source/master_list.bib +++ b/doc/source/master_list.bib @@ -59,6 +59,8 @@ @string{GMD @string{CRST = {Cold Reg. Sci. Technol.}} @string{IJHPCA={Int. J High Perform. Comput. Appl}} @string{PTRSA={Philos. Trans. Royal Soc. A}} +@string{SIAMJCP={SIAM J. Sci. Comput.}} + % ********************************************** @@ -977,6 +979,31 @@ @Article{Tsujino18 pages = {79-139}, url = {http://dx.doi.org/10.1016/j.ocemod.2018.07.002} } +@Article{Lemieux08, + author = "J.-F. Lemieux and B. Tremblay and S. Thomas and J. Sedláček and L. A. Mysak", + title = "{Using the preconditioned Generalized Minimum RESidual (GMRES) method to solve the sea-ice momentum equation}", + journal = JGRO, + volume = {113}, + number = {C10}, + pages = {}, + keywords = {Sea ice, GMRES, Krylov subspace}, + doi = {10.1029/2007JC004680}, + url = {https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2007JC004680}, + eprint = {https://agupubs.onlinelibrary.wiley.com/doi/pdf/10.1029/2007JC004680}, + year = {2008} +} +@Article{Saad93, + author = "Y. Saad", + title = "{A Flexible Inner-Outer Preconditioned GMRES Algorithm}", + journal = SIAMJCP, + volume = {14}, + number = {2}, + year = {1993}, + pages = {461-469}, + doi = {10.1137/0914028}, + URL = {https://doi.org/10.1137/0914028} +} + % ********************************************** % For new entries, see example entry in BIB_TEMPLATE.txt % ********************************************** diff --git a/doc/source/science_guide/sg_dynamics.rst b/doc/source/science_guide/sg_dynamics.rst index 4c9b6d502..e7f214ff7 100644 --- a/doc/source/science_guide/sg_dynamics.rst +++ b/doc/source/science_guide/sg_dynamics.rst @@ -5,15 +5,19 @@ Dynamics ======== -There are now different rheologies available in the CICE code. The +There are different approaches in the CICE code for representing sea ice +rheology and for solving the sea ice momentum equation. The elastic-viscous-plastic (EVP) model represents a modification of the standard viscous-plastic (VP) model for sea ice dynamics :cite:`Hibler79`. The elastic-anisotropic-plastic (EAP) model, on the other hand, explicitly accounts for the observed sub-continuum anisotropy of the sea ice cover :cite:`Wilchinsky06,Weiss09`. If -`kdyn` = 1 in the namelist then the EVP rheology is used (module -**ice\_dyn\_evp.F90**), while `kdyn` = 2 is associated with the EAP -rheology (**ice\_dyn\_eap.F90**). At times scales associated with the +``kdyn`` = 1 in the namelist then the EVP model is used (module +**ice\_dyn\_evp.F90**), while ``kdyn`` = 2 is associated with the EAP +model (**ice\_dyn\_eap.F90**), and ``kdyn`` = 3 is associated with the +VP model (**ice\_dyn\_vp.F90**). + +At times scales associated with the wind forcing, the EVP model reduces to the VP model while the EAP model reduces to the anisotropic rheology described in detail in :cite:`Wilchinsky06,Tsamados13`. At shorter time scales the @@ -29,14 +33,23 @@ dynamics in :cite:`Tsamados13`. Simulation results and performance of the EVP and EAP models have been compared with the VP model and with each other in realistic simulations of the Arctic respectively in :cite:`Hunke99` and -:cite:`Tsamados13`. Here we summarize the equations and -direct the reader to the above references for details. The numerical +:cite:`Tsamados13`. + +The EVP numerical implementation in this code release is that of :cite:`Hunke02` and :cite:`Hunke03`, with revisions to the numerical solver as in :cite:`Bouillon13`. The implementation of the EAP sea ice dynamics into CICE is described in detail in :cite:`Tsamados13`. +The VP solver implementation mostly follows :cite:`Lemieux08`, with +FGMRES :cite:`Saad93` as the linear solver and GMRES as the preconditioner. +Note that the VP solver has not yet been tested on the ``tx1`` grid or with +threading enabled. + +Here we summarize the equations and +direct the reader to the above references for details. + .. _momentum: ******** @@ -67,20 +80,36 @@ concentration regions. A careful explanation of the issue and its continuum solution is provided in :cite:`Hunke03` and :cite:`Connolley04`. -The momentum equation is discretized in time as follows, for the classic -EVP approach. First, for clarity, the two components of Equation :eq:`vpmom` are +For clarity, the two components of Equation :eq:`vpmom` are .. math:: \begin{aligned} - m{\partial u\over\partial t} &=& {\partial\sigma_{1j}\over\partial x_j} + \tau_{ax} + + m{\partial u\over\partial t} &= {\partial\sigma_{1j}\over\partial x_j} + \tau_{ax} + a_i c_w \rho_w \left|{\bf U}_w - {\bf u}\right| \left[\left(U_w-u\right)\cos\theta - \left(V_w-v\right)\sin\theta\right] -C_bu +mfv - mg{\partial H_\circ\over\partial x}, \\ - m{\partial v\over\partial t} &=& {\partial\sigma_{2j}\over\partial x_j} + \tau_{ay} + + m{\partial v\over\partial t} &= {\partial\sigma_{2j}\over\partial x_j} + \tau_{ay} + a_i c_w \rho_w \left|{\bf U}_w - {\bf u}\right| \left[\left(U_w-u\right)\sin\theta + \left(V_w-v\right)\cos\theta\right] -C_bv-mfu - mg{\partial H_\circ\over\partial y}. \end{aligned} + :label: momsys + + +A bilinear discretization is used for the stress terms +:math:`\partial\sigma_{ij}/\partial x_j`, +which enables the discrete equations to be derived from the +continuous equations written in curvilinear coordinates. In this +manner, metric terms associated with the curvature of the grid are +incorporated into the discretization explicitly. Details pertaining to +the spatial discretization are found in :cite:`Hunke02`. + +.. _evp-momentum: + +Elastic-Viscous-Plastic +~~~~~~~~~~~~~~~~~~~~~~~ +The momentum equation is discretized in time as follows, for the classic +EVP approach. In the code, :math:`{\tt vrel}=a_i c_w \rho_w\left|{\bf U}_w - {\bf u}^k\right|` and :math:`C_b=T_b \left( \sqrt{(u^k)^2+(v^k)^2}+u_0 \right)^{-1}`, @@ -91,20 +120,20 @@ variables used in the code. .. math:: \underbrace{\left({m\over\Delta t_e}+{\tt vrel} \cos\theta\ + C_b \right)}_{\tt cca} u^{k+1} - \underbrace{\left(mf+{\tt vrel}\sin\theta\right)}_{\tt ccb}v^{k+1} - = \underbrace{{\partial\sigma_{1j}^{k+1}\over\partial x_j}}_{\tt strintx} - + \underbrace{\tau_{ax} - mg{\partial H_\circ\over\partial x} }_{\tt forcex} - + {\tt vrel}\underbrace{\left(U_w\cos\theta-V_w\sin\theta\right)}_{\tt waterx} + {m\over\Delta t_e}u^k, + = &\underbrace{{\partial\sigma_{1j}^{k+1}\over\partial x_j}}_{\tt strintx} + + \underbrace{\tau_{ax} - mg{\partial H_\circ\over\partial x} }_{\tt forcex} \\ + &+ {\tt vrel}\underbrace{\left(U_w\cos\theta-V_w\sin\theta\right)}_{\tt waterx} + {m\over\Delta t_e}u^k, :label: umom .. math:: \underbrace{\left(mf+{\tt vrel}\sin\theta\right)}_{\tt ccb} u^{k+1} + \underbrace{\left({m\over\Delta t_e}+{\tt vrel} \cos\theta + C_b \right)}_{\tt cca}v^{k+1} - = \underbrace{{\partial\sigma_{2j}^{k+1}\over\partial x_j}}_{\tt strinty} - + \underbrace{\tau_{ay} - mg{\partial H_\circ\over\partial y} }_{\tt forcey} - + {\tt vrel}\underbrace{\left(U_w\sin\theta+V_w\cos\theta\right)}_{\tt watery} + {m\over\Delta t_e}v^k, + = &\underbrace{{\partial\sigma_{2j}^{k+1}\over\partial x_j}}_{\tt strinty} + + \underbrace{\tau_{ay} - mg{\partial H_\circ\over\partial y} }_{\tt forcey} \\ + &+ {\tt vrel}\underbrace{\left(U_w\sin\theta+V_w\cos\theta\right)}_{\tt watery} + {m\over\Delta t_e}v^k, :label: vmom -and vrel\ :math:`\cdot`\ waterx(y) = taux(y). +and :math:`{\tt vrel}\ \cdot\ {\tt waterx(y)}= {\tt taux(y)}`. We solve this system of equations analytically for :math:`u^{k+1}` and :math:`v^{k+1}`. Define @@ -121,8 +150,8 @@ where :math:`{\bf F} = \nabla\cdot\sigma^{k+1}`. Then .. math:: \begin{aligned} - \left({m\over\Delta t_e} +{\tt vrel}\cos\theta\ + C_b \right)u^{k+1} - \left(mf + {\tt vrel}\sin\theta\right) v^{k+1} &=& \hat{u} \\ - \left(mf + {\tt vrel}\sin\theta\right) u^{k+1} + \left({m\over\Delta t_e} +{\tt vrel}\cos\theta + C_b \right)v^{k+1} &=& \hat{v}.\end{aligned} + \left({m\over\Delta t_e} +{\tt vrel}\cos\theta\ + C_b \right)u^{k+1} - \left(mf + {\tt vrel}\sin\theta\right) v^{k+1} &= \hat{u} \\ + \left(mf + {\tt vrel}\sin\theta\right) u^{k+1} + \left({m\over\Delta t_e} +{\tt vrel}\cos\theta + C_b \right)v^{k+1} &= \hat{v}.\end{aligned} Solving simultaneously for :math:`u^{k+1}` and :math:`v^{k+1}`, @@ -140,10 +169,62 @@ where .. math:: b = mf + {\tt vrel}\sin\theta. :label: cevpb + +.. _vp-momentum: + +Viscous-Plastic +~~~~~~~~~~~~~~~ + +In the VP approach, equation :eq:`momsys` is discretized implicitly using a Backward Euler approach, +and stresses are not computed explicitly: -When the subcycling is finished for each (thermodynamic) time step, the -ice–ocean stress must be constructed from `taux(y)` and the terms -containing `vrel` on the left hand side of the equations. +.. math:: + \begin{align} + m\frac{(u^{n}-u^{n-1})}{\Delta t} &= \frac{\partial \sigma_{1j}^n}{\partial x_j} + - \tau_{w,x}^n + \tau_{b,x}^n + mfv^n + + r_{x}^n, + \\ + m\frac{(v^{n}-v^{n-1})}{\Delta t} &= \frac{\partial \sigma^{n} _{2j}}{\partial x_j} + - \tau_{w,y}^n + \tau_{b,y}^n -mfu^{n} + + r_{y}^n + \end{align} + :label: u_sit + +where :math:`r = (r_x,r_y)` contains all terms that do not depend on the velocities :math:`u^n, v^n` (namely the sea surface tilt and the wind stress). +As the water drag, seabed stress and rheology term depend on the velocity field, the only unknowns in equation :eq:`u_sit` are :math:`u^n` and :math:`v^n`. + +Once discretized in space, equation :eq:`u_sit` leads to a system of :math:`N` nonlinear equations with :math:`N` unknowns that can be concisely written as + +.. math:: + \mathbf{A}(\mathbf{u})\mathbf{u} = \mathbf{b}(\mathbf{u}), + :label: nonlin_sys + +where :math:`\mathbf{A}` is an :math:`N\times N` matrix and :math:`\mathbf{u}` and :math:`\mathbf{b}` are vectors of size :math:`N`. +Note that we have dropped the time level index :math:`n`. +The vector :math:`\mathbf{u}` is formed by stacking first the :math:`u` components, followed by the :math:`v` components of the discretized ice velocity. +The vector :math:`\mathbf{b}` is a function of the velocity vector :math:`\mathbf{u}` because of the water and seabed stress terms as well as parts of the rheology term that depend non-linearly on :math:`\mathbf{u}`. + +The nonlinear system :eq:`nonlin_sys` is solved using a Picard iteration method. +Starting from a previous iterate :math:`\mathbf{u}_{k-1}`, the nonlinear system is linearized by substituting :math:`\mathbf{u}_{k-1}` in the expression of the matrix :math:`\mathbf{A}` and the vector :math:`\mathbf{b}`: + +.. math:: + \mathbf{A}(\mathbf{u}_{k-1})\mathbf{u}_{k} = \mathbf{b}(\mathbf{u}_{k-1}) + :label: picard + +The resulting linear system is solved using the Flexible Generalized Minimum RESidual (FGMRES, :cite:`Saad93`) method and this process is repeated iteratively. + +The maximum number of Picard iterations can be set using the namelist flag ``maxits_nonlin``. +The relative tolerance for the Picard solver can be set using the namelist flag ``reltol_nonlin``. +The Picard iterative process stops when :math:`\left\lVert \mathbf{u}_{k} \right\rVert_2 < {\tt reltol\_nonlin} \cdot \left\lVert\mathbf{u}_{0}\right\rVert_2` or when ``maxits_nonlin`` is reached. + +Parameters for the FGMRES linear solver and the preconditioner can be controlled using additional namelist flags (see :ref:`dynamics_nml`). + +Ice-Ocean stress +~~~~~~~~~~~~~~~~ + +At the end of each (thermodynamic) time step, the +ice–ocean stress must be constructed from :math:`{\tt taux(y)}` and the terms +containing :math:`{\tt vrel}` on the left hand side of the equations. The Hibler-Bryan form for the ice-ocean stress :cite:`Hibler87` is included in **ice\_dyn\_shared.F90** but is currently commented out, @@ -185,7 +266,7 @@ where the :math:`a_i` and :math:`v_i` are the total ice concentrations and ice v ridge(s) reaches the seafloor for a water depth :math:`h_{wu}=\min[h_w(i,j),h_w(i+1,j),h_w(i,j+1),h_w(i+1,j+1)]`. Given the formulation of :math:`C_b` in equation :eq:`Cb`, the seabed stress components are non-zero only when :math:`h_u > h_{cu}`. -The maximum seabed stress depends on the weigth of the ridge +The maximum seabed stress depends on the weight of the ridge above hydrostatic balance and the value of :math:`k_2`. It is, however, the parameter :math:`k_1` that has the most notable impact on the simulated extent of landfast ice. The value of :math:`k_1` can be changed at runtime using the namelist variable ``k1``. The grounding scheme can be turned on or off using the namelist logical basalstress. @@ -207,47 +288,44 @@ For convenience we formulate the stress tensor :math:`\bf \sigma` in terms of :math:`\sigma_1=\sigma_{11}+\sigma_{22}`, :math:`\sigma_2=\sigma_{11}-\sigma_{22}`, and introduce the divergence, :math:`D_D`, and the horizontal tension and shearing -strain rates, :math:`D_T` and :math:`D_S` respectively. - -CICE now outputs the internal ice pressure which is an important field to support navigation in ice-infested water. -The internal ice pressure :math:`(sigP)` is the average of the normal stresses multiplied by :math:`-1` and -is therefore simply equal to :math:`-\sigma_1/2`. - -*Elastic-Viscous-Plastic* - -In the EVP model the internal stress tensor is determined from a -regularized version of the VP constitutive law. Following the approach of :cite:`Konig10` (see also :cite:`Lemieux16`), the -elliptical yield curve can be modified such that the ice has isotropic tensile strength. -The tensile strength :math:`T_p` is expressed as a fraction of the ice strength :math:`P`, that is :math:`T_p=k_t P` -where :math:`k_t` should be set to a value between 0 and 1 (this can be changed at runtime with the namelist parameter ``Ktens``). The constitutive law is therefore +strain rates, :math:`D_T` and :math:`D_S` respectively: .. math:: - {1\over E}{\partial\sigma_1\over\partial t} + {\sigma_1\over 2\zeta} - + {P_R(1-k_t)\over 2\zeta} = D_D, \\ - :label: sig1 + D_D = \dot{\epsilon}_{11} + \dot{\epsilon}_{22}, .. math:: - {1\over E}{\partial\sigma_2\over\partial t} + {\sigma_2\over 2\eta} = D_T, - :label: sig2 + D_T = \dot{\epsilon}_{11} - \dot{\epsilon}_{22}, .. math:: - {1\over E}{\partial\sigma_{12}\over\partial t} + {\sigma_{12}\over - 2\eta} = {1\over 2}D_S, - :label: sig12 + D_S = 2\dot{\epsilon}_{12}, where .. math:: - D_D = \dot{\epsilon}_{11} + \dot{\epsilon}_{22}, + \dot{\epsilon}_{ij} = {1\over 2}\left({{\partial u_i}\over{\partial x_j}} + {{\partial u_j}\over{\partial x_i}}\right) -.. math:: - D_T = \dot{\epsilon}_{11} - \dot{\epsilon}_{22}, +CICE can output the internal ice pressure which is an important field to support navigation in ice-infested water. +The internal ice pressure (``sigP``) is the average of the normal stresses multiplied by :math:`-1` and +is therefore simply equal to :math:`-\sigma_1/2`. -.. math:: - D_S = 2\dot{\epsilon}_{12}, +Following the approach of :cite:`Konig10` (see also :cite:`Lemieux16`), the +elliptical yield curve can be modified such that the ice has isotropic tensile strength. +The tensile strength :math:`T_p` is expressed as a fraction of the ice strength :math:`P`, that is :math:`T_p=k_t P` +where :math:`k_t` should be set to a value between 0 and 1 (this can be changed at runtime with the namelist parameter ``Ktens``). + +.. _stress-vp: + +Viscous-Plastic +~~~~~~~~~~~~~~~ + +The VP constitutive law is given by .. math:: - \dot{\epsilon}_{ij} = {1\over 2}\left({{\partial u_i}\over{\partial x_j}} + {{\partial u_j}\over{\partial x_i}}\right), + \sigma_{ij} = 2 \eta \dot{\epsilon}_{ij} + (\zeta - \eta) D_D - P_R(1 - k_t)\frac{\delta_{ij}}{2} + :label: vp-const + +where :math:`\eta` and :math:`\zeta` are the bulk and shear viscosities. +An elliptical yield curve is used, with the viscosities given by .. math:: \zeta = {P(1+k_t)\over 2\Delta}, @@ -255,14 +333,41 @@ where .. math:: \eta = {P(1+k_t)\over {2\Delta e^2}}, +where + .. math:: - \Delta = \left[D_D^2 + {1\over e^2}\left(D_T^2 + D_S^2\right)\right]^{1/2}, + \Delta = \left[D_D^2 + {1\over e^2}\left(D_T^2 + D_S^2\right)\right]^{1/2} and :math:`P_R` is a “replacement pressure” (see :cite:`Geiger98`, for example), which serves to prevent residual ice motion due to spatial -variations of :math:`P` when the rates of strain are exactly zero. The ice strength :math:`P` +variations of :math:`P` when the rates of strain are exactly zero. + +The ice strength :math:`P` is a function of the ice thickness and concentration -as it is described in the `Icepack Documentation `_. The parameteter :math:`e` is the ratio of the major and minor axes of the elliptical yield curve, also called the ellipse aspect ratio. It can be changed using the namelist parameter ``e_ratio``. +as described in the `Icepack Documentation `_. The parameter :math:`e` is the ratio of the major and minor axes of the elliptical yield curve, also called the ellipse aspect ratio. It can be changed using the namelist parameter ``e_ratio``. + +.. _stress-evp: + +Elastic-Viscous-Plastic +~~~~~~~~~~~~~~~~~~~~~~~ + +In the EVP model the internal stress tensor is determined from a +regularized version of the VP constitutive law :eq:`vp-const`. The constitutive law is therefore + +.. math:: + {1\over E}{\partial\sigma_1\over\partial t} + {\sigma_1\over 2\zeta} + + {P_R(1-k_t)\over 2\zeta} = D_D, \\ + :label: sig1 + +.. math:: + {1\over E}{\partial\sigma_2\over\partial t} + {\sigma_2\over 2\eta} = D_T, + :label: sig2 + +.. math:: + {1\over E}{\partial\sigma_{12}\over\partial t} + {\sigma_{12}\over + 2\eta} = {1\over 2}D_S, + :label: sig12 + Viscosities are updated during the subcycling, so that the entire dynamics component is subcycled within the time step, and the elastic @@ -304,15 +409,10 @@ appear explicitly.) Choices of the parameters used to define :math:`E`, :math:`T` and :math:`\Delta t_e` are discussed in Sections :ref:`revp` and :ref:`parameters`. -The bilinear discretization used for the stress terms -:math:`\partial\sigma_{ij}/\partial x_j` in the momentum equation is -now used, which enabled the discrete equations to be derived from the -continuous equations written in curvilinear coordinates. In this -manner, metric terms associated with the curvature of the grid are -incorporated into the discretization explicitly. Details pertaining to -the spatial discretization are found in :cite:`Hunke02`. +.. _stress-eap: -*Elastic-Anisotropic-Plastic* +Elastic-Anisotropic-Plastic +~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the EAP model the internal stress tensor is related to the geometrical properties and orientation of underlying virtual diamond @@ -558,6 +658,6 @@ Introducing another numerical parameter :math:`\alpha=2T \Delta t_e ^{-1}` :cite where as opposed to the classic EVP, the second term in each equation is at iteration :math:`k` :cite:`Bouillon13`. Also, as opposed to the classic EVP, :math:`\Delta t_e` times the number of subcycles (or iterations) does not need to be equal to the advective time step :math:`\Delta t`. Finally, as with the classic EVP approach, the stresses are initialized using the previous time level values. -The revised EVP is activated by setting the namelist parameter `revised\_evp` = true. -In the code :math:`\alpha = arlx` and :math:`\beta = brlx`. The values of :math:`arlx` and :math:`brlx` can be set in the namelist. -It is recommended to use large values of these parameters and to set :math:`arlx=brlx` :cite:`Kimmritz15`. +The revised EVP is activated by setting the namelist parameter ``revised_evp = true``. +In the code :math:`\alpha` is ``arlx`` and :math:`\beta` is ``brlx``. The values of ``arlx`` and ``brlx`` can be set in the namelist. +It is recommended to use large values of these parameters and to set :math:`\alpha=\beta` :cite:`Kimmritz15`. diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 032c8b529..227a63663 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -349,6 +349,8 @@ thermo_nml "``sw_dtemp``", "real", "temperature difference from melt to start redistributing", "0.02" "", "", "", "" +.. _dynamics_nml: + dynamics_nml ~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -369,10 +371,13 @@ dynamics_nml "", "``zero``", "zero coriolis", "" "``Cstar``", "real", "constant in Hibler strength formula", "20" "``e_ratio``", "real", "EVP ellipse aspect ratio", "2.0" + "``dim_fgmres``", "integer", "maximum number of Arnoldi iterations for FGMRES solver", "50" + "``dim_pgmres``", "integer", "maximum number of Arnoldi iterations for PGMRES preconditioner", "5" "``kdyn``", "``-1``", "dynamics algorithm OFF", "1" "", "``0``", "dynamics OFF", "" "", "``1``", "EVP dynamics", "" "", "``2``", "EAP dynamics", "" + "", "``3``", "VP dynamics", "" "``kevp_kernel``", "``0``", "standard 2D EVP memory parallel solver", "0" "", "``2``", "1D shared memory solver (not fully validated)", "" "``kstrength``", "``0``", "ice strength formulation :cite:`Hibler79`", "1" @@ -388,9 +393,23 @@ dynamics_nml "``Ktens``", "real", "Tensile strength factor (see :cite:`Konig10`)", "0.0" "``k1``", "real", "1st free parameter for landfast parameterization", "8.0" "``k2``", "real", "2nd free parameter (N/m\ :math:`^3`) for landfast parameterization", "15.0" + "``maxits_nonlin``", "integer", "maximum number of nonlinear iterations for VP solver", "1000" + "``maxits_fgmres``", "integer", "maximum number of restarts for FGMRES solver", "1" + "``maxits_pgmres``", "integer", "maximum number of restarts for PGMRES preconditioner", "1" + "``monitor_nonlin``", "logical", "write velocity norm at each nonlinear iteration", "``.false.``" + "``monitor_fgmres``", "logical", "write velocity norm at each FGMRES iteration", "``.false.``" + "``monitor_pgmres``", "logical", "write velocity norm at each PGMRES iteration", "``.false.``" "``mu_rdg``", "real", "e-folding scale of ridged ice for ``krdg_partic`` = 1 in m^0.5", "3.0" "``ndte``", "integer", "number of EVP subcycles", "120" + "``ortho_type``", "``mgs``", "Use modified Gram-Shchmidt in FGMRES solver", "``mgs``" + "", "``cgs``", "Use classical Gram-Shchmidt in FGMRES solver", "" + "``precond``", "``pgmres``", "Use GMRES as preconditioner for FGMRES solver", "``pgmres``" + "", "``diag``", "Use Jacobi preconditioner for the FGMRES solver", "" + "", "``ident``", "Don't use a preconditioner for the FGMRES solver", "" "``Pstar``", "real", "constant in Hibler strength formula (N/m\ :math:`^2`)", "2.75e4" + "``reltol_nonlin``", "real", "relative tolerance for nonlinear solver", "1e-8" + "``reltol_fgmres``", "real", "relative tolerance for FGMRES solver", "1e-2" + "``reltol_pgmres``", "real", "relative tolerance for PGMRES preconditioner", "1e-6" "``revised_evp``", "logical", "use revised EVP formulation", "``.false.``" "``ssh_stress``", "``coupled``", "computed from coupled sea surface height gradient", "``geostrophic``" "", "``geostropic``", "computed from ocean velocity", "" diff --git a/icepack b/icepack index 3b1ac0187..db2a47789 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 3b1ac0187ce30fbb950276b979376b041ca4467b +Subproject commit db2a4778970ae340b6bdd62eb03f60cd37a13f75 From 2eca569055a0bcbd0f72859bc7d71ba802231ffe Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 5 Apr 2021 08:29:35 -0400 Subject: [PATCH 33/90] update icepack --- .gitmodules | 3 +-- icepack | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.gitmodules b/.gitmodules index b84a13b43..8a773d230 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +1,3 @@ [submodule "icepack"] path = icepack - #url = https://github.com/NOAA-EMC/Icepack - url = https://github.com/DeniseWorthen/Icepack + url = https://github.com/NOAA-EMC/Icepack diff --git a/icepack b/icepack index 1fbea24e1..8bc17e1ee 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 1fbea24e1c364a02dea7068e977b4e1355aef917 +Subproject commit 8bc17e1eee235fb0e26857119175990aa0102613 From d8fb6d915b7d0c2e144d8b70e59639c2cfd061f2 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Wed, 2 Jun 2021 16:57:19 -0400 Subject: [PATCH 34/90] switch icepack branches * update to icepack master but set abort flag in ITD routine to false --- .gitmodules | 2 +- icepack | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 472a87b2e..a707591c3 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,4 +2,4 @@ path = icepack #url = https://github.com/NOAA-EMC/Icepack url = https://github.com/DeniseWorthen/Icepack - branch = feature/updcice + branch = feature/icepack_noabort diff --git a/icepack b/icepack index 5490d3369..2845c94d0 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 5490d3369238d32e463ff153bf34390ec54c4d4b +Subproject commit 2845c94d0b44bed5f5b7e7857fd90ca5c00df50e From 9a76541edadbb02910cbc88290484a32e4a7887b Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Fri, 4 Jun 2021 16:01:59 -0400 Subject: [PATCH 35/90] update icepack --- .gitmodules | 4 +--- icepack | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.gitmodules b/.gitmodules index a707591c3..8a773d230 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,5 +1,3 @@ [submodule "icepack"] path = icepack - #url = https://github.com/NOAA-EMC/Icepack - url = https://github.com/DeniseWorthen/Icepack - branch = feature/icepack_noabort + url = https://github.com/NOAA-EMC/Icepack diff --git a/icepack b/icepack index 2845c94d0..9a7e22089 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 2845c94d0b44bed5f5b7e7857fd90ca5c00df50e +Subproject commit 9a7e220890f70d2654314cbb023b9137f15d6327 From 519d3392d515ec3ff668a50974774c942222367a Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Fri, 4 Jun 2021 16:41:08 -0400 Subject: [PATCH 36/90] Update CICE to latest Consortium master (#26) update CICE and Icepack * changes the criteria for aborting ice for thermo-conservation errors * updates the time manager * fixes two bugs in ice_therm_mushy * updates Icepack to Consortium master w/ flip of abort flag for troublesome IC cases --- .github/workflows/test-cice.yml | 8 +- cice.setup | 14 +- .../cicedynB/analysis/ice_diagnostics.F90 | 76 +- cicecore/cicedynB/analysis/ice_history.F90 | 8 +- .../cicedynB/analysis/ice_history_shared.F90 | 20 +- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 3 +- .../dynamics/ice_transport_driver.F90 | 36 +- cicecore/cicedynB/general/ice_flux.F90 | 11 +- cicecore/cicedynB/general/ice_forcing.F90 | 788 +++++++----- cicecore/cicedynB/general/ice_forcing_bgc.F90 | 44 +- cicecore/cicedynB/general/ice_init.F90 | 54 +- cicecore/cicedynB/general/ice_step_mod.F90 | 41 +- .../infrastructure/comm/mpi/ice_boundary.F90 | 240 ++-- .../infrastructure/comm/mpi/ice_timers.F90 | 2 + .../comm/serial/ice_boundary.F90 | 226 ++-- .../infrastructure/comm/serial/ice_timers.F90 | 6 +- .../cicedynB/infrastructure/ice_blocks.F90 | 17 +- .../cicedynB/infrastructure/ice_domain.F90 | 63 +- cicecore/cicedynB/infrastructure/ice_grid.F90 | 78 +- .../infrastructure/ice_read_write.F90 | 31 +- .../infrastructure/ice_restart_driver.F90 | 20 +- .../cicedynB/infrastructure/ice_restoring.F90 | 5 + .../io/io_binary/ice_restart.F90 | 81 +- .../io/io_netcdf/ice_history_write.F90 | 16 +- .../io/io_netcdf/ice_restart.F90 | 64 +- .../io/io_pio2/ice_history_write.F90 | 15 +- .../infrastructure/io/io_pio2/ice_restart.F90 | 74 +- cicecore/drivers/direct/hadgem3/CICE.F90 | 37 - .../drivers/direct/hadgem3/CICE_FinalMod.F90 | 31 - cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 | 31 - cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 8 +- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 16 +- cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 | 27 +- cicecore/drivers/mct/cesm1/ice_comp_mct.F90 | 51 +- .../drivers/mct/cesm1/ice_prescribed_mod.F90 | 2 +- .../drivers/nuopc/cmeps/CICE_FinalMod.F90 | 31 - cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 12 +- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 14 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 40 +- .../nuopc/cmeps/ice_prescribed_mod.F90 | 2 +- cicecore/drivers/nuopc/dmi/CICE.F90 | 36 - cicecore/drivers/nuopc/dmi/CICE_FinalMod.F90 | 43 +- cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 27 +- cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 | 34 +- cicecore/drivers/standalone/cice/CICE.F90 | 36 - .../drivers/standalone/cice/CICE_FinalMod.F90 | 31 - .../drivers/standalone/cice/CICE_InitMod.F90 | 21 +- .../drivers/standalone/cice/CICE_RunMod.F90 | 80 +- .../standalone/cice/CICE_RunMod.F90_debug | 704 ----------- cicecore/drivers/unittest/calchk/calchk.F90 | 588 +++++++++ .../unittest/helloworld/helloworld.F90 | 8 + cicecore/shared/ice_calendar.F90 | 1088 +++++++++++------ cicecore/shared/ice_distribution.F90 | 71 +- cicecore/shared/ice_init_column.F90 | 12 +- cicecore/shared/ice_spacecurve.F90 | 446 ++++--- cicecore/version.txt | 2 +- configuration/scripts/Makefile | 20 +- configuration/scripts/cice.batch.csh | 27 +- configuration/scripts/cice.build | 8 +- configuration/scripts/cice.launch.csh | 12 + configuration/scripts/cice.run.setup.csh | 12 +- configuration/scripts/cice.settings | 1 + configuration/scripts/cice_decomp.csh | 17 + configuration/scripts/ice_in | 17 +- .../scripts/machines/Macros.banting_intel | 2 +- .../scripts/machines/Macros.cheyenne_gnu | 1 + .../scripts/machines/Macros.cheyenne_intel | 4 +- .../scripts/machines/Macros.compy_intel | 44 + .../scripts/machines/Macros.conda_macos | 1 + .../scripts/machines/Macros.daley_intel | 2 +- .../scripts/machines/Macros.gaffney_intel | 2 +- .../scripts/machines/Macros.koehr_intel | 2 +- .../scripts/machines/Macros.mustang_intel18 | 1 + .../scripts/machines/Macros.mustang_intel19 | 1 + .../scripts/machines/Macros.mustang_intel20 | 1 + .../scripts/machines/Macros.onyx_intel | 1 + .../scripts/machines/env.cheyenne_gnu | 3 + .../scripts/machines/env.cheyenne_intel | 3 + .../scripts/machines/env.cheyenne_pgi | 3 + .../scripts/machines/env.compy_intel | 42 + configuration/scripts/options/set_env.calchk | 2 + .../scripts/options/set_env.helloworld | 2 + configuration/scripts/options/set_nml.alt01 | 2 +- configuration/scripts/options/set_nml.alt03 | 1 + configuration/scripts/options/set_nml.alt05 | 3 - configuration/scripts/options/set_nml.alt06 | 5 + configuration/scripts/options/set_nml.bgcz | 4 +- configuration/scripts/options/set_nml.bigdiag | 8 + configuration/scripts/options/set_nml.box2001 | 1 + configuration/scripts/options/set_nml.boxadv | 2 + .../{set_nml.boxdyn => set_nml.boxnodyn} | 3 + .../scripts/options/set_nml.boxrestore | 2 + .../scripts/options/set_nml.boxslotcyl | 2 + .../scripts/options/set_nml.debugblocks | 1 + .../scripts/options/set_nml.dspiralcenter | 1 + .../scripts/options/set_nml.dwghtfile | 3 + configuration/scripts/options/set_nml.gbox180 | 4 + configuration/scripts/options/set_nml.gx1 | 4 +- configuration/scripts/options/set_nml.gx1apr | 5 + .../scripts/options/set_nml.gx1coreii | 1 + configuration/scripts/options/set_nml.gx1prod | 23 +- configuration/scripts/options/set_nml.gx3sep2 | 6 + configuration/scripts/options/set_nml.ml | 7 + .../scripts/options/set_nml.run10day | 3 +- configuration/scripts/options/set_nml.run1day | 3 +- .../scripts/options/set_nml.run1year | 3 +- configuration/scripts/options/set_nml.run2day | 3 +- configuration/scripts/options/set_nml.run3day | 3 +- configuration/scripts/options/set_nml.run3dt | 1 + configuration/scripts/options/set_nml.run5day | 3 +- .../scripts/options/set_nml.run60day | 3 +- .../scripts/options/set_nml.run90day | 3 +- .../scripts/options/set_nml.seabedLKD | 6 + .../scripts/options/set_nml.seabedprob | 6 + .../scripts/options/test_nml.restart1 | 3 +- .../scripts/options/test_nml.restart2 | 3 +- configuration/scripts/tests/base_suite.ts | 13 +- configuration/scripts/tests/baseline.script | 102 +- configuration/scripts/tests/comparelog.csh | 34 +- configuration/scripts/tests/decomp_suite.ts | 53 +- configuration/scripts/tests/io_suite.ts | 6 + .../scripts/tests/lcov_modify_source.sh | 44 + configuration/scripts/tests/nothread_suite.ts | 6 +- configuration/scripts/tests/quick_suite.ts | 2 +- .../scripts/tests/report_results.csh | 10 +- .../scripts/tests/test_unittest.script | 24 + configuration/scripts/tests/unittest_suite.ts | 4 + .../convert_restarts.f90 | 0 .../interp_jra55_ncdf_bilinear.py | 441 +++++++ .../tools/jra55_datasets/make_forcing.csh | 49 + doc/source/cice_index.rst | 31 +- doc/source/conf.py | 4 +- doc/source/developer_guide/dg_dynamics.rst | 7 +- doc/source/developer_guide/dg_forcing.rst | 4 +- doc/source/developer_guide/dg_scripts.rst | 24 +- doc/source/developer_guide/dg_tools.rst | 150 +++ doc/source/developer_guide/index.rst | 1 + doc/source/user_guide/ug_case_settings.rst | 33 +- doc/source/user_guide/ug_implementation.rst | 133 +- doc/source/user_guide/ug_running.rst | 35 +- doc/source/user_guide/ug_testing.rst | 66 +- doc/source/user_guide/ug_troubleshooting.rst | 25 +- icepack | 2 +- 143 files changed, 4504 insertions(+), 2731 deletions(-) mode change 100644 => 100755 cicecore/cicedynB/general/ice_forcing.F90 delete mode 100644 cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug create mode 100644 cicecore/drivers/unittest/calchk/calchk.F90 create mode 100644 cicecore/drivers/unittest/helloworld/helloworld.F90 create mode 100644 configuration/scripts/machines/Macros.compy_intel create mode 100755 configuration/scripts/machines/env.compy_intel create mode 100644 configuration/scripts/options/set_env.calchk create mode 100644 configuration/scripts/options/set_env.helloworld create mode 100644 configuration/scripts/options/set_nml.alt06 create mode 100644 configuration/scripts/options/set_nml.bigdiag rename configuration/scripts/options/{set_nml.boxdyn => set_nml.boxnodyn} (88%) create mode 100644 configuration/scripts/options/set_nml.debugblocks create mode 100644 configuration/scripts/options/set_nml.dspiralcenter create mode 100644 configuration/scripts/options/set_nml.dwghtfile create mode 100644 configuration/scripts/options/set_nml.gbox180 create mode 100644 configuration/scripts/options/set_nml.gx1apr create mode 100644 configuration/scripts/options/set_nml.gx3sep2 create mode 100644 configuration/scripts/options/set_nml.ml create mode 100644 configuration/scripts/options/set_nml.seabedLKD create mode 100644 configuration/scripts/options/set_nml.seabedprob mode change 100755 => 100644 configuration/scripts/tests/base_suite.ts mode change 100755 => 100644 configuration/scripts/tests/io_suite.ts create mode 100755 configuration/scripts/tests/lcov_modify_source.sh create mode 100644 configuration/scripts/tests/test_unittest.script create mode 100644 configuration/scripts/tests/unittest_suite.ts rename configuration/tools/{ => cice4_restart_conversion}/convert_restarts.f90 (100%) create mode 100755 configuration/tools/jra55_datasets/interp_jra55_ncdf_bilinear.py create mode 100755 configuration/tools/jra55_datasets/make_forcing.csh create mode 100644 doc/source/developer_guide/dg_tools.rst diff --git a/.github/workflows/test-cice.yml b/.github/workflows/test-cice.yml index 1fdd8188d..32e784564 100644 --- a/.github/workflows/test-cice.yml +++ b/.github/workflows/test-cice.yml @@ -17,7 +17,7 @@ on: defaults: run: - shell: /bin/csh {0} + shell: /bin/csh -e {0} jobs: build: @@ -104,9 +104,9 @@ jobs: - name: download input data run: | cd $HOME/cice-dirs/input - wget https://zenodo.org/record/3728358/files/CICE_data_gx3_grid_ic-20200320.tar.gz && tar xvfz CICE_data_gx3_grid_ic-20200320.tar.gz - wget https://zenodo.org/record/3728362/files/CICE_data_gx3_forcing_NCAR_bulk-20200320.tar.gz && tar xvfz CICE_data_gx3_forcing_NCAR_bulk-20200320.tar.gz - wget https://zenodo.org/record/3728364/files/CICE_data_gx3_forcing_JRA55-20200320.tar.gz && tar xvfz CICE_data_gx3_forcing_JRA55-20200320.tar.gz + wget --progress=dot:giga https://zenodo.org/record/3728358/files/CICE_data_gx3_grid_ic-20200320.tar.gz && tar xvfz CICE_data_gx3_grid_ic-20200320.tar.gz + wget --progress=dot:giga https://zenodo.org/record/3728362/files/CICE_data_gx3_forcing_NCAR_bulk-20200320.tar.gz && tar xvfz CICE_data_gx3_forcing_NCAR_bulk-20200320.tar.gz + wget --progress=dot:giga https://zenodo.org/record/3728364/files/CICE_data_gx3_forcing_JRA55-20200320.tar.gz && tar xvfz CICE_data_gx3_forcing_JRA55-20200320.tar.gz pwd ls -alR # - name: run case diff --git a/cice.setup b/cice.setup index 3efe94827..8dc46005a 100755 --- a/cice.setup +++ b/cice.setup @@ -390,6 +390,18 @@ if ((${dosuite} == 1 || ${dotest} == 1) && ${testid} == ${spval}) then exit -1 endif +# This creates a new sandbox and modifies the source code for "improved" lcov analysis +# Turn this if block off if you don't want coverage to do that +if ($coverage == 1) then + set sandbox_lcov = ${ICE_SANDBOX}/../cice_lcov_${sdate}-${stime} + cp -p -r ${ICE_SANDBOX} ${sandbox_lcov} + echo "shifting to sandbox = ${sandbox_lcov}" + set ICE_SANDBOX = ${sandbox_lcov} + set ICE_SCRIPTS = "${ICE_SANDBOX}/configuration/scripts" + cd ${ICE_SANDBOX} + ${ICE_SCRIPTS}/tests/lcov_modify_source.sh +endif + #--------------------------------------------------------------------- # Setup tsfile and test suite support stuff @@ -1094,7 +1106,7 @@ cd ${testname_base} source ./cice.settings if (\${dobuild} == true) then if (\${doreuse} == true) then - set ciceexe = "../ciceexe.\${ICE_ENVNAME}.\${ICE_COMMDIR}.\${ICE_BLDDEBUG}.\${ICE_THREADED}.\${ICE_IOTYPE}" + set ciceexe = "../ciceexe.\${ICE_TARGET}.\${ICE_ENVNAME}.\${ICE_COMMDIR}.\${ICE_BLDDEBUG}.\${ICE_THREADED}.\${ICE_IOTYPE}" ./cice.build --exe \${ciceexe} if !(-e \${ciceexe}) cp -p \${ICE_RUNDIR}/cice \${ciceexe} else diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedynB/analysis/ice_diagnostics.F90 index cff544cd4..3eaf9d057 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics.F90 @@ -25,9 +25,8 @@ module ice_diagnostics implicit none private - public :: runtime_diags, init_mass_diags, init_diags, & - print_state, print_points_state, diagnostic_abort - + public :: runtime_diags, init_mass_diags, init_diags, debug_ice, & + print_state, diagnostic_abort ! diagnostic output file character (len=char_len), public :: diag_file @@ -35,9 +34,13 @@ module ice_diagnostics ! point print data logical (kind=log_kind), public :: & + debug_model , & ! if true, debug model at high level print_points , & ! if true, print point data print_global ! if true, print global data + integer (kind=int_kind), public :: & + debug_model_step = 999999999 ! begin printing at istep1=debug_model_step + integer (kind=int_kind), parameter, public :: & npnt = 2 ! total number of points to be printed @@ -87,16 +90,6 @@ module ice_diagnostics totaeron , & ! total aerosol mass totaeros ! total aerosol mass - ! printing info for routine print_state - ! iblkp, ip, jp, mtask identify the grid cell to print -! character (char_len) :: plabel - integer (kind=int_kind), parameter, public :: & - check_step = 999999999, & ! begin printing at istep1=check_step - iblkp = 1, & ! block number - ip = 72, & ! i index - jp = 11, & ! j index - mtask = 0 ! my_task - !======================================================================= contains @@ -1525,20 +1518,39 @@ end subroutine init_diags !======================================================================= -! This routine is useful for debugging. -! Calls to it should be inserted in the form (after thermo, for example) -! do iblk = 1, nblocks -! do j=jlo,jhi -! do i=ilo,ihi -! plabel = 'post thermo' -! if (istep1 >= check_step .and. iblk==iblkp .and i==ip & -! .and. j==jp .and. my_task == mtask) & -! call print_state(plabel,i,j,iblk) -! enddo -! enddo +! This routine is useful for debugging +! author Elizabeth C. Hunke, LANL + + subroutine debug_ice(iblk, plabeld) + + use ice_kinds_mod + use ice_calendar, only: istep1 + use ice_communicate, only: my_task + use ice_blocks, only: nx_block, ny_block + + character (char_len), intent(in) :: plabeld + integer (kind=int_kind), intent(in) :: iblk + + ! local + integer (kind=int_kind) :: i, j, m + character(len=*), parameter :: subname='(debug_ice)' + +! tcraig, do this only on one point, the first point +! do m = 1, npnt + m = 1 + if (istep1 >= debug_model_step .and. & + iblk == pbloc(m) .and. my_task == pmloc(m)) then + i = piloc(m) + j = pjloc(m) + call print_state(plabeld,i,j,iblk) + endif ! enddo -! -! 'use ice_diagnostics' may need to be inserted also + + end subroutine debug_ice + +!======================================================================= + +! This routine is useful for debugging. ! author: Elizabeth C. Hunke, LANL subroutine print_state(plabel,i,j,iblk) @@ -1587,7 +1599,7 @@ subroutine print_state(plabel,i,j,iblk) this_block = get_block(blocks_ice(iblk),iblk) - write(nu_diag,*) plabel + write(nu_diag,*) subname,plabel write(nu_diag,*) 'istep1, my_task, i, j, iblk:', & istep1, my_task, i, j, iblk write(nu_diag,*) 'Global i and j:', & @@ -1699,16 +1711,14 @@ subroutine print_state(plabel,i,j,iblk) write(nu_diag,*) ' evap = ',evap (i,j,iblk) write(nu_diag,*) ' flwout = ',flwout(i,j,iblk) write(nu_diag,*) ' ' + call flush_fileunit(nu_diag) end subroutine print_state !======================================================================= +#ifdef UNDEPRECATE_print_points_state ! This routine is useful for debugging. -! Calls can be inserted anywhere and it will print info on print_points points -! call print_points_state(plabel) -! -! 'use ice_diagnostics' may need to be inserted also subroutine print_points_state(plabel,ilabel) @@ -1764,6 +1774,7 @@ subroutine print_points_state(plabel,ilabel) write(llabel,'(a)') 'pps:'//trim(llabel) endif + write(nu_diag,*) subname write(nu_diag,*) trim(llabel),'istep1, my_task, i, j, iblk=', & istep1, my_task, i, j, iblk write(nu_diag,*) trim(llabel),'Global i and j=', & @@ -1842,12 +1853,13 @@ subroutine print_points_state(plabel,ilabel) write(nu_diag,*) ' evap = ',evap (i,j,iblk) write(nu_diag,*) ' flwout = ',flwout(i,j,iblk) write(nu_diag,*) ' ' + call flush_fileunit(nu_diag) endif ! my_task enddo ! ncnt end subroutine print_points_state - +#endif !======================================================================= ! prints error information prior to aborting diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedynB/analysis/ice_history.F90 index 1aa2515a4..f91562449 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedynB/analysis/ice_history.F90 @@ -1713,7 +1713,7 @@ subroutine accum_hist (dt) use ice_domain_size, only: nfsd use ice_grid, only: tmask, lmask_n, lmask_s, dxu, dyu use ice_calendar, only: new_year, write_history, & - write_ic, time, histfreq, nstreams, month, & + write_ic, timesecs, histfreq, nstreams, mmonth, & new_month use ice_dyn_eap, only: a11, a12, e11, e12, e22, s11, s12, s22, & yieldstress11, yieldstress12, yieldstress22 @@ -1864,7 +1864,7 @@ subroutine accum_hist (dt) avgct(ns) = avgct(ns) + c1 ! if (avgct(ns) == c1) time_beg(ns) = (time-dt)/int(secday) if (avgct(ns) == c1) then - time_beg(ns) = (time-dt)/int(secday) + time_beg(ns) = (timesecs-dt)/int(secday) time_beg(ns) = real(time_beg(ns),kind=real_kind) endif endif @@ -3966,7 +3966,7 @@ subroutine accum_hist (dt) enddo ! iblk !$OMP END PARALLEL DO - time_end(ns) = time/int(secday) + time_end(ns) = timesecs/int(secday) time_end(ns) = real(time_end(ns),kind=real_kind) !--------------------------------------------------------------- @@ -4057,7 +4057,7 @@ subroutine accum_hist (dt) enddo endif ! new_year - if ( (month .eq. 7) .and. new_month ) then + if ( (mmonth .eq. 7) .and. new_month ) then do j=jlo,jhi do i=ilo,ihi ! reset SH Jul 1 diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index ce177ad1e..52d268990 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -653,9 +653,9 @@ module ice_history_shared subroutine construct_filename(ncfile,suffix,ns) - use ice_calendar, only: sec, nyr, month, daymo, & + use ice_calendar, only: msec, myear, mmonth, daymo, & mday, write_ic, histfreq, histfreq_n, & - year_init, new_year, new_month, new_day, & + new_year, new_month, new_day, & dt use ice_restart_shared, only: lenstr @@ -667,12 +667,12 @@ subroutine construct_filename(ncfile,suffix,ns) character (len=1) :: cstream character(len=*), parameter :: subname = '(construct_filename)' - iyear = nyr + year_init - 1 ! set year_init=1 in ice_in to get iyear=nyr - imonth = month + iyear = myear + imonth = mmonth iday = mday - isec = sec - dt + isec = msec - dt - if (write_ic) isec = sec + if (write_ic) isec = msec ! construct filename if (write_ic) then write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & @@ -688,7 +688,7 @@ subroutine construct_filename(ncfile,suffix,ns) imonth = 12 iday = daymo(imonth) elseif (new_month) then - imonth = month - 1 + imonth = mmonth - 1 iday = daymo(imonth) elseif (new_day) then iday = iday - 1 @@ -703,7 +703,7 @@ subroutine construct_filename(ncfile,suffix,ns) if (histfreq(ns) == '1') then ! instantaneous, write every dt write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & - iyear,'-',imonth,'-',iday,'-',sec,'.',suffix + iyear,'-',imonth,'-',iday,'-',msec,'.',suffix elseif (hist_avg) then ! write averaged data @@ -714,7 +714,7 @@ subroutine construct_filename(ncfile,suffix,ns) elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly write(ncfile,'(a,a,i2.2,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & history_file(1:lenstr(history_file))//trim(cstream),'_', & - histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',sec,'.',suffix + histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',msec,'.',suffix elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly write(ncfile,'(a,a,i4.4,a,i2.2,a,a)') & history_file(1:lenstr(history_file))//trim(cstream),'.', & @@ -728,7 +728,7 @@ subroutine construct_filename(ncfile,suffix,ns) else ! instantaneous with histfreq > dt write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & history_file(1:lenstr(history_file)),'_inst.', & - iyear,'-',imonth,'-',iday,'-',sec,'.',suffix + iyear,'-',imonth,'-',iday,'-',msec,'.',suffix endif endif diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index d8ce42681..2206e0de7 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -361,8 +361,7 @@ subroutine evp (dt) first_time = .false. endif if (trim(grid_type) == 'tripole') then - call abort_ice(trim(subname)//' & - & Kernel not tested on tripole grid. Set kevp_kernel=0') + call abort_ice(trim(subname)//' Kernel not tested on tripole grid. Set kevp_kernel=0') endif call ice_dyn_evp_1d_copyin( & nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost, & diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 index c500e1631..e3da6390b 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 @@ -272,7 +272,7 @@ subroutine transport_remap (dt) trmask ! = 1. if tracer is present, = 0. otherwise logical (kind=log_kind) :: & - l_stop ! if true, abort the model + ckflag ! if true, abort the model integer (kind=int_kind) :: & istop, jstop ! indices of grid cell where model aborts @@ -327,7 +327,7 @@ subroutine transport_remap (dt) !---! Initialize, update ghost cells, fill tracer arrays. !---!------------------------------------------------------------------- - l_stop = .false. + ckflag = .false. istop = 0 jstop = 0 @@ -605,10 +605,10 @@ subroutine transport_remap (dt) if (my_task == master_task) then fieldid = subname//':000' - call global_conservation (l_stop, fieldid, & + call global_conservation (ckflag, fieldid, & asum_init(0), asum_final(0)) - if (l_stop) then + if (ckflag) then write (nu_diag,*) 'istep1, my_task =', & istep1, my_task write (nu_diag,*) 'transport: conservation error, cat 0' @@ -618,11 +618,11 @@ subroutine transport_remap (dt) do n = 1, ncat write(fieldid,'(a,i3.3)') subname,n call global_conservation & - (l_stop, fieldid, & + (ckflag, fieldid, & asum_init(n), asum_final(n), & atsum_init(:,n), atsum_final(:,n)) - if (l_stop) then + if (ckflag) then write (nu_diag,*) 'istep1, my_task, cat =', & istep1, my_task, n write (nu_diag,*) 'transport: conservation error, cat ',n @@ -639,7 +639,7 @@ subroutine transport_remap (dt) !------------------------------------------------------------------- if (l_monotonicity_check) then - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,n,l_stop,istop,jstop) + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,n,ckflag,istop,jstop) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -647,7 +647,7 @@ subroutine transport_remap (dt) jlo = this_block%jlo jhi = this_block%jhi - l_stop = .false. + ckflag = .false. istop = 0 jstop = 0 @@ -657,10 +657,10 @@ subroutine transport_remap (dt) ilo, ihi, jlo, jhi, & tmin(:,:,:,n,iblk), tmax(:,:,:,n,iblk), & aim (:,:, n,iblk), trm (:,:,:,n,iblk), & - l_stop, & + ckflag, & istop, jstop) - if (l_stop) then + if (ckflag) then write (nu_diag,*) 'istep1, my_task, iblk, cat =', & istep1, my_task, iblk, n call abort_ice(subname//'ERROR: monotonicity error') @@ -1083,7 +1083,7 @@ end subroutine tracers_to_state ! ! author William H. Lipscomb, LANL - subroutine global_conservation (l_stop, fieldid, & + subroutine global_conservation (ckflag, fieldid, & asum_init, asum_final, & atsum_init, atsum_final) @@ -1099,7 +1099,7 @@ subroutine global_conservation (l_stop, fieldid, & atsum_final ! final global ice area*tracer logical (kind=log_kind), intent(inout) :: & - l_stop ! if true, abort on return + ckflag ! if true, abort on return ! local variables @@ -1120,7 +1120,7 @@ subroutine global_conservation (l_stop, fieldid, & if (asum_init > puny) then diff = asum_final - asum_init if (abs(diff/asum_init) > puny) then - l_stop = .true. + ckflag = .true. write (nu_diag,*) write (nu_diag,*) subname,'Ice area conserv error ', trim(fieldid) write (nu_diag,*) subname,' Initial global area =', asum_init @@ -1135,7 +1135,7 @@ subroutine global_conservation (l_stop, fieldid, & if (abs(atsum_init(nt)) > puny) then diff = atsum_final(nt) - atsum_init(nt) if (abs(diff/atsum_init(nt)) > puny) then - l_stop = .true. + ckflag = .true. write (nu_diag,*) write (nu_diag,*) subname,'Ice area*tracer conserv error ', trim(fieldid),nt write (nu_diag,*) subname,' Tracer index =', nt @@ -1323,7 +1323,7 @@ subroutine check_monotonicity (nx_block, ny_block, & ilo, ihi, jlo, jhi, & tmin, tmax, & aim, trm, & - l_stop, & + ckflag, & istop, jstop) integer (kind=int_kind), intent(in) :: & @@ -1341,7 +1341,7 @@ subroutine check_monotonicity (nx_block, ny_block, & tmax ! local max tracer logical (kind=log_kind), intent(inout) :: & - l_stop ! if true, abort on return + ckflag ! if true, abort on return integer (kind=int_kind), intent(inout) :: & istop, jstop ! indices of grid cell where model aborts @@ -1425,7 +1425,7 @@ subroutine check_monotonicity (nx_block, ny_block, & w1 = max(c1, abs(tmin(i,j,nt))) w2 = max(c1, abs(tmax(i,j,nt))) if (trm(i,j,nt) < tmin(i,j,nt)-w1*puny) then - l_stop = .true. + ckflag = .true. istop = i jstop = j write (nu_diag,*) ' ' @@ -1435,7 +1435,7 @@ subroutine check_monotonicity (nx_block, ny_block, & write (nu_diag,*) 'tmin =' , tmin(i,j,nt) write (nu_diag,*) 'ice area =' , aim(i,j) elseif (trm(i,j,nt) > tmax(i,j,nt)+w2*puny) then - l_stop = .true. + ckflag = .true. istop = i jstop = j write (nu_diag,*) ' ' diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 71253a4b1..06b371c3c 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -547,7 +547,8 @@ subroutine init_coupler_flux integer (kind=int_kind) :: n - real (kind=dbl_kind) :: fcondtopn_d(6), fsurfn_d(6) + integer (kind=int_kind), parameter :: max_d = 6 + real (kind=dbl_kind) :: fcondtopn_d(max_d), fsurfn_d(max_d) real (kind=dbl_kind) :: stefan_boltzmann, Tffresh real (kind=dbl_kind) :: vonkar, zref, iceruf @@ -589,7 +590,7 @@ subroutine init_coupler_flux flw (:,:,:) = c180 ! incoming longwave rad (W/m^2) frain (:,:,:) = c0 ! rainfall rate (kg/m2/s) do n = 1, ncat ! conductive heat flux (W/m^2) - fcondtopn_f(:,:,n,:) = fcondtopn_d(n) + fcondtopn_f(:,:,n,:) = fcondtopn_d(min(n,max_d)) enddo fsurfn_f = fcondtopn_f ! surface heat flux (W/m^2) flatn_f (:,:,:,:) = c0 ! latent heat flux (kg/m2/s) @@ -606,7 +607,7 @@ subroutine init_coupler_flux flw (:,:,:) = 280.0_dbl_kind ! incoming longwave rad (W/m^2) frain (:,:,:) = c0 ! rainfall rate (kg/m2/s) do n = 1, ncat ! surface heat flux (W/m^2) - fsurfn_f(:,:,n,:) = fsurfn_d(n) + fsurfn_f(:,:,n,:) = fsurfn_d(min(n,max_d)) enddo fcondtopn_f(:,:,:,:) = 0.0_dbl_kind ! conductive heat flux (W/m^2) flatn_f (:,:,:,:) = -2.0_dbl_kind ! latent heat flux (W/m^2) @@ -623,7 +624,7 @@ subroutine init_coupler_flux flw (:,:,:) = 230.0_dbl_kind ! incoming longwave rad (W/m^2) frain (:,:,:) = c0 ! rainfall rate (kg/m2/s) do n = 1, ncat ! surface heat flux (W/m^2) - fsurfn_f(:,:,n,:) = fsurfn_d(n) + fsurfn_f(:,:,n,:) = fsurfn_d(min(n,max_d)) enddo fcondtopn_f(:,:,:,:) = c0 ! conductive heat flux (W/m^2) flatn_f (:,:,:,:) = -1.0_dbl_kind ! latent heat flux (W/m^2) @@ -654,9 +655,7 @@ subroutine init_coupler_flux enddo enddo -#ifndef CICE_IN_NEMO sst (:,:,:) = Tf(:,:,:) ! sea surface temp (C) -#endif qdp (:,:,:) = c0 ! deep ocean heat flux (W/m^2) hmix (:,:,:) = c20 ! ocean mixed layer depth (m) hwater(:,:,:) = bathymetry(:,:,:) ! ocean water depth (m) diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 old mode 100644 new mode 100755 index edbba8101..200b3d00b --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -22,16 +22,16 @@ module ice_forcing use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: ncat, max_blocks, nx_global, ny_global use ice_communicate, only: my_task, master_task - use ice_calendar, only: istep, istep1, time, time_forc, & - sec, mday, month, nyr, yday, daycal, dayyr, & - daymo, days_per_year, hc_jday + use ice_calendar, only: istep, istep1, & + msec, mday, mmonth, myear, yday, daycal, & + daymo, days_per_year, compute_days_between use ice_fileunits, only: nu_diag, nu_forcing use ice_exit, only: abort_ice use ice_read_write, only: ice_open, ice_read, & ice_get_ncvarsize, ice_read_vec_nc, & ice_open_nc, ice_read_nc, ice_close_nc use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite, & - timer_bound + timer_bound, timer_forcing use ice_arrays_column, only: oceanmixed_ice, restore_bgc use ice_constants, only: c0, c1, c2, c3, c4, c5, c8, c10, c12, c15, c20, & c180, c360, c365, c1000, c3600 @@ -53,10 +53,10 @@ module ice_forcing read_data_nc_point, interp_coeff integer (kind=int_kind), public :: & - ycycle , & ! number of years in forcing cycle - fyear_init , & ! first year of data in forcing cycle - fyear , & ! current year in forcing cycle - fyear_final ! last year in cycle + ycycle , & ! number of years in forcing cycle, set by namelist + fyear_init , & ! first year of data in forcing cycle, set by namelist + fyear , & ! current year in forcing cycle, varying during the run + fyear_final ! last year in cycle, computed at init character (char_len_long) :: & ! input data file names uwind_file, & @@ -80,8 +80,7 @@ module ice_forcing botmelt_file real (kind=dbl_kind), public :: & - c1intp, c2intp , & ! interpolation coefficients - ftime ! forcing time (for restart) + c1intp, c2intp ! interpolation coefficients integer (kind=int_kind) :: & oldrecnum = 0 , & ! old record number (save between steps) @@ -159,7 +158,7 @@ module ice_forcing trest ! restoring time scale (sec) logical (kind=log_kind), public :: & - dbug ! prints debugging output if true + forcing_diag ! prints forcing debugging output if true real (dbl_kind), dimension(:), allocatable, public :: & jday_atm ! jday time vector from atm forcing files @@ -167,6 +166,15 @@ module ice_forcing integer (kind=int_kind), public :: & Njday_atm ! Number of atm forcing timesteps + + ! PRIVATE: + + real (dbl_kind), parameter :: & + mixed_layer_depth_default = c20 ! default mixed layer depth in m + + logical (kind=log_kind), parameter :: & + forcing_debug = .false. ! local debug flag + !======================================================================= contains @@ -177,6 +185,9 @@ module ice_forcing ! subroutine alloc_forcing integer (int_kind) :: ierr + character(len=*), parameter :: subname = '(alloc_forcing)' + + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' allocate ( & cldf(nx_block,ny_block, max_blocks), & ! cloud fraction @@ -221,14 +232,20 @@ subroutine init_forcing_atmo use ice_calendar, only: use_leap_years + integer (kind=int_kind) :: modadj ! adjustment for mod function character(len=*), parameter :: subname = '(init_forcing_atmo)' - ! Allocate forcing arrays - call alloc_forcing() + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - fyear = fyear_init + mod(nyr-1,ycycle) ! current year + modadj = abs((min(0,myear-fyear_init)/ycycle+1)*ycycle) + fyear = fyear_init + mod(myear-fyear_init+modadj,ycycle) fyear_final = fyear_init + ycycle - 1 ! last year in forcing cycle + if (forcing_debug .and. my_task == master_task) then + write(nu_diag,*) subname,'fdbg fyear = ',fyear,fyear_init,fyear_final + write(nu_diag,*) subname,'fdbg atm_data_type = ',trim(atm_data_type) + endif + if (trim(atm_data_type) /= 'default' .and. & my_task == master_task) then write (nu_diag,*) ' Initial forcing data year = ',fyear_init @@ -327,15 +344,19 @@ subroutine init_forcing_ocn(dt) character(len=*), parameter :: subname = '(init_forcing_ocn)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) -! sst_data(:,:,:,:) = c0 -! sss_data(:,:,:,:) = c0 -! uocn_data(:,:,:,:) = c0 -! vocn_data(:,:,:,:) = c0 + call alloc_forcing() + + sst_data(:,:,:,:) = c0 + sss_data(:,:,:,:) = c0 + uocn_data(:,:,:,:) = c0 + vocn_data(:,:,:,:) = c0 nbits = 64 ! double precision data @@ -368,7 +389,7 @@ subroutine init_forcing_ocn(dt) sss(:,:,:) = c0 do k = 1,12 ! loop over 12 months - call ice_read (nu_forcing, k, work1, 'rda8', dbug, & + call ice_read (nu_forcing, k, work1, 'rda8', forcing_diag, & field_loc_center, field_type_scalar) !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks @@ -415,7 +436,7 @@ subroutine init_forcing_ocn(dt) if (my_task == master_task) & call ice_open (nu_forcing, sst_file, nbits) - call ice_read (nu_forcing, month, sst, 'rda8', dbug, & + call ice_read (nu_forcing, mmonth, sst, 'rda8', forcing_diag, & field_loc_center, field_type_scalar) if (my_task == master_task) close(nu_forcing) @@ -451,7 +472,7 @@ subroutine init_forcing_ocn(dt) endif fieldname='sst' - call ice_read_nc(fid,month,fieldname,sst,diag) + call ice_read_nc(fid,mmonth,fieldname,sst,diag) if (my_task == master_task) call ice_close_nc(fid) @@ -469,8 +490,8 @@ subroutine init_forcing_ocn(dt) endif ! ocn_data_type if (trim(ocn_data_type) == 'ncar') then -! call ocn_data_ncar_init - call ocn_data_ncar_init_3D + call ocn_data_ncar_init +! call ocn_data_ncar_init_3D endif if (trim(ocn_data_type) == 'hycom') then @@ -499,6 +520,8 @@ subroutine ocn_freezing_temperature character(len=*), parameter :: subname = '(ocn_freezing_temperature)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block @@ -533,7 +556,8 @@ subroutine get_forcing_atmo integer (kind=int_kind) :: & iblk, & ! block index ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - fyear_old, & ! prior fyear value + modadj, & ! adjustment to make mod a postive number + fyear_old, & ! fyear setting on last timestep nt_Tsfc type (block) :: & @@ -541,12 +565,17 @@ subroutine get_forcing_atmo character(len=*), parameter :: subname = '(get_forcing_atmo)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + + call ice_timer_start(timer_forcing) + fyear_old = fyear - fyear = fyear_init + mod(nyr-1,ycycle) ! current year + modadj = abs((min(0,myear-fyear_init)/ycycle+1)*ycycle) + fyear = fyear_init + mod(myear-fyear_init+modadj,ycycle) if (trim(atm_data_type) /= 'default' .and. & (istep <= 1 .or. fyear /= fyear_old)) then if (my_task == master_task) then - write (nu_diag,*) ' Current forcing data year = ',fyear + write (nu_diag,*) ' Set current forcing data year = ',fyear endif endif @@ -555,23 +584,25 @@ subroutine get_forcing_atmo if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - ftime = time ! forcing time - time_forc = ftime ! for restarting + !------------------------------------------------------------------- + ! Read and interpolate atmospheric data + !------------------------------------------------------------------- - !------------------------------------------------------------------- - ! Read and interpolate atmospheric data - !------------------------------------------------------------------- + if (forcing_debug .and. my_task == master_task) then + write(nu_diag,*) subname,'fdbg fyear = ',fyear + write(nu_diag,*) subname,'fdbg atm_data_type = ',trim(atm_data_type) + endif if (trim(atm_data_type) == 'ncar') then call ncar_data elseif (trim(atm_data_type) == 'LYq') then call LY_data elseif (trim(atm_data_type) == 'JRA55_gx1') then - call JRA55_data(fyear) + call JRA55_data elseif (trim(atm_data_type) == 'JRA55_gx3') then - call JRA55_data(fyear) + call JRA55_data elseif (trim(atm_data_type) == 'JRA55_tx1') then - call JRA55_data(fyear) + call JRA55_data elseif (trim(atm_data_type) == 'hadgem') then call hadgem_data elseif (trim(atm_data_type) == 'monthly') then @@ -586,9 +617,9 @@ subroutine get_forcing_atmo return endif - !------------------------------------------------------------------- - ! Convert forcing data to fields needed by ice model - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Convert forcing data to fields needed by ice model + !------------------------------------------------------------------- !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks @@ -640,6 +671,8 @@ subroutine get_forcing_atmo field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) + call ice_timer_stop(timer_forcing) + end subroutine get_forcing_atmo !======================================================================= @@ -655,6 +688,15 @@ subroutine get_forcing_ocn (dt) character(len=*), parameter :: subname = '(get_forcing_ocn)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + + call ice_timer_start(timer_forcing) + + if (forcing_debug .and. my_task == master_task) then + write(nu_diag,*) subname,'fdbg fyear = ',fyear + write(nu_diag,*) subname,'fdbg ocn_data_type = ',trim(ocn_data_type) + endif + if (trim(ocn_data_type) == 'clim') then call ocn_data_clim(dt) elseif (trim(ocn_data_type) == 'ncar' .or. & @@ -670,6 +712,8 @@ subroutine get_forcing_ocn (dt) !MHRI: NOT IMPLEMENTED YET endif + call ice_timer_stop(timer_forcing) + end subroutine get_forcing_ocn !======================================================================= @@ -694,7 +738,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & ! data is missing, and we assume periodicity when monthly data ! is missing. - use ice_diagnostics, only: check_step + use ice_diagnostics, only: debug_model_step logical (kind=log_kind), intent(in) :: flag @@ -726,13 +770,15 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call ice_timer_start(timer_readwrite) ! reading/writing nbits = 64 ! double precision data - if (istep1 > check_step) dbug = .true. !! debugging + if (istep1 > debug_model_step) forcing_diag = .true. !! debugging - if (my_task==master_task .and. (dbug)) then + if (my_task==master_task .and. (forcing_diag)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -770,7 +816,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = 1 nrec = recd + n2 call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', dbug, field_loc, field_type) + 'rda8', forcing_diag, field_loc, field_type) if (ixx==1 .and. my_task == master_task) close(nu_forcing) endif ! ixm ne -99 @@ -782,7 +828,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + ixx call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', dbug, field_loc, field_type) + 'rda8', forcing_diag, field_loc, field_type) if (ixp /= -99) then ! currently in latter half of data interval @@ -807,7 +853,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + n4 call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', dbug, field_loc, field_type) + 'rda8', forcing_diag, field_loc, field_type) endif ! ixp /= -99 if (my_task == master_task) close(nu_forcing) @@ -842,7 +888,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & ! ! Adapted by Alison McLaren, Met Office from read_data - use ice_diagnostics, only: check_step + use ice_diagnostics, only: debug_model_step logical (kind=log_kind), intent(in) :: flag @@ -860,16 +906,14 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & fieldname ! field name in netCDF file integer (kind=int_kind), intent(in) :: & - field_loc, & ! location of field on staggered grid - field_type ! type of field (scalar, vector, angle) + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) real (kind=dbl_kind), dimension(nx_block,ny_block,2,max_blocks), intent(out) :: & field_data ! 2 values needed for interpolation ! local variables - character(len=*), parameter :: subname = '(read_data_nc)' - integer (kind=int_kind) :: & nrec , & ! record number to read n2, n4 , & ! like ixm and ixp, but @@ -877,11 +921,15 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & arg , & ! value of time argument in field_data fid ! file id for netCDF routines + character(len=*), parameter :: subname = '(read_data_nc)' + + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > check_step) dbug = .true. !! debugging + if (istep1 > debug_model_step) forcing_diag = .true. !! debugging - if (my_task==master_task .and. (dbug)) then + if (my_task==master_task .and. (forcing_diag)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -920,7 +968,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + n2 call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), dbug, & + (fid, nrec, fieldname, field_data(:,:,arg,:), forcing_diag, & field_loc, field_type) if (ixx==1) call ice_close_nc(fid) @@ -934,7 +982,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + ixx call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), dbug, & + (fid, nrec, fieldname, field_data(:,:,arg,:), forcing_diag, & field_loc, field_type) if (ixp /= -99) then @@ -960,7 +1008,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + n4 call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), dbug, & + (fid, nrec, fieldname, field_data(:,:,arg,:), forcing_diag, & field_loc, field_type) endif ! ixp /= -99 @@ -986,7 +1034,7 @@ subroutine read_data_nc_hycom (flag, recd, & ! ! Adapted by Mads Hvid Ribergaard, DMI from read_data_nc - use ice_diagnostics, only: check_step + use ice_diagnostics, only: debug_model_step use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite logical (kind=log_kind), intent(in) :: flag @@ -1011,11 +1059,15 @@ subroutine read_data_nc_hycom (flag, recd, & integer (kind=int_kind) :: & fid ! file id for netCDF routines + character(len=*), parameter :: subname = '(read_data_nc_hycom)' + + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > check_step) dbug = .true. !! debugging + if (istep1 > debug_model_step) forcing_diag = .true. !! debugging - if (my_task==master_task .and. (dbug)) then + if (my_task==master_task .and. (forcing_diag)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -1026,11 +1078,11 @@ subroutine read_data_nc_hycom (flag, recd, & ! read data !----------------------------------------------------------------- call ice_read_nc & - (fid, recd , fieldname, field_data(:,:,1,:), dbug, & + (fid, recd , fieldname, field_data(:,:,1,:), forcing_diag, & field_loc, field_type) call ice_read_nc & - (fid, recd+1, fieldname, field_data(:,:,2,:), dbug, & + (fid, recd+1, fieldname, field_data(:,:,2,:), forcing_diag, & field_loc, field_type) call ice_close_nc(fid) @@ -1052,7 +1104,7 @@ subroutine read_clim_data (readflag, recd, ixm, ixx, ixp, & ! no need to get data from other years or to extrapolate data beyond ! the forcing time period. - use ice_diagnostics, only: check_step + use ice_diagnostics, only: debug_model_step logical (kind=log_kind),intent(in) :: readflag @@ -1079,13 +1131,15 @@ subroutine read_clim_data (readflag, recd, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_clim_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call ice_timer_start(timer_readwrite) ! reading/writing nbits = 64 ! double precision data - if (istep1 > check_step) dbug = .true. !! debugging + if (istep1 > debug_model_step) forcing_diag = .true. !! debugging - if (my_task==master_task .and. (dbug)) & + if (my_task==master_task .and. (forcing_diag)) & write(nu_diag,*) ' ', trim(data_file) if (readflag) then @@ -1101,19 +1155,19 @@ subroutine read_clim_data (readflag, recd, ixm, ixx, ixp, & arg = 1 nrec = recd + ixm call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', dbug, field_loc, field_type) + 'rda8', forcing_diag, field_loc, field_type) endif arg = arg + 1 nrec = recd + ixx call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', dbug, field_loc, field_type) + 'rda8', forcing_diag, field_loc, field_type) if (ixp /= -99) then arg = arg + 1 nrec = recd + ixp call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', dbug, field_loc, field_type) + 'rda8', forcing_diag, field_loc, field_type) endif if (my_task == master_task) close (nu_forcing) @@ -1134,7 +1188,7 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & ! no need to get data from other years or to extrapolate data beyond ! the forcing time period. - use ice_diagnostics, only: check_step + use ice_diagnostics, only: debug_model_step logical (kind=log_kind),intent(in) :: readflag @@ -1164,11 +1218,13 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_clim_data_nc)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > check_step) dbug = .true. !! debugging + if (istep1 > debug_model_step) forcing_diag = .true. !! debugging - if (my_task==master_task .and. (dbug)) & + if (my_task==master_task .and. (forcing_diag)) & write(nu_diag,*) ' ', trim(data_file) if (readflag) then @@ -1185,21 +1241,21 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & nrec = recd + ixm call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - dbug, field_loc, field_type) + forcing_diag, field_loc, field_type) endif arg = arg + 1 nrec = recd + ixx call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - dbug, field_loc, field_type) + forcing_diag, field_loc, field_type) if (ixp /= -99) then arg = arg + 1 nrec = recd + ixp call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - dbug, field_loc, field_type) + forcing_diag, field_loc, field_type) endif if (my_task == master_task) call ice_close_nc (fid) @@ -1222,14 +1278,16 @@ subroutine interp_coeff_monthly (recslot) real (kind=dbl_kind) :: & secday , & ! seconds in day - tt , & ! seconds elapsed in current year - t1, t2 ! seconds elapsed at month midpoint + tt , & ! days elapsed in current year + t1, t2 ! days elapsed at month midpoint real (kind=dbl_kind) :: & daymid(0:13) ! month mid-points character(len=*), parameter :: subname = '(interp_coeff_monthly)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -1238,21 +1296,27 @@ subroutine interp_coeff_monthly (recslot) daymid(1:13) = 14._dbl_kind ! time frame ends 0 sec into day 15 daymid(0) = 14._dbl_kind - daymo(12) ! Dec 15, 0 sec - ! make time cyclic - tt = mod(ftime/secday,dayyr) + ! compute days since Jan 1, 00h, yday is the day counter for the year + tt = real(yday-1,kind=dbl_kind) + real(msec,kind=dbl_kind)/secday ! Find neighboring times if (recslot==2) then ! first half of month - t2 = daycal(month) + daymid(month) ! midpoint, current month - if (month == 1) then + t2 = daycal(mmonth) + daymid(mmonth) ! midpoint, current month + if (mmonth == 1) then t1 = daymid(0) ! Dec 15 (0 sec) else - t1 = daycal(month-1) + daymid(month-1) ! midpoint, previous month + t1 = daycal(mmonth-1) + daymid(mmonth-1) ! midpoint, previous month endif else ! second half of month - t1 = daycal(month) + daymid(month) ! midpoint, current month - t2 = daycal(month+1) + daymid(month+1)! day 15 of next month (0 sec) + t1 = daycal(mmonth) + daymid(mmonth) ! midpoint, current month + t2 = daycal(mmonth+1) + daymid(mmonth+1)! day 15 of next month (0 sec) + endif + + if (tt < t1 .or. tt > t2) then + write(nu_diag,*) subname,' ERROR in tt',tt,t1,t2 + call abort_ice (error_message=subname//' ERROR in tt', & + file=__FILE__, line=__LINE__) endif ! Compute coefficients @@ -1282,8 +1346,7 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) ! local variables real (kind=dbl_kind) :: & - secday, & ! seconds in a day - secyr ! seconds in a year + secday ! seconds in a day real (kind=dbl_kind) :: & tt , & ! seconds elapsed in current year @@ -1292,13 +1355,15 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) character(len=*), parameter :: subname = '(interp_coeff)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - secyr = dayyr * secday ! seconds in a year - tt = mod(ftime,secyr) + ! compute seconds since Jan 1, 00h, yday is the day counter for the year + tt = real(yday-1,kind=dbl_kind)*secday + real(msec,kind=dbl_kind) ! Find neighboring times rcnum = real(recnum,kind=dbl_kind) @@ -1322,6 +1387,12 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) c1intp = abs((t2 - tt) / (t2 - t1)) c2intp = c1 - c1intp + if (forcing_debug .and. my_task == master_task) then + write(nu_diag,*) subname,'fdbg yday,sec = ',yday,msec + write(nu_diag,*) subname,'fdbg tt = ',tt + write(nu_diag,*) subname,'fdbg c12intp = ',c1intp,c2intp + endif + end subroutine interp_coeff !======================================================================= @@ -1335,6 +1406,9 @@ subroutine interp_coeff2 (tt, t1, t2) real (kind=dbl_kind), intent(in) :: & tt , & ! current decimal daynumber t1, t2 ! first+last decimal daynumber + character(len=*), parameter :: subname = '(interp_coeff2)' + + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' ! Compute coefficients c1intp = abs((t2 - tt) / (t2 - t1)) @@ -1364,6 +1438,8 @@ subroutine interpolate_data (field_data, field) character(len=*), parameter :: subname = '(interpolate data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block @@ -1395,6 +1471,8 @@ subroutine file_year (data_file, yr) character(len=*), parameter :: subname = '(file_year)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (trim(atm_data_type) == 'hadgem') then ! netcdf i = index(data_file,'.nc') - 5 tmpname = data_file @@ -1481,6 +1559,8 @@ subroutine prepare_forcing (nx_block, ny_block, & character(len=*), parameter :: subname = '(prepare_forcing)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(Tffresh_out=Tffresh, puny_out=puny) call icepack_query_parameters(secday_out=secday) call icepack_query_parameters(calc_strair_out=calc_strair) @@ -1579,7 +1659,7 @@ subroutine prepare_forcing (nx_block, ny_block, & ! convert precipitation units to kg/m^2 s if (trim(precip_units) == 'mm_per_month') then - precip_factor = c12/(secday*days_per_year) + precip_factor = c12/(secday*real(days_per_year,kind=dbl_kind)) elseif (trim(precip_units) == 'mm_per_day') then precip_factor = c1/secday elseif (trim(precip_units) == 'mm_per_sec' .or. & @@ -1699,6 +1779,8 @@ subroutine longwave_parkinson_washington(Tair, cldf, flw) character(len=*), parameter :: subname = '(longwave_parkinson_washington)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(Tffresh_out=Tffresh, & stefan_boltzmann_out=stefan_boltzmann) call icepack_warnings_flush(nu_diag) @@ -1749,6 +1831,8 @@ subroutine longwave_rosati_miyakoda(cldf, Tsfc, & character(len=*), parameter :: subname = '(longwave_rosati_miyakoda)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(Tffresh_out=Tffresh, & stefan_boltzmann_out=stefan_boltzmann, & emissivity_out=emissivity) @@ -1786,6 +1870,8 @@ subroutine ncar_files (yr) character(len=*), parameter :: subname = '(ncar_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + fsw_file = & trim(atm_data_dir)//'/MONTHLY/swdn.1996.dat' call file_year(fsw_file,yr) @@ -1857,6 +1943,8 @@ subroutine ncar_data character(len=*), parameter :: subname = '(ncar_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -1870,12 +1958,12 @@ subroutine ncar_data !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(month))) ! exact middle +! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -1892,29 +1980,29 @@ subroutine ncar_data ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. if (trim(atm_data_format) == 'bin') then - call read_data (readm, 0, fyear, ixm, month, ixp, & + call read_data (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, fsw_file, fsw_data, & field_loc_center, field_type_scalar) - call read_data (readm, 0, fyear, ixm, month, ixp, & + call read_data (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, flw_file, cldf_data, & field_loc_center, field_type_scalar) - call read_data (readm, 0, fyear, ixm, month, ixp, & + call read_data (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, rain_file, fsnow_data, & field_loc_center, field_type_scalar) else call abort_ice (error_message=subname//'nonbinary atm_data_format unavailable', & file=__FILE__, line=__LINE__) ! The routine exists, for example: -! call read_data_nc (readm, 0, fyear, ixm, month, ixp, & +! call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & ! maxrec, fsw_file, 'fsw', fsw_data, & ! field_loc_center, field_type_scalar) -! call read_data_nc (readm, 0, fyear, ixm, month, ixp, & +! call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & ! maxrec, flw_file, 'cldf',cldf_data, & ! field_loc_center, field_type_scalar) -! call read_data_nc (readm, 0, fyear, ixm, month, ixp, & +! call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & ! maxrec, rain_file,'prec',fsnow_data, & ! field_loc_center, field_type_scalar) endif @@ -1937,7 +2025,7 @@ subroutine ncar_data maxrec = 1460 ! 365*4 ! current record number - recnum = 4*int(yday) - 3 + int(real(sec,kind=dbl_kind)/sec6hr) + recnum = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec6hr) ! Compute record numbers for surrounding data @@ -2009,6 +2097,8 @@ subroutine LY_files (yr) character(len=*), parameter :: subname = '(LY_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + flw_file = & trim(atm_data_dir)//'/MONTHLY/cldf.omip.dat' @@ -2044,6 +2134,9 @@ subroutine LY_files (yr) endif ! master_task end subroutine LY_files + +!======================================================================= + subroutine JRA55_gx1_files(yr) ! integer (kind=int_kind), intent(in) :: & @@ -2051,6 +2144,8 @@ subroutine JRA55_gx1_files(yr) character(len=*), parameter :: subname = '(JRA55_gx1_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + uwind_file = & trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_2005.nc' call file_year(uwind_file,yr) @@ -2060,6 +2155,9 @@ subroutine JRA55_gx1_files(yr) write (nu_diag,*) trim(uwind_file) endif end subroutine JRA55_gx1_files + +!======================================================================= + subroutine JRA55_tx1_files(yr) ! integer (kind=int_kind), intent(in) :: & @@ -2067,6 +2165,8 @@ subroutine JRA55_tx1_files(yr) character(len=*), parameter :: subname = '(JRA55_tx1_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + uwind_file = & trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_tx1_2005.nc' call file_year(uwind_file,yr) @@ -2076,6 +2176,9 @@ subroutine JRA55_tx1_files(yr) write (nu_diag,*) trim(uwind_file) endif end subroutine JRA55_tx1_files + +!======================================================================= + subroutine JRA55_gx3_files(yr) ! integer (kind=int_kind), intent(in) :: & @@ -2083,6 +2186,8 @@ subroutine JRA55_gx3_files(yr) character(len=*), parameter :: subname = '(JRA55_gx3_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + uwind_file = & trim(atm_data_dir)//'/8XDAILY/JRA55_gx3_03hr_forcing_2005.nc' call file_year(uwind_file,yr) @@ -2092,6 +2197,7 @@ subroutine JRA55_gx3_files(yr) write (nu_diag,*) trim(uwind_file) endif end subroutine JRA55_gx3_files + !======================================================================= ! ! read Large and Yeager atmospheric data @@ -2131,6 +2237,8 @@ subroutine LY_data character(len=*), parameter :: subname = '(LY_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) @@ -2145,12 +2253,12 @@ subroutine LY_data !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(month))) ! exact middle +! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -2167,11 +2275,11 @@ subroutine LY_data ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & flw_file, cldf_data, field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & rain_file, fsnow_data, field_loc_center, field_type_scalar) call interpolate_data (cldf_data, cldf) @@ -2190,7 +2298,7 @@ subroutine LY_data maxrec = 1460 ! 365*4 ! current record number - recnum = 4*int(yday) - 3 + int(real(sec,kind=dbl_kind)/sec6hr) + recnum = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec6hr) ! Compute record numbers for surrounding data (2 on each side) @@ -2278,7 +2386,7 @@ subroutine LY_data ! Save record number oldrecnum = recnum - if (dbug) then + if (forcing_diag) then if (my_task == master_task) write (nu_diag,*) 'LY_bulk_data' vmin = global_minval(fsw,distrb_info,tmask) @@ -2310,13 +2418,13 @@ subroutine LY_data if (my_task.eq.master_task) & write (nu_diag,*) 'Qa',vmin,vmax - endif ! dbug + endif ! forcing_diag end subroutine LY_data !======================================================================= - subroutine JRA55_data (yr) + subroutine JRA55_data use ice_blocks, only: block, get_block use ice_global_reductions, only: global_minval, global_maxval @@ -2324,34 +2432,34 @@ subroutine JRA55_data (yr) use ice_flux, only: fsnow, Tair, uatm, vatm, Qa, fsw, flw use ice_grid, only: hm, tlon, tlat, tmask, umask use ice_state, only: aice - use ice_calendar, only: days_per_year, use_leap_years + use ice_calendar, only: days_per_year - integer (kind=int_kind), intent(in) :: & - yr ! current forcing year - - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ncid , & ! netcdf file id - i, j, n1, iblk, & - yrp , & ! year after yr in forcing cycle + i, j, n1 , & + lfyear , & ! local year value recnum , & ! record number maxrec , & ! maximum record number - recslot , & ! spline slot for current record - dataloc ! = 1 for data located in middle of time interval - ! = 2 for date located at end of time interval + iblk ! block index + + integer (kind=int_kind), save :: & + frec_info(2,2) = -99 ! remember prior values to reduce reading + ! first dim is yr, recnum + ! second dim is data1 data2 real (kind=dbl_kind) :: & sec3hr , & ! number of seconds in 3 hours secday , & ! number of seconds in day - eps, tt , & ! interpolation coeff calc + eps, tt , & ! for interpolation coefficients Tffresh , & vmin, vmax - logical (kind=log_kind) :: debug_n_d = .false. - - character (char_len_long) :: uwind_file_old character(len=64) :: fieldname !netcdf field name + character (char_len_long) :: uwind_file_old character(len=*), parameter :: subname = '(JRA55_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) @@ -2359,54 +2467,52 @@ subroutine JRA55_data (yr) file=__FILE__, line=__LINE__) sec3hr = secday/c8 ! seconds in 3 hours - maxrec = days_per_year*8 + maxrec = days_per_year * 8 - if (debug_n_d .and. my_task == master_task) then - write (nu_diag,*) subname,'recnum',recnum - write (nu_diag,*) subname,'maxrec',maxrec - write (nu_diag,*) subname,'days_per_year', days_per_year + if (forcing_debug .and. my_task == master_task) then + write(nu_diag,*) subname,'fdbg dpy, maxrec = ',days_per_year,maxrec endif !------------------------------------------------------------------- ! 3-hourly data ! states are instantaneous, 1st record is 00z Jan 1 ! fluxes are 3 hour averages, 1st record is 00z-03z Jan 1 - ! Both states and fluxes have 1st record defined as 00z Jan 1 ! interpolate states, do not interpolate fluxes - ! fluxes are held constant from [init period, end period) !------------------------------------------------------------------- ! File is NETCDF with winds in NORTH and EAST direction ! file variable names are: - ! glbrad (shortwave W/m^2) - ! dlwsfc (longwave W/m^2) - ! wndewd (eastward wind m/s) - ! wndnwd (northward wind m/s) - ! airtmp (air temperature K) - ! spchmd (specific humidity kg/kg) - ! ttlpcp (precipitation kg/m s-1) + ! glbrad (shortwave W/m^2), 3 hr average + ! dlwsfc (longwave W/m^2), 3 hr average + ! wndewd (eastward wind m/s), instantaneous + ! wndnwd (northward wind m/s), instantaneous + ! airtmp (air temperature K), instantaneous + ! spchmd (specific humidity kg/kg), instantaneous + ! ttlpcp (precipitation kg/m s-1), 3 hr average !------------------------------------------------------------------- uwind_file_old = uwind_file - call file_year(uwind_file,yr) if (uwind_file /= uwind_file_old .and. my_task == master_task) then write(nu_diag,*) subname,' reading forcing file = ',trim(uwind_file) endif call ice_open_nc(uwind_file,ncid) - do n1 = 1,2 + do n1 = 1, 2 + lfyear = fyear + call file_year(uwind_file,lfyear) if (n1 == 1) then - recnum = 8*int(yday) - 7 + int(real(sec,kind=dbl_kind)/sec3hr) + recnum = 8*int(yday) - 7 + int(real(msec,kind=dbl_kind)/sec3hr) if (my_task == master_task .and. (recnum <= 2 .or. recnum >= maxrec-1)) then write(nu_diag,*) subname,' reading forcing file 1st ts = ',trim(uwind_file) endif elseif (n1 == 2) then - recnum = 8*int(yday) - 7 + int(real(sec,kind=dbl_kind)/sec3hr) + 1 + recnum = 8*int(yday) - 7 + int(real(msec,kind=dbl_kind)/sec3hr) + 1 if (recnum > maxrec) then - yrp = fyear_init + mod(nyr,ycycle) ! next year + lfyear = fyear + 1 ! next year + if (lfyear > fyear_final) lfyear = fyear_init recnum = 1 - call file_year(uwind_file,yrp) + call file_year(uwind_file,lfyear) if (my_task == master_task) then write(nu_diag,*) subname,' reading forcing file 2nd ts = ',trim(uwind_file) endif @@ -2415,58 +2521,79 @@ subroutine JRA55_data (yr) endif endif - if (debug_n_d .and. my_task == master_task) then - write(nu_diag,*) subname,' read recnum = ',recnum,n1 + if (forcing_debug .and. my_task == master_task) then + write(nu_diag,*) subname,'fdbg read recnum = ',recnum,n1 endif - fieldname = 'airtmp' - call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,n1,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + ! to reduce reading, check whether it's the same data as last read - fieldname = 'wndewd' - call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,n1,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + if (lfyear /= frec_info(1,n1) .or. recnum /= frec_info(2,n1)) then - fieldname = 'wndnwd' - call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,n1,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) - - fieldname = 'spchmd' - call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,n1,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) - - ! only read one timestep for fluxes, 3 hr average, no interpolation - if (n1 == 1) then - fieldname = 'glbrad' - call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,n1,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) - - fieldname = 'dlwsfc' - call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,n1,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) - - fieldname = 'ttlpcp' - call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,n1,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) - endif + ! check whether we can copy values from 2 to 1, should be faster than reading + ! can only do this from 2 to 1 or 1 to 2 without setting up a temporary + ! it's more likely that the values from data2 when time advances are needed in data1 + ! compare n1=1 year/record with data from last timestep at n1=2 - enddo + if (n1 == 1 .and. lfyear == frec_info(1,2) .and. recnum == frec_info(2,2)) then + Tair_data(:,:,1,:) = Tair_data(:,:,2,:) + uatm_data(:,:,1,:) = uatm_data(:,:,2,:) + vatm_data(:,:,1,:) = vatm_data(:,:,2,:) + Qa_data(:,:,1,:) = Qa_data(:,:,2,:) + fsw_data(:,:,1,:) = fsw_data(:,:,2,:) + flw_data(:,:,1,:) = flw_data(:,:,2,:) + fsnow_data(:,:,1,:) = fsnow_data(:,:,2,:) + else + + fieldname = 'airtmp' + call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,n1,:),forcing_debug, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'wndewd' + call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,n1,:),forcing_debug, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'wndnwd' + call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,n1,:),forcing_debug, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'spchmd' + call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,n1,:),forcing_debug, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'glbrad' + call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,n1,:),forcing_debug, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'dlwsfc' + call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,n1,:),forcing_debug, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'ttlpcp' + call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,n1,:),forcing_debug, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + endif ! copy data from n1=2 from last timestep to n1=1 + endif ! input data is same as last timestep + + frec_info(1,n1) = lfyear + frec_info(2,n1) = recnum + + enddo ! n1 call ice_close_nc(ncid) ! reset uwind_file to original year - call file_year(uwind_file,yr) + call file_year(uwind_file,fyear) ! Compute interpolation coefficients eps = 1.0e-6 - tt = real(mod(sec,nint(sec3hr)),kind=dbl_kind) + tt = real(mod(msec,nint(sec3hr)),kind=dbl_kind) c2intp = tt / sec3hr if (c2intp < c0 .and. c2intp > c0-eps) c2intp = c0 if (c2intp > c1 .and. c2intp < c1+eps) c2intp = c1 @@ -2476,8 +2603,8 @@ subroutine JRA55_data (yr) call abort_ice (error_message=subname//' ERROR: c2intp out of range', & file=__FILE__, line=__LINE__) endif - if (debug_n_d .and. my_task == master_task) then - write(nu_diag,*) subname,' c12intp = ',c1intp,c2intp + if (forcing_debug .and. my_task == master_task) then + write(nu_diag,*) subname,'fdbg c12intp = ',c1intp,c2intp endif ! Interpolate @@ -2485,10 +2612,10 @@ subroutine JRA55_data (yr) call interpolate_data (uatm_data, uatm) call interpolate_data (vatm_data, vatm) call interpolate_data (Qa_data, Qa) - ! use 3 hr average for heat flux and precip fields - ! call interpolate_data (fsw_data, fsw) - ! call interpolate_data (flw_data, flw) - ! call interpolate_data (fsnow_data, fsnow) + ! use 3 hr average for heat flux and precip fields, no interpolation +! call interpolate_data (fsw_data, fsw) +! call interpolate_data (flw_data, flw) +! call interpolate_data (fsnow_data, fsnow) fsw(:,:,:) = fsw_data(:,:,1,:) flw(:,:,:) = flw_data(:,:,1,:) fsnow(:,:,:) = fsnow_data(:,:,1,:) @@ -2517,39 +2644,30 @@ subroutine JRA55_data (yr) enddo ! iblk !$OMP END PARALLEL DO - if (debug_n_d .or. dbug) then - if (my_task.eq.master_task) & - write (nu_diag,*) subname,'JRA55_bulk_data' - vmin = global_minval(fsw,distrb_info,tmask) - vmax = global_maxval(fsw,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) subname,'fsw',vmin,vmax - vmin = global_minval(flw,distrb_info,tmask) - vmax = global_maxval(flw,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) subname,'flw',vmin,vmax - vmin =global_minval(fsnow,distrb_info,tmask) - vmax =global_maxval(fsnow,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) subname,'fsnow',vmin,vmax - vmin = global_minval(Tair,distrb_info,tmask) - vmax = global_maxval(Tair,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) subname,'Tair',vmin,vmax - vmin = global_minval(uatm,distrb_info,umask) - vmax = global_maxval(uatm,distrb_info,umask) - if (my_task.eq.master_task) & - write (nu_diag,*) subname,'uatm',vmin,vmax - vmin = global_minval(vatm,distrb_info,umask) - vmax = global_maxval(vatm,distrb_info,umask) - if (my_task.eq.master_task) & - write (nu_diag,*) subname,'vatm',vmin,vmax - vmin = global_minval(Qa,distrb_info,tmask) - vmax = global_maxval(Qa,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) subname,'Qa',vmin,vmax - - endif ! dbug + if (forcing_diag .or. forcing_debug) then + if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg JRA55_bulk_data' + vmin = global_minval(fsw,distrb_info,tmask) + vmax = global_maxval(fsw,distrb_info,tmask) + if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg fsw',vmin,vmax + vmin = global_minval(flw,distrb_info,tmask) + vmax = global_maxval(flw,distrb_info,tmask) + if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg flw',vmin,vmax + vmin =global_minval(fsnow,distrb_info,tmask) + vmax =global_maxval(fsnow,distrb_info,tmask) + if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg fsnow',vmin,vmax + vmin = global_minval(Tair,distrb_info,tmask) + vmax = global_maxval(Tair,distrb_info,tmask) + if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg Tair',vmin,vmax + vmin = global_minval(uatm,distrb_info,umask) + vmax = global_maxval(uatm,distrb_info,umask) + if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg uatm',vmin,vmax + vmin = global_minval(vatm,distrb_info,umask) + vmax = global_maxval(vatm,distrb_info,umask) + if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg vatm',vmin,vmax + vmin = global_minval(Qa,distrb_info,tmask) + vmax = global_maxval(Qa,distrb_info,tmask) + if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg Qa',vmin,vmax + endif ! forcing_diag end subroutine JRA55_data @@ -2596,6 +2714,8 @@ subroutine compute_shortwave(nx_block, ny_block, & character(len=*), parameter :: subname = '(compute_shortwave)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(secday_out=secday, pi_out=pi) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -2604,7 +2724,7 @@ subroutine compute_shortwave(nx_block, ny_block, & do j=jlo,jhi do i=ilo,ihi deg2rad = pi/c180 -! solar_time = mod(real(sec,kind=dbl_kind),secday)/c3600 & +! solar_time = mod(real(msec,kind=dbl_kind),secday)/c3600 & ! + c12*sin(p5*TLON(i,j)) ! Convert longitude to range of -180 to 180 for LST calculation @@ -2613,7 +2733,7 @@ subroutine compute_shortwave(nx_block, ny_block, & if (lontmp .gt. c180) lontmp = lontmp - c360 if (lontmp .lt. -c180) lontmp = lontmp + c360 - solar_time = mod(real(sec,kind=dbl_kind),secday)/c3600 & + solar_time = mod(real(msec,kind=dbl_kind),secday)/c3600 & + lontmp/c15 if (solar_time .ge. 24._dbl_kind) solar_time = solar_time - 24._dbl_kind hour_angle = (c12 - solar_time)*pi/c12 @@ -2658,6 +2778,8 @@ subroutine Qa_fixLY(nx_block, ny_block, Tair, Qa) character(len=*), parameter :: subname = '(Qa_fixLY)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(Tffresh_out=Tffresh, puny_out=puny) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -2700,6 +2822,8 @@ subroutine hadgem_files (yr) character(len=*), parameter :: subname = '(hadgem_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(calc_strair_out=calc_strair, & calc_Tsfc_out=calc_Tsfc) call icepack_warnings_flush(nu_diag) @@ -2898,6 +3022,8 @@ subroutine hadgem_data character(len=*), parameter :: subname = '(hadgem_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(Lsub_out=Lsub) call icepack_query_parameters(calc_strair_out=calc_strair, & calc_Tsfc_out=calc_Tsfc) @@ -2913,12 +3039,12 @@ subroutine hadgem_data !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(month))) ! exact middle +! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -2935,18 +3061,18 @@ subroutine hadgem_data ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. ! ----------------------------------------------------------- ! Rainfall and snowfall ! ----------------------------------------------------------- fieldname='rainfall' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, rain_file, fieldname, frain_data, & field_loc_center, field_type_scalar) fieldname='snowfall' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, snow_file, fieldname, fsnow_data, & field_loc_center, field_type_scalar) @@ -2961,11 +3087,11 @@ subroutine hadgem_data ! -------------------------------------------------------- fieldname='u_10' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, uwind_file, fieldname, uatm_data, & field_loc_center, field_type_vector) fieldname='v_10' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, vwind_file, fieldname, vatm_data, & field_loc_center, field_type_vector) @@ -2980,11 +3106,11 @@ subroutine hadgem_data ! -------------------------------------------------------- fieldname='taux' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, strax_file, fieldname, strax_data, & field_loc_center, field_type_vector) fieldname='tauy' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, stray_file, fieldname, stray_data, & field_loc_center, field_type_vector) @@ -2999,7 +3125,7 @@ subroutine hadgem_data ! -------------------------------------------------- fieldname='wind_10' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, wind_file, fieldname, wind_data, & field_loc_center, field_type_scalar) @@ -3022,23 +3148,23 @@ subroutine hadgem_data if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then fieldname='SW_incoming' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, fsw_file, fieldname, fsw_data, & field_loc_center, field_type_scalar) fieldname='LW_incoming' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, flw_file, fieldname, flw_data, & field_loc_center, field_type_scalar) fieldname='t_10' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, tair_file, fieldname, Tair_data, & field_loc_center, field_type_scalar) fieldname='rho_10' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, rhoa_file, fieldname, rhoa_data, & field_loc_center, field_type_scalar) fieldname='q_10' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, humid_file, fieldname, Qa_data, & field_loc_center, field_type_scalar) @@ -3059,7 +3185,7 @@ subroutine hadgem_data ! ------------------------------------------------------ fieldname='sublim' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, sublim_file, fieldname, sublim_data, & field_loc_center, field_type_scalar) @@ -3068,12 +3194,12 @@ subroutine hadgem_data do n = 1, ncat write(fieldname, '(a,i1)') 'topmeltn',n - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, topmelt_file(n), fieldname, topmelt_data(:,:,:,:,n), & field_loc_center, field_type_scalar) write(fieldname, '(a,i1)') 'botmeltn',n - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, botmelt_file(n), fieldname, botmelt_data(:,:,:,:,n), & field_loc_center, field_type_scalar) @@ -3127,6 +3253,8 @@ subroutine monthly_files (yr) character(len=*), parameter :: subname = '(monthly_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + flw_file = & trim(atm_data_dir)//'/MONTHLY/cldf.omip.dat' @@ -3198,6 +3326,8 @@ subroutine monthly_data character(len=*), parameter :: subname = '(monthly_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + !------------------------------------------------------------------- ! monthly data ! @@ -3206,12 +3336,12 @@ subroutine monthly_data !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(month))) ! exact middle +! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -3228,27 +3358,27 @@ subroutine monthly_data ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & flw_file, cldf_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & rain_file, fsnow_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & tair_file, Tair_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & humid_file, Qa_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & wind_file, wind_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & strax_file, strax_data, & field_loc_center, field_type_vector) - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & stray_file, stray_data, & field_loc_center, field_type_vector) @@ -3295,7 +3425,7 @@ subroutine monthly_data enddo ! iblk !$OMP END PARALLEL DO - if (dbug) then + if (forcing_diag) then if (my_task == master_task) write (nu_diag,*) 'LY_bulk_data' vmin = global_minval(fsw,distrb_info,tmask) vmax = global_maxval(fsw,distrb_info,tmask) @@ -3330,7 +3460,7 @@ subroutine monthly_data if (my_task.eq.master_task) & write (nu_diag,*) 'Qa',vmin,vmax - endif ! dbug + endif ! forcing_diag end subroutine monthly_data @@ -3377,6 +3507,8 @@ subroutine oned_data character(len=*), parameter :: subname = '(oned_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + diag = .false. ! write diagnostic information if (trim(atm_data_format) == 'nc') then ! read nc file @@ -3452,6 +3584,8 @@ subroutine oned_files character(len=*), parameter :: subname = '(oned_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + fsw_file = & trim(atm_data_dir)//'/hourlysolar_brw1989_5yr.nc' @@ -3517,6 +3651,8 @@ subroutine ocn_data_clim (dt) character(len=*), parameter :: subname = '(ocn_data_clim)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (my_task == master_task .and. istep == 1) then if (trim(ocn_data_type)=='clim') then write (nu_diag,*) ' ' @@ -3540,12 +3676,12 @@ subroutine ocn_data_clim (dt) if (trim(ocn_data_type)=='clim') then midmonth = 15 ! data is given on 15th of every month -!!! midmonth = fix(p5 * real(daymo(month))) ! exact middle +!!! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -3561,14 +3697,14 @@ subroutine ocn_data_clim (dt) call interp_coeff_monthly (recslot) readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. !------------------------------------------------------------------- ! Read two monthly SSS values and interpolate. ! Note: SSS is restored instantaneously to data. !------------------------------------------------------------------- - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & sss_file, sss_data, & field_loc_center, field_type_scalar) call interpolate_data (sss_data, sss) @@ -3592,7 +3728,7 @@ subroutine ocn_data_clim (dt) !------------------------------------------------------------------- if (trim(ocn_data_type)=='clim') then - call read_clim_data (readm, 0, ixm, month, ixp, & + call read_clim_data (readm, 0, ixm, mmonth, ixp, & sst_file, sst_data, & field_loc_center, field_type_scalar) call interpolate_data (sst_data, sstdat) @@ -3673,6 +3809,8 @@ subroutine ocn_data_ncar_init character(len=*), parameter :: subname = '(ocn_data_ncar_init)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (my_task == master_task) then write (nu_diag,*) 'WARNING: evp_prep calculates surface tilt' @@ -3722,13 +3860,14 @@ subroutine ocn_data_ncar_init do m=1,12 ! Note: netCDF does single to double conversion if necessary - if (n >= 4 .and. n <= 7) then - call ice_read_nc(fid, m, vname(n), work1, dbug, & - field_loc_NEcorner, field_type_vector) - else - call ice_read_nc(fid, m, vname(n), work1, dbug, & +! if (n >= 4 .and. n <= 7) then +! call ice_read_nc(fid, m, vname(n), work1, forcing_diag, & +! field_loc_NEcorner, field_type_vector) +! else + call ice_read_nc(fid, m, vname(n), work1, forcing_diag, & field_loc_center, field_type_scalar) - endif +! endif + ocn_frc_m(:,:,:,n,m) = work1(:,:,:) enddo ! month loop @@ -3750,10 +3889,10 @@ subroutine ocn_data_ncar_init do m=1,12 nrec = nrec + 1 if (n >= 4 .and. n <= 7) then - call ice_read (nu_forcing, nrec, work1, 'rda8', dbug, & + call ice_read (nu_forcing, nrec, work1, 'rda8', forcing_diag, & field_loc_NEcorner, field_type_vector) else - call ice_read (nu_forcing, nrec, work1, 'rda8', dbug, & + call ice_read (nu_forcing, nrec, work1, 'rda8', forcing_diag, & field_loc_center, field_type_scalar) endif ocn_frc_m(:,:,:,n,m) = work1(:,:,:) @@ -3764,8 +3903,8 @@ subroutine ocn_data_ncar_init endif !echmod - currents cause Fram outflow to be too large - ocn_frc_m(:,:,:,4,:) = c0 - ocn_frc_m(:,:,:,5,:) = c0 +! ocn_frc_m(:,:,:,4,:) = c0 +! ocn_frc_m(:,:,:,5,:) = c0 !echmod end subroutine ocn_data_ncar_init @@ -3830,6 +3969,8 @@ subroutine ocn_data_ncar_init_3D character(len=*), parameter :: subname = '(ocn_data_ncar_init_3D)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (my_task == master_task) then write (nu_diag,*) 'WARNING: evp_prep calculates surface tilt' @@ -3882,10 +4023,10 @@ subroutine ocn_data_ncar_init_3D ! Note: netCDF does single to double conversion if necessary if (n == 4 .or. n == 5) then ! 3D currents nzlev = 1 ! surface currents - call ice_read_nc_uv(fid, m, nzlev, vname(n), work1, dbug, & + call ice_read_nc_uv(fid, m, nzlev, vname(n), work1, forcing_diag, & field_loc_center, field_type_scalar) else - call ice_read_nc(fid, m, vname(n), work1, dbug, & + call ice_read_nc(fid, m, vname(n), work1, forcing_diag, & field_loc_center, field_type_scalar) endif @@ -3967,6 +4108,8 @@ subroutine ocn_data_ncar(dt) character(len=*), parameter :: subname = '(ocn_data_ncar)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + !------------------------------------------------------------------- ! monthly data ! @@ -3975,12 +4118,12 @@ subroutine ocn_data_ncar(dt) !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(month),kind=dbl_kind)) ! exact middle +! midmonth = fix(p5 * real(daymo(mmonth),kind=dbl_kind)) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -3995,19 +4138,18 @@ subroutine ocn_data_ncar(dt) ! Find interpolation coefficients call interp_coeff_monthly (recslot) + sst_data(:,:,:,:) = c0 do n = nfld, 1, -1 - !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks ! use sst_data arrays as temporary work space until n=1 if (ixm /= -99) then ! first half of month sst_data(:,:,1,iblk) = ocn_frc_m(:,:,iblk,n,ixm) - sst_data(:,:,2,iblk) = ocn_frc_m(:,:,iblk,n,month) + sst_data(:,:,2,iblk) = ocn_frc_m(:,:,iblk,n,mmonth) else ! second half of month - sst_data(:,:,1,iblk) = ocn_frc_m(:,:,iblk,n,month) + sst_data(:,:,1,iblk) = ocn_frc_m(:,:,iblk,n,mmonth) sst_data(:,:,2,iblk) = ocn_frc_m(:,:,iblk,n,ixp) endif enddo - !$OMP END PARALLEL DO call interpolate_data (sst_data,work1) ! masking by hm is necessary due to NaNs in the data file @@ -4023,7 +4165,7 @@ subroutine ocn_data_ncar(dt) do iblk = 1, nblocks if (hm(i,j,iblk) == c1) then if (n == 2) sss (i,j,iblk) = work1(i,j,iblk) - if (n == 3) hmix (i,j,iblk) = work1(i,j,iblk) + if (n == 3) hmix (i,j,iblk) = max(mixed_layer_depth_default,work1(i,j,iblk)) if (n == 4) uocn (i,j,iblk) = work1(i,j,iblk) if (n == 5) vocn (i,j,iblk) = work1(i,j,iblk) if (n == 6) ss_tltx(i,j,iblk) = work1(i,j,iblk) @@ -4071,7 +4213,7 @@ subroutine ocn_data_ncar(dt) !$OMP END PARALLEL DO endif - if (dbug) then + if (forcing_diag) then if (my_task == master_task) & write (nu_diag,*) 'ocn_data_ncar' vmin = global_minval(Tf,distrb_info,tmask) @@ -4125,6 +4267,8 @@ subroutine ocn_data_oned character(len=*), parameter :: subname = '(ocn_data_oned)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + sss (:,:,:) = 34.0_dbl_kind ! sea surface salinity (ppt) call ocn_freezing_temperature @@ -4136,7 +4280,7 @@ subroutine ocn_data_oned ss_tlty(:,:,:) = c0 frzmlt (:,:,:) = c0 ! freezing/melting potential (W/m^2) qdp (:,:,:) = c0 ! deep ocean heat flux (W/m^2) - hmix (:,:,:) = c20 ! ocean mixed layer depth + hmix (:,:,:) = mixed_layer_depth_default ! ocean mixed layer depth end subroutine ocn_data_oned @@ -4180,6 +4324,8 @@ subroutine ocn_data_hadgem(dt) character(len=*), parameter :: subname = '(ocn_data_hadgem)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + !------------------------------------------------------------------- ! monthly data ! @@ -4188,12 +4334,12 @@ subroutine ocn_data_hadgem(dt) !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(month))) ! exact middle +! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -4210,7 +4356,7 @@ subroutine ocn_data_hadgem(dt) ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. if (my_task == master_task .and. istep == 1) then write (nu_diag,*) ' ' @@ -4231,7 +4377,7 @@ subroutine ocn_data_hadgem(dt) ! ----------------------------------------------------------- sst_file = trim(ocn_data_dir)//'/MONTHLY/sst.1997.nc' fieldname='sst' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, sst_file, fieldname, sst_data, & field_loc_center, field_type_scalar) @@ -4265,7 +4411,7 @@ subroutine ocn_data_hadgem(dt) filename = trim(ocn_data_dir)//'/MONTHLY/uocn.1997.nc' fieldname='uocn' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, filename, fieldname, uocn_data, & field_loc_center, field_type_vector) @@ -4274,7 +4420,7 @@ subroutine ocn_data_hadgem(dt) filename = trim(ocn_data_dir)//'/MONTHLY/vocn.1997.nc' fieldname='vocn' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, filename, fieldname, vocn_data, & field_loc_center, field_type_vector) @@ -4334,6 +4480,10 @@ subroutine ocn_data_hycom_init character (char_len) :: & fieldname ! field name in netcdf file + character(len=*), parameter :: subname = '(ocn_data_hycom_init)' + + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (trim(ocn_data_type) == 'hycom') then sss_file = trim(ocn_data_dir)//'ice.restart.surf.nc' @@ -4344,7 +4494,7 @@ subroutine ocn_data_hycom_init fieldname = 'sss' call ice_open_nc (sss_file, fid) - call ice_read_nc (fid, 1 , fieldname, sss, dbug, & + call ice_read_nc (fid, 1 , fieldname, sss, forcing_diag, & field_loc_center, field_type_scalar) call ice_close_nc(fid) @@ -4359,7 +4509,7 @@ subroutine ocn_data_hycom_init fieldname = 'sst' call ice_open_nc (sst_file, fid) - call ice_read_nc (fid, 1 , fieldname, sst, dbug, & + call ice_read_nc (fid, 1 , fieldname, sst, forcing_diag, & field_loc_center, field_type_scalar) call ice_close_nc(fid) @@ -4387,6 +4537,9 @@ subroutine hycom_atm_files fid ! File id character (char_len) :: & varname ! variable name in netcdf file + character(len=*), parameter :: subname = '(hycom_atm_files)' + + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' fsw_file = trim(atm_data_dir)//'/forcing.shwflx.nc' flw_file = trim(atm_data_dir)//'/forcing.radflx.nc' @@ -4430,7 +4583,6 @@ subroutine hycom_atm_data use ice_flux, only: fsw, fsnow, Tair, uatm, vatm, Qa, flw use ice_domain, only: nblocks - use ice_calendar, only: year_init integer (kind=int_kind) :: & recnum ! record number @@ -4450,11 +4602,13 @@ subroutine hycom_atm_data character(len=*), parameter :: subname = '(hycom_atm_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) - ! current time in HYCOM jday units - hcdate = hc_jday(nyr+year_init-1,0,0)+ yday+sec/secday + ! current time in HYCOM jday units (HYCOM ref year: 1900,12,31,000000) + hcdate = real(compute_days_between(1900,12,31,myear,mmonth,mday)) + msec/secday ! Init recnum try recnum=min(max(oldrecnum,1),Njday_atm-1) @@ -4477,7 +4631,7 @@ subroutine hycom_atm_data write (nu_diag,*) & 'ERROR: CICE: Atm forcing not available at hcdate =',hcdate write (nu_diag,*) & - 'ERROR: CICE: nyr, year_init, yday ,sec = ',nyr, year_init, yday, sec + 'ERROR: CICE: myear, yday ,msec = ',myear, yday, msec call abort_ice ('ERROR: CICE stopped') endif @@ -4528,7 +4682,7 @@ subroutine hycom_atm_data endif ! Interpolate - if (dbug) then + if (forcing_diag) then if (my_task == master_task) then write(nu_diag,*)'CICE: Atm. interpolate: = ',& hcdate,c1intp,c2intp @@ -4581,7 +4735,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & ! data is missing, and we assume periodicity when monthly data ! is missing. ! - use ice_diagnostics, only: check_step + use ice_diagnostics, only: debug_model_step logical (kind=log_kind), intent(in) :: flag @@ -4605,8 +4759,6 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & real (kind=dbl_kind), dimension(2), intent(inout) :: & field_data ! 2 values needed for interpolation - character(len=*), parameter :: subname = '(read_data_nc_point)' - integer (kind=int_kind) :: & nrec , & ! record number to read n2, n4 , & ! like ixm and ixp, but @@ -4614,13 +4766,17 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & arg , & ! value of time argument in field_data fid ! file id for netCDF routines + character(len=*), parameter :: subname = '(read_data_nc_point)' + + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call ice_timer_start(timer_readwrite) ! reading/writing field_data = c0 ! to satisfy intent(out) attribute - if (istep1 > check_step) dbug = .true. !! debugging + if (istep1 > debug_model_step) forcing_diag = .true. !! debugging - if (my_task==master_task .and. (dbug)) then + if (my_task==master_task .and. (forcing_diag)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -4667,7 +4823,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + n2 call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), dbug, & + (fid, nrec, fieldname, field_data(arg), forcing_diag, & field_loc, field_type) !if (ixx==1) call ice_close_nc(fid) @@ -4682,7 +4838,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + ixx call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), dbug, & + (fid, nrec, fieldname, field_data(arg), forcing_diag, & field_loc, field_type) if (ixp /= -99) then @@ -4708,7 +4864,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & nrec = recd + n4 call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), dbug, & + (fid, nrec, fieldname, field_data(arg), forcing_diag, & field_loc, field_type) endif ! ixp /= -99 @@ -4726,6 +4882,8 @@ subroutine ISPOL_files character(len=*), parameter :: subname = '(ISPOL_files)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + fsw_file = & trim(atm_data_dir)//'/fsw_sfc_4Xdaily.nc' @@ -4817,6 +4975,8 @@ subroutine ISPOL_data character(len=*), parameter :: subname = '(ISPOL_data)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -4914,7 +5074,7 @@ subroutine ISPOL_data maxrec = 1460 ! 366*4 ! current record number - recnum4X = 4*int(yday) - 3 + int(real(sec,kind=dbl_kind)/sec1hr) + recnum4X = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec1hr) ! Compute record numbers for surrounding data (2 on each side) ixm = mod(recnum4X+maxrec-2,maxrec) + 1 @@ -5015,6 +5175,8 @@ subroutine ocn_data_ispol_init character(len=*), parameter :: subname = '(ocn_data_ispol_init)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + if (my_task == master_task) then if (restore_ocn) write (nu_diag,*) & @@ -5040,10 +5202,10 @@ subroutine ocn_data_ispol_init do m=1,12 ! Note: netCDF does single to double conversion if necessary if (n >= 4 .and. n <= 7) then - call ice_read_nc(fid, m, vname(n), work, dbug, & + call ice_read_nc(fid, m, vname(n), work, forcing_diag, & field_loc_NEcorner, field_type_vector) else - call ice_read_nc(fid, m, vname(n), work, dbug, & + call ice_read_nc(fid, m, vname(n), work, forcing_diag, & field_loc_center, field_type_scalar) endif ocn_frc_m(:,:,:,n,m) = work @@ -5074,6 +5236,7 @@ subroutine box2001_data use ice_domain, only: nblocks use ice_domain_size, only: max_blocks + use ice_calendar, only: timesecs use ice_blocks, only: nx_block, ny_block, nghost use ice_flux, only: uocn, vocn, uatm, vatm, wind, rhoa, strax, stray use ice_grid, only: uvm, to_ugrid @@ -5089,6 +5252,11 @@ subroutine box2001_data real (kind=dbl_kind) :: & secday, pi , puny, period, pi2, tau + + character(len=*), parameter :: subname = '(box2001_data)' + + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call icepack_query_parameters(pi_out=pi, pi2_out=pi2, puny_out=puny) call icepack_query_parameters(secday_out=secday) @@ -5111,12 +5279,12 @@ subroutine box2001_data vocn(i,j,iblk) = vocn(i,j,iblk) * uvm(i,j,iblk) ! wind components - uatm(i,j,iblk) = c5 + (sin(pi2*time/period)-c3) & + uatm(i,j,iblk) = c5 + (sin(pi2*timesecs/period)-c3) & * sin(pi2*real(i-nghost, kind=dbl_kind) & /real(nx_global,kind=dbl_kind)) & * sin(pi *real(j-nghost, kind=dbl_kind) & /real(ny_global,kind=dbl_kind)) - vatm(i,j,iblk) = c5 + (sin(pi2*time/period)-c3) & + vatm(i,j,iblk) = c5 + (sin(pi2*timesecs/period)-c3) & * sin(pi *real(i-nghost, kind=dbl_kind) & /real(nx_global,kind=dbl_kind)) & * sin(pi2*real(j-nghost, kind=dbl_kind) & @@ -5180,6 +5348,8 @@ subroutine get_wave_spec logical (kind=log_kind) :: wave_spec character(len=*), parameter :: subname = '(get_wave_spec)' + if (forcing_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + call ice_timer_start(timer_fsd) call icepack_query_parameters(wave_spec_out=wave_spec, & @@ -5191,7 +5361,7 @@ subroutine get_wave_spec ! if no wave data is provided, wave_spectrum is zero everywhere wave_spectrum(:,:,:,:) = c0 wave_spec_dir = ocn_data_dir - dbug = .false. + forcing_diag = .false. ! wave spectrum and frequencies if (wave_spec) then @@ -5209,7 +5379,7 @@ subroutine get_wave_spec else #ifdef USE_NETCDF call ice_open_nc(wave_spec_file,fid) - call ice_read_nc_xyf (fid, 1, 'efreq', wave_spectrum(:,:,:,:), dbug, & + call ice_read_nc_xyf (fid, 1, 'efreq', wave_spectrum(:,:,:,:), forcing_diag, & field_loc_center, field_type_scalar) call ice_close_nc(fid) #else diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedynB/general/ice_forcing_bgc.F90 index e5ef851fa..d9408c304 100644 --- a/cicecore/cicedynB/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedynB/general/ice_forcing_bgc.F90 @@ -14,7 +14,7 @@ module ice_forcing_bgc use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: max_blocks use ice_communicate, only: my_task, master_task - use ice_calendar, only: dt, istep, sec, mday, month + use ice_calendar, only: dt, istep, msec, mday, mmonth use ice_fileunits, only: nu_diag use ice_arrays_column, only: restore_bgc, & bgc_data_dir, fe_data_type @@ -163,12 +163,12 @@ subroutine get_forcing_bgc !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -!!! midmonth = fix(p5 * real(daymo(month))) ! exact middle +!!! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -184,7 +184,7 @@ subroutine get_forcing_bgc call interp_coeff_monthly (recslot) readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. endif ! 'clim prep' @@ -194,11 +194,11 @@ subroutine get_forcing_bgc !------------------------------------------------------------------- if (trim(bgc_data_type)=='clim' .AND. tr_bgc_Sil) then - ! call read_clim_data (readm, 0, ixm, month, ixp, & + ! call read_clim_data (readm, 0, ixm, mmonth, ixp, & ! sil_file, sil_data, & ! field_loc_center, field_type_scalar) fieldname = 'silicate' - call read_clim_data_nc (readm, 0, ixm, month, ixp, & + call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & sil_file, fieldname, sil_data, & field_loc_center, field_type_scalar) call interpolate_data (sil_data, sildat) @@ -276,11 +276,11 @@ subroutine get_forcing_bgc !------------------------------------------------------------------- if (trim(bgc_data_type)=='clim' .AND. tr_bgc_Nit) then - ! call read_clim_data (readm, 0, ixm, month, ixp, & + ! call read_clim_data (readm, 0, ixm, mmonth, ixp, & ! nit_file, nit_data, & ! field_loc_center, field_type_scalar) fieldname = 'nitrate' - call read_clim_data_nc (readm, 0, ixm, month, ixp, & + call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & nit_file, fieldname, nit_data, & field_loc_center, field_type_scalar) call interpolate_data (nit_data, nitdat) @@ -584,7 +584,7 @@ end subroutine faero_default subroutine faero_data - use ice_calendar, only: month, mday, istep, sec + use ice_calendar, only: mmonth, mday, istep, msec use ice_domain_size, only: max_blocks use ice_blocks, only: nx_block, ny_block use ice_flux_bgc, only: faero_atm @@ -625,12 +625,12 @@ subroutine faero_data !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(month))) ! exact middle +! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = 99 ! other two points will be used if (mday < midmonth) ixp = 99 @@ -647,23 +647,23 @@ subroutine faero_data ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. ! aero_file = trim(atm_data_dir)//'faero.nc' aero_file = '/usr/projects/climate/eclare/DATA/gx1v3/faero.nc' fieldname='faero_atm001' - call read_clim_data_nc (readm, 0, ixm, month, ixp, & + call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & aero_file, fieldname, aero1_data, & field_loc_center, field_type_scalar) fieldname='faero_atm002' - call read_clim_data_nc (readm, 0, ixm, month, ixp, & + call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & aero_file, fieldname, aero2_data, & field_loc_center, field_type_scalar) fieldname='faero_atm003' - call read_clim_data_nc (readm, 0, ixm, month, ixp, & + call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & aero_file, fieldname, aero3_data, & field_loc_center, field_type_scalar) @@ -727,12 +727,12 @@ subroutine fzaero_data !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(month))) ! exact middle +! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(month+maxrec-2,maxrec) + 1 - ixp = mod(month, maxrec) + 1 + ixm = mod(mmonth+maxrec-2,maxrec) + 1 + ixp = mod(mmonth, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -749,14 +749,14 @@ subroutine fzaero_data ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. ! aero_file = trim(atm_data_dir)//'faero.nc' ! Cam5 monthly total black carbon deposition on the gx1 grid" aero_file = '/usr/projects/climate/njeffery/DATA/CAM/Hailong_Wang/Cam5_bc_monthly_popgrid.nc' fieldname='bcd' - call read_clim_data_nc (readm, 0, ixm, month, ixp, & + call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & aero_file, fieldname, aero_data, & field_loc_center, field_type_scalar) diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index b59a93862..5e5fd144f 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -58,16 +58,18 @@ module ice_init subroutine input_data use ice_broadcast, only: broadcast_scalar, broadcast_array - use ice_diagnostics, only: diag_file, print_global, print_points, latpnt, lonpnt + use ice_diagnostics, only: diag_file, print_global, print_points, latpnt, lonpnt, & + debug_model, debug_model_step use ice_domain, only: close_boundaries, orca_halogrid use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd, nfreq, & n_iso, n_aero, n_zaero, n_algae, & n_doc, n_dic, n_don, n_fed, n_fep, & max_nstrm - use ice_calendar, only: year_init, istep0, histfreq, histfreq_n, & + use ice_calendar, only: year_init, month_init, day_init, sec_init, & + istep0, histfreq, histfreq_n, & dumpfreq, dumpfreq_n, diagfreq, & npt, dt, ndtd, days_per_year, use_leap_years, & - write_ic, dump_last + write_ic, dump_last, npt_unit use ice_arrays_column, only: oceanmixed_ice use ice_restart_column, only: restart_age, restart_FY, restart_lvl, & restart_pond_cesm, restart_pond_lvl, restart_pond_topo, restart_aero, & @@ -82,7 +84,7 @@ subroutine input_data use ice_flux, only: default_season use ice_flux_bgc, only: cpl_bgc use ice_forcing, only: & - ycycle, fyear_init, dbug, & + ycycle, fyear_init, forcing_diag, & atm_data_type, atm_data_dir, precip_units, rotate_wind, & atm_data_format, ocn_data_format, & bgc_data_type, & @@ -125,7 +127,7 @@ subroutine input_data mu_rdg, hs0, dpscale, rfracmin, rfracmax, pndaspect, hs1, hp1, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, dSdt_slow_mode, & phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar, & - sw_frac, sw_dtemp, floediam, hfrazilmin + sw_frac, sw_dtemp, floediam, hfrazilmin, iceruf integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & kitd, kcatbound, ktransport @@ -154,7 +156,7 @@ subroutine input_data !----------------------------------------------------------------- namelist /setup_nml/ & - days_per_year, use_leap_years, year_init, istep0, & + days_per_year, use_leap_years, istep0, npt_unit, & dt, npt, ndtd, numin, & runtype, runid, bfbflag, numax, & ice_ic, restart, restart_dir, restart_file, & @@ -162,9 +164,10 @@ subroutine input_data pointer_file, dumpfreq, dumpfreq_n, dump_last, & diagfreq, diag_type, diag_file, history_format,& print_global, print_points, latpnt, lonpnt, & - dbug, histfreq, histfreq_n, hist_avg, & + forcing_diag, histfreq, histfreq_n, hist_avg, & history_dir, history_file, history_precision, cpl_bgc, & - conserv_check, & + conserv_check, debug_model, debug_model_step, & + year_init, month_init, day_init, sec_init, & write_ic, incond_dir, incond_file, version_name namelist /grid_nml/ & @@ -224,7 +227,7 @@ subroutine input_data namelist /forcing_nml/ & formdrag, atmbndy, calc_strair, calc_Tsfc, & highfreq, natmiter, atmiter_conv, & - ustar_min, emissivity, & + ustar_min, emissivity, iceruf, & fbot_xfer_type, update_ocn_f, l_mpond_fresh, tfrz_option, & oceanmixed_ice, restore_ice, restore_ocn, trestore, & precip_units, default_season, wave_spec_type,nfreq, & @@ -250,6 +253,9 @@ subroutine input_data days_per_year = 365 ! number of days in a year use_leap_years= .false.! if true, use leap years (Feb 29) year_init = 0 ! initial year + month_init = 1 ! initial month + day_init = 1 ! initial day + sec_init = 0 ! initial second istep0 = 0 ! no. of steps taken in previous integrations, ! real (dumped) or imagined (to set calendar) #ifndef CESMCOUPLED @@ -258,7 +264,10 @@ subroutine input_data numin = 11 ! min allowed unit number numax = 99 ! max allowed unit number npt = 99999 ! total number of time steps (dt) + npt_unit = '1' ! units of npt 'y', 'm', 'd', 's', '1' diagfreq = 24 ! how often diag output is written + debug_model = .false. ! debug output + debug_model_step = 999999999 ! debug model after this step number print_points = .false. ! if true, print point data print_global = .true. ! if true, print global diagnostic data bfbflag = 'off' ! off = optimized @@ -370,6 +379,7 @@ subroutine input_data calc_Tsfc = .true. ! calculate surface temperature update_ocn_f = .false. ! include fresh water and salt fluxes for frazil ustar_min = 0.005 ! minimum friction velocity for ocean heat flux (m/s) + iceruf = 0.0005_dbl_kind ! ice surface roughness at atmosphere interface (m) emissivity = 0.985 ! emissivity of snow and ice l_mpond_fresh = .false. ! logical switch for including meltpond freshwater ! flux feedback to ocean model @@ -426,7 +436,7 @@ subroutine input_data restore_ocn = .false. ! restore sst if true trestore = 90 ! restoring timescale, days (0 instantaneous) restore_ice = .false. ! restore ice state on grid edges if true - dbug = .false. ! true writes diagnostics for input forcing + forcing_diag = .false. ! true writes diagnostics for input forcing latpnt(1) = 90._dbl_kind ! latitude of diagnostic point 1 (deg) lonpnt(1) = 0._dbl_kind ! longitude of point 1 (deg) @@ -584,10 +594,16 @@ subroutine input_data call broadcast_scalar(days_per_year, master_task) call broadcast_scalar(use_leap_years, master_task) call broadcast_scalar(year_init, master_task) + call broadcast_scalar(month_init, master_task) + call broadcast_scalar(day_init, master_task) + call broadcast_scalar(sec_init, master_task) call broadcast_scalar(istep0, master_task) call broadcast_scalar(dt, master_task) call broadcast_scalar(npt, master_task) + call broadcast_scalar(npt_unit, master_task) call broadcast_scalar(diagfreq, master_task) + call broadcast_scalar(debug_model, master_task) + call broadcast_scalar(debug_model_step, master_task) call broadcast_scalar(print_points, master_task) call broadcast_scalar(print_global, master_task) call broadcast_scalar(bfbflag, master_task) @@ -722,6 +738,7 @@ subroutine input_data call broadcast_scalar(update_ocn_f, master_task) call broadcast_scalar(l_mpond_fresh, master_task) call broadcast_scalar(ustar_min, master_task) + call broadcast_scalar(iceruf, master_task) call broadcast_scalar(emissivity, master_task) call broadcast_scalar(fbot_xfer_type, master_task) call broadcast_scalar(precip_units, master_task) @@ -741,14 +758,12 @@ subroutine input_data call broadcast_scalar(restore_ocn, master_task) call broadcast_scalar(trestore, master_task) call broadcast_scalar(restore_ice, master_task) - call broadcast_scalar(dbug, master_task) + call broadcast_scalar(forcing_diag, master_task) call broadcast_array (latpnt(1:2), master_task) call broadcast_array (lonpnt(1:2), master_task) call broadcast_scalar(runid, master_task) call broadcast_scalar(runtype, master_task) - - if (dbug) & ! else only master_task writes to file - call broadcast_scalar(nu_diag, master_task) + !call broadcast_scalar(nu_diag, master_task) ! tracers call broadcast_scalar(tr_iage, master_task) @@ -1443,6 +1458,7 @@ subroutine input_data tmpstr2 = ' : four constant albedos' else tmpstr2 = ' : unknown value' + abort_list = trim(abort_list)//":23" endif write(nu_diag,1030) ' albedo_type = ', trim(albedo_type),trim(tmpstr2) if (trim(albedo_type) == 'ccsm3') then @@ -1469,6 +1485,7 @@ subroutine input_data write(nu_diag,1010) ' calc_strair = ', calc_strair,' : calculate wind stress and speed' write(nu_diag,1010) ' rotate_wind = ', rotate_wind,' : rotate wind/stress to computational grid' write(nu_diag,1010) ' formdrag = ', formdrag,' : use form drag parameterization' + write(nu_diag,1000) ' iceruf = ', iceruf, ' : ice surface roughness at atmosphere interface (m)' if (trim(atmbndy) == 'default') then tmpstr2 = ' : stability-based boundary layer' write(nu_diag,1010) ' highfreq = ', highfreq,' : high-frequency atmospheric coupling' @@ -1621,11 +1638,17 @@ subroutine input_data write(nu_diag,1031) ' runid = ', trim(runid) write(nu_diag,1031) ' runtype = ', trim(runtype) write(nu_diag,1021) ' year_init = ', year_init + write(nu_diag,1021) ' month_init = ', month_init + write(nu_diag,1021) ' day_init = ', day_init + write(nu_diag,1021) ' sec_init = ', sec_init write(nu_diag,1021) ' istep0 = ', istep0 + write(nu_diag,1031) ' npt_unit = ', trim(npt_unit) write(nu_diag,1021) ' npt = ', npt write(nu_diag,1021) ' diagfreq = ', diagfreq write(nu_diag,1011) ' print_global = ', print_global write(nu_diag,1011) ' print_points = ', print_points + write(nu_diag,1011) ' debug_model = ', debug_model + write(nu_diag,1022) ' debug_model_step = ', debug_model_step write(nu_diag,1031) ' bfbflag = ', trim(bfbflag) write(nu_diag,1021) ' numin = ', numin write(nu_diag,1021) ' numax = ', numax @@ -1785,7 +1808,7 @@ subroutine input_data wave_spec_type_in = wave_spec_type, & wave_spec_in=wave_spec, nfreq_in=nfreq, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & - Pstar_in=Pstar, Cstar_in=Cstar, & + Pstar_in=Pstar, Cstar_in=Cstar, iceruf_in=iceruf, & sw_redist_in=sw_redist, sw_frac_in=sw_frac, sw_dtemp_in=sw_dtemp) call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & @@ -1806,6 +1829,7 @@ subroutine input_data 1011 format (a20,1x,l6) 1020 format (a20,8x,i6,1x,a) ! integer 1021 format (a20,1x,i6) + 1022 format (a20,1x,i12) 1023 format (a20,1x,6i6) 1030 format (a20,a14,1x,a) ! character 1031 format (a20,1x,a,a) diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index b21908e77..29bfdbf0e 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -12,7 +12,7 @@ module ice_step_mod use ice_kinds_mod - use ice_constants, only: c0, c1, c1000, c4 + use ice_constants, only: c0, c1, c1000, c4, p25 use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -189,18 +189,18 @@ subroutine step_therm1 (dt, iblk) use ice_prescribed_mod, only: prescribed_ice #else logical (kind=log_kind) :: & - prescribed_ice ! if .true., use prescribed ice instead of computed + prescribed_ice ! if .true., use prescribed ice instead of computed #endif real (kind=dbl_kind), intent(in) :: & - dt ! time step + dt ! time step (s) integer (kind=int_kind), intent(in) :: & - iblk ! block index + iblk ! block index ! local variables #ifdef CICE_IN_NEMO real (kind=dbl_kind) :: & - raice ! temporary reverse ice concentration + raice ! reciprocal of ice concentration #endif integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain @@ -215,24 +215,27 @@ subroutine step_therm1 (dt, iblk) logical (kind=log_kind) :: & tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, tr_pond_cesm, & - tr_pond_lvl, tr_pond_topo, calc_Tsfc + tr_pond_lvl, tr_pond_topo, calc_Tsfc, highfreq real (kind=dbl_kind) :: & - puny + uvel_center, & ! cell-centered velocity, x component (m/s) + vvel_center, & ! cell-centered velocity, y component (m/s) + puny ! a very small number real (kind=dbl_kind), dimension(n_aero,2,ncat) :: & - aerosno, aeroice ! kg/m^2 + aerosno, aeroice ! kg/m^2 real (kind=dbl_kind), dimension(n_iso,ncat) :: & - isosno, isoice ! kg/m^2 + isosno, isoice ! kg/m^2 type (block) :: & - this_block ! block information for current block + this_block ! block information for current block character(len=*), parameter :: subname = '(step_therm1)' call icepack_query_parameters(puny_out=puny) call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) + call icepack_query_parameters(highfreq_out=highfreq) call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_iso_out=tr_iso, & @@ -289,6 +292,16 @@ subroutine step_therm1 (dt, iblk) do j = jlo, jhi do i = ilo, ihi + if (highfreq) then ! include ice velocity in calculation of wind stress + uvel_center = p25*(uvel(i,j ,iblk) + uvel(i-1,j ,iblk) & ! cell-centered velocity + + uvel(i,j-1,iblk) + uvel(i-1,j-1,iblk)) ! assumes wind components + vvel_center = p25*(vvel(i,j ,iblk) + vvel(i-1,j ,iblk) & ! are also cell-centered + + vvel(i,j-1,iblk) + vvel(i-1,j-1,iblk)) + else + uvel_center = c0 ! not used + vvel_center = c0 + endif ! highfreq + if (tr_iso) then ! trcrn(nt_iso*) has units kg/m^3 do n=1,ncat do k=1,n_iso @@ -324,8 +337,8 @@ subroutine step_therm1 (dt, iblk) vicen = vicen (i,j,:,iblk), & vsno = vsno (i,j, iblk), & vsnon = vsnon (i,j,:,iblk), & - uvel = uvel (i,j, iblk), & - vvel = vvel (i,j, iblk), & + uvel = uvel_center , & + vvel = vvel_center , & Tsfc = trcrn (i,j,nt_Tsfc,:,iblk), & zqsn = trcrn (i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & zqin = trcrn (i,j,nt_qice:nt_qice+nilyr-1,:,iblk), & @@ -1026,7 +1039,7 @@ subroutine step_radiation (dt, iblk) kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, & gaer_bc_tab, bcenh, swgrid, igrid use ice_blocks, only: block, get_block - use ice_calendar, only: calendar_type, days_per_year, nextsw_cday, yday, sec + use ice_calendar, only: calendar_type, days_per_year, nextsw_cday, yday, msec use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, n_aero, nilyr, nslyr, n_zaero, n_algae, nblyr use ice_flux, only: swvdr, swvdf, swidr, swidf, coszen, fsnow @@ -1145,7 +1158,7 @@ subroutine step_radiation (dt, iblk) calendar_type=calendar_type, & days_per_year=days_per_year, & nextsw_cday=nextsw_cday, yday=yday, & - sec=sec, & + sec=msec, & kaer_tab=kaer_tab, kaer_bc_tab=kaer_bc_tab(:,:), & waer_tab=waer_tab, waer_bc_tab=waer_bc_tab(:,:), & gaer_tab=gaer_tab, gaer_bc_tab=gaer_bc_tab(:,:), & diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 index 884ee6331..635bbbeb4 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 @@ -41,6 +41,7 @@ module ice_boundary type, public :: ice_halo integer (int_kind) :: & communicator, &! communicator to use for update messages + numLocalBlocks, &! number of local blocks, needed for halo fill numMsgSend, &! number of messages to send halo update numMsgRecv, &! number of messages to recv halo update numLocalCopies, &! num local copies for halo update @@ -50,6 +51,7 @@ module ice_boundary tripoleTFlag ! NS boundary is a tripole T-fold integer (int_kind), dimension(:), pointer :: & + blockGlobalID, &! list of local block global IDs, needed for halo fill recvTask, &! task from which to recv each msg sendTask, &! task to which to send each msg sizeSend, &! size of each sent message @@ -220,6 +222,13 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & cornerMsgSize = nghost*nghost tripoleRows = nghost+1 + !*** store some block info to fill haloes properly + call ice_distributionGet(dist, numLocalBlocks=halo%numLocalBlocks) + if (halo%numLocalBlocks > 0) then + allocate(halo%blockGlobalID(halo%numLocalBlocks)) + call ice_distributionGet(dist, blockGlobalID=halo%blockGlobalID) + endif + if (nsBoundaryType == 'tripole' .or. nsBoundaryType == 'tripoleT') then tripoleTFlag = (nsBoundaryType == 'tripoleT') if (tripoleTflag) tripoleRows = tripoleRows+1 @@ -1023,6 +1032,7 @@ subroutine ice_HaloMask(halo, basehalo, mask) communicator, &! communicator for message passing numMsgSend, numMsgRecv, &! number of messages for this halo numLocalCopies, &! num local copies for halo update + numLocalBlocks, &! num local blocks for halo fill tripoleRows, &! number of rows in tripole buffer lbufSizeSend, &! buffer size for send messages lbufSizeRecv ! buffer size for recv messages @@ -1043,6 +1053,7 @@ subroutine ice_HaloMask(halo, basehalo, mask) numMsgSend = basehalo%numMsgSend numMsgRecv = basehalo%numMsgRecv numLocalCopies = basehalo%numLocalCopies + numLocalBlocks = basehalo%numLocalBlocks lbufSizeSend = size(basehalo%sendAddr,dim=2) lbufSizeRecv = size(basehalo%recvAddr,dim=2) @@ -1056,6 +1067,7 @@ subroutine ice_HaloMask(halo, basehalo, mask) halo%recvAddr(3,lbufSizeRecv,numMsgRecv), & halo%srcLocalAddr(3,numLocalCopies), & halo%dstLocalAddr(3,numLocalCopies), & + halo%blockGlobalID(numLocalBlocks), & stat = istat) if (istat > 0) then @@ -1067,10 +1079,13 @@ subroutine ice_HaloMask(halo, basehalo, mask) halo%tripoleRows = tripoleRows halo%tripoleTFlag = tripoleTFlag halo%numLocalCopies = numLocalCopies + halo%numLocalBlocks = numLocalBlocks halo%srcLocalAddr = basehalo%srcLocalAddr(:,1:numLocalCopies) halo%dstLocalAddr = basehalo%dstLocalAddr(:,1:numLocalCopies) + halo%blockGlobalID = basehalo%blockGlobalID + numMsgSend = 0 do nmsg=1,basehalo%numMsgSend scnt = 0 @@ -1176,7 +1191,8 @@ subroutine ice_HaloUpdate2DR8(array, halo, & !----------------------------------------------------------------------- integer (int_kind) :: & - i,j,n,nmsg, &! dummy loop indices + i,j,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message @@ -1285,13 +1301,18 @@ subroutine ice_HaloUpdate2DR8(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:) = fill - array(1:nx_block,ny_block-j+1,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:) = fill - array(nx_block-i+1,1:ny_block,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,iblk) = fill + array(1:nx_block, jhi+j,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,iblk) = fill + array(ihi+i, 1:ny_block,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -1569,6 +1590,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & integer (int_kind) :: & i,j,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message @@ -1677,13 +1699,18 @@ subroutine ice_HaloUpdate2DR4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:) = fill - array(1:nx_block,ny_block-j+1,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:) = fill - array(nx_block-i+1,1:ny_block,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,iblk) = fill + array(1:nx_block, jhi+j,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,iblk) = fill + array(ihi+i, 1:ny_block,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -1961,6 +1988,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & integer (int_kind) :: & i,j,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message @@ -2069,13 +2097,18 @@ subroutine ice_HaloUpdate2DI4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:) = fill - array(1:nx_block,ny_block-j+1,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:) = fill - array(nx_block-i+1,1:ny_block,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,iblk) = fill + array(1:nx_block, jhi+j,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,iblk) = fill + array(ihi+i, 1:ny_block,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -2353,6 +2386,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & integer (int_kind) :: & i,j,k,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension @@ -2489,13 +2523,18 @@ subroutine ice_HaloUpdate3DR8(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,iblk) = fill + array(1:nx_block, jhi+j,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,iblk) = fill + array(ihi+i, 1:ny_block,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -2804,6 +2843,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & integer (int_kind) :: & i,j,k,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension @@ -2940,13 +2980,18 @@ subroutine ice_HaloUpdate3DR4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,iblk) = fill + array(1:nx_block, jhi+j,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,iblk) = fill + array(ihi+i, 1:ny_block,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -3255,6 +3300,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & integer (int_kind) :: & i,j,k,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension @@ -3391,13 +3437,18 @@ subroutine ice_HaloUpdate3DI4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,iblk) = fill + array(1:nx_block, jhi+j,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,iblk) = fill + array(ihi+i, 1:ny_block,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -3706,6 +3757,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & integer (int_kind) :: & i,j,k,l,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions @@ -3846,13 +3898,18 @@ subroutine ice_HaloUpdate4DR8(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,:,iblk) = fill + array(1:nx_block, jhi+j,:,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,:,iblk) = fill + array(ihi+i, 1:ny_block,:,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -4181,6 +4238,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & integer (int_kind) :: & i,j,k,l,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions @@ -4321,13 +4379,18 @@ subroutine ice_HaloUpdate4DR4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,:,iblk) = fill + array(1:nx_block, jhi+j,:,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,:,iblk) = fill + array(ihi+i, 1:ny_block,:,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -4656,6 +4719,7 @@ subroutine ice_HaloUpdate4DI4(array, halo, & integer (int_kind) :: & i,j,k,l,n,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions @@ -4796,13 +4860,18 @@ subroutine ice_HaloUpdate4DI4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,:,iblk) = fill + array(1:nx_block, jhi+j,:,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,:,iblk) = fill + array(ihi+i, 1:ny_block,:,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -5232,13 +5301,18 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & ! the tripole zipper as needed for stresses. if you zero ! it out, all halo values will be wiped out. !----------------------------------------------------------------------- -! do j = 1,nghost -! array1(1:nx_block, j,:) = fill -! array1(1:nx_block,ny_block-j+1,:) = fill -! enddo -! do i = 1,nghost -! array1(i, 1:ny_block,:) = fill -! array1(nx_block-i+1,1:ny_block,:) = fill +! do iblk = 1, halo%numLocalBlocks +! call get_block_parameter(halo%blockGlobalID(iblk), & +! ilo=ilo, ihi=ihi, & +! jlo=jlo, jhi=jhi) +! do j = 1,nghost +! array(1:nx_block, jlo-j,iblk) = fill +! array(1:nx_block, jhi+j,iblk) = fill +! enddo +! do i = 1,nghost +! array(ilo-i, 1:ny_block,iblk) = fill +! array(ihi+i, 1:ny_block,iblk) = fill +! enddo ! enddo !----------------------------------------------------------------------- @@ -6715,20 +6789,20 @@ subroutine ice_HaloDestroy(halo) character(len=*), parameter :: subname = '(ice_HaloDestroy)' !----------------------------------------------------------------------- - deallocate(halo%sendTask, stat=istat) - deallocate(halo%recvTask, stat=istat) - deallocate(halo%sizeSend, stat=istat) - deallocate(halo%sizeRecv, stat=istat) - deallocate(halo%tripSend, stat=istat) - deallocate(halo%tripRecv, stat=istat) - deallocate(halo%srcLocalAddr, stat=istat) - deallocate(halo%dstLocalAddr, stat=istat) - deallocate(halo%sendAddr, stat=istat) - deallocate(halo%recvAddr, stat=istat) + deallocate(halo%sendTask, & + halo%recvTask, & + halo%sizeSend, & + halo%sizeRecv, & + halo%tripSend, & + halo%tripRecv, & + halo%srcLocalAddr, & + halo%dstLocalAddr, & + halo%sendAddr, & + halo%recvAddr, & + halo%blockGlobalID, stat=istat) if (istat > 0) then - call abort_ice( & - 'ice_HaloDestroy: error deallocating') + call abort_ice(subname,' ERROR: deallocating') return endif end subroutine ice_HaloDestroy diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 index 6f9c8b0c6..046cf9336 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 @@ -60,6 +60,7 @@ module ice_timers #endif timer_bound, &! boundary updates timer_bgc, &! biogeochemistry + timer_forcing, &! forcing timer_evp_1d, &! timer only loop timer_evp_2d ! timer including conversion 1d/2d ! timer_tmp ! for temporary timings @@ -179,6 +180,7 @@ subroutine init_ice_timers call get_ice_timer(timer_hist, 'History ',nblocks,distrb_info%nprocs) call get_ice_timer(timer_bound, 'Bound', nblocks,distrb_info%nprocs) call get_ice_timer(timer_bgc, 'BGC', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_forcing, 'Forcing', nblocks,distrb_info%nprocs) #if (defined CESMCOUPLED) call get_ice_timer(timer_cplrecv, 'Cpl-recv', nblocks,distrb_info%nprocs) call get_ice_timer(timer_rcvsnd, 'Rcv->Snd', nblocks,distrb_info%nprocs) diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 index 9c2cfd9fc..c66cdd13c 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 @@ -40,12 +40,16 @@ module ice_boundary type, public :: ice_halo integer (int_kind) :: & communicator, &! communicator to use for update messages + numLocalBlocks, &! number of local blocks, needed for halo fill numLocalCopies, &! num local copies for halo update tripoleRows ! number of rows in tripole buffer logical (log_kind) :: & tripoleTFlag ! NS boundary is a tripole T-fold + integer (int_kind), dimension(:), pointer :: & + blockGlobalID ! list of local block global IDs, needed for halo fill + integer (int_kind), dimension(:,:), pointer :: & srcLocalAddr, &! src addresses for each local copy dstLocalAddr ! dst addresses for each local copy @@ -174,6 +178,13 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & cornerMsgSize = nghost*nghost tripoleRows = nghost+1 + !*** store some block info to fill haloes properly + call ice_distributionGet(dist, numLocalBlocks=halo%numLocalBlocks) + if (halo%numLocalBlocks > 0) then + allocate(halo%blockGlobalID(halo%numLocalBlocks)) + call ice_distributionGet(dist, blockGlobalID=halo%blockGlobalID) + endif + if (nsBoundaryType == 'tripole' .or. nsBoundaryType == 'tripoleT') then tripoleTFlag = (nsBoundaryType == 'tripoleT') if (tripoleTflag) tripoleRows = tripoleRows+1 @@ -581,6 +592,7 @@ subroutine ice_HaloMask(halo, basehalo, mask) istat, &! allocate status flag communicator, &! communicator for message passing numLocalCopies, &! num local copies for halo update + numLocalBlocks, &! num local blocks for halo fill tripoleRows ! number of rows in tripole buffer logical (log_kind) :: & @@ -599,9 +611,11 @@ subroutine ice_HaloMask(halo, basehalo, mask) tripoleRows = basehalo%tripoleRows tripoleTFlag = basehalo%tripoleTFlag numLocalCopies = basehalo%numLocalCopies + numLocalBlocks = basehalo%numLocalBlocks allocate(halo%srcLocalAddr(3,numLocalCopies), & halo%dstLocalAddr(3,numLocalCopies), & + halo%blockGlobalID(numLocalBlocks), & stat = istat) if (istat > 0) then @@ -613,10 +627,13 @@ subroutine ice_HaloMask(halo, basehalo, mask) halo%tripoleRows = tripoleRows halo%tripoleTFlag = tripoleTFlag halo%numLocalCopies = numLocalCopies + halo%numLocalBlocks = numLocalBlocks halo%srcLocalAddr = basehalo%srcLocalAddr halo%dstLocalAddr = basehalo%dstLocalAddr + halo%blockGlobalID = basehalo%blockGlobalID + !----------------------------------------------------------------------- end subroutine ice_HaloMask @@ -659,6 +676,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & integer (int_kind) :: & i,j,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message iDst,jDst, &! dest addresses for message @@ -699,13 +717,18 @@ subroutine ice_HaloUpdate2DR8(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:) = fill - array(1:nx_block,ny_block-j+1,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:) = fill - array(nx_block-i+1,1:ny_block,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,iblk) = fill + array(1:nx_block, jhi+j,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,iblk) = fill + array(ihi+i, 1:ny_block,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -945,6 +968,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & integer (int_kind) :: & i,j,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message iDst,jDst, &! dest addresses for message @@ -985,13 +1009,18 @@ subroutine ice_HaloUpdate2DR4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:) = fill - array(1:nx_block,ny_block-j+1,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:) = fill - array(nx_block-i+1,1:ny_block,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,iblk) = fill + array(1:nx_block, jhi+j,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,iblk) = fill + array(ihi+i, 1:ny_block,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -1231,6 +1260,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & integer (int_kind) :: & i,j,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message iDst,jDst, &! dest addresses for message @@ -1271,13 +1301,18 @@ subroutine ice_HaloUpdate2DI4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:) = fill - array(1:nx_block,ny_block-j+1,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:) = fill - array(nx_block-i+1,1:ny_block,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,iblk) = fill + array(1:nx_block, jhi+j,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,iblk) = fill + array(ihi+i, 1:ny_block,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -1517,6 +1552,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & integer (int_kind) :: & i,j,k,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension iSrc,jSrc, &! source addresses for message @@ -1564,13 +1600,18 @@ subroutine ice_HaloUpdate3DR8(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,iblk) = fill + array(1:nx_block, jhi+j,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,iblk) = fill + array(ihi+i, 1:ny_block,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -1829,6 +1870,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & integer (int_kind) :: & i,j,k,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension iSrc,jSrc, &! source addresses for message @@ -1876,13 +1918,18 @@ subroutine ice_HaloUpdate3DR4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,iblk) = fill + array(1:nx_block, jhi+j,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,iblk) = fill + array(ihi+i, 1:ny_block,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -2141,6 +2188,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & integer (int_kind) :: & i,j,k,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension iSrc,jSrc, &! source addresses for message @@ -2188,13 +2236,18 @@ subroutine ice_HaloUpdate3DI4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,iblk) = fill + array(1:nx_block, jhi+j,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,iblk) = fill + array(ihi+i, 1:ny_block,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -2453,6 +2506,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & integer (int_kind) :: & i,j,k,l,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions iSrc,jSrc, &! source addresses for message @@ -2501,13 +2555,18 @@ subroutine ice_HaloUpdate4DR8(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,:,iblk) = fill + array(1:nx_block, jhi+j,:,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,:,iblk) = fill + array(ihi+i, 1:ny_block,:,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -2782,6 +2841,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & integer (int_kind) :: & i,j,k,l,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions iSrc,jSrc, &! source addresses for message @@ -2830,13 +2890,18 @@ subroutine ice_HaloUpdate4DR4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,:,iblk) = fill + array(1:nx_block, jhi+j,:,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,:,iblk) = fill + array(ihi+i, 1:ny_block,:,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -3111,6 +3176,7 @@ subroutine ice_HaloUpdate4DI4(array, halo, & integer (int_kind) :: & i,j,k,l,nmsg, &! dummy loop indices + iblk,ilo,ihi,jlo,jhi, &! block sizes for fill nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions iSrc,jSrc, &! source addresses for message @@ -3159,13 +3225,18 @@ subroutine ice_HaloUpdate4DI4(array, halo, & ! !----------------------------------------------------------------------- - do j = 1,nghost - array(1:nx_block, j,:,:,:) = fill - array(1:nx_block,ny_block-j+1,:,:,:) = fill - enddo - do i = 1,nghost - array(i, 1:ny_block,:,:,:) = fill - array(nx_block-i+1,1:ny_block,:,:,:) = fill + do iblk = 1, halo%numLocalBlocks + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi) + do j = 1,nghost + array(1:nx_block, jlo-j,:,:,iblk) = fill + array(1:nx_block, jhi+j,:,:,iblk) = fill + enddo + do i = 1,nghost + array(ilo-i, 1:ny_block,:,:,iblk) = fill + array(ihi+i, 1:ny_block,:,:,iblk) = fill + enddo enddo !----------------------------------------------------------------------- @@ -3472,13 +3543,18 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & ! the tripole zipper as needed for stresses. if you zero ! it out, all halo values will be wiped out. !----------------------------------------------------------------------- -! do j = 1,nghost -! array1(1:nx_block, j,:) = fill -! array1(1:nx_block,ny_block-j+1,:) = fill -! enddo -! do i = 1,nghost -! array1(i, 1:ny_block,:) = fill -! array1(nx_block-i+1,1:ny_block,:) = fill +! do iblk = 1, halo%numLocalBlocks +! call get_block_parameter(halo%blockGlobalID(iblk), & +! ilo=ilo, ihi=ihi, & +! jlo=jlo, jhi=jhi) +! do j = 1,nghost +! array(1:nx_block, jlo-j,iblk) = fill +! array(1:nx_block, jhi+j,iblk) = fill +! enddo +! do i = 1,nghost +! array(ilo-i, 1:ny_block,iblk) = fill +! array(ihi+i, 1:ny_block,iblk) = fill +! enddo ! enddo !----------------------------------------------------------------------- @@ -4500,8 +4576,14 @@ subroutine ice_HaloDestroy(halo) !----------------------------------------------------------------------- - deallocate(halo%srcLocalAddr, stat=istat) - deallocate(halo%dstLocalAddr, stat=istat) + deallocate(halo%srcLocalAddr, & + halo%dstLocalAddr, & + halo%blockGlobalID, stat=istat) + + if (istat > 0) then + call abort_ice(subname,' ERROR: deallocating') + return + endif end subroutine ice_HaloDestroy diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 index 3074c1dc9..4599de42e 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 @@ -52,6 +52,7 @@ module ice_timers timer_hist, &! diagnostics/history timer_bound, &! boundary updates timer_bgc, &! biogeochemistry + timer_forcing, &! forcing timer_evp_1d, &! timer only loop timer_evp_2d ! timer including conversion 1d/2d ! timer_tmp ! for temporary timings @@ -193,8 +194,9 @@ subroutine init_ice_timers call get_ice_timer(timer_hist, 'History ',nblocks,distrb_info%nprocs) call get_ice_timer(timer_bound, 'Bound', nblocks,distrb_info%nprocs) call get_ice_timer(timer_bgc, 'BGC', nblocks,distrb_info%nprocs) - call get_ice_timer(timer_evp_1d, '1d-evp', nblocks,distrb_info%nprocs) - call get_ice_timer(timer_evp_2d, '2d-evp', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_forcing, 'Forcing', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_evp_1d, '1d-evp', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_evp_2d, '2d-evp', nblocks,distrb_info%nprocs) ! call get_ice_timer(timer_tmp, ' ',nblocks,distrb_info%nprocs) !----------------------------------------------------------------------- diff --git a/cicecore/cicedynB/infrastructure/ice_blocks.F90 b/cicecore/cicedynB/infrastructure/ice_blocks.F90 index 5177dd047..2768a40c3 100644 --- a/cicecore/cicedynB/infrastructure/ice_blocks.F90 +++ b/cicecore/cicedynB/infrastructure/ice_blocks.F90 @@ -83,6 +83,9 @@ module ice_blocks nblocks_x ,&! tot num blocks in i direction nblocks_y ! tot num blocks in j direction + logical (kind=log_kind), public :: & + debug_blocks ! print verbose block information + !----------------------------------------------------------------------- ! ! module private data @@ -133,8 +136,6 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & iblock, jblock ,&! block loop indices is, ie, js, je ! temp start, end indices - logical (log_kind) :: dbug - character(len=*), parameter :: subname = '(create_blocks)' !---------------------------------------------------------------------- @@ -252,8 +253,8 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & !*** set last physical point if padded domain else if (j_global(j,n) == ny_global .and. & - j > all_blocks(n)%jlo .and. & - j < all_blocks(n)%jhi) then + j >= all_blocks(n)%jlo .and. & + j < all_blocks(n)%jhi) then all_blocks(n)%jhi = j ! last physical point in padded domain endif end do @@ -300,8 +301,8 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & !*** last physical point in padded domain else if (i_global(i,n) == nx_global .and. & - i > all_blocks(n)%ilo .and. & - i < all_blocks(n)%ihi) then + i >= all_blocks(n)%ilo .and. & + i < all_blocks(n)%ihi) then all_blocks(n)%ihi = i endif end do @@ -311,9 +312,7 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & end do end do -! dbug = .true. - dbug = .false. - if (dbug) then + if (debug_blocks) then if (my_task == master_task) then write(nu_diag,*) 'block i,j locations' do n = 1, nblocks_tot diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index cc57ea585..52f0da850 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -21,7 +21,7 @@ module ice_domain add_mpi_barriers use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_blocks, only: block, get_block, create_blocks, nghost, & - nblocks_x, nblocks_y, nblocks_tot, nx_block, ny_block + nblocks_x, nblocks_y, nblocks_tot, nx_block, ny_block, debug_blocks use ice_distribution, only: distrb use ice_boundary, only: ice_halo use ice_exit, only: abort_ice @@ -134,7 +134,8 @@ subroutine init_domain_blocks maskhalo_dyn, & maskhalo_remap, & maskhalo_bound, & - add_mpi_barriers + add_mpi_barriers, & + debug_blocks !---------------------------------------------------------------------- ! @@ -153,6 +154,7 @@ subroutine init_domain_blocks maskhalo_remap = .false. ! if true, use masked halos for transport maskhalo_bound = .false. ! if true, use masked halos for bound_state add_mpi_barriers = .false. ! if true, throttle communication + debug_blocks = .false. ! if true, print verbose block information max_blocks = -1 ! max number of blocks per processor block_size_x = -1 ! size of block in first horiz dimension block_size_y = -1 ! size of block in second horiz dimension @@ -190,12 +192,11 @@ subroutine init_domain_blocks call broadcast_scalar(maskhalo_remap, master_task) call broadcast_scalar(maskhalo_bound, master_task) call broadcast_scalar(add_mpi_barriers, master_task) + call broadcast_scalar(debug_blocks, master_task) if (my_task == master_task) then if (max_blocks < 1) then - max_blocks=int( & - ( (dble(nx_global-1)/dble(block_size_x + 1)) * & - (dble(ny_global-1)/dble(block_size_y + 1)) ) & - / dble(nprocs)) + max_blocks=( ((nx_global-1)/block_size_x + 1) * & + ((ny_global-1)/block_size_y + 1) - 1) / nprocs + 1 max_blocks=max(1,max_blocks) write(nu_diag,'(/,a52,i6,/)') & '(ice_domain): max_block < 1: max_block estimated to ',max_blocks @@ -268,6 +269,7 @@ subroutine init_domain_blocks write(nu_diag,'(a,l6)') ' maskhalo_remap = ', maskhalo_remap write(nu_diag,'(a,l6)') ' maskhalo_bound = ', maskhalo_bound write(nu_diag,'(a,l6)') ' add_mpi_barriers = ', add_mpi_barriers + write(nu_diag,'(a,l6)') ' debug_blocks = ', debug_blocks write(nu_diag,'(a,2i6)') ' block_size_x,_y = ', block_size_x, block_size_y write(nu_diag,'(a,i6)') ' max_blocks = ', max_blocks write(nu_diag,'(a,i6,/)')' Number of ghost cells = ', nghost @@ -287,7 +289,7 @@ subroutine init_domain_distribution(KMTG,ULATG) ! initialized here through calls to the appropriate boundary routines. use ice_boundary, only: ice_HaloCreate - use ice_distribution, only: create_distribution, create_local_block_ids + use ice_distribution, only: create_distribution, create_local_block_ids, ice_distributionGet use ice_domain_size, only: max_blocks, nx_global, ny_global real (dbl_kind), dimension(nx_global,ny_global), intent(in) :: & @@ -311,6 +313,7 @@ subroutine init_domain_distribution(KMTG,ULATG) integer (int_kind) :: & i,j,n ,&! dummy loop indices ig,jg ,&! global indices + ninfo ,&! ice_distributionGet check work_unit ,&! size of quantized work unit #ifdef USE_NETCDF fid ,&! file id @@ -326,6 +329,7 @@ subroutine init_domain_distribution(KMTG,ULATG) rad_to_deg ! radians to degrees integer (int_kind), dimension(:), allocatable :: & + blkinfo ,&! ice_distributionGet check nocn ,&! number of ocean points per block work_per_block ! number of work units per block @@ -449,7 +453,6 @@ subroutine init_domain_distribution(KMTG,ULATG) if (my_task == master_task) then ! cannot use ice_read_write due to circular dependency #ifdef USE_NETCDF - write(nu_diag,*) 'read ',trim(distribution_wght_file),minval(wght),maxval(wght) status = nf90_open(distribution_wght_file, NF90_NOWRITE, fid) if (status /= nf90_noerr) then call abort_ice (subname//'ERROR: Cannot open '//trim(distribution_wght_file)) @@ -457,6 +460,7 @@ subroutine init_domain_distribution(KMTG,ULATG) status = nf90_inq_varid(fid, 'wght', varid) status = nf90_get_var(fid, varid, wght) status = nf90_close(fid) + write(nu_diag,*) 'read ',trim(distribution_wght_file),minval(wght),maxval(wght) #else call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) @@ -563,6 +567,49 @@ subroutine init_domain_distribution(KMTG,ULATG) call create_local_block_ids(blocks_ice, distrb_info) + ! internal check of icedistributionGet as part of verification process + if (debug_blocks) then + call ice_distributionGet(distrb_info, nprocs=ninfo) + if (ninfo /= distrb_info%nprocs) & + call abort_ice(subname//' ice_distributionGet nprocs ERROR', file=__FILE__, line=__LINE__) + + call ice_distributionGet(distrb_info, communicator=ninfo) + if (ninfo /= distrb_info%communicator) & + call abort_ice(subname//' ice_distributionGet communicator ERROR', file=__FILE__, line=__LINE__) + + call ice_distributionGet(distrb_info, numLocalBlocks=ninfo) + if (ninfo /= distrb_info%numLocalBlocks) & + call abort_ice(subname//' ice_distributionGet numLocalBlocks ERROR', file=__FILE__, line=__LINE__) + + allocate(blkinfo(ninfo)) + + call ice_distributionGet(distrb_info, blockGlobalID = blkinfo) + do n = 1, ninfo + if (blkinfo(n) /= distrb_info%blockGlobalID(n)) & + call abort_ice(subname//' ice_distributionGet blockGlobalID ERROR', file=__FILE__, line=__LINE__) + enddo + + deallocate(blkinfo) + allocate(blkinfo(nblocks_tot)) + + call ice_distributionGet(distrb_info, blockLocation = blkinfo) + do n = 1, nblocks_tot + if (blkinfo(n) /= distrb_info%blockLocation(n)) & + call abort_ice(subname//' ice_distributionGet blockLocation ERROR', file=__FILE__, line=__LINE__) + enddo + + call ice_distributionGet(distrb_info, blockLocalID = blkinfo) + do n = 1, nblocks_tot + if (blkinfo(n) /= distrb_info%blockLocalID(n)) & + call abort_ice(subname//' ice_distributionGet blockLocalID ERROR', file=__FILE__, line=__LINE__) + enddo + + deallocate(blkinfo) + + if (my_task == master_task) & + write(nu_diag,*) subname,' ice_distributionGet checks pass' + endif + if (associated(blocks_ice)) then nblocks = size(blocks_ice) else diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index a354efb6b..2304877d2 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -26,7 +26,7 @@ module ice_grid use ice_domain, only: blocks_ice, nblocks, halo_info, distrb_info, & ew_boundary_type, ns_boundary_type, init_domain_distribution use ice_fileunits, only: nu_diag, nu_grid, nu_kmt, & - get_fileunit, release_fileunit + get_fileunit, release_fileunit, flush_fileunit use ice_gather_scatter, only: gather_global, scatter_global use ice_read_write, only: ice_read, ice_read_nc, ice_read_global, & ice_read_global_nc, ice_open, ice_open_nc, ice_close_nc @@ -384,11 +384,9 @@ subroutine init_grid2 ! T-grid cell and U-grid cell quantities !----------------------------------------------------------------- -! tarea(:,:,:) = c0 - !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -486,7 +484,7 @@ subroutine init_grid2 !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & !$OMP angle_0,angle_w,angle_s,angle_sw) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -642,7 +640,7 @@ subroutine popgrid kmt(:,:,:) = c0 !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -785,7 +783,7 @@ subroutine popgrid_nc kmt(:,:,:) = c0 !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1104,7 +1102,7 @@ subroutine latlongrid !$OMP PARALLEL DO PRIVATE(iblk,this_block,ilo,ihi,jlo,jhi,i,j) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1198,15 +1196,9 @@ subroutine rectgrid if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - ANGLE(i,j,iblk) = c0 ! "square with the world" - enddo - enddo - enddo - !$OMP END PARALLEL DO + hm (:,:,:) = c0 + kmt(:,:,:) = c0 + angle(:,:,:) = c0 ! "square with the world" allocate(work_g1(nx_global,ny_global)) @@ -1396,7 +1388,7 @@ subroutine cpomgrid kmt(:,:,:) = c0 !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1636,11 +1628,10 @@ subroutine makemask !----------------------------------------------------------------- bm = c0 -! uvm = c0 !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1663,12 +1654,19 @@ subroutine makemask field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) - !$OMP PARALLEL DO PRIVATE(iblk,i,j) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - tmask(i,j,iblk) = .false. - umask(i,j,iblk) = .false. + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + ! needs to cover halo (no halo update for logicals) + tmask(:,:,iblk) = .false. + umask(:,:,iblk) = .false. + do j = jlo-nghost, jhi+nghost + do i = ilo-nghost, ihi+nghost if ( hm(i,j,iblk) > p5) tmask(i,j,iblk) = .true. if (uvm(i,j,iblk) > p5) umask(i,j,iblk) = .true. enddo @@ -1684,11 +1682,14 @@ subroutine makemask tarean(:,:,iblk) = c0 tareas(:,:,iblk) = c0 - do j = 1, ny_block - do i = 1, nx_block + do j = jlo,jhi + do i = ilo,ihi - if (ULAT(i,j,iblk) >= -puny) lmask_n(i,j,iblk) = .true. ! N. Hem. - if (ULAT(i,j,iblk) < -puny) lmask_s(i,j,iblk) = .true. ! S. Hem. + if (ULAT(i,j,iblk) >= -puny) then + lmask_n(i,j,iblk) = .true. ! N. Hem. + else + lmask_s(i,j,iblk) = .true. ! S. Hem. + endif ! N hemisphere area mask (m^2) if (lmask_n(i,j,iblk)) tarean(i,j,iblk) = tarea(i,j,iblk) & @@ -1743,7 +1744,7 @@ subroutine Tlatlon !$OMP x1,y1,z1,x2,y2,z2,x3,y3,z3,x4,y4,z4, & !$OMP tx,ty,tz,da) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1915,7 +1916,7 @@ subroutine to_ugrid(work1,work2) !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -2000,7 +2001,7 @@ subroutine to_tgrid(work1, work2) !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -2073,7 +2074,7 @@ subroutine gridbox_corners !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -2400,8 +2401,9 @@ subroutine get_bathymetry do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - k = kmt(i,j,iblk) - if (k > puny) bathymetry(i,j,iblk) = depth(k) + k = min(nint(kmt(i,j,iblk)),nlevel) + if (k > nlevel) call abort_ice(subname//' kmt gt nlevel error') + if (k > 0) bathymetry(i,j,iblk) = depth(k) enddo enddo enddo @@ -2431,7 +2433,7 @@ subroutine get_bathymetry_popfile character(len=*), parameter :: subname = '(get_bathymetry_popfile)' - ntmp = maxval(KMT) + ntmp = maxval(nint(KMT)) nlevel = global_maxval(ntmp,distrb_info) if (my_task==master_task) then @@ -2491,8 +2493,8 @@ subroutine get_bathymetry_popfile do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - k = kmt(i,j,iblk) - if (k > nlevel) call abort_ice(subname//' kmt/nlevel error') + k = nint(kmt(i,j,iblk)) + if (k > nlevel) call abort_ice(subname//' kmt gt nlevel error') if (k > 0) bathymetry(i,j,iblk) = depth(k) enddo enddo diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index 87d0813cc..d902c62f8 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -1116,6 +1116,7 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & ! dimlen ! dimension size real (kind=dbl_kind) :: & + missingvalue, & amin, amax, asum ! min, max values and sum of input array ! character (char_len) :: & @@ -1141,6 +1142,8 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & nx = nx_global ny = ny_global + work = c0 ! to satisfy intent(out) attribute + if (present(restart_ext)) then if (restart_ext) then nx = nx_global + 2*nghost @@ -1181,6 +1184,7 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & count=(/nx,ny,1/) ) endif + status = nf90_get_att(fid, varid, "_FillValue", missingvalue) endif ! my_task = master_task !------------------------------------------------------------------- @@ -1188,9 +1192,9 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then -! write(nu_diag,*) & -! 'ice_read_nc_xy, fid= ',fid, ', nrec = ',nrec, & -! ', varname = ',trim(varname) + write(nu_diag,*) & + 'ice_read_nc_xy, fid= ',fid, ', nrec = ',nrec, & + ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim @@ -1198,8 +1202,8 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & ! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = minval(work_g1) - amax = maxval(work_g1, mask = work_g1 /= spval_dbl) - asum = sum (work_g1, mask = work_g1 /= spval_dbl) + amax = maxval(work_g1, mask = work_g1 /= missingvalue) + asum = sum (work_g1, mask = work_g1 /= missingvalue) write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) endif @@ -1223,12 +1227,15 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & endif deallocate(work_g1) + +! echmod: this should not be necessary if fill/missing are only on land + where (work > 1.0e+30_dbl_kind) work = c0 + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) - work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_xy @@ -1282,6 +1289,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & ! dimlen ! size of dimension real (kind=dbl_kind) :: & + missingvalue, & ! missing value amin, amax, asum ! min, max values and sum of input array ! character (char_len) :: & @@ -1347,6 +1355,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & count=(/nx,ny,ncat,1/) ) endif + status = nf90_get_att(fid, varid, "_FillValue", missingvalue) endif ! my_task = master_task !------------------------------------------------------------------- @@ -1354,9 +1363,9 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then -! write(nu_diag,*) & -! 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & -! ', varname = ',trim(varname) + write(nu_diag,*) & + 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & + ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim @@ -1365,8 +1374,8 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & ! enddo do n=1,ncat amin = minval(work_g1(:,:,n)) - amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) - asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) + amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) + asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) enddo endif diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 index 5a6c79503..1a5681b38 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 @@ -197,7 +197,7 @@ subroutine restartfile (ice_ic) use ice_boundary, only: ice_HaloUpdate_stress use ice_blocks, only: nghost, nx_block, ny_block - use ice_calendar, only: istep0, npt + use ice_calendar, only: istep0, npt, calendar use ice_communicate, only: my_task, master_task use ice_domain, only: nblocks, halo_info use ice_domain_size, only: nilyr, nslyr, ncat, & @@ -244,6 +244,7 @@ subroutine restartfile (ice_ic) file=__FILE__, line=__LINE__) call init_restart_read(ice_ic) + call calendar() diag = .true. @@ -529,7 +530,8 @@ subroutine restartfile_v4 (ice_ic) use ice_broadcast, only: broadcast_scalar use ice_blocks, only: nghost, nx_block, ny_block - use ice_calendar, only: istep0, istep1, time, time_forc, calendar, npt + use ice_calendar, only: istep0, istep1, timesecs, calendar, npt, & + set_date_from_timesecs use ice_communicate, only: my_task, master_task use ice_domain, only: nblocks, distrb_info use ice_domain_size, only: nilyr, nslyr, ncat, nx_global, ny_global, & @@ -571,6 +573,9 @@ subroutine restartfile_v4 (ice_ic) real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1, work_g2 + real (kind=dbl_kind) :: & + time_forc ! historic, now local + character(len=*), parameter :: subname = '(restartfile_v4)' call icepack_query_tracer_sizes(ntrcr_out=ntrcr) @@ -602,14 +607,15 @@ subroutine restartfile_v4 (ice_ic) if (use_restart_time) then if (my_task == master_task) then - read (nu_restart) istep0,time,time_forc - write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc + read (nu_restart) istep0,timesecs,time_forc + write(nu_diag,*) 'Restart read at istep=',istep0,timesecs endif call broadcast_scalar(istep0,master_task) istep1 = istep0 - call broadcast_scalar(time,master_task) - call broadcast_scalar(time_forc,master_task) - call calendar(time) + call broadcast_scalar(timesecs,master_task) +! call broadcast_scalar(time_forc,master_task) + call set_date_from_timesecs(timesecs) + call calendar() else diff --git a/cicecore/cicedynB/infrastructure/ice_restoring.F90 b/cicecore/cicedynB/infrastructure/ice_restoring.F90 index 38104315d..c7254cd80 100644 --- a/cicecore/cicedynB/infrastructure/ice_restoring.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restoring.F90 @@ -98,6 +98,11 @@ subroutine ice_HaloRestore_init vsnon_rest(nx_block,ny_block,ncat,max_blocks), & trcrn_rest(nx_block,ny_block,ntrcr,ncat,max_blocks)) + aicen_rest(:,:,:,:) = c0 + vicen_rest(:,:,:,:) = c0 + vsnon_rest(:,:,:,:) = c0 + trcrn_rest(:,:,:,:,:) = c0 + !----------------------------------------------------------------------- ! initialize ! halo cells have to be filled manually at this stage diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 index b1a2d026b..91d57ea48 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 @@ -31,6 +31,8 @@ module ice_restart public :: init_restart_write, init_restart_read, & read_restart_field, write_restart_field, final_restart + real(kind=dbl_kind) :: time_forc = -99. ! historic now local + !======================================================================= contains @@ -42,7 +44,8 @@ module ice_restart subroutine init_restart_read(ice_ic) - use ice_calendar, only: istep0, istep1, time, time_forc, npt, nyr + use ice_calendar, only: istep0, istep1, timesecs, npt, myear, & + set_date_from_timesecs use ice_communicate, only: my_task, master_task use ice_dyn_shared, only: kdyn use ice_read_write, only: ice_open, ice_open_ext @@ -105,17 +108,18 @@ subroutine init_restart_read(ice_ic) call ice_open(nu_restart,trim(filename),0) endif if (use_restart_time) then - read (nu_restart) istep0,time,time_forc,nyr + read (nu_restart) istep0,timesecs,time_forc,myear else read (nu_restart) iignore,rignore,rignore ! use namelist values endif - write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc + write(nu_diag,*) 'Restart read at istep=',istep0,timesecs endif call broadcast_scalar(istep0,master_task) - call broadcast_scalar(time,master_task) + call broadcast_scalar(timesecs,master_task) call broadcast_scalar(time_forc,master_task) - call broadcast_scalar(nyr,master_task) + call broadcast_scalar(myear,master_task) + call set_date_from_timesecs(timesecs) istep1 = istep0 @@ -375,8 +379,8 @@ end subroutine init_restart_read subroutine init_restart_write(filename_spec) - use ice_calendar, only: sec, month, mday, nyr, istep1, & - time, time_forc, year_init + use ice_calendar, only: msec, mmonth, mday, myear, istep1, & + timesecs use ice_communicate, only: my_task, master_task use ice_dyn_shared, only: kdyn use ice_read_write, only: ice_open, ice_open_ext @@ -391,8 +395,7 @@ subroutine init_restart_write(filename_spec) tr_pond_topo, tr_pond_lvl, tr_brine integer (kind=int_kind) :: & - nbtrcr, & ! number of bgc tracers - iyear, imonth, iday ! year, month, day + nbtrcr ! number of bgc tracers character(len=char_len_long) :: filename @@ -414,14 +417,10 @@ subroutine init_restart_write(filename_spec) if (present(filename_spec)) then filename = trim(filename_spec) else - iyear = nyr + year_init - 1 - imonth = month - iday = mday - write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec end if ! write pointer (path/file) @@ -434,7 +433,7 @@ subroutine init_restart_write(filename_spec) else call ice_open(nu_dump,filename,0) endif - write(nu_dump) istep1,time,time_forc,nyr + write(nu_dump) istep1,timesecs,time_forc,myear write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -445,7 +444,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.eap.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_eap,filename,0) @@ -454,7 +453,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_eap) istep1,time,time_forc + write(nu_dump_eap) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -465,7 +464,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.fsd.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_fsd,filename,0) @@ -474,7 +473,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_fsd) istep1,time,time_forc + write(nu_dump_fsd) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -485,7 +484,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.FY.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_FY,filename,0) @@ -494,7 +493,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_FY) istep1,time,time_forc + write(nu_dump_FY) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -505,7 +504,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.iage.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_age,filename,0) @@ -514,7 +513,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_age) istep1,time,time_forc + write(nu_dump_age) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -525,7 +524,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.lvl.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_lvl,filename,0) @@ -534,7 +533,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_lvl) istep1,time,time_forc + write(nu_dump_lvl) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -545,7 +544,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.pond_cesm.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_pond,filename,0) @@ -554,7 +553,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_pond) istep1,time,time_forc + write(nu_dump_pond) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -565,7 +564,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.pond_lvl.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_pond,filename,0) @@ -574,7 +573,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_pond) istep1,time,time_forc + write(nu_dump_pond) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -585,7 +584,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.pond_topo.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_pond,filename,0) @@ -594,7 +593,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_pond) istep1,time,time_forc + write(nu_dump_pond) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -605,7 +604,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.brine.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_hbrine,filename,0) @@ -614,7 +613,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_hbrine) istep1,time,time_forc + write(nu_dump_hbrine) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -625,7 +624,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.bgc.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_bgc,filename,0) @@ -634,7 +633,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_bgc) istep1,time,time_forc + write(nu_dump_bgc) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif endif @@ -644,7 +643,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.iso.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_iso,filename,0) @@ -653,7 +652,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_iso) istep1,time,time_forc + write(nu_dump_iso) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -664,7 +663,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.aero.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec if (restart_ext) then call ice_open_ext(nu_dump_aero,filename,0) @@ -673,7 +672,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_aero) istep1,time,time_forc + write(nu_dump_aero) istep1,timesecs,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -803,7 +802,7 @@ end subroutine write_restart_field subroutine final_restart() - use ice_calendar, only: istep1, time, time_forc + use ice_calendar, only: istep1, timesecs use ice_communicate, only: my_task, master_task logical (kind=log_kind) :: & @@ -843,7 +842,7 @@ subroutine final_restart() if (solve_zsal .or. nbtrcr > 0) & close(nu_dump_bgc) - write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc + write(nu_diag,*) 'Restart read/written ',istep1,timesecs endif end subroutine final_restart diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 index b3024302e..9c6b30ee1 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -47,8 +47,9 @@ subroutine ice_write_hist (ns) use ice_arrays_column, only: hin_max, floe_rad_c use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar - use ice_calendar, only: time, sec, idate, idate0, write_ic, & - histfreq, dayyr, days_per_year, use_leap_years + use ice_calendar, only: msec, timesecs, idate, idate0, write_ic, & + histfreq, days_per_year, use_leap_years, dayyr, & + year_init, month_init, day_init use ice_communicate, only: my_task, master_task use ice_domain, only: distrb_info use ice_domain_size, only: nx_global, ny_global, max_nstrm, max_blocks @@ -80,7 +81,6 @@ subroutine ice_write_hist (ns) integer (kind=int_kind), dimension(5) :: dimidcz integer (kind=int_kind), dimension(3) :: dimid_nverts integer (kind=int_kind), dimension(6) :: dimidex -! real (kind=real_kind) :: ltime real (kind=dbl_kind) :: ltime2 character (char_len) :: title character (char_len_long) :: ncfile(max_nstrm) @@ -133,8 +133,7 @@ subroutine ice_write_hist (ns) if (my_task == master_task) then -! ltime=time/int(secday) - ltime2=time/int(secday) + ltime2 = timesecs/secday call construct_filename(ncfile(ns),'nc',ns) @@ -1038,9 +1037,9 @@ subroutine ice_write_hist (ns) 'ERROR: global attribute source') if (use_leap_years) then - write(title,'(a,i3,a)') 'This year has ',int(dayyr),' days' + write(title,'(a,i3,a)') 'This year has ',dayyr,' days' else - write(title,'(a,i3,a)') 'All years have exactly ',int(dayyr),' days' + write(title,'(a,i3,a)') 'All years have exactly ',dayyr,' days' endif status = nf90_put_att(ncid,nf90_global,'comment',title) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1051,7 +1050,7 @@ subroutine ice_write_hist (ns) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: global attribute date1') - write(title,'(a,i6)') 'seconds elapsed into model date: ',sec + write(title,'(a,i6)') 'seconds elapsed into model date: ',msec status = nf90_put_att(ncid,nf90_global,'comment3',title) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: global attribute date2') @@ -1091,7 +1090,6 @@ subroutine ice_write_hist (ns) status = nf90_inq_varid(ncid,'time',varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting time varid') -!sgl status = nf90_put_var(ncid,varid,ltime) status = nf90_put_var(ncid,varid,ltime2) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: writing time variable') diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index 53c7dac60..e744caf09 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -42,8 +42,8 @@ module ice_restart subroutine init_restart_read(ice_ic) - use ice_calendar, only: sec, month, mday, nyr, istep0, istep1, & - time, time_forc, npt + use ice_calendar, only: msec, mmonth, mday, myear, & + istep0, istep1, npt use ice_communicate, only: my_task, master_task character(len=char_len_long), intent(in), optional :: ice_ic @@ -53,7 +53,7 @@ subroutine init_restart_read(ice_ic) character(len=char_len_long) :: & filename, filename0 - integer (kind=int_kind) :: status + integer (kind=int_kind) :: status, status1 character(len=*), parameter :: subname = '(init_restart_read)' @@ -79,24 +79,36 @@ subroutine init_restart_read(ice_ic) 'ERROR: reading restart ncfile '//trim(filename)) if (use_restart_time) then - status = nf90_get_att(ncid, nf90_global, 'istep1', istep0) - status = nf90_get_att(ncid, nf90_global, 'time', time) - status = nf90_get_att(ncid, nf90_global, 'time_forc', time_forc) - status = nf90_get_att(ncid, nf90_global, 'nyr', nyr) - if (status == nf90_noerr) then - status = nf90_get_att(ncid, nf90_global, 'month', month) + status1 = nf90_noerr + status = nf90_get_att(ncid, nf90_global, 'istep1', istep0) + if (status /= nf90_noerr) status1 = status +! status = nf90_get_att(ncid, nf90_global, 'time', time) +! status = nf90_get_att(ncid, nf90_global, 'time_forc', time_forc) + status = nf90_get_att(ncid, nf90_global, 'myear', myear) + if (status /= nf90_noerr) status = nf90_get_att(ncid, nf90_global, 'nyr', myear) + if (status /= nf90_noerr) status1 = status + status = nf90_get_att(ncid, nf90_global, 'mmonth', mmonth) + if (status /= nf90_noerr) status = nf90_get_att(ncid, nf90_global, 'month', mmonth) + if (status /= nf90_noerr) status1 = status status = nf90_get_att(ncid, nf90_global, 'mday', mday) - status = nf90_get_att(ncid, nf90_global, 'sec', sec) - endif + if (status /= nf90_noerr) status1 = status + status = nf90_get_att(ncid, nf90_global, 'msec', msec) + if (status /= nf90_noerr) status = nf90_get_att(ncid, nf90_global, 'sec', msec) + if (status /= nf90_noerr) status1 = status + if (status1 /= nf90_noerr) call abort_ice(subname// & + 'ERROR: reading restart time '//trim(filename)) endif ! use namelist values if use_restart_time = F endif call broadcast_scalar(istep0,master_task) - call broadcast_scalar(time,master_task) - call broadcast_scalar(time_forc,master_task) - call broadcast_scalar(nyr,master_task) - +! call broadcast_scalar(time,master_task) + call broadcast_scalar(myear,master_task) + call broadcast_scalar(mmonth,master_task) + call broadcast_scalar(mday,master_task) + call broadcast_scalar(msec,master_task) +! call broadcast_scalar(time_forc,master_task) + istep1 = istep0 ! if runid is bering then need to correct npt for istep0 @@ -118,8 +130,7 @@ end subroutine init_restart_read subroutine init_restart_write(filename_spec) use ice_blocks, only: nghost - use ice_calendar, only: sec, month, mday, nyr, istep1, & - time, time_forc, year_init + use ice_calendar, only: msec, mmonth, mday, myear, istep1 use ice_communicate, only: my_task, master_task use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & n_iso, n_aero, nblyr, n_zaero, n_algae, n_doc, & @@ -145,7 +156,6 @@ subroutine init_restart_write(filename_spec) integer (kind=int_kind) :: & k, n, & ! index nx, ny, & ! global array size - iyear, & ! year nbtrcr ! number of bgc tracers character(len=char_len_long) :: filename @@ -186,12 +196,10 @@ subroutine init_restart_write(filename_spec) if (present(filename_spec)) then filename = trim(filename_spec) else - iyear = nyr + year_init - 1 - write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec end if ! write pointer (path/file) @@ -208,12 +216,12 @@ subroutine init_restart_write(filename_spec) 'ERROR: creating restart ncfile '//trim(filename)) status = nf90_put_att(ncid,nf90_global,'istep1',istep1) - status = nf90_put_att(ncid,nf90_global,'time',time) - status = nf90_put_att(ncid,nf90_global,'time_forc',time_forc) - status = nf90_put_att(ncid,nf90_global,'nyr',nyr) - status = nf90_put_att(ncid,nf90_global,'month',month) +! status = nf90_put_att(ncid,nf90_global,'time',time) +! status = nf90_put_att(ncid,nf90_global,'time_forc',time_forc) + status = nf90_put_att(ncid,nf90_global,'myear',myear) + status = nf90_put_att(ncid,nf90_global,'mmonth',mmonth) status = nf90_put_att(ncid,nf90_global,'mday',mday) - status = nf90_put_att(ncid,nf90_global,'sec',sec) + status = nf90_put_att(ncid,nf90_global,'msec',msec) nx = nx_global ny = ny_global @@ -795,7 +803,7 @@ end subroutine write_restart_field subroutine final_restart() - use ice_calendar, only: istep1, time, time_forc + use ice_calendar, only: istep1, idate use ice_communicate, only: my_task, master_task integer (kind=int_kind) :: status @@ -806,7 +814,7 @@ subroutine final_restart() status = nf90_close(ncid) if (my_task == master_task) & - write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc + write(nu_diag,*) 'Restart read/written ',istep1,idate #else call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 index 7e16f2591..72a1ed97f 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -41,8 +41,8 @@ subroutine ice_write_hist (ns) use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar - use ice_calendar, only: time, sec, idate, idate0, write_ic, & - histfreq, dayyr, days_per_year, use_leap_years + use ice_calendar, only: msec, timesecs, idate, idate0, write_ic, & + histfreq, days_per_year, use_leap_years, dayyr use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c360, spval, spval_dbl use ice_domain, only: distrb_info, nblocks @@ -76,7 +76,6 @@ subroutine ice_write_hist (ns) character (char_len_long) :: ncfile(max_nstrm) integer (kind=int_kind) :: iotype - integer (kind=int_kind) :: iyear, imonth, iday integer (kind=int_kind) :: icategory,ind,i_aice,boundid character (char_len) :: start_time,current_date,current_time @@ -176,8 +175,8 @@ subroutine ice_write_hist (ns) call ice_pio_initdecomp(ndim3=nzslyr, ndim4=ncat_hist, iodesc=iodesc4ds) call ice_pio_initdecomp(ndim3=nfsd_hist, ndim4=ncat_hist, iodesc=iodesc4df) - ltime2 = time/int(secday) - ltime = real(time/int(secday),kind=real_kind) + ltime2 = timesecs/secday + ltime = real(timesecs/secday,kind=real_kind) ! option of turning on double precision history files lprecision = pio_real @@ -861,16 +860,16 @@ subroutine ice_write_hist (ns) status = pio_put_att(File,pio_global,'source',trim(title)) if (use_leap_years) then - write(title,'(a,i3,a)') 'This year has ',int(dayyr),' days' + write(title,'(a,i3,a)') 'This year has ',dayyr,' days' else - write(title,'(a,i3,a)') 'All years have exactly ',int(dayyr),' days' + write(title,'(a,i3,a)') 'All years have exactly ',dayyr,' days' endif status = pio_put_att(File,pio_global,'comment',trim(title)) write(title,'(a,i8.8)') 'File written on model date ',idate status = pio_put_att(File,pio_global,'comment2',trim(title)) - write(title,'(a,i6)') 'seconds elapsed into model date: ',sec + write(title,'(a,i6)') 'seconds elapsed into model date: ',msec status = pio_put_att(File,pio_global,'comment3',trim(title)) title = 'CF-1.0' diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index eb703abcd..12d5d8e71 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -41,8 +41,8 @@ module ice_restart subroutine init_restart_read(ice_ic) - use ice_calendar, only: istep0, istep1, time, time_forc, nyr, month, & - mday, sec, npt + use ice_calendar, only: istep0, istep1, myear, mmonth, & + mday, msec, npt use ice_communicate, only: my_task, master_task use ice_domain_size, only: ncat use ice_read_write, only: ice_open @@ -54,7 +54,7 @@ subroutine init_restart_read(ice_ic) character(len=char_len_long) :: & filename, filename0 - integer (kind=int_kind) :: status + integer (kind=int_kind) :: status, status1 integer (kind=int_kind) :: iotype @@ -87,28 +87,40 @@ subroutine init_restart_read(ice_ic) call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat,remap=.true.) if (use_restart_time) then - status = pio_get_att(File, pio_global, 'istep1', istep0) - status = pio_get_att(File, pio_global, 'time', time) - status = pio_get_att(File, pio_global, 'time_forc', time_forc) - call pio_seterrorhandling(File, PIO_BCAST_ERROR) - status = pio_get_att(File, pio_global, 'nyr', nyr) - call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) - if (status == PIO_noerr) then - status = pio_get_att(File, pio_global, 'month', month) + status1 = PIO_noerr + status = pio_get_att(File, pio_global, 'istep1', istep0) +! status = pio_get_att(File, pio_global, 'time', time) +! status = pio_get_att(File, pio_global, 'time_forc', time_forc) + call pio_seterrorhandling(File, PIO_BCAST_ERROR) + status = pio_get_att(File, pio_global, 'myear', myear) + if (status /= PIO_noerr) status = pio_get_att(File, pio_global, 'nyr', myear) + if (status /= PIO_noerr) status1 = status + status = pio_get_att(File, pio_global, 'mmonth', mmonth) + if (status /= PIO_noerr) status = pio_get_att(File, pio_global, 'month', mmonth) + if (status /= PIO_noerr) status1 = status status = pio_get_att(File, pio_global, 'mday', mday) - status = pio_get_att(File, pio_global, 'sec', sec) - endif + if (status /= PIO_noerr) status1 = status + status = pio_get_att(File, pio_global, 'msec', msec) + if (status /= PIO_noerr) status = pio_get_att(File, pio_global, 'sec', msec) + if (status /= PIO_noerr) status1 = status + if (status1 /= PIO_noerr) & + call abort_ice(subname//"ERROR: reading restart time ") + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) endif ! use namelist values if use_restart_time = F ! endif if (my_task == master_task) then - write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc + write(nu_diag,*) 'Restart read at istep=',istep0,myear,mmonth,mday,msec endif call broadcast_scalar(istep0,master_task) - call broadcast_scalar(time,master_task) - call broadcast_scalar(time_forc,master_task) - call broadcast_scalar(nyr,master_task) + call broadcast_scalar(myear,master_task) + call broadcast_scalar(mmonth,master_task) + call broadcast_scalar(mday,master_task) + call broadcast_scalar(msec,master_task) +! call broadcast_scalar(time,master_task) +! call broadcast_scalar(time_forc,master_task) + call broadcast_scalar(myear,master_task) istep1 = istep0 @@ -126,8 +138,7 @@ end subroutine init_restart_read subroutine init_restart_write(filename_spec) - use ice_calendar, only: sec, month, mday, nyr, istep1, & - time, time_forc, year_init + use ice_calendar, only: msec, mmonth, mday, myear, istep1 use ice_communicate, only: my_task, master_task use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & n_iso, n_aero, nblyr, n_zaero, n_algae, n_doc, & @@ -155,9 +166,6 @@ subroutine init_restart_write(filename_spec) ! local variables - integer (kind=int_kind) :: & - iyear, imonth, iday ! year, month, day - character(len=char_len_long) :: filename integer (kind=int_kind) :: dimid_ni, dimid_nj, dimid_ncat, & @@ -196,14 +204,10 @@ subroutine init_restart_write(filename_spec) if (present(filename_spec)) then filename = trim(filename_spec) else - iyear = nyr + year_init - 1 - imonth = month - iday = mday - write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.', & - iyear,'-',month,'-',mday,'-',sec + myear,'-',mmonth,'-',mday,'-',msec end if if (restart_format(1:3) /= 'bin') filename = trim(filename) // '.nc' @@ -224,12 +228,12 @@ subroutine init_restart_write(filename_spec) clobber=.true., cdf64=lcdf64, iotype=iotype) status = pio_put_att(File,pio_global,'istep1',istep1) - status = pio_put_att(File,pio_global,'time',time) - status = pio_put_att(File,pio_global,'time_forc',time_forc) - status = pio_put_att(File,pio_global,'nyr',nyr) - status = pio_put_att(File,pio_global,'month',month) +! status = pio_put_att(File,pio_global,'time',time) +! status = pio_put_att(File,pio_global,'time_forc',time_forc) + status = pio_put_att(File,pio_global,'myear',myear) + status = pio_put_att(File,pio_global,'mmonth',mmonth) status = pio_put_att(File,pio_global,'mday',mday) - status = pio_put_att(File,pio_global,'sec',sec) + status = pio_put_att(File,pio_global,'msec',msec) status = pio_def_dim(File,'ni',nx_global,dimid_ni) status = pio_def_dim(File,'nj',ny_global,dimid_nj) @@ -702,7 +706,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & status = pio_inq_varid(File,trim(vname),vardesc) - if (status /= 0) then + if (status /= PIO_noerr) then call abort_ice(subname//"ERROR: CICE restart? Missing variable: "//trim(vname)) endif @@ -854,7 +858,7 @@ end subroutine write_restart_field subroutine final_restart() - use ice_calendar, only: istep1, time, time_forc + use ice_calendar, only: istep1, idate, msec use ice_communicate, only: my_task, master_task character(len=*), parameter :: subname = '(final_restart)' @@ -864,7 +868,7 @@ subroutine final_restart() call pio_closefile(File) if (my_task == master_task) & - write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc + write(nu_diag,*) 'Restart read/written ',istep1,idate,msec end subroutine final_restart diff --git a/cicecore/drivers/direct/hadgem3/CICE.F90 b/cicecore/drivers/direct/hadgem3/CICE.F90 index e444dcd40..b2314240c 100644 --- a/cicecore/drivers/direct/hadgem3/CICE.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE.F90 @@ -56,40 +56,3 @@ program icemodel end program icemodel !======================================================================= -! -! Wrapper for the print_state debugging routine. -! Useful for debugging in the main driver (see ice.F_debug) -! ip, jp, mtask are set in ice_diagnostics.F -! -! author Elizabeth C. Hunke, LANL -! - subroutine debug_ice(plabeld) - - use ice_kinds_mod - use ice_calendar, only: istep1 - use ice_communicate, only: my_task - use ice_diagnostics, only: check_step, iblkp, ip, jp, mtask, print_state - use ice_domain, only: nblocks - use ice_blocks, only: nx_block, ny_block - - character (char_len), intent(in) :: plabeld - - ! local - integer (kind=int_kind) :: i, j, iblk - - if (istep1 >= check_step) then - - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (iblk==iblkp .and. i==ip .and. j==jp .and. my_task==mtask) & - call print_state(plabeld,i,j,iblk) - enddo - enddo - enddo - - endif - - end subroutine debug_ice - -!======================================================================= diff --git a/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 index 397950023..2fdb170f1 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 @@ -48,12 +48,6 @@ subroutine CICE_Finalize !echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output call release_all_fileunits - !------------------------------------------------------------------- - ! write 'finished' file if needed - !------------------------------------------------------------------- - - if (runid == 'bering') call writeout_finished_file() - !------------------------------------------------------------------- ! quit MPI !------------------------------------------------------------------- @@ -63,31 +57,6 @@ subroutine CICE_Finalize end subroutine CICE_Finalize -!======================================================================= -! -! Write a file indicating that this run finished cleanly. This is -! needed only for runs on machine 'bering' (set using runid = 'bering'). -! -! author: Adrian Turner, LANL - - subroutine writeout_finished_file() - - use ice_restart_shared, only: restart_dir - use ice_communicate, only: my_task, master_task - - character(len=char_len_long) :: filename - - if (my_task == master_task) then - - filename = trim(restart_dir)//"finished" - open(11,file=filename) - write(11,*) "finished" - close(11) - - endif - - end subroutine writeout_finished_file - !======================================================================= end module CICE_FinalMod diff --git a/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 b/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 index 943787498..0be3636f0 100644 --- a/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 @@ -45,12 +45,6 @@ subroutine CICE_Finalize !echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output call release_all_fileunits - !------------------------------------------------------------------- - ! write 'finished' file if needed - !------------------------------------------------------------------- - - if (runid == 'bering') call writeout_finished_file() - !------------------------------------------------------------------- ! quit MPI !------------------------------------------------------------------- @@ -60,31 +54,6 @@ subroutine CICE_Finalize end subroutine CICE_Finalize -!======================================================================= -! -! Write a file indicating that this run finished cleanly. This is -! needed only for runs on machine 'bering' (set using runid = 'bering'). -! -! author: Adrian Turner, LANL - - subroutine writeout_finished_file() - - use ice_restart_shared, only: restart_dir - - character(len=char_len_long) :: filename - character(len=*), parameter :: subname = '(writeout_finished_file)' - - if (my_task == master_task) then - - filename = trim(restart_dir)//"finished" - open(11,file=filename) - write(11,*) "finished" - close(11) - - endif - - end subroutine writeout_finished_file - !======================================================================= end module CICE_FinalMod diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index da745d965..c3de87f68 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -64,7 +64,7 @@ subroutine cice_init(mpicom_ice) floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & + use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & init_calendar, calendar use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags @@ -156,7 +156,7 @@ subroutine cice_init(mpicom_ice) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call calendar(time) ! determine the initial date + call calendar ! determine the initial date call init_forcing_ocn(dt) ! initialize sss and sst from data call init_state ! initialize the ice state @@ -233,7 +233,7 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: time, calendar + use ice_calendar, only: calendar use ice_constants, only: c0 use ice_domain, only: nblocks use ice_domain_size, only: ncat, n_iso, n_aero, nfsd @@ -295,7 +295,7 @@ subroutine init_restart if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar(time) ! update time parameters + call calendar ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index d53014b7b..e9ab0d7e4 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -44,7 +44,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar + use ice_calendar, only: istep, istep1, dt, stop_now, calendar, advance_timestep use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -81,12 +81,14 @@ subroutine CICE_Run ! call ice_step - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date +! istep = istep + 1 ! update time step counters +! istep1 = istep1 + 1 +! time = time + dt ! determine the time and date ! call calendar(time) ! at the end of the timestep + call advance_timestep() ! advance timestep and update calendar data + call ice_timer_start(timer_couple) ! atm/ocn coupling ! for standalone @@ -108,7 +110,7 @@ subroutine CICE_Run call init_flux_atm ! Initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - call calendar(time) ! at the end of the timestep +! call calendar(time) ! at the end of the timestep call ice_timer_stop(timer_couple) ! atm/ocn coupling @@ -136,7 +138,7 @@ subroutine ice_step use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_calendar, only: idate, sec + use ice_calendar, only: idate, msec use ice_diagnostics, only: init_mass_diags, runtime_diags use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks @@ -207,7 +209,7 @@ subroutine ice_step if (prescribed_ice) then ! read prescribed ice call t_barrierf('cice_run_presc_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_presc') - call ice_prescribed_run(idate, sec) + call ice_prescribed_run(idate, msec) call t_stopf ('cice_run_presc') endif diff --git a/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 b/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 index 8ae80abdc..08681d84f 100644 --- a/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 +++ b/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 @@ -48,10 +48,10 @@ module ice_comp_esmf use ice_constants, only : c0, c1, spval_dbl, rad_to_deg, radius, secday use ice_communicate, only : my_task, master_task, MPI_COMM_ICE use ice_calendar, only : istep, istep1, force_restart_now, write_ic,& - idate, idate0, mday, time, month, daycal, & - sec, dt, dt_dyn, calendar, & + idate, idate0, mday, time, mmonth, & + msec, dt, dt_dyn, calendar, & calendar_type, nextsw_cday, days_per_year, & - nyr, new_year, time2sec, year_init + myear, new_year, time2sec, year_init use icepack_orbital, only : eccen, obliqr, lambm0, mvelpp use ice_timers @@ -178,12 +178,11 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) integer :: ref_ymd ! Reference date (YYYYMMDD) integer :: ref_tod ! reference time of day (s) integer :: iyear ! yyyy - integer :: nyrp ! yyyy + integer :: myearp ! yyyy integer :: dtime ! time step integer :: shrlogunit,shrloglev ! old values integer :: iam,ierr integer :: lbnum - integer :: daycal(13) !number of cumulative days per month integer :: nleaps ! number of leap days before current year integer :: mpicom_loc, mpicom_vm, gsize integer :: nfields @@ -367,17 +366,17 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) endif iyear = (idate/10000) ! integer year of basedate - month = (idate-iyear*10000)/100 ! integer month of basedate - mday = idate-iyear*10000-month*100 ! day of month of basedate + mmonth= (idate-iyear*10000)/100 ! integer month of basedate + mday = idate-iyear*10000-mmonth*100 ! day of month of basedate if (my_task == master_task) then write(nu_diag,*) trim(subname),' curr_ymd = ',curr_ymd write(nu_diag,*) trim(subname),' cice year_init = ',year_init write(nu_diag,*) trim(subname),' cice start date = ',idate - write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,month,mday,start_tod + write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,mmonth,mday,start_tod endif - call time2sec(iyear,month,mday,time) + call time2sec(iyear,mmonth,mday,time) time = time+start_tod call shr_sys_flush(nu_diag) @@ -641,7 +640,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) integer :: curr_tod ! Current time of day (s) integer :: shrlogunit,shrloglev ! old values integer :: lbnum - integer :: n, nyrp + integer :: n, myearp type(ESMF_Array) :: i2x, x2i real(R8), pointer :: fptr(:,:) character(len=*), parameter :: subname = '(ice_run_esmf)' @@ -695,9 +694,9 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) force_restart_now = seq_timemgr_RestartAlarmIsOn(EClock) if (calendar_type .eq. "GREGORIAN") then - nyrp = nyr - nyr = (curr_ymd/10000)+1 ! integer year of basedate - if (nyr /= nyrp) then + myearp = myear + myear = (curr_ymd/10000)+1 ! integer year of basedate + if (myear /= myearp) then new_year = .true. else new_year = .false. @@ -758,7 +757,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) ! check that internal clock is in sync with master clock !-------------------------------------------------------------------- - tod = sec + tod = msec ymd = idate if (.not. seq_timemgr_EClockDateInSync( EClock, ymd, tod )) then call seq_timemgr_EClockGetData( EClock, curr_ymd=ymd_sync, & diff --git a/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 b/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 index 7162d6397..64dff54e2 100644 --- a/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 +++ b/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 @@ -47,10 +47,9 @@ module ice_comp_mct use ice_constants, only : ice_init_constants use ice_communicate, only : my_task, master_task, MPI_COMM_ICE use ice_calendar, only : istep, istep1, force_restart_now, write_ic,& - idate, idate0, mday, time, month, daycal, & - sec, dt, dt_dyn, calendar, & - calendar_type, nextsw_cday, days_per_year, & - nyr, new_year, time2sec, year_init + idate, idate0, mday, mmonth, myear, & + msec, dt, dt_dyn, calendar, & + calendar_type, nextsw_cday, days_per_year use ice_timers use ice_kinds_mod, only : int_kind, dbl_kind, char_len_long, log_kind @@ -151,13 +150,11 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) integer :: curr_tod ! Current time of day (s) integer :: ref_ymd ! Reference date (YYYYMMDD) integer :: ref_tod ! reference time of day (s) - integer :: iyear ! yyyy - integer :: nyrp ! yyyy + integer :: myearp ! yyyy integer :: dtime ! time step integer :: shrlogunit,shrloglev ! old values integer :: iam,ierr integer :: lbnum - integer :: daycal(13) !number of cumulative days per month integer :: nleaps ! number of leap days before current year integer :: mpicom_loc ! temporary mpicom logical (kind=log_kind) :: atm_aero, tr_aero, tr_zaero @@ -302,10 +299,9 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) ! - on restart run ! - istep0, time and time_forc are read from restart file ! - istep1 is set to istep0 - ! - idate is determined from time via the call to calendar (see below) + ! - date information is determined from restart ! - on initial run - ! - iyear, month and mday obtained from sync clock - ! - time determined from iyear, month and mday + ! - myear, mmonth, mday, msec obtained from sync clock ! - istep0 and istep1 are set to 0 call seq_timemgr_EClockGetData(EClock, & @@ -335,37 +331,26 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) idate0 = curr_ymd idate = curr_ymd -! idate0 = curr_ymd - (year_init*10000) -! idate = curr_ymd - (year_init*10000) - if (idate < 0) then - write(nu_diag,*) trim(subname),' ERROR curr_ymd,year_init =',curr_ymd,year_init + write(nu_diag,*) trim(subname),' ERROR curr_ymd =',curr_ymd write(nu_diag,*) trim(subname),' ERROR idate lt zero',idate call shr_sys_abort(subname//' :: ERROR idate lt zero') endif - iyear = (idate/10000) ! integer year of basedate - month = (idate-iyear*10000)/100 ! integer month of basedate - mday = idate-iyear*10000-month*100 ! day of month of basedate + myear = (idate/10000) ! integer year of basedate + mmonth= (idate-myear*10000)/100 ! integer month of basedate + mday = idate-myear*10000-mmonth*100 ! day of month of basedate + msec = start_tod ! seconds if (my_task == master_task) then write(nu_diag,*) trim(subname),' curr_ymd = ',curr_ymd - write(nu_diag,*) trim(subname),' cice year_init = ',year_init write(nu_diag,*) trim(subname),' cice start date = ',idate - write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,month,mday,start_tod + write(nu_diag,*) trim(subname),' cice start ymds = ',myear,mmonth,mday,start_tod endif - if (calendar_type /= "GREGORIAN") then - call time2sec(iyear-year_init,month,mday,time) - else - call time2sec(iyear-(year_init-1),month,mday,time) - endif - - time = time+start_tod - call shr_sys_flush(nu_diag) end if - call calendar(time) ! update calendar info + call calendar ! update calendar info if (write_ic) call accum_hist(dt) ! write initial conditions !--------------------------------------------------------------------------- @@ -527,7 +512,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) integer :: curr_tod ! Current time of day (s) integer :: shrlogunit,shrloglev ! old values integer :: lbnum - integer :: n, nyrp + integer :: n, myearp type(mct_gGrid) , pointer :: dom_i type(seq_infodata_type), pointer :: infodata type(mct_gsMap) , pointer :: gsMap_i @@ -580,9 +565,9 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) force_restart_now = seq_timemgr_RestartAlarmIsOn(EClock) ! if (calendar_type .eq. "GREGORIAN") then -! nyrp = nyr -! nyr = (curr_ymd/10000)+1 ! integer year of basedate -! if (nyr /= nyrp) then +! myearp = myear +! myear = (curr_ymd/10000)+1 ! integer year of basedate +! if (myear /= myearp) then ! new_year = .true. ! else ! new_year = .false. @@ -632,7 +617,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) ! check that internal clock is in sync with master clock !-------------------------------------------------------------------- - tod = sec + tod = msec ymd = idate if (.not. seq_timemgr_EClockDateInSync( EClock, ymd, tod )) then call seq_timemgr_EClockGetData( EClock, curr_ymd=ymd_sync, & diff --git a/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 index 4debdfa55..e068a2892 100644 --- a/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 +++ b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 @@ -42,7 +42,7 @@ module ice_prescribed_mod use ice_blocks, only : nx_block, ny_block, block, get_block use ice_domain, only : nblocks, distrb_info, blocks_ice use ice_grid, only : TLAT,TLON,hm,tmask - use ice_calendar, only : idate, sec, calendar_type + use ice_calendar, only : idate, calendar_type use ice_arrays_column, only : hin_max use ice_read_write use ice_exit, only: abort_ice diff --git a/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 index 943787498..0be3636f0 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 @@ -45,12 +45,6 @@ subroutine CICE_Finalize !echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output call release_all_fileunits - !------------------------------------------------------------------- - ! write 'finished' file if needed - !------------------------------------------------------------------- - - if (runid == 'bering') call writeout_finished_file() - !------------------------------------------------------------------- ! quit MPI !------------------------------------------------------------------- @@ -60,31 +54,6 @@ subroutine CICE_Finalize end subroutine CICE_Finalize -!======================================================================= -! -! Write a file indicating that this run finished cleanly. This is -! needed only for runs on machine 'bering' (set using runid = 'bering'). -! -! author: Adrian Turner, LANL - - subroutine writeout_finished_file() - - use ice_restart_shared, only: restart_dir - - character(len=char_len_long) :: filename - character(len=*), parameter :: subname = '(writeout_finished_file)' - - if (my_task == master_task) then - - filename = trim(restart_dir)//"finished" - open(11,file=filename) - write(11,*) "finished" - close(11) - - endif - - end subroutine writeout_finished_file - !======================================================================= end module CICE_FinalMod diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index b37d73f65..a57f8aef8 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -45,8 +45,9 @@ subroutine cice_init floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & - init_calendar, calendar + use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & + init_calendar, advance_timestep, calc_timesteps + use ice_calendar, only: calendar use ice_communicate, only: my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks @@ -130,8 +131,7 @@ subroutine cice_init call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - - call calendar(time) ! determine the initial date + call calendar() ! determine the initial date ! TODO: - why is this being called when you are using CMEPS? call init_forcing_ocn(dt) ! initialize sss and sst from data @@ -192,7 +192,7 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: time, calendar + use ice_calendar, only: calendar use ice_constants, only: c0 use ice_domain, only: nblocks use ice_domain_size, only: ncat, n_iso, n_aero, nfsd @@ -254,7 +254,7 @@ subroutine init_restart if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar(time) ! update time parameters + call calendar() ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 644ef72fa..3daa7e192 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -44,7 +44,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar + use ice_calendar, only: istep, istep1, dt, calendar, advance_timestep use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -77,18 +77,16 @@ subroutine CICE_Run ! timestep loop !-------------------------------------------------------------------- - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - call ice_timer_start(timer_couple) ! atm/ocn coupling + call advance_timestep() ! advance timestep and update calendar data + if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - call calendar(time) ! at the end of the timestep + call calendar() ! at the end of the timestep call ice_timer_stop(timer_couple) ! atm/ocn coupling @@ -113,7 +111,7 @@ subroutine ice_step use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_calendar, only: idate, sec + use ice_calendar, only: idate, msec use ice_diagnostics, only: init_mass_diags, runtime_diags use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks @@ -185,7 +183,7 @@ subroutine ice_step if (prescribed_ice) then ! read prescribed ice call t_barrierf('cice_run_presc_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_presc') - call ice_prescribed_run(idate, sec) + call ice_prescribed_run(idate, msec) call t_stopf ('cice_run_presc') endif #endif diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index da3d95369..ebfc3d674 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -29,13 +29,13 @@ module ice_comp_nuopc use ice_grid , only : tlon, tlat, hm, tarea, ULON, ULAT use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice use ice_calendar , only : force_restart_now, write_ic - use ice_calendar , only : idate, mday, time, month, daycal, time2sec, year_init - use ice_calendar , only : sec, dt, calendar, calendar_type, nextsw_cday, istep + use ice_calendar , only : idate, mday, mmonth, year_init, timesecs + use ice_calendar , only : msec, dt, calendar, calendar_type, nextsw_cday, istep use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long use ice_scam , only : scmlat, scmlon, single_column use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name use ice_fileunits , only : inst_suffix, release_all_fileunits, flush_fileunit - use ice_restart_shared , only : runid, runtype, restart_dir, restart_file + use ice_restart_shared , only : runid, runtype, restart, use_restart_time, restart_dir, restart_file use ice_history , only : accum_hist use CICE_InitMod , only : cice_init use CICE_RunMod , only : cice_run @@ -395,7 +395,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) Tocnfrz_in = -34.0_dbl_kind*0.054_dbl_kind, & pi_in = SHR_CONST_PI, & snowpatch_in = 0.005_dbl_kind, & - dragio_in = 0.00962_dbl_kind) + dragio_in = 0.00536_dbl_kind) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -422,8 +422,12 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) runtype = "initial" else if (trim(starttype) == trim('continue') ) then runtype = "continue" + restart = .true. + use_restart_time = .true. else if (trim(starttype) == trim('branch')) then runtype = "continue" + restart = .true. + use_restart_time = .true. else call abort_ice( subname//' ERROR: unknown starttype' ) end if @@ -514,12 +518,14 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call shr_file_setLogUnit (shrlogunit) - call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return if (isPresent .and. isSet) then diag_filename = trim(cvalue) end if - call NUOPC_CompAttributeGet(gcomp, name="logfile", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + call NUOPC_CompAttributeGet(gcomp, name="logfile", value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return if (isPresent .and. isSet) then diag_filename = trim(diag_filename) // '/' // trim(cvalue) @@ -600,14 +606,14 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call abort_ice(subname//' :: ERROR idate lt zero') endif iyear = (idate/10000) ! integer year of basedate - month = (idate-iyear*10000)/100 ! integer month of basedate - mday = idate-iyear*10000-month*100 ! day of month of basedate + mmonth= (idate-iyear*10000)/100 ! integer month of basedate + mday = idate-iyear*10000-mmonth*100 ! day of month of basedate if (my_task == master_task) then write(nu_diag,*) trim(subname),' curr_ymd = ',curr_ymd write(nu_diag,*) trim(subname),' cice year_init = ',year_init write(nu_diag,*) trim(subname),' cice start date = ',idate - write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,month,mday,start_tod + write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,mmonth,mday,start_tod write(nu_diag,*) trim(subname),' cice calendar_type = ',trim(calendar_type) endif @@ -615,15 +621,15 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (calendar_type == "GREGORIAN" .or. & calendar_type == "Gregorian" .or. & calendar_type == "gregorian") then - call time2sec(iyear-(year_init-1),month,mday,time) + call time2sec(iyear-(year_init-1),mmonth,mday,time) else - call time2sec(iyear-year_init,month,mday,time) + call time2sec(iyear-year_init,mmonth,mday,time) endif #endif - time = time+start_tod + timesecs = timesecs+start_tod end if - call calendar(time) ! update calendar info + call calendar() ! update calendar info if (write_ic) then call accum_hist(dt) ! write initial conditions end if @@ -878,7 +884,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! TODO (mvertens, 2018-12-21): fill in iceberg_prognostic as .false. if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & - idate, sec, nu_diag, rc=rc) + idate, msec, nu_diag, rc=rc) end if !-------------------------------- @@ -1019,7 +1025,7 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- ! cice clock - tod = sec + tod = msec ymd = idate ! model clock @@ -1080,7 +1086,7 @@ subroutine ModelAdvance(gcomp, rc) ! write Debug output if (debug_import > 0 .and. my_task==master_task) then call State_fldDebug(importState, flds_scalar_name, 'cice_import:', & - idate, sec, nu_diag, rc=rc) + idate, msec, nu_diag, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if if (dbug > 0) then @@ -1107,7 +1113,7 @@ subroutine ModelAdvance(gcomp, rc) ! write Debug output if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & - idate, sec, nu_diag, rc=rc) + idate, msec, nu_diag, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if if (dbug > 0) then diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index 78ea39b4e..6eca4f2b4 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -47,7 +47,7 @@ end subroutine ice_prescribed_init use ice_blocks , only : nx_block, ny_block, block, get_block use ice_domain , only : nblocks, distrb_info, blocks_ice use ice_grid , only : TLAT, TLON, hm, tmask, tarea, grid_type, ocn_gridcell_frac - use ice_calendar , only : idate, sec, calendar_type + use ice_calendar , only : idate, calendar_type use ice_arrays_column , only : hin_max use ice_read_write use ice_exit , only: abort_ice diff --git a/cicecore/drivers/nuopc/dmi/CICE.F90 b/cicecore/drivers/nuopc/dmi/CICE.F90 index 2fd0c9f88..7056e0e5b 100644 --- a/cicecore/drivers/nuopc/dmi/CICE.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE.F90 @@ -57,39 +57,3 @@ program icemodel end program icemodel !======================================================================= -! -! Wrapper for the print_state debugging routine. -! Useful for debugging in the main driver (see ice.F_debug) -! ip, jp, mtask are set in ice_diagnostics.F -! -! author Elizabeth C. Hunke, LANL -! - subroutine debug_ice(iblk, plabeld) - - use ice_kinds_mod - use ice_calendar, only: istep1 - use ice_communicate, only: my_task - use ice_diagnostics, only: check_step, iblkp, ip, jp, mtask, print_state - use ice_blocks, only: nx_block, ny_block - - character (char_len), intent(in) :: plabeld - integer (kind=int_kind), intent(in) :: iblk - - ! local - integer (kind=int_kind) :: i, j - character(len=*), parameter :: subname='(debug_ice)' - - if (istep1 >= check_step .and. & - iblk==iblkp .and. my_task==mtask) then - - do j = 1, ny_block - do i = 1, nx_block - if (i==ip .and. j==jp) call print_state(plabeld,i,j,iblk) - enddo - enddo - - endif - - end subroutine debug_ice - -!======================================================================= diff --git a/cicecore/drivers/nuopc/dmi/CICE_FinalMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_FinalMod.F90 index 9e2681dbb..9f32875e1 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_FinalMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_FinalMod.F90 @@ -35,9 +35,9 @@ subroutine CICE_Finalize character(len=*), parameter :: subname = '(CICE_Finalize)' - !------------------------------------------------------------------- - ! stop timers and print timer info - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! stop timers and print timer info + !------------------------------------------------------------------- call ice_timer_stop(timer_total) ! stop timing entire run call ice_timer_print_all(stats=.false.) ! print timing information @@ -55,15 +55,9 @@ subroutine CICE_Finalize !echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output call release_all_fileunits - !------------------------------------------------------------------- - ! write 'finished' file if needed - !------------------------------------------------------------------- - - if (runid == 'bering') call writeout_finished_file() - - !------------------------------------------------------------------- - ! quit MPI - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! quit MPI + !------------------------------------------------------------------- #ifndef coupled #ifndef CICE_DMI @@ -72,31 +66,6 @@ subroutine CICE_Finalize #endif end subroutine CICE_Finalize -!======================================================================= -! -! Write a file indicating that this run finished cleanly. This is -! needed only for runs on machine 'bering' (set using runid = 'bering'). -! -! author: Adrian Turner, LANL - - subroutine writeout_finished_file() - - use ice_restart_shared, only: restart_dir - - character(len=char_len_long) :: filename - character(len=*), parameter :: subname = '(writeout_finished_file)' - - if (my_task == master_task) then - - filename = trim(restart_dir)//"finished" - open(11,file=filename) - write(11,*) "finished" - close(11) - - endif - - end subroutine writeout_finished_file - !======================================================================= end module CICE_FinalMod diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index 70ef5f895..625348863 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -46,9 +46,9 @@ subroutine CICE_Initialize(mpi_comm) integer (kind=int_kind), optional, intent(in) :: mpi_comm ! communicator from nuopc character(len=*), parameter :: subname='(CICE_Initialize)' - !-------------------------------------------------------------------- - ! model initialization - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! model initialization + !-------------------------------------------------------------------- if (present(mpi_comm)) then call cice_init(mpi_comm) @@ -69,14 +69,15 @@ subroutine cice_init(mpi_comm) floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & - init_calendar, calendar + use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & + init_calendar, advance_timestep, calc_timesteps use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd use ice_dyn_eap, only: init_eap, alloc_dyn_eap use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_vp, only: init_vp use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -87,7 +88,8 @@ subroutine cice_init(mpi_comm) use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state - use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers + use ice_init_column, only: init_thermo_vertical, init_shortwave, & + init_zbgc, input_zbgc, count_tracers use ice_kinds_mod use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start @@ -166,9 +168,6 @@ subroutine cice_init(mpi_comm) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) -#ifndef CICE_DMI - call calendar(time) ! determine the initial date -#endif #ifndef CICE_IN_NEMO call init_forcing_ocn(dt) ! initialize sss and sst from data #endif @@ -188,6 +187,7 @@ subroutine cice_init(mpi_comm) call init_diags ! initialize diagnostic output points call init_history_therm ! initialize thermo history variables call init_history_dyn ! initialize dynamic history variables + call calc_timesteps ! update timestep counter if not using npt_unit="1" call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) call icepack_query_tracer_flags(tr_iso_out=tr_iso) @@ -204,10 +204,7 @@ subroutine cice_init(mpi_comm) if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - call calendar(time) ! at the end of the first timestep + call advance_timestep() !-------------------------------------------------------------------- ! coupler communication or forcing data initialization @@ -252,7 +249,7 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: time, calendar + use ice_calendar, only: calendar use ice_constants, only: c0 use ice_domain, only: nblocks use ice_domain_size, only: ncat, n_iso, n_aero, nfsd @@ -314,7 +311,7 @@ subroutine init_restart if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar(time) ! update time parameters + call calendar() ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index df8fe4978..cfd519146 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -43,7 +43,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar + use ice_calendar, only: istep, istep1, dt, stop_now, advance_timestep use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -73,20 +73,16 @@ subroutine CICE_Run file=__FILE__, line=__LINE__) #ifndef CICE_IN_NEMO - !-------------------------------------------------------------------- - ! timestep loop - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! timestep loop + !-------------------------------------------------------------------- #ifndef CICE_DMI - timeLoop: do + timeLoop: do #endif #endif call ice_step - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - - call calendar(time) ! at the end of the timestep + call advance_timestep() ! advance time #ifndef CICE_IN_NEMO #ifndef CICE_DMI @@ -361,8 +357,8 @@ subroutine coupling_prep (iblk) albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & - fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + fswthru_ai, fhocn, scale_factor, snowfrac, & + fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt @@ -556,11 +552,12 @@ subroutine coupling_prep (iblk) evap (:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), fswthru (:,:,iblk), & - fswthru_vdr(:,:,iblk), & - fswthru_vdf(:,:,iblk), & - fswthru_idr(:,:,iblk), & - fswthru_idf(:,:,iblk), & + fhocn (:,:,iblk), & + fswthru (:,:,iblk), & + fswthru_vdr (:,:,iblk), & + fswthru_vdf (:,:,iblk), & + fswthru_idr (:,:,iblk), & + fswthru_idf (:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & @@ -635,11 +632,12 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & real (kind=dbl_kind) :: & puny, & ! + Lsub, & ! rLsub ! 1/Lsub character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - call icepack_query_parameters(puny_out=puny) + call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) diff --git a/cicecore/drivers/standalone/cice/CICE.F90 b/cicecore/drivers/standalone/cice/CICE.F90 index 2fd0c9f88..7056e0e5b 100644 --- a/cicecore/drivers/standalone/cice/CICE.F90 +++ b/cicecore/drivers/standalone/cice/CICE.F90 @@ -57,39 +57,3 @@ program icemodel end program icemodel !======================================================================= -! -! Wrapper for the print_state debugging routine. -! Useful for debugging in the main driver (see ice.F_debug) -! ip, jp, mtask are set in ice_diagnostics.F -! -! author Elizabeth C. Hunke, LANL -! - subroutine debug_ice(iblk, plabeld) - - use ice_kinds_mod - use ice_calendar, only: istep1 - use ice_communicate, only: my_task - use ice_diagnostics, only: check_step, iblkp, ip, jp, mtask, print_state - use ice_blocks, only: nx_block, ny_block - - character (char_len), intent(in) :: plabeld - integer (kind=int_kind), intent(in) :: iblk - - ! local - integer (kind=int_kind) :: i, j - character(len=*), parameter :: subname='(debug_ice)' - - if (istep1 >= check_step .and. & - iblk==iblkp .and. my_task==mtask) then - - do j = 1, ny_block - do i = 1, nx_block - if (i==ip .and. j==jp) call print_state(plabeld,i,j,iblk) - enddo - enddo - - endif - - end subroutine debug_ice - -!======================================================================= diff --git a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 index dd0ca0b20..a59c210aa 100644 --- a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 @@ -55,12 +55,6 @@ subroutine CICE_Finalize !echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output call release_all_fileunits - !------------------------------------------------------------------- - ! write 'finished' file if needed - !------------------------------------------------------------------- - - if (runid == 'bering') call writeout_finished_file() - !------------------------------------------------------------------- ! quit MPI !------------------------------------------------------------------- @@ -69,31 +63,6 @@ subroutine CICE_Finalize end subroutine CICE_Finalize -!======================================================================= -! -! Write a file indicating that this run finished cleanly. This is -! needed only for runs on machine 'bering' (set using runid = 'bering'). -! -! author: Adrian Turner, LANL - - subroutine writeout_finished_file() - - use ice_restart_shared, only: restart_dir - - character(len=char_len_long) :: filename - character(len=*), parameter :: subname = '(writeout_finished_file)' - - if (my_task == master_task) then - - filename = trim(restart_dir)//"finished" - open(11,file=filename) - write(11,*) "finished" - close(11) - - endif - - end subroutine writeout_finished_file - !======================================================================= end module CICE_FinalMod diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 8b507740d..60f71fa8a 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -64,8 +64,8 @@ subroutine cice_init floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & - init_calendar, calendar + use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & + init_calendar, advance_timestep, calc_timesteps use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks @@ -156,8 +156,6 @@ subroutine cice_init if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) -! call calendar(time) ! determine the initial date - call init_forcing_ocn(dt) ! initialize sss and sst from data call init_state ! initialize the ice state call init_transport ! initialize horizontal transport @@ -175,6 +173,7 @@ subroutine cice_init call init_diags ! initialize diagnostic output points call init_history_therm ! initialize thermo history variables call init_history_dyn ! initialize dynamic history variables + call calc_timesteps ! update timestep counter if not using npt_unit="1" call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) call icepack_query_tracer_flags(tr_iso_out=tr_iso) @@ -191,10 +190,12 @@ subroutine cice_init if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - call calendar(time) ! at the end of the first timestep +! tcraig, use advance_timestep here +! istep = istep + 1 ! update time step counters +! istep1 = istep1 + 1 +! time = time + dt ! determine the time and date +! call calendar(time) ! at the end of the first timestep + call advance_timestep() !-------------------------------------------------------------------- ! coupler communication or forcing data initialization @@ -231,7 +232,7 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: time, calendar + use ice_calendar, only: calendar use ice_constants, only: c0 use ice_domain, only: nblocks use ice_domain_size, only: ncat, n_iso, n_aero, nfsd @@ -293,7 +294,7 @@ subroutine init_restart if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar(time) ! update time parameters + call calendar() ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 9f6f42f28..08059435f 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -43,7 +43,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar + use ice_calendar, only: istep, istep1, dt, stop_now, advance_timestep use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -82,11 +82,12 @@ subroutine CICE_Run call ice_step - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - - call calendar(time) ! at the end of the timestep +! tcraig, use advance_timestep now +! istep = istep + 1 ! update time step counters +! istep1 = istep1 + 1 +! time = time + dt ! determine the time and date +! call calendar(time) ! at the end of the timestep + call advance_timestep() ! advance time #ifndef CICE_IN_NEMO if (stop_now >= 1) exit timeLoop @@ -137,7 +138,7 @@ subroutine ice_step use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_diagnostics, only: init_mass_diags, runtime_diags + use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks use ice_dyn_eap, only: write_restart_eap @@ -175,6 +176,15 @@ subroutine ice_step character(len=*), parameter :: subname = '(ice_step)' + character (len=char_len) :: plabeld + + if (debug_model) then + plabeld = 'beginning time step' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo + endif + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) @@ -218,14 +228,36 @@ subroutine ice_step if (calc_Tsfc) call prep_radiation (iblk) + if (debug_model) then + plabeld = 'post prep_radiation' + call debug_ice (iblk, plabeld) + endif + !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- call step_therm1 (dt, iblk) ! vertical thermodynamics + + if (debug_model) then + plabeld = 'post step_therm1' + call debug_ice (iblk, plabeld) + endif + call biogeochemistry (dt, iblk) ! biogeochemistry + + if (debug_model) then + plabeld = 'post biogeochemistry' + call debug_ice (iblk, plabeld) + endif + call step_therm2 (dt, iblk) ! ice thickness distribution thermo + if (debug_model) then + plabeld = 'post step_therm2' + call debug_ice (iblk, plabeld) + endif + endif ! ktherm > 0 enddo ! iblk @@ -251,6 +283,13 @@ subroutine ice_step ! momentum, stress, transport call step_dyn_horiz (dt_dyn) + if (debug_model) then + plabeld = 'post step_dyn_horiz' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo ! iblk + endif + ! ridging !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks @@ -258,12 +297,26 @@ subroutine ice_step enddo !$OMP END PARALLEL DO + if (debug_model) then + plabeld = 'post step_dyn_ridge' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo ! iblk + endif + ! clean up, update tendency diagnostics offset = c0 call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) enddo + if (debug_model) then + plabeld = 'post dynamics' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo + endif + !----------------------------------------------------------------- ! albedo, shortwave radiation !----------------------------------------------------------------- @@ -277,12 +330,22 @@ subroutine ice_step if (ktherm >= 0) call step_radiation (dt, iblk) + if (debug_model) then + plabeld = 'post step_radiation' + call debug_ice (iblk, plabeld) + endif + !----------------------------------------------------------------- ! get ready for coupling and the next time step !----------------------------------------------------------------- call coupling_prep (iblk) + if (debug_model) then + plabeld = 'post coupling_prep' + call debug_ice (iblk, plabeld) + endif + enddo ! iblk !$OMP END PARALLEL DO @@ -627,11 +690,12 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & real (kind=dbl_kind) :: & puny, & ! + Lsub, & ! rLsub ! 1/Lsub character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - call icepack_query_parameters(puny_out=puny) + call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug deleted file mode 100644 index 5f7eebe31..000000000 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug +++ /dev/null @@ -1,704 +0,0 @@ -!======================================================================= -! -! Main driver for time stepping of CICE. -! -! authors Elizabeth C. Hunke, LANL -! Philip W. Jones, LANL -! William H. Lipscomb, LANL -! -! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency -! 2006 ECH: Converted to free source form (F90) -! 2007 BPB: Modified Delta-Eddington shortwave interface -! 2008 ECH: moved ESMF code to its own driver - - module CICE_RunMod - - use ice_kinds_mod - use ice_fileunits, only: nu_diag - use ice_arrays_column, only: oceanmixed_ice - use ice_constants, only: c0, c1 - use ice_constants, only: field_loc_center, field_type_scalar - use ice_exit, only: abort_ice - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_iso, icepack_max_aero - use icepack_intfc, only: icepack_query_parameters - use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes - - implicit none - private - public :: CICE_Run, ice_step - -!======================================================================= - - contains - -!======================================================================= -! -! This is the main driver routine for advancing CICE forward in time. -! -! author Elizabeth C. Hunke, LANL -! Philip W. Jones, LANL -! William H. Lipscomb, LANL - - subroutine CICE_Run - - use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar - use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & - get_wave_spec - use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - fiso_default, faero_default - use ice_flux, only: init_flux_atm, init_flux_ocn - use ice_timers, only: ice_timer_start, ice_timer_stop, & - timer_couple, timer_step - logical (kind=log_kind) :: & - tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd - character(len=*), parameter :: subname = '(CICE_Run)' - - !-------------------------------------------------------------------- - ! initialize error code and step timer - !-------------------------------------------------------------------- - - call ice_timer_start(timer_step) ! start timing entire run - - call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, & - wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_iso_out=tr_iso, & - tr_aero_out=tr_aero, & - tr_zaero_out=tr_zaero, & - tr_fsd_out=tr_fsd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - -#ifndef CICE_IN_NEMO - !-------------------------------------------------------------------- - ! timestep loop - !-------------------------------------------------------------------- - - timeLoop: do -#endif - - call ice_step - - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - - call calendar(time) ! at the end of the timestep - -#ifndef CICE_IN_NEMO - if (stop_now >= 1) exit timeLoop -#endif - - call ice_timer_start(timer_couple) ! atm/ocn coupling - -! for now, wave_spectrum is constant in time -! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data - - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry - if (z_tracers) call get_atm_bgc ! biogeochemistry - - call init_flux_atm ! Initialize atmosphere fluxes sent to coupler - call init_flux_ocn ! initialize ocean fluxes sent to coupler - - call ice_timer_stop(timer_couple) ! atm/ocn coupling - -#ifndef CICE_IN_NEMO - enddo timeLoop -#endif - - !-------------------------------------------------------------------- - ! end of timestep loop - !-------------------------------------------------------------------- - - call ice_timer_stop(timer_step) ! end timestepping loop timer - - end subroutine CICE_Run - -!======================================================================= -! -! Calls drivers for physics components, some initialization, and output -! -! author Elizabeth C. Hunke, LANL -! William H. Lipscomb, LANL - - subroutine ice_step - - use ice_boundary, only: ice_HaloUpdate - use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_diagnostics, only: init_mass_diags, runtime_diags - use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags - use ice_domain, only: halo_info, nblocks - use ice_dyn_eap, only: write_restart_eap - use ice_dyn_shared, only: kdyn, kridge - use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd - use ice_history, only: accum_hist - use ice_history_bgc, only: init_history_bgc - use ice_restart, only: final_restart - use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & - write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_iso, write_restart_bgc, write_restart_hbrine - use ice_restart_driver, only: dumpfile - use ice_restoring, only: restore_ice, ice_HaloRestore - use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & - update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, save_init, step_dyn_wave - use ice_timers, only: ice_timer_start, ice_timer_stop, & - timer_diags, timer_column, timer_thermo, timer_bound, & - timer_hist, timer_readwrite - - integer (kind=int_kind) :: & - iblk , & ! block index - k , & ! dynamics supercycling index - ktherm ! thermodynamics is off when ktherm = -1 - - real (kind=dbl_kind) :: & - offset ! d(age)/dt time offset - - logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & - calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec - - character(len=*), parameter :: subname = '(ice_step)' - - character (len=char_len) :: plabeld - - plabeld = 'beginning time step' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & - wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & - tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !----------------------------------------------------------------- - ! restoring on grid boundaries - !----------------------------------------------------------------- - - if (restore_ice) call ice_HaloRestore - - !----------------------------------------------------------------- - ! initialize diagnostics and save initial state values - !----------------------------------------------------------------- - - call ice_timer_start(timer_diags) ! diagnostics/history - call init_mass_diags ! diagnostics per timestep - call init_history_therm - call init_history_bgc - call ice_timer_stop(timer_diags) ! diagnostics/history - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - - call save_init - - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) then - - !----------------------------------------------------------------- - ! scale radiation fields - !----------------------------------------------------------------- - - if (calc_Tsfc) call prep_radiation (iblk) - - plabeld = 'post prep_radiation' - call debug_ice (iblk, plabeld) - - !----------------------------------------------------------------- - ! thermodynamics and biogeochemistry - !----------------------------------------------------------------- - - call step_therm1 (dt, iblk) ! vertical thermodynamics - - plabeld = 'post step_therm1' - call debug_ice (iblk, plabeld) - - call biogeochemistry (dt, iblk) ! biogeochemistry - - plabeld = 'post biogeochemistry' - call debug_ice (iblk, plabeld) - - call step_therm2 (dt, iblk) ! ice thickness distribution thermo - - plabeld = 'post step_therm2' - call debug_ice (iblk, plabeld) - - endif ! ktherm > 0 - - enddo ! iblk - !$OMP END PARALLEL DO - - ! clean up, update tendency diagnostics - offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) - - call ice_timer_stop(timer_thermo) ! thermodynamics - call ice_timer_stop(timer_column) ! column physics - - !----------------------------------------------------------------- - ! dynamics, transport, ridging - !----------------------------------------------------------------- - - ! wave fracture of the floe size distribution - ! note this is called outside of the dynamics subcycling loop - if (tr_fsd .and. wave_spec) call step_dyn_wave(dt) - - do k = 1, ndtd - - ! momentum, stress, transport - call step_dyn_horiz (dt_dyn) - - do iblk = 1, nblocks - plabeld = 'post step_dyn_horiz' - call debug_ice (iblk, plabeld) - enddo ! iblk - - ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) - enddo - !$OMP END PARALLEL DO - - do iblk = 1, nblocks - plabeld = 'post step_dyn_ridge' - call debug_ice (iblk, plabeld) - enddo ! iblk - - ! clean up, update tendency diagnostics - offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) - - enddo - - plabeld = 'post dynamics' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - - !----------------------------------------------------------------- - ! albedo, shortwave radiation - !----------------------------------------------------------------- - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - -!MHRI: CHECK THIS OMP - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) call step_radiation (dt, iblk) - - plabeld = 'post step_radiation' - call debug_ice (iblk, plabeld) - - !----------------------------------------------------------------- - ! get ready for coupling and the next time step - !----------------------------------------------------------------- - - call coupling_prep (iblk) - - plabeld = 'post coupling_prep' - call debug_ice (iblk, plabeld) - - enddo ! iblk - !$OMP END PARALLEL DO - - call ice_timer_start(timer_bound) - call ice_HaloUpdate (scale_factor, halo_info, & - field_loc_center, field_type_scalar) - call ice_timer_stop(timer_bound) - - call ice_timer_stop(timer_thermo) ! thermodynamics - call ice_timer_stop(timer_column) ! column physics - - !----------------------------------------------------------------- - ! write data - !----------------------------------------------------------------- - - call ice_timer_start(timer_diags) ! diagnostics - if (mod(istep,diagfreq) == 0) then - call runtime_diags(dt) ! log file - if (solve_zsal) call zsal_diags - if (skl_bgc .or. z_tracers) call bgc_diags - if (tr_brine) call hbrine_diags - endif - call ice_timer_stop(timer_diags) ! diagnostics - - call ice_timer_start(timer_hist) ! history - call accum_hist (dt) ! history file - call ice_timer_stop(timer_hist) ! history - - call ice_timer_start(timer_readwrite) ! reading/writing - if (write_restart == 1) then - call dumpfile ! core variables for restarting - if (tr_iage) call write_restart_age - if (tr_FY) call write_restart_FY - if (tr_lvl) call write_restart_lvl - if (tr_pond_cesm) call write_restart_pond_cesm - if (tr_pond_lvl) call write_restart_pond_lvl - if (tr_pond_topo) call write_restart_pond_topo - if (tr_fsd) call write_restart_fsd - if (tr_iso) call write_restart_iso - if (tr_aero) call write_restart_aero - if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc - if (tr_brine) call write_restart_hbrine - if (kdyn == 2) call write_restart_eap - call final_restart - endif - - call ice_timer_stop(timer_readwrite) ! reading/writing - - end subroutine ice_step - -!======================================================================= -! -! Prepare for coupling -! -! authors: Elizabeth C. Hunke, LANL - - subroutine coupling_prep (iblk) - - use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn - use ice_blocks, only: nx_block, ny_block, get_block, block - use ice_domain, only: blocks_ice - use ice_calendar, only: dt, nstreams - use ice_domain_size, only: ncat - use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & - albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & - alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & - fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, scale_factor, snowfrac, & - fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & - swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & - scale_fluxes, frzmlt_init, frzmlt - use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & - fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai - use ice_grid, only: tmask - use ice_state, only: aicen, aice -#ifdef CICE_IN_NEMO - use ice_state, only: aice_init - use ice_flux, only: flatn_f, fsurfn_f -#endif - use ice_step_mod, only: ocean_mixed_layer - use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - - integer (kind=int_kind), intent(in) :: & - iblk ! block index - - ! local variables - - integer (kind=int_kind) :: & - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - n , & ! thickness category index - i,j , & ! horizontal indices - k , & ! tracer index - nbtrcr ! - - type (block) :: & - this_block ! block information for current block - - logical (kind=log_kind) :: & - calc_Tsfc ! - - real (kind=dbl_kind) :: & - cszn , & ! counter for history averaging - puny , & ! - rhofresh , & ! - netsw ! flag for shortwave radiation presence - - character(len=*), parameter :: subname = '(coupling_prep)' - - call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) - call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !----------------------------------------------------------------- - ! Save current value of frzmlt for diagnostics. - ! Update mixed layer with heat and radiation from ice. - !----------------------------------------------------------------- - - do j = 1, ny_block - do i = 1, nx_block - frzmlt_init (i,j,iblk) = frzmlt(i,j,iblk) - enddo - enddo - - call ice_timer_start(timer_couple) ! atm/ocn coupling - - if (oceanmixed_ice) & - call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst - - !----------------------------------------------------------------- - ! Aggregate albedos - !----------------------------------------------------------------- - - do j = 1, ny_block - do i = 1, nx_block - alvdf(i,j,iblk) = c0 - alidf(i,j,iblk) = c0 - alvdr(i,j,iblk) = c0 - alidr(i,j,iblk) = c0 - - albice(i,j,iblk) = c0 - albsno(i,j,iblk) = c0 - albpnd(i,j,iblk) = c0 - apeff_ai(i,j,iblk) = c0 - snowfrac(i,j,iblk) = c0 - - ! for history averaging - cszn = c0 - netsw = swvdr(i,j,iblk)+swidr(i,j,iblk)+swvdf(i,j,iblk)+swidf(i,j,iblk) - if (netsw > puny) cszn = c1 - do n = 1, nstreams - albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn - enddo - enddo - enddo - - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - - do n = 1, ncat - do j = jlo, jhi - do i = ilo, ihi - if (aicen(i,j,n,iblk) > puny) then - - alvdf(i,j,iblk) = alvdf(i,j,iblk) & - + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) - alidf(i,j,iblk) = alidf(i,j,iblk) & - + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) - alvdr(i,j,iblk) = alvdr(i,j,iblk) & - + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) - alidr(i,j,iblk) = alidr(i,j,iblk) & - + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) - - netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & - + swvdf(i,j,iblk) + swidf(i,j,iblk) - if (netsw > puny) then ! sun above horizon - albice(i,j,iblk) = albice(i,j,iblk) & - + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) - albsno(i,j,iblk) = albsno(i,j,iblk) & - + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) - albpnd(i,j,iblk) = albpnd(i,j,iblk) & - + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) - endif - - apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history - + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) - snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history - + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) - - endif ! aicen > puny - enddo - enddo - enddo - - do j = 1, ny_block - do i = 1, nx_block - - !----------------------------------------------------------------- - ! reduce fresh by fpond for coupling - !----------------------------------------------------------------- - - if (l_mpond_fresh) then - fpond(i,j,iblk) = fpond(i,j,iblk) * rhofresh/dt - fresh(i,j,iblk) = fresh(i,j,iblk) - fpond(i,j,iblk) - endif - - !---------------------------------------------------------------- - ! Store grid box mean albedos and fluxes before scaling by aice - !---------------------------------------------------------------- - - alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) - alidf_ai (i,j,iblk) = alidf (i,j,iblk) - alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) - alidr_ai (i,j,iblk) = alidr (i,j,iblk) - fresh_ai (i,j,iblk) = fresh (i,j,iblk) - fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) - fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) - fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) - - if (nbtrcr > 0) then - do k = 1, nbtrcr - flux_bio_ai (i,j,k,iblk) = flux_bio (i,j,k,iblk) - enddo - endif - - !----------------------------------------------------------------- - ! Save net shortwave for scaling factor in scale_factor - !----------------------------------------------------------------- - scale_factor(i,j,iblk) = & - swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & - + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & - + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & - + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) - - enddo - enddo - - !----------------------------------------------------------------- - ! Divide fluxes by ice area - ! - the CESM coupler assumes fluxes are per unit ice area - ! - also needed for global budget in diagnostics - !----------------------------------------------------------------- - - call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, & - icepack_max_aero, & - aice (:,:,iblk), Tf (:,:,iblk), & - Tair (:,:,iblk), Qa (:,:,iblk), & - strairxT (:,:,iblk), strairyT(:,:,iblk), & - fsens (:,:,iblk), flat (:,:,iblk), & - fswabs (:,:,iblk), flwout (:,:,iblk), & - evap (:,:,iblk), & - Tref (:,:,iblk), Qref (:,:,iblk), & - fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), & - fswthru (:,:,iblk), & - fswthru_vdr (:,:,iblk), & - fswthru_vdf (:,:,iblk), & - fswthru_idr (:,:,iblk), & - fswthru_idf (:,:,iblk), & - faero_ocn(:,:,:,iblk), & - alvdr (:,:,iblk), alidr (:,:,iblk), & - alvdf (:,:,iblk), alidf (:,:,iblk), & - fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio (:,:,1:nbtrcr,iblk), & - Qref_iso =Qref_iso (:,:,:,iblk), & - fiso_evap=fiso_evap(:,:,:,iblk), & - fiso_ocn =fiso_ocn (:,:,:,iblk)) - -#ifdef CICE_IN_NEMO -!echmod - comment this out for efficiency, if .not. calc_Tsfc - if (.not. calc_Tsfc) then - - !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. - !--------------------------------------------------------------- - - call sfcflux_to_ocn & - (nx_block, ny_block, & - tmask (:,:,iblk), aice_init(:,:,iblk), & - fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & - fresh (:,:,iblk), fhocn (:,:,iblk)) - endif -!echmod -#endif - call ice_timer_stop(timer_couple) ! atm/ocn coupling - - end subroutine coupling_prep - -#ifdef CICE_IN_NEMO - -!======================================================================= -! -! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can -! be provided at points which do not have ice. (This is could be due to -! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, -! conserve energy and water by passing these fluxes to the ocean. -! -! author: A. McLaren, Met Office - - subroutine sfcflux_to_ocn(nx_block, ny_block, & - tmask, aice, & - fsurfn_f, flatn_f, & - fresh, fhocn) - - use ice_domain_size, only: ncat - - integer (kind=int_kind), intent(in) :: & - nx_block, ny_block ! block dimensions - - logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & - tmask ! land/boundary mask, thickness (T-cell) - - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in):: & - aice ! initial ice concentration - - real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), intent(in) :: & - fsurfn_f, & ! net surface heat flux (provided as forcing) - flatn_f ! latent heat flux (provided as forcing) - - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout):: & - fresh , & ! fresh water flux to ocean (kg/m2/s) - fhocn ! actual ocn/ice heat flx (W/m**2) - - - ! local variables - integer (kind=int_kind) :: & - i, j, n ! horizontal indices - - real (kind=dbl_kind) :: & - puny, & ! - rLsub ! 1/Lsub - - character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - - call icepack_query_parameters(puny_out=puny) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - rLsub = c1 / Lsub - - do n = 1, ncat - do j = 1, ny_block - do i = 1, nx_block - if (tmask(i,j) .and. aice(i,j) <= puny) then - fhocn(i,j) = fhocn(i,j) & - + fsurfn_f(i,j,n) + flatn_f(i,j,n) - fresh(i,j) = fresh(i,j) & - + flatn_f(i,j,n) * rLsub - endif - enddo ! i - enddo ! j - enddo ! n - - - end subroutine sfcflux_to_ocn - -#endif - -!======================================================================= - - end module CICE_RunMod - -!======================================================================= diff --git a/cicecore/drivers/unittest/calchk/calchk.F90 b/cicecore/drivers/unittest/calchk/calchk.F90 new file mode 100644 index 000000000..bbd61b63e --- /dev/null +++ b/cicecore/drivers/unittest/calchk/calchk.F90 @@ -0,0 +1,588 @@ + + program calchk + + use ice_kinds_mod, only: int_kind, dbl_kind + use ice_calendar, only: myear, mmonth, mday, msec + use ice_calendar, only: year_init, month_init, day_init, sec_init + use ice_calendar, only: dt, ndtd, istep0, diagfreq, npt, npt_unit + use ice_calendar, only: months_per_year, daymo, timesecs, seconds_per_day + use ice_calendar, only: use_leap_years, days_per_year + use ice_calendar, only: compute_elapsed_days + use ice_calendar, only: update_date, calc_timesteps + use ice_calendar, only: init_calendar, calendar + use ice_calendar, only: set_date_from_timesecs + use ice_calendar, only: calendar_date2time, calendar_time2date + use ice_calendar, only: compute_calendar_data + implicit none + + integer(kind=int_kind) :: yearmax + integer(kind=int_kind) :: nday,nptc + integer(kind=int_kind) :: n,m,ny,nm,nd,nf1,nf2,xadd,nfa,nfb,nfc,ns1,ns2 + integer(kind=int_kind) :: yi,mi,di,si + integer(kind=int_kind) :: dyear,dmon,dday,dsec + integer(kind=int_kind) :: fyear,fmon,fday,fsec + character(len=32) :: calstr,unitstr,signstr + integer (kind=int_kind) :: tdaymo (months_per_year) ! days per month + integer (kind=int_kind) :: tdaycal(months_per_year+1) ! day count per month + integer (kind=int_kind) :: tdayyr ! days in year + + integer(kind=int_kind), parameter :: ntests = 8 + character(len=8) :: errorflag0,errorflag(1:ntests),errorflagtmp + character(len=32) :: testname(ntests) + integer(kind=int_kind) :: yearv(ntests),monv(ntests),dayv(ntests),secv(ntests),ndayv(ntests) ! computed values + integer(kind=int_kind) :: yearc(ntests),monc(ntests),dayc(ntests),secc(ntests),ndayc(ntests) ! correct results + real(kind=dbl_kind) :: timesecsv(ntests),timesecsc(ntests) + character(len=*), parameter :: & + passflag = 'PASS', & + failflag = 'FAIL' + + write(6,*) ' ' + write(6,*) 'Running CALCHK' + write(6,*) ' ' + + errorflag0 = passflag + errorflag(:) = passflag + testname(:) = '' + testname(1) = 'compute_elapsed_days' + testname(2) = 'set_date_from_timesecs' + testname(3) = 'calendar advance' + testname(4) = 'date2time time2date' + testname(5) = 'big add/sub update_date' + testname(6) = 'small add/sub update_date' + testname(7) = 'special checks' + testname(8) = 'calc_timesteps' + + ndtd = 1 + + ! test yearmax years from year 0 +! yearmax = 1000 + yearmax = 100000 + + ! test 3 calendars + do n = 1,3 + + errorflag(:) = passflag + + if (n == 1) then + use_leap_years = .false. + days_per_year = 365 + calstr = 'noleap' + elseif (n == 2) then + use_leap_years = .false. + days_per_year = 360 + calstr = '360day' + elseif (n == 3) then + use_leap_years = .true. + days_per_year = 365 + calstr = 'gregorian' + endif + + istep0 = 1000 + year_init = 0 + month_init = 1 + day_init = 1 + sec_init = 0 + myear = -1 + mmonth = -1 + mday = -1 + dt = 86400._dbl_kind + diagfreq = 99999999 + call init_calendar() + + !----------------- + ! This test makes sure compute_elapsed_days works for different calendars + ! and multiple years. This also checks that the timesecs value computed + ! in calendar and passed into set_date_from_timesecs returns the correct date. + ! In test1, nday should increment 1 day each loop and the final number + ! of days is known for 1000 and 100000 years (precomputed) + ! In test2, set_date_from_timesecs will reset myear, mmonth, mday, msec + !----------------- + + ndayc(1) = -1 ! prior day + do ny = 0,yearmax + do nm = 1,months_per_year + do nd = 1,daymo(nm) + + errorflagtmp = passflag + yearv(1) = ny + monv(1) = nm + dayv(1) = nd + secv(1) = 0 + + ! check days increment by 1 + ndayv(1) = compute_elapsed_days(yearv(1),monv(1),dayv(1)) + if (ndayv(1) - ndayc(1) /= 1) then + errorflagtmp = failflag + errorflag(1) = failflag + write(6,*) 'ERROR1: did not increment one day',yearv(1),monv(1),dayv(1),ndayv(1) + endif + + ! establish internal date and update internal calendar including timesecs + myear = yearv(1) + mmonth = monv(1) + mday = dayv(1) + msec = secv(1) + call calendar() + timesecsv(1) = timesecs + + ! check set_date_from_timesecs + yearc(2) = myear + monc(2) = mmonth + dayc(2) = mday + secc(2) = msec + timesecsc(2) = timesecs + ndayc(2) = ndayv(1) + myear = -1 + mmonth = -1 + mday = -1 + msec = -1 + timesecs = -1 + call set_date_from_timesecs(timesecsc(2)) + if (myear /= yearc(2) .or. mmonth /= monc(2) .or. mday /= dayc(2) .or. msec /= secc(2) .or. timesecs /= timesecsc(2)) then + errorflagtmp = failflag + errorflag(2) = failflag + write(6,*) 'ERROR2: timesecs error' + write(6,1001) 'e2',ndayc(2),yearc(2),'-',monc(2),'-',dayc(2),':',secc(2),' timesecs = ',timesecsc(2) + endif + if (errorflagtmp /= passflag .or. & + ndayv(1) <= 10 .or. mod(ndayv(1),yearmax*10) == 0 .or. & + (yearv(1) == yearmax .and. monv(1) == months_per_year)) then + write(6,1001) ' CHECK1: ',ndayv(1),yearv(1) ,'-',monv(1),'-',dayv(1),':',secv(1) ,' timesecs = ',timesecsv(1) + endif + ndayc(1) = ndayv(1) + enddo + enddo + enddo + + ! check total number of days run in yearmax years + if (yearmax == 1000) then + if (n == 1) then + ndayc(1) = 365364 + elseif (n == 2) then + ndayc(1) = 360359 + elseif (n == 3) then + ndayc(1) = 365607 + endif + if (ndayv(1) /= ndayc(1)) then + errorflag(1) = failflag + write(6,*) 'ERROR1a: final nday incorrect', ndayv(1), ndayc(1) + endif + endif + + ! check total number of days run in yearmax years + if (yearmax == 100000) then + if (n == 1) then + ndayc(1) = 36500364 + elseif (n == 2) then + ndayc(1) = 36000359 + elseif (n == 3) then + ndayc(1) = 36524615 + endif + if (ndayv(1) /= ndayc(1)) then + errorflag(1) = failflag + write(6,*) 'ERROR1a: final nday incorrect', ndayv(1), ndayc(1) + endif + endif + + !----------------- + ! check adding arbitrary amounts to each date unit and see if calendar reconciles properly + ! then subtract same arbitrary amounts in reverse order and make sure it ends at original value + !----------------- + + yearv(1) = 1000 + monv(1) = 1 + dayv(1) = 1 + secv(1) = 0 + myear = yearv(1) + mmonth = monv(1) + mday = dayv(1) + msec = secv(1) + call calendar() + nday = compute_elapsed_days(myear,mmonth,mday) + dyear = 0 + dmon = 0 + dday = 0 + dsec = 0 + do nfa = 1,-1,-2 + write(6,*) ' ' + write(6,1001) ' CHECK3: ',nday,myear ,'-',mmonth ,'-',mday ,':',msec ,' timesecs = ',timesecs + do nfb = 1,10 + do nfc = 1,4 + if (nfa == 1) then + nf1 = nfb + nf2 = nfc + signstr = 'Add' + elseif (nfa == -1) then + nf1 = 11-nfb + nf2 = 5-nfc + signstr = 'Sub' + endif + fyear = 0 + fmon = 0 + fday = 0 + fsec = 0 + if (nf2 == 1) then + xadd = nf1*nf1 + unitstr = 'years' + myear = myear + nfa*xadd + if (nfa == 1) dyear = dyear + nfa*xadd + fyear = nfa*xadd + elseif (nf2 == 2) then + xadd = nf1*nf1 + unitstr = 'months' + mmonth = mmonth + nfa*xadd + if (nfa == 1) dmon = dmon + nfa*xadd + fmon = nfa*xadd + elseif (nf2 == 3) then + xadd = nf1*nf1*nf1*nf1 + unitstr = 'days' + mday = mday + nfa*xadd + if (nfa == 1) dday = dday + nfa*xadd + fday = nfa*xadd + elseif (nf2 == 4) then + xadd = nf1*nf1*nf1*nf1*nf1*nf1*nf1 + unitstr = 'seconds' + msec = msec + nfa*xadd + if (nfa == 1) dsec = dsec + nfa*xadd + fsec = nfa*xadd + endif + call calendar() + nday = compute_elapsed_days(myear,mmonth,mday) + write(6,1002) ' CHECK3: '//trim(signstr)//' ',xadd,trim(unitstr) + write(6,1001) ' CHECK3: ',nday,myear ,'-',mmonth ,'-',mday ,':',msec ,' timesecs = ',timesecs + + !----------------- + ! This checks update_date add and subtract to make sure the original value is returned + !----------------- + + yearc(6) = myear + monc(6) = mmonth + dayc(6) = mday + secc(6) = msec + timesecsc(6) = timesecs + yearv(6) = yearc(6) + monv(6) = monc(6) + dayv(6) = dayc(6) + secv(6) = secc(6) + call update_date(yearv(6),monv(6),dayv(6),secv(6),fyear,fmon,fday,fsec) + write(6,1001) ' CHECK6: ',-1,yearv(6),'-',monv(6),'-',dayv(6),':',secv(6) + if (yearc(6) == yearv(6) .and. monc(6) == monv(6) .and. dayc(6) == dayv(6) .and. secc(6) == secv(6) .and. timesecsc(6) == timesecsv(6)) then + errorflag(6) = failflag + write(6,*) ' ' + write(6,*) 'ERROR6a: update date error' + write(6,1001) 'e6',nday,yearv(6),'-',monv(6),'-',dayv(6),':',secv(6),' timesecs = ',timesecsv(6) + write(6,1001) ' ',nday,yearc(6),'-',monc(6),'-',dayc(6),':',secc(6),' timesecs = ',timesecsc(6) + write(6,*) ' ',fyear,fmon,fday,fsec + write(6,*) ' ' + endif + call update_date(yearv(6),monv(6),dayv(6),secv(6),-fyear,-fmon,-fday,-fsec) + call calendar_date2time(yearc(6),monc(6),dayc(6),secc(6),timesecsv(6)) + if (yearc(6) /= yearv(6) .or. monc(6) /= monv(6) .or. dayc(6) /= dayv(6) .or. secc(6) /= secv(6) .or. timesecsc(6) /= timesecsv(6)) then + errorflag(6) = failflag + write(6,*) ' ' + write(6,*) 'ERROR6b: update date error' + write(6,1001) 'e6',nday,yearv(6),'-',monv(6),'-',dayv(6),':',secv(6),' timesecs = ',timesecsv(6) + write(6,1001) ' ',nday,yearc(6),'-',monc(6),'-',dayc(6),':',secc(6),' timesecs = ',timesecsc(6) + write(6,*) ' ',fyear,fmon,fday,fsec + write(6,*) ' ' + endif + + !----------------- + ! This checks date2time and time2date leveraging the pseudo random dates + ! plus various reference settings. Different reference dates means + ! timesecs won't match, so don't check them. + !----------------- + + yi = myear/2 + mi = max(mmonth/2,1) + di = max(mday*7/8,1) + si = max(msec*7/8,1) + yearc(4) = myear + monc(4) = mmonth + dayc(4) = mday + secc(4) = msec + timesecsc(4) = timesecs + yearv(4) = -1 + monv(4) = -1 + dayv(4) = -1 + secv(4) = -1 + timesecsv(4) = -1 + call calendar_date2time(yearc(4),monc(4),dayc(4),secc(4),timesecsv(4),yi,mi,di,si) + call calendar_time2date(timesecsv(4),yearv(4),monv(4),dayv(4),secv(4),yi,mi,di,si) + write(6,*) 'CHECK4: ',timesecsv(4) + if (yearc(4) /= yearv(4) .or. monc(4) /= monv(4) .or. dayc(4) /= dayv(4) .or. secc(4) /= secv(4)) then + errorflag(4) = failflag + write(6,*) ' ' + write(6,*) 'ERROR4: date2time time2date error' + write(6,1001) 'e4',nday,yearv(4),'-',monv(4),'-',dayv(4),':',secv(4),' timesecs = ',timesecsv(4) + write(6,1001) ' ',nday,yearc(4),'-',monc(4),'-',dayc(4),':',secc(4),' timesecs = ',timesecsc(4) + write(6,*) ' ' + endif + + enddo + enddo + + yearv(3) = myear + monv(3) = mmonth + dayv(3) = mday + secv(3) = msec + timesecsv(3) = timesecs + if (nfa == 1) then + if (n == 1) then + yearc(3) = 1487 + monc(3) = 1 + dayc(3) = 21 + secc(3) = 22825 + ndayc(3) = 542775 + elseif (n == 2) then + yearc(3) = 1488 + monc(3) = 1 + dayc(3) = 13 + secc(3) = 22825 + ndayc(3) = 535692 + elseif (n == 3) then + yearc(3) = 1487 + monc(3) = 1 + dayc(3) = 5 + secc(3) = 22825 + ndayc(3) = 543120 + endif + elseif (nfa == -1) then + yearc(3) = yearv(1) + monc(3) = monv(1) + dayc(3) = dayv(1) + secc(3) = secv(1) + if (n == 1) then + ndayc(3) = 365000 + elseif (n == 2) then + ndayc(3) = 360000 + elseif (n == 3) then + ndayc(3) = 365243 + endif + endif + + ! check answers + if (yearv(3) /= yearc(3) .or. monv(3) /= monc(3) .or. dayv(3) /= dayc(3) .or. secv(3) /= secc(3)) then + errorflag(3) = failflag + write(6,*) ' ' + write(6,*) 'ERROR3: calendar advance error' + write(6,1001) 'e3',nday,yearc(3),'-',monc(3),'-',dayc(3),':',secc(3),' timesecs = ',timesecsc(3) + write(6,1001) ' ',nday,yearv(3),'-',monv(3),'-',dayv(3),':',secv(3),' timesecs = ',timesecsv(3) + write(6,*) ' ' + endif + enddo + + write(6,*) ' ' + yearv(1) = 1000 + monv(1) = 1 + dayv(1) = 1 + secv(1) = 0 + yearv(5) = yearv(1) + monv(5) = monv(1) + dayv(5) = dayv(1) + secv(5) = secv(1) + write(6,1001) ' CHECK5a: ',-1,yearv(5) ,'-',monv(5) ,'-',dayv(5) ,':',secv(5) + write(6,1002) ' Add ',dyear,'years' + write(6,1002) ' Add ',dmon,'months' + write(6,1002) ' Add ',dday,'days' + write(6,1002) ' Add ',dsec,'seconds' + call update_date(yearv(5),monv(5),dayv(5),secv(5),dyear,dmon,dday,dsec) + write(6,1001) ' CHECK5a: ',-1,yearv(5) ,'-',monv(5) ,'-',dayv(5) ,':',secv(5) + write(6,*) ' ' + + ! correct answers + if (n == 1) then + yearc(5) = 1487 + monc(5) = 1 + dayc(5) = 24 + secc(5) = 22825 + ndayc(5) = 542775 + elseif (n == 2) then + yearc(5) = 1488 + monc(5) = 1 + dayc(5) = 13 + secc(5) = 22825 + ndayc(5) = 535692 + elseif (n == 3) then + yearc(5) = 1487 + monc(5) = 1 + dayc(5) = 7 + secc(5) = 22825 + ndayc(5) = 543120 + endif + + ! check answers + if (yearv(5) /= yearc(5) .or. monv(5) /= monc(5) .or. dayv(5) /= dayc(5) .or. secv(5) /= secc(5)) then + errorflag(5) = failflag + write(6,*) ' ' + write(6,*) 'ERROR5a: calendar advance error' + write(6,1001) 'e5',nday,yearc(5),'-',monc(5),'-',dayc(5),':',secc(5),' timesecs = ',timesecs + write(6,1001) ' ',nday,yearv(5),'-',monv(5),'-',dayv(5),':',secv(5),' timesecs = ',timesecs + write(6,*) ' ' + endif + + write(6,1001) ' CHECK5b: ',-1,yearv(5) ,'-',monv(5) ,'-',dayv(5) ,':',secv(5) + write(6,1002) ' Sub ',dyear,'years' + write(6,1002) ' Sub ',dmon,'months' + write(6,1002) ' Sub ',dday,'days' + write(6,1002) ' Sub ',dsec,'seconds' + call update_date(yearv(5),monv(5),dayv(5),secv(5),-dyear,-dmon,-dday,-dsec) + write(6,1001) ' CHECK5b: ',-1,yearv(5) ,'-',monv(5) ,'-',dayv(5) ,':',secv(5) + + ! correct answers + yearc(5) = yearv(1) + monc(5) = monv(1) + dayc(5) = dayv(1) + secc(5) = secv(1) + if (yearv(5) /= yearc(5) .or. monv(5) /= monc(5) .or. dayv(5) /= dayc(5) .or. secv(5) /= secc(5)) then + errorflag(5) = failflag + write(6,*) ' ' + write(6,*) 'ERROR5b: calendar advance error' + write(6,1001) 'e5',nday,yearc(5),'-',monc(5),'-',dayc(5),':',secc(5),' timesecs = ',timesecs + write(6,1001) ' ',nday,yearv(5),'-',monv(5),'-',dayv(5),':',secv(5),' timesecs = ',timesecs + write(6,*) ' ' + endif + + !------------------------- + ! Special checks: + ! Add a month to the last day of each month + ! Check date2time for seconds + !------------------------- + + write(6,*) ' ' + do ny = 1,5 + do nm = 1, months_per_year + if (ny == 1) yearv(7) = 1900 + if (ny == 2) yearv(7) = 1999 + if (ny == 3) yearv(7) = 2000 + if (ny == 4) yearv(7) = 2004 + if (ny == 5) yearv(7) = 2005 + call compute_calendar_data(yearv(7),tdaymo,tdaycal,tdayyr) + monv(7) = nm + dayv(7) = tdaymo(nm) + secv(7) = 0 + if (tdaymo(mod(nm,months_per_year)+1) >= tdaymo(nm)) then + monc(7) = mod(nm,months_per_year)+1 + dayc(7) = dayv(7) + else + monc(7) = mod(nm+1,months_per_year)+1 + dayc(7) = tdaymo(nm) - tdaymo(mod(nm,months_per_year)+1) + endif + yearc(7) = yearv(7) + if (monc(7) < monv(7)) yearc(7) = yearv(7) + 1 + secc(7) = secv(7) + call update_date(yearv(7),monv(7),dayv(7),secv(7),dmon=1) + write(6,1001) ' CHECK7a:',1,yearv(7),'-',monv(7),'-',dayv(7),':',secv(7) + if (yearv(7) /= yearc(7) .or. monv(7) /= monc(7) .or. dayv(7) /= dayc(7) .or. secv(7) /= secc(7)) then + errorflag(7) = failflag + write(6,*) ' ' + write(6,*) 'ERROR7a: add 1 month to end of month error' + write(6,1001) 'e7',-1,yearc(7),'-',monc(7),'-',dayc(7),':',secc(7) + write(6,1001) ' ',-1,yearv(7),'-',monv(7),'-',dayv(7),':',secv(7) + write(6,*) ' ' + endif + enddo + enddo + + do ns1 = 0,seconds_per_day,seconds_per_day/4 + do ns2 = 0,seconds_per_day,seconds_per_day/4 + yearv(7) = 2002 + monv(7) = 3 + call compute_calendar_data(yearv(7),tdaymo,tdaycal,tdayyr) + dayv(7) = tdaymo(monv(7)) + call calendar_date2time(yearv(7),monv(7),dayv(7),ns2,timesecsv(7),yearv(7),monv(7),dayv(7),ns1) + write(6,*) 'CHECK7b:',ns1,ns2,timesecsv(7) + if (timesecsv(7) /= ns2-ns1) then + errorflag(7) = failflag + write(6,*) ' ' + write(6,*) 'ERROR7b: sec diff same date error' + write(6,*) ' ',ns1,ns2,timesecsv(7),ns2-ns1 + write(6,*) ' ' + endif + call calendar_date2time(yearv(7),monv(7)+1,1,ns2,timesecsv(7),yearv(7),monv(7),dayv(7),ns1) + write(6,*) 'CHECK7c:',ns1,ns2,timesecsv(7) + if (timesecsv(7) /= ns2-ns1+seconds_per_day) then + errorflag(7) = failflag + write(6,*) ' ' + write(6,*) 'ERROR7c: sec diff next day error' + write(6,*) ' ',ns1,ns2,timesecsv(7),ns2-ns1+seconds_per_day + write(6,*) ' ' + endif + enddo + enddo + + !------------------------- + ! calc_timesteps + !------------------------- + + myear = 2000 + mmonth = 2 + mday = 1 + msec = 0 + do nf1 = 1,6 + npt = 10 + dt = 3600._dbl_kind + + if (nf1 == 1) then + npt_unit = '1' + nptc = 10 + endif + if (nf1 == 2) then + npt_unit = 's' + npt = 36000. + nptc = 10 + endif + if (nf1 == 3) then + npt_unit = 'h' + nptc = 10 + endif + if (nf1 == 4) then + npt_unit = 'd' + nptc = 240 + endif + if (nf1 == 5) then + npt_unit = 'm' + if (n == 1) nptc = 7272 + if (n == 2) nptc = 7200 + if (n == 3) nptc = 7296 + endif + if (nf1 == 6) then + npt_unit = 'y' + if (n == 1) nptc = 87600 + if (n == 2) nptc = 86400 + if (n == 3) nptc = 87672 + endif + call calc_timesteps() + write(6,*) 'CHECK8:',npt + if (npt /= nptc) then + errorflag(8) = failflag + write(6,*) 'ERROR8: npt error',npt,nptc + endif + enddo + + !------------------------- + ! write test results + !------------------------- + + write(6,*) ' ' + write(6,*) 'Test Results: ',yearmax,' years' + do m = 1,ntests + write(6,*) trim(errorflag(m))," ... ",trim(calstr)," ",trim(testname(m)) + if (errorflag(m) == failflag) errorflag0=failflag + enddo + write(6,*) ' ' + + enddo ! do n + + 1001 format(a,i10,1x,i7.4,a,i2.2,a,i2.2,a,i5.5,a,e23.16) + 1002 format(a,i10,1x,a) + + write(6,*) ' ' + if (errorflag0 == passflag) then + write(6,*) 'CALCHK COMPLETED SUCCESSFULLY' + else + write(6,*) 'CALCHK FAILED' + endif + + end program + diff --git a/cicecore/drivers/unittest/helloworld/helloworld.F90 b/cicecore/drivers/unittest/helloworld/helloworld.F90 new file mode 100644 index 000000000..651436bea --- /dev/null +++ b/cicecore/drivers/unittest/helloworld/helloworld.F90 @@ -0,0 +1,8 @@ + + program hello_world + + write(6,*) 'hello_world' + write(6,*) 'COMPLETED SUCCESSFULLY' + + end program + diff --git a/cicecore/shared/ice_calendar.F90 b/cicecore/shared/ice_calendar.F90 index e7107f42a..4d7ae378f 100644 --- a/cicecore/shared/ice_calendar.F90 +++ b/cicecore/shared/ice_calendar.F90 @@ -2,7 +2,7 @@ ! Calendar routines for managing time ! -! authors: Elizabeth C. Hunke, LANL +! Authors: Elizabeth C. Hunke, LANL ! Tony Craig, NCAR ! Craig MacLachlan, UK Met Office ! @@ -10,10 +10,12 @@ ! Converted to free form source (F90). ! 2010 CM : Fixed support for Gregorian calendar: subroutines ! sec2time, time2sec and set_calendar added. +! 2020 TC : Significant refactor to move away from time as prognostic module ice_calendar use ice_kinds_mod + use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c1, c100, c30, c360, c365, c3600, & c4, c400 use ice_domain_size, only: max_nstrm @@ -25,78 +27,88 @@ module ice_calendar implicit none private - public :: init_calendar, calendar, time2sec, sec2time, hc_jday + ! INTERFACES - integer (kind=int_kind), public :: & - days_per_year , & ! number of days in one year - daymo(12) , & ! number of days in each month - daycal(13) ! day number at end of month + public :: init_calendar ! initialize calendar + public :: calc_timesteps ! initialize number of timesteps (after namelist and restart are read) + public :: advance_timestep ! advance model 1 timestep and update calendar + public :: calendar ! update model internal calendar/time information + public :: set_date_from_timesecs ! set model date from time in seconds + ! (relative to init date) + ! needed for binary restarts - ! 360-day year data - integer (kind=int_kind) :: & - daymo360(12) , & ! number of days in each month - daycal360(13) ! day number at end of month - data daymo360 / 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30/ - data daycal360/ 0,30, 60, 90,120,150,180,210,240,270,300,330,360/ + ! semi-private, only used directly by unit tester + public :: compute_elapsed_days ! compute elapsed days since 0000-01-01 + public :: compute_days_between ! compute elapsed days between two dates + public :: update_date ! input date and delta date, compute new date + public :: calendar_date2time ! convert date to time relative to init date + public :: calendar_time2date ! convert time to date relative to init date + public :: compute_calendar_data ! compute info about calendar for a given year - ! 365-day year data - integer (kind=int_kind) :: & - daymo365(12) , & ! number of days in each month - daycal365(13) ! day number at end of month - data daymo365 / 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31/ - data daycal365/ 0,31, 59, 90,120,151,181,212,243,273,304,334,365/ + ! private functions + private :: set_calendar ! sets model calendar type (noleap, etc) - ! 366-day year data (leap year) - integer (kind=int_kind) :: & - daymo366(12) , & ! number of days in each month - daycal366(13) ! day number at end of month - data daymo366 / 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31/ - data daycal366/ 0,31, 60, 91,121,152,182,213,244,274,305,335,366/ + ! PUBLIC - real (kind=dbl_kind), parameter :: & - days_per_4c = 146097.0_dbl_kind, & - days_per_c = 36524.0_dbl_kind, & - days_per_4y = 1461.0_dbl_kind, & - days_per_y = 365.0_dbl_kind + character(len=*), public, parameter :: & + ice_calendar_gregorian = 'Gregorian', & ! calendar name, actually proleptic gregorian here + ice_calendar_noleap = 'NO_LEAP', & ! 365 day per year calendar + ice_calendar_360day = '360day' ! 360 day calendar with 30 days per month + + integer (kind=int_kind), public, parameter :: & + months_per_year = 12, & ! months per year + hours_per_day = 24 ! hours per day + + integer (kind=int_kind), public :: & + seconds_per_day , & ! seconds per day + seconds_per_hour , & ! seconds per hour + days_per_year , & ! number of days in one year + daymo(months_per_year), & ! number of days in each month + daycal(months_per_year+1) ! accumulated days in year to end of prior month integer (kind=int_kind), public :: & - istep , & ! local step counter for time loop - istep0 , & ! counter, number of steps taken in previous run + ! step counters + istep , & ! local step counter for current run in time loop + istep0 , & ! counter, number of steps at start of run istep1 , & ! counter, number of steps at current timestep + ! basic model time variables + myear , & ! year number + mmonth , & ! month number, 1 to months_per_year mday , & ! day of the month - hour , & ! hour of the day - month , & ! month number, 1 to 12 - monthp , & ! last month + msec , & ! elapsed seconds into date + ! initial time year_init, & ! initial year - nyr , & ! year number + month_init,& ! initial month + day_init, & ! initial day of month + sec_init , & ! initial seconds + ! other stuff idate , & ! date (yyyymmdd) - idate0 , & ! initial date (yyyymmdd) - sec , & ! elapsed seconds into date + idate0 , & ! initial date (yyyymmdd), associated with year_init, month_init, day_init + dayyr , & ! number of days in the current year npt , & ! total number of time steps (dt) + npt0 , & ! original npt value in npt0_unit ndtd , & ! number of dynamics subcycles: dt_dyn=dt/ndtd stop_now , & ! if 1, end program execution write_restart, & ! if 1, write restart now diagfreq , & ! diagnostic output frequency (10 = once per 10 dt) dumpfreq_n , & ! restart output frequency (10 = once per 10 d,m,y) nstreams , & ! number of history output streams - histfreq_n(max_nstrm) ! history output frequency + histfreq_n(max_nstrm) ! history output frequency + + logical (kind=log_kind), public :: & + new_year , & ! new year = .true. + new_month , & ! new month = .true. + new_day , & ! new day = .true. + new_hour ! new hour = .true. real (kind=dbl_kind), public :: & dt , & ! thermodynamics timestep (s) dt_dyn , & ! dynamics/transport/ridging timestep (s) - time , & ! total elapsed time (s) - time_forc , & ! time of last forcing update (s) + timesecs , & ! total elapsed time (s) yday , & ! day of the year - tday , & ! absolute day number - dayyr , & ! number of days per year - nextsw_cday , & ! julian day of next shortwave calculation - basis_seconds ! Seconds since calendar zero + nextsw_cday ! julian day of next shortwave calculation logical (kind=log_kind), public :: & - new_year , & ! new year = .true. - new_month , & ! new month = .true. - new_day , & ! new day = .true. - new_hour , & ! new hour = .true. use_leap_years , & ! use leap year functionality if true write_ic , & ! write initial condition now dump_last , & ! write restart file on last time step @@ -104,6 +116,8 @@ module ice_calendar write_history(max_nstrm) ! write history now character (len=1), public :: & + npt_unit, & ! run length unit, 'y', 'm', 'd', 'h', 's', '1' + npt0_unit, & ! original run length unit, 'y', 'm', 'd', 'h', 's', '1' histfreq(max_nstrm), & ! history output frequency, 'y','m','d','h','1' dumpfreq ! restart frequency, 'y','m','d' @@ -111,17 +125,33 @@ module ice_calendar calendar_type ! differentiates Gregorian from other calendars ! default = ' ' + ! PRIVATE + + integer (kind=int_kind) :: & + hour ! hour of the day + + ! 360-day year data + integer (kind=int_kind) :: & + daymo360(months_per_year) ! number of days in each month + data daymo360 / 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30/ + + ! 365-day year data + integer (kind=int_kind) :: & + daymo365(months_per_year) ! number of days in each month + data daymo365 / 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31/ + + ! 366-day year data (leap year) + integer (kind=int_kind) :: & + daymo366(months_per_year) ! number of days in each month + data daymo366 / 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31/ + + !======================================================================= contains !======================================================================= - ! Initialize calendar variables -! -! authors: Elizabeth C. Hunke, LANL -! Tony Craig, NCAR -! Craig MacLachlan, UK Met Office subroutine init_calendar @@ -134,99 +164,178 @@ subroutine init_calendar if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) + seconds_per_day = nint(secday) + if ((abs(real(seconds_per_day,kind=dbl_kind)/secday)-1.0_dbl_kind) > 1.0e-7) then + write(nu_diag,*) trim(subname),' ERROR secday should basically be an integer',secday + call abort_ice(subname//'ERROR: improper secday') + endif + seconds_per_hour = nint(secday/real(hours_per_day,kind=dbl_kind)) + if (abs(seconds_per_hour*hours_per_day - seconds_per_day) > 0) then + write(nu_diag,*) trim(subname),' ERROR seconds per day and hours per day inconsistent' + call abort_ice(subname//'ERROR: improper seconds_per_hour') + endif + istep = 0 ! local timestep number - time=istep0*dt ! s - yday=c0 ! absolute day number - mday=0 ! day of the month - month=0 ! month - nyr=0 ! year - idate=00000101 ! date - sec=0 ! seconds into date + myear=year_init ! year + mmonth=month_init ! month + mday=day_init ! day of the month + msec=sec_init ! seconds into date + hour=0 ! computed in calendar, but needs some reasonable initial value istep1 = istep0 ! number of steps at current timestep ! real (dumped) or imagined (use to set calendar) + idate0 = (myear)*10000 + mmonth*100 + mday ! date (yyyymmdd) stop_now = 0 ! end program execution if stop_now=1 dt_dyn = dt/real(ndtd,kind=dbl_kind) ! dynamics et al timestep force_restart_now = .false. - ! Check that the number of days per year is set correctly when using - ! leap years. If not, set days_per_year correctly and warn the user. - if (use_leap_years .and. days_per_year /= 365) then - days_per_year = 365 - write(nu_diag,*) 'Warning: days_per_year has been set to 365', & - ' because use_leap_years = .true.' - end if - #ifdef CESMCOUPLED ! calendar_type set by coupling #else - calendar_type = ' ' - if (use_leap_years .and. days_per_year == 365) calendar_type = 'Gregorian' -#endif - - dayyr = real(days_per_year, kind=dbl_kind) - if (days_per_year == 360) then - daymo = daymo360 - daycal = daycal360 - elseif (days_per_year == 365) then - daymo = daymo365 - daycal = daycal365 - else - call abort_ice(subname//'ERROR: days_per_year must be 360 or 365') + calendar_type = '' + if (use_leap_years) then + if (days_per_year == 365) then + calendar_type = trim(ice_calendar_gregorian) + else + call abort_ice(subname//'ERROR: use_leap_years is true, must set days_per_year to 365') + endif + else + if (days_per_year == 365) then + calendar_type = trim(ice_calendar_noleap) + elseif (days_per_year == 360) then + calendar_type = trim(ice_calendar_360day) + else + call abort_ice(subname//'ERROR: days_per_year only 365 or 360 supported') + endif endif +#endif - ! Get the time in seconds from calendar zero to start of initial year - call time2sec(year_init,1,1,basis_seconds) + call set_calendar(myear) + call calendar() - ! determine initial date (assumes namelist year_init, istep0 unchanged) - sec = mod(time,secday) ! elapsed seconds into date at - ! end of dt - tday = (time-sec)/secday + c1 ! absolute day number + end subroutine init_calendar - ! Convert the current timestep into a calendar date - call sec2time(nyr,month,mday,basis_seconds+sec) +!======================================================================= +! Initialize timestep counter +! This converts npt_unit and npt to a number of timesteps stored in npt +! npt0 and npt0_unit remember the original values +! It is safe to call this more than once, but it should be called only after +! the initial model run date is known (from namelist or restart) and before +! the first timestep - yday = mday + daycal(month) ! day of the year - nyr = nyr - year_init + 1 ! year number + subroutine calc_timesteps - idate0 = (nyr+year_init-1)*10000 + month*100 + mday ! date (yyyymmdd) + real (kind=dbl_kind) :: secday ! seconds per day + real (kind=dbl_kind) :: dtimesecs ! time in seconds of run + integer (kind=int_kind) :: yeare,monthe,daye,sece ! time at end of run + character(len=*),parameter :: subname='(calc_timesteps)' - end subroutine init_calendar + call icepack_query_parameters(secday_out=secday) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) -!======================================================================= + yeare = myear + monthe = mmonth + daye = mday + sece = msec + npt0 = npt + npt0_unit = npt_unit + + if (npt_unit == 'y') then + call update_date(yeare,monthe,daye,sece,dyear=npt) + call calendar_date2time(yeare,monthe,daye,sece,dtimesecs,myear,mmonth,mday,msec) + elseif (npt_unit == 'm') then + call update_date(yeare,monthe,daye,sece,dmon=npt) + call calendar_date2time(yeare,monthe,daye,sece,dtimesecs,myear,mmonth,mday,msec) + elseif (npt_unit == 'd') then + dtimesecs = real(npt,kind=dbl_kind)*secday + call update_date(yeare,monthe,daye,sece,dday=npt) + elseif (npt_unit == 'h') then + dtimesecs = real(npt,kind=dbl_kind)*secday/real(hours_per_day,kind=dbl_kind) + call update_date(yeare,monthe,daye,sece,dsec=nint(dtimesecs)) + elseif (npt_unit == 's') then + call update_date(yeare,monthe,daye,sece,dsec=npt) + dtimesecs = real(npt,kind=dbl_kind) + elseif (npt_unit == '1') then + dtimesecs = dt*real(npt,kind=dbl_kind) + call update_date(yeare,monthe,daye,sece,dsec=nint(dtimesecs)) + else + write(nu_diag,*) trim(subname),' ERROR invalid npt_unit = ',trim(npt_unit) + call abort_ice(subname//'ERROR: invalid npt_unit') + endif + + npt = nint(dtimesecs/dt) + npt_unit = '1' + + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,'(1x,2a,i9,a,f13.2)') subname,' modified npt from ',npt0,' '//trim(npt0_unit)//' with dt= ',dt + write(nu_diag,'(1x,2a,i9,a,f13.2)') subname,' to ',npt ,' '//trim(npt_unit )//' with dt= ',dt + write(nu_diag,'(1x,2a,i6.4,a,i2.2,a,i2.2,a,i5.5)') subname,' start time is',myear,'-',mmonth,'-',mday,':',msec + write(nu_diag,'(1x,2a,i6.4,a,i2.2,a,i2.2,a,i5.5)') subname,' end time is',yeare,'-',monthe,'-',daye,':',sece + write(nu_diag,*) ' ' + endif + + ! check that npt is very close to an integer + if ((abs(real(npt,kind=dbl_kind)*dt/dtimesecs)-1.0_dbl_kind) > 1.0e-7) then + write(nu_diag,*) trim(subname),' ERROR dt and npt not consistent',npt,dt + call abort_ice(subname//'ERROR: improper npt') + endif + + end subroutine calc_timesteps +!======================================================================= ! Determine the date at the end of the time step -! -! authors: Elizabeth C. Hunke, LANL -! Tony Craig, NCAR -! Craig MacLachlan, UK Met Office - subroutine calendar(ttime) + subroutine advance_timestep() - use ice_communicate, only: my_task, master_task + ! local variables + + integer(kind=int_kind) :: & + idt ! integer dt + character(len=*),parameter :: subname='(advance_timestep)' + + if (trim(npt_unit) /= '1') then + write(nu_diag,*) trim(subname),' ERROR npt_unit should be converted to timesteps by now ',trim(npt_unit) + write(nu_diag,*) trim(subname),' ERROR you may need to call calc_timesteps to convert from other units' + call abort_ice(subname//'ERROR: npt_unit incorrect') + endif + + istep = istep + 1 + istep1 = istep1 + 1 + idt = nint(dt) + ! dt is historically a real but it should be an integer + ! make sure dt is very close to an integer + if ((abs(real(idt,kind=dbl_kind)/dt)-1.0_dbl_kind) > 1.0e-7) then + write(nu_diag,*) trim(subname),' ERROR dt error, needs to be integer number of seconds, dt=',dt + call abort_ice(subname//'ERROR: improper dt') + endif + msec = msec + idt + call calendar() - real (kind=dbl_kind), intent(in) :: & - ttime ! time variable + end subroutine advance_timestep + +!======================================================================= +! Update the calendar and time manager info + + subroutine calendar() + +! real (kind=dbl_kind), intent(in), optional :: & +! ttime ! time variable ! local variables integer (kind=int_kind) :: & ns , & ! loop index - nyrp,mdayp,hourp , & ! previous year, day, hour + yearp,monthp,dayp,hourp , & ! previous year, month, day, hour elapsed_days , & ! since beginning this run elapsed_months , & ! since beginning this run - elapsed_hours , & ! since beginning this run - month0 - real (kind=dbl_kind) :: secday ! seconds per day + elapsed_hours ! since beginning this run character(len=*),parameter :: subname='(calendar)' - call icepack_query_parameters(secday_out=secday) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - nyrp=nyr - monthp=month - mdayp=mday + yearp=myear + monthp=mmonth + dayp=mday hourp=hour new_year=.false. new_month=.false. @@ -235,349 +344,576 @@ subroutine calendar(ttime) write_history(:)=.false. write_restart=0 - sec = mod(ttime,secday) ! elapsed seconds into date at - ! end of dt - tday = (ttime-sec)/secday + c1 ! absolute day number - - ! Deterime the current date from the timestep - call sec2time(nyr,month,mday,basis_seconds+ttime) + call update_date(myear,mmonth,mday,msec) + call set_calendar(myear) - yday = mday + daycal(month) ! day of the year - nyr = nyr - year_init + 1 ! year number - - hour = int((ttime)/c3600) + c1 ! hour + idate = (myear)*10000 + mmonth*100 + mday ! date (yyyymmdd) + yday = daycal(mmonth) + mday ! day of the year + hour = (msec+1)/(seconds_per_hour) + elapsed_months = (myear - year_init)*months_per_year + mmonth - month_init + elapsed_days = compute_days_between(year_init,month_init,day_init,myear,mmonth,mday) + elapsed_hours = elapsed_days * hours_per_day + call calendar_date2time(myear,mmonth,mday,msec,timesecs) - month0 = int((idate0 - int(idate0 / 10000) * 10000) / 100) - - elapsed_months = (nyr - 1)*12 + (month - month0) - elapsed_days = int((istep * dt) / secday) - elapsed_hours = int(ttime/3600) - - idate = (nyr+year_init-1)*10000 + month*100 + mday ! date (yyyymmdd) + !--- compute other stuff #ifndef CESMCOUPLED if (istep >= npt+1) stop_now = 1 if (istep == npt .and. dump_last) write_restart = 1 ! last timestep #endif - if (nyr /= nyrp) new_year = .true. - if (month /= monthp) new_month = .true. - if (mday /= mdayp) new_day = .true. - if (hour /= hourp) new_hour = .true. + if (myear /= yearp) new_year = .true. + if (mmonth /= monthp) new_month = .true. + if (mday /= dayp) new_day = .true. + if (hour /= hourp) new_hour = .true. + ! History writing flags do ns = 1, nstreams - if (histfreq(ns)=='1' .and. histfreq_n(ns)/=0) then - if (mod(istep1, histfreq_n(ns))==0) & - write_history(ns)=.true. - endif + + select case (histfreq(ns)) + case ("y", "Y") + if (new_year .and. histfreq_n(ns)/=0) then + if (mod(myear, histfreq_n(ns))==0) & + write_history(ns) = .true. + endif + case ("m", "M") + if (new_month .and. histfreq_n(ns)/=0) then + if (mod(elapsed_months,histfreq_n(ns))==0) & + write_history(ns) = .true. + endif + case ("d", "D") + if (new_day .and. histfreq_n(ns)/=0) then + if (mod(elapsed_days,histfreq_n(ns))==0) & + write_history(ns) = .true. + endif + case ("h", "H") + if (new_hour .and. histfreq_n(ns)/=0) then + if (mod(elapsed_hours,histfreq_n(ns))==0) & + write_history(ns) = .true. + endif + case ("1") + if (histfreq_n(ns)/=0) then + if (mod(istep1, histfreq_n(ns))==0) & + write_history(ns)=.true. + endif + end select + enddo - if (dumpfreq == '1') then + ! Restart writing flag + + select case (dumpfreq) + case ("y", "Y") + if (new_year .and. mod(myear, dumpfreq_n)==0) & + write_restart = 1 + case ("m", "M") + if (new_month .and. mod(elapsed_months,dumpfreq_n)==0) & + write_restart = 1 + case ("d", "D") + if (new_day .and. mod(elapsed_days, dumpfreq_n)==0) & + write_restart = 1 + case ("h", "H") + if (new_hour .and. mod(elapsed_hours, dumpfreq_n)==0) & + write_restart = 1 + case ("1") if (mod(istep1, dumpfreq_n)==0) & write_restart = 1 - endif - - if (istep > 1) then + end select - do ns = 1, nstreams + if (force_restart_now) write_restart = 1 - select case (histfreq(ns)) - case ("y", "Y") - if (new_year .and. histfreq_n(ns)/=0) then - if (mod(nyr, histfreq_n(ns))==0) & - write_history(ns) = .true. - endif - case ("m", "M") - if (new_month .and. histfreq_n(ns)/=0) then - if (mod(elapsed_months,histfreq_n(ns))==0) & - write_history(ns) = .true. - endif - case ("d", "D") - if (new_day .and. histfreq_n(ns)/=0) then - if (mod(elapsed_days,histfreq_n(ns))==0) & - write_history(ns) = .true. - endif - case ("h", "H") - if (new_hour .and. histfreq_n(ns)/=0) then - if (mod(elapsed_hours,histfreq_n(ns))==0) & - write_history(ns) = .true. - endif - end select - - enddo ! nstreams - - select case (dumpfreq) - case ("y", "Y") - if (new_year .and. mod(nyr, dumpfreq_n)==0) & - write_restart = 1 - case ("m", "M") - if (new_month .and. mod(elapsed_months,dumpfreq_n)==0) & - write_restart = 1 - case ("d", "D") - if (new_day .and. mod(elapsed_days, dumpfreq_n)==0) & - write_restart = 1 - case ("h", "H") - if (new_hour .and. mod(elapsed_hours, dumpfreq_n)==0) & - write_restart = 1 - end select - - if (force_restart_now) write_restart = 1 - - endif ! istep > 1 - - if (my_task == master_task .and. mod(istep,diagfreq) == 0 & + if (my_task == master_task .and. mod(istep1,diagfreq) == 0 & .and. stop_now /= 1) then write(nu_diag,*) ' ' write(nu_diag,'(a7,i10,4x,a6,i10,4x,a4,i10)') & - 'istep1:', istep1, 'idate:', idate, 'sec:', sec + 'istep1:', istep1, 'idate:', idate, 'sec:', msec endif end subroutine calendar !======================================================================= +! Set the model calendar data for year -! Convert the date to seconds since calendar zero. -! ** This is based on the UM routine TIME2SEC ** -! -! authors: Craig MacLachlan, UK Met Office + subroutine set_calendar(year) - subroutine time2sec(year,month,day,tsec) + integer (kind=int_kind), intent(in) :: year ! current year - integer (kind=int_kind), intent(in) :: year ! year - integer (kind=int_kind), intent(in) :: month ! month - integer (kind=int_kind), intent(in) :: day ! year - real (kind=dbl_kind), intent(out) :: tsec ! seconds since calendar zero + ! Internal variable + character(len=*),parameter :: subname='(set_calendar)' - ! local variables + call compute_calendar_data(year,daymo,daycal,dayyr) - real (kind=dbl_kind) :: days_since_calz ! days since calendar zero - real (kind=dbl_kind) :: secday ! seconds per day - integer (kind=int_kind) :: years_since_calz ! days since calendar zero - character(len=*),parameter :: subname='(time2sec)' + end subroutine set_calendar + +!======================================================================= +! Add and reconcile date +! delta time arguments are optional + + subroutine update_date(ayear,amon,aday,asec,dyear,dmon,dday,dsec) + + integer (kind=int_kind), intent(inout) :: ayear, amon, aday, asec ! year, month, day, sec + integer (kind=int_kind), intent(in), optional :: dyear, dmon, dday, dsec ! delta year, month, day, sec + + ! local variables + integer (kind=int_kind) :: tdaymo (months_per_year) ! days per month + integer (kind=int_kind) :: tdaycal(months_per_year+1) ! day count per month + integer (kind=int_kind) :: tdayyr ! days in year + real (kind=dbl_kind) :: secday ! seconds per day + integer (kind=int_kind) :: isecday ! seconds per day + integer (kind=int_kind) :: delta + character(len=*),parameter :: subname='(update_date)' call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) + isecday = nint(secday) + + ! order matters. think about adding 1 month and 10 days to the 25th of a month + ! what is the right order? + ! will add all deltas then reconcile years then months then days then seconds + + if (present(dyear)) ayear = ayear + dyear + if (present(dmon)) amon = amon + dmon + if (present(dday)) aday = aday + dday + if (present(dsec)) asec = asec + dsec + + ! adjust negative data first + ! reconcile months - years + do while (amon <= 0) + delta = int((abs(amon))/months_per_year) + 1 + ayear = ayear - delta + amon = amon + delta*months_per_year + enddo + call compute_calendar_data(ayear,tdaymo,tdaycal,tdayyr) + + ! reconcile days - months - years + do while (aday <= 0) + amon = amon - 1 + do while (amon <= 0) + delta = int((abs(amon))/months_per_year) + 1 + ayear = ayear - delta + amon = amon + delta*months_per_year + call compute_calendar_data(ayear,tdaymo,tdaycal,tdayyr) + enddo + aday = aday + tdaymo(amon) + enddo - if (dayyr == 360) then - days_since_calz = c360*year + c30*(month-1) + day - c1 - tsec = secday * days_since_calz + ! reconcile seconds - days - months - years + if (asec < 0) then + delta = int(abs(asec)/isecday) + 1 + aday = aday - delta + asec = asec + delta*isecday + endif + do while (aday <= 0) + amon = amon - 1 + do while (amon <= 0) + delta = int((abs(amon))/months_per_year) + 1 + ayear = ayear - delta + amon = amon + delta*months_per_year + call compute_calendar_data(ayear,tdaymo,tdaycal,tdayyr) + enddo + aday = aday + tdaymo(amon) + enddo - else - - if (use_leap_years) then + ! check for negative data + if (ayear < 0 .or. amon <= 0 .or. aday <= 0 .or. asec < 0) then + write(nu_diag,*) trim(subname),' ERROR in dateA, ',ayear,amon,aday,asec + call abort_ice(subname//'ERROR: in date') + endif + + ! reconcile months - years + do while (amon > months_per_year) + delta = int((amon-1)/months_per_year) + ayear = ayear + delta + amon = amon - delta*months_per_year + enddo + call compute_calendar_data(ayear,tdaymo,tdaycal,tdayyr) + + ! reconcile days - months - years + do while (aday > tdaymo(amon)) + aday = aday - tdaymo(amon) + amon = amon + 1 + do while (amon > months_per_year) + delta = int((amon-1)/months_per_year) + ayear = ayear + delta + amon = amon - delta*months_per_year + call compute_calendar_data(ayear,tdaymo,tdaycal,tdayyr) + enddo + enddo - call set_calendar(year) + ! reconcile seconds - days - months - years + if (asec >= isecday) then + delta = int(asec/isecday) + aday = aday + delta + asec = asec - delta*isecday + endif + do while (aday > tdaymo(amon)) + aday = aday - tdaymo(amon) + amon = amon + 1 + do while (amon > months_per_year) + delta = int((amon-1)/months_per_year) + ayear = ayear + delta + amon = amon - delta*months_per_year + call compute_calendar_data(ayear,tdaymo,tdaycal,tdayyr) + enddo + enddo - ! Add on the days from this year - days_since_calz = day + daycal(month) - c1 + ! check for negative data, just in case + if (ayear < 0 .or. amon <= 0 .or. aday <= 0 .or. asec < 0) then + write(nu_diag,*) trim(subname),' ERROR in dateB, ',ayear,amon,aday,asec + call abort_ice(subname//'ERROR: in date') + endif - ! Subtract a year because we only want to count whole years - years_since_calz = year - 1 - - ! Add days from preceeding years - days_since_calz = days_since_calz & - + int(years_since_calz/c400)*days_per_4c - years_since_calz = years_since_calz & - - int(years_since_calz/c400)*400 + end subroutine update_date - days_since_calz = days_since_calz & - + int(years_since_calz/c100)*days_per_c - years_since_calz = years_since_calz & - - int(years_since_calz/c100)*100 +!======================================================================= - days_since_calz = days_since_calz & - + int(years_since_calz/c4)*days_per_4y - years_since_calz = years_since_calz & - - int(years_since_calz/c4)*4 +! Set internal calendar date from timesecs input +! Needed for binary restarts where only timesecs is on the restart file - days_since_calz = days_since_calz & - + years_since_calz*days_per_y + subroutine set_date_from_timesecs(ttimesecs) - tsec = secday * days_since_calz - - else ! Using fixed 365-day calendar - - days_since_calz = c365*year + daycal365(month) + day - c1 - tsec = secday * days_since_calz + real (kind=dbl_kind), intent(in) :: ttimesecs ! seconds since init date - end if + ! Internal variable + character(len=*),parameter :: subname='(set_date_from_timesecs)' - end if + timesecs = ttimesecs + call calendar_time2date(ttimesecs,myear,mmonth,mday,msec,year_init,month_init,day_init,sec_init) - end subroutine time2sec + end subroutine set_date_from_timesecs !======================================================================= +! Compute elapsed days from year0,month0,day0 to year1,month1,day1 +! Same day results in 0 elapsed days -! Convert the time in seconds since calendar zero to a date. -! -! authors: Craig MacLachlan, UK Met Office + integer function compute_days_between(year0,month0,day0,year1,month1,day1) - subroutine sec2time(year,month,day,tsec) + integer (kind=int_kind), intent(in) :: year0 ! start year + integer (kind=int_kind), intent(in) :: month0 ! start month + integer (kind=int_kind), intent(in) :: day0 ! start day + integer (kind=int_kind), intent(in) :: year1 ! end year + integer (kind=int_kind), intent(in) :: month1 ! end month + integer (kind=int_kind), intent(in) :: day1 ! end day - integer (kind=int_kind), intent(out) :: year ! year - integer (kind=int_kind), intent(out) :: month ! month - integer (kind=int_kind), intent(out) :: day ! year - real (kind=dbl_kind), intent(in) :: tsec ! seconds since calendar zero + ! Internal variable + logical (kind=log_kind) :: isleap ! Leap year logical + integer (kind=int_kind) :: nday0, nday1 + character(len=*),parameter :: subname='(compute_days_between)' - ! local variables + nday0 = compute_elapsed_days(year0,month0,day0) + nday1 = compute_elapsed_days(year1,month1,day1) - real (kind=dbl_kind) :: days_since_calz ! days since calendar zero - real (kind=dbl_kind) :: secday ! seconds per day - integer (kind=int_kind) :: k ! counter - character(len=*),parameter :: subname='(sec2time)' + compute_days_between = nday1 - nday0 - call icepack_query_parameters(secday_out=secday) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) + end function compute_days_between + +!======================================================================= +! compute calendar data based on year + + subroutine compute_calendar_data(ayear,adaymo,adaycal,adayyr) + + integer (kind=int_kind), intent(in) :: ayear ! year + integer (kind=int_kind), intent(out) :: adaymo(:) ! days per month + integer (kind=int_kind), intent(out) :: adaycal(:) ! day count per month + integer (kind=int_kind), intent(out) :: adayyr ! days per year + + ! Internal variable + logical (kind=log_kind) :: isleap ! Leap year logical + integer (kind=int_kind) :: n + character(len=*),parameter :: subname='(compute_calendar_data)' - days_since_calz = int(tsec/secday) + if (ayear < 0) then + write(nu_diag,*) trim(subname),' ERROR in ayear = ',ayear + call abort_ice(subname//'ERROR: in ayear') + endif - if (dayyr == 360) then + if (size(adaymo) /= months_per_year .or. & + size(adaycal) /= months_per_year+1 ) then + call abort_ice(subname//'ERROR: in argument sizes') + endif - year = int(days_since_calz/c360) - month = mod(int(days_since_calz/c30),12) + 1 - day = mod(int(days_since_calz),30) + 1 + if (trim(calendar_type) == trim(ice_calendar_gregorian)) then + + isleap = .false. ! not a leap year + if (mod(ayear, 4) == 0) isleap = .true. + if (mod(ayear,100) == 0) isleap = .false. + if (mod(ayear,400) == 0) isleap = .true. + + if (isleap) then + adaymo = daymo366 + else + adaymo = daymo365 + endif + elseif (trim(calendar_type) == trim(ice_calendar_360day)) then + adaymo = daymo360 else + adaymo = daymo365 + endif - if (use_leap_years) then - - year = int(days_since_calz/days_per_4c)*400 - days_since_calz = days_since_calz & - - int(days_since_calz/days_per_4c)*days_per_4c - - if (days_since_calz == 4*days_per_c) then - year = year + 400 - days_since_calz = days_per_y + 1 - else - year = year + int(days_since_calz/days_per_c)*100 - days_since_calz = days_since_calz & - - int(days_since_calz/days_per_c)*days_per_c - - year = year + int(days_since_calz/days_per_4y)*4 - days_since_calz = days_since_calz & - - int(days_since_calz/days_per_4y)*days_per_4y - - if (days_since_calz == 4*days_per_y) then - year = year + 4 - days_since_calz = days_per_y + 1 - else - year = year + int(days_since_calz/days_per_y) + 1 - days_since_calz = days_since_calz & - - int(days_since_calz/days_per_y)*days_per_y + c1 - endif - endif + adaycal(1) = 0 + do n = 1, months_per_year + adaycal(n+1) = adaycal(n) + adaymo(n) + enddo + adayyr=adaycal(months_per_year+1) - ! Ensure the calendar variables are correct for this year. - call set_calendar(year) + end subroutine compute_calendar_data - ! Calculate the month - month = 1 - do k = 1, 12 - if (days_since_calz > daycal(k)) month = k - enddo +!======================================================================= +! Compute elapsed days from 0000-01-01 to year1,month1,day1 +! 0000-01-01 is 0 elapsed days - ! Calculate the day of the month - day = days_since_calz - daycal(month) + integer function compute_elapsed_days(ayear,amonth,aday) - else ! Using fixed 365-day calendar - - year = int(days_since_calz/c365) - days_since_calz = days_since_calz - year*365 + 1 - - ! Calculate the month - month = 1 - do k = 1, 12 - if (days_since_calz > daycal365(k)) month = k - enddo + integer (kind=int_kind), intent(in) :: ayear ! year + integer (kind=int_kind), intent(in) :: amonth ! month + integer (kind=int_kind), intent(in) :: aday ! day - ! Calculate the day of the month - day = days_since_calz - daycal365(month) + ! Internal variable + integer (kind=int_kind) :: ced_nday, n + integer (kind=int_kind) :: lyear,lmonth,lday,lsec + integer (kind=int_kind) :: tdaymo (months_per_year) ! days per month + integer (kind=int_kind) :: tdaycal(months_per_year+1) ! day count per month + integer (kind=int_kind) :: tdayyr ! days in year + character(len=*),parameter :: subname='(compute_elapsed_days)' + + ! use 0000-01-01 as base, year 0 is a leap year + ! this must be implemented consistent with set_calendar + + lyear = ayear + lmonth = amonth + lday = aday + lsec = 0 + + if (lyear < 0 .or. lmonth <= 0 .or. lday <= 0) then + write(nu_diag,*) trim(subname),' ERROR for year,month,day = ',lyear,lmonth,lday + call abort_ice(subname//'ERROR: illegal date') + elseif (lmonth > months_per_year) then + call update_date(lyear,lmonth,lday,lsec) + endif - end if + ! compute days from year 0000-01-01 to year-01-01 + ! don't loop thru years for performance reasons + if (trim(calendar_type) == trim(ice_calendar_gregorian)) then + if (lyear == 0) then + ced_nday = 0 + else + ced_nday = lyear * 365 + 1 + (lyear-1)/4 - (lyear-1)/100 + (lyear-1)/400 + endif + else + ced_nday = lyear * daycal(months_per_year+1) + endif - end if + ! now compute days in this year + call compute_calendar_data(lyear,tdaymo,tdaycal,tdayyr) - end subroutine sec2time + do n = 1, lmonth-1 + ced_nday = ced_nday + tdaymo(n) + enddo -!======================================================================= + if (lday <= tdaymo(lmonth)) then + ced_nday = ced_nday + lday - 1 + else + write(nu_diag,*) trim(subname),' ERROR for year,month,day = ',ayear,amonth,aday + call abort_ice(subname//'ERROR: illegal day in month') + endif -! Set the "days per month", "days per year", etc variables for the -! current year. -! -! authors: Craig MacLachlan, UK Met Office + compute_elapsed_days = ced_nday - subroutine set_calendar(year) + end function compute_elapsed_days - integer (kind=int_kind), intent(in) :: year ! current year +!======================================================================= +! Compute time in seconds from input calendar date +! relative to year_init, month_init, day_init, sec_init unless _ref values passed in +! For santity, must pass all four ref values or none - ! Internal variable - logical (kind=log_kind) :: isleap ! Leap year logical - character(len=*),parameter :: subname='(set_calendar)' + subroutine calendar_date2time(ayear,amon,aday,asec,atimesecs,year_ref,mon_ref,day_ref,sec_ref) + + integer(kind=int_kind), intent(in) :: & + ayear,amon,aday,asec ! year, month, day, sec of ttimesecs + real (kind=dbl_kind), intent(out) :: atimesecs ! seconds since init date + integer(kind=int_kind), intent(in), optional :: & + year_ref,mon_ref,day_ref,sec_ref ! year, month, day, sec reference time - isleap = .false. ! not a leap year - if (mod(year, 4) == 0) isleap = .true. - if (mod(year,100) == 0) isleap = .false. - if (mod(year,400) == 0) isleap = .true. - - ! Ensure the calendar is set correctly - if (isleap) then - daycal = daycal366 - daymo = daymo366 - dayyr=real(daycal(13), kind=dbl_kind) - days_per_year=int(dayyr) + ! Internal variable + real (kind=dbl_kind) :: secday + integer (kind=int_kind) :: elapsed_days ! since beginning this run + integer (kind=int_kind) :: lyear_ref,lmon_ref,lday_ref,lsec_ref ! local reference year, month, day, sec + integer (kind=int_kind) :: cnt + character(len=*),parameter :: subname='(calendar_date2time)' + + ! set reference date and check that 0 or 4 optional arguments are passed + cnt = 0 + if (present(year_ref)) then + lyear_ref = year_ref + cnt = cnt + 1 + else + lyear_ref = year_init + endif + if (present(mon_ref)) then + lmon_ref = mon_ref + cnt = cnt + 1 + else + lmon_ref = month_init + endif + if (present(day_ref)) then + lday_ref = day_ref + cnt = cnt + 1 + else + lday_ref = day_init + endif + if (present(sec_ref)) then + lsec_ref = sec_ref + cnt = cnt + 1 else - daycal = daycal365 - daymo = daymo365 - dayyr=real(daycal(13), kind=dbl_kind) - days_per_year=int(dayyr) + lsec_ref = sec_init endif + if (cnt /= 0 .and. cnt /= 4) then + write(nu_diag,*) trim(subname),' ERROR in ref args, must pass 0 or 4 ' + call abort_ice(subname//'ERROR: in ref args, must pass 0 or 4') + endif + + call icepack_query_parameters(secday_out=secday) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + elapsed_days = compute_days_between(lyear_ref,lmon_ref,lday_ref,ayear,amon,aday) + atimesecs = real(elapsed_days,kind=dbl_kind)*secday + & + real(asec,kind=dbl_kind) - real(lsec_ref,kind=dbl_kind) - end subroutine set_calendar + end subroutine calendar_date2time !======================================================================= +! Compute calendar date from input time in seconds +! relative to year_init, month_init, day_init, sec_init or ref data if passed. +! For sanity, require all four or no ref values. +! Implemented to minimize accumulating errors and avoid overflows +! and perform well. - real(kind=dbl_kind) function hc_jday(iyear,imm,idd,ihour) -!-------------------------------------------------------------------- -! converts "calendar" date to HYCOM julian day: -! 1) year,month,day,hour (4 arguments) -! 2) year,doy,hour (3 arguments) + subroutine calendar_time2date(atimesecs,ayear,amon,aday,asec,year_ref,mon_ref,day_ref,sec_ref) + + real (kind=dbl_kind), intent(in) :: atimesecs ! seconds since init date + integer(kind=int_kind), intent(out) :: & + ayear,amon,aday,asec ! year, month, day, sec of timesecs + integer(kind=int_kind), intent(in), optional :: & + year_ref,mon_ref,day_ref,sec_ref ! year, month, day, sec reference time + + ! Internal variable + integer (kind=int_kind) :: ndays + integer (kind=int_kind) :: tyear, tmon, tday, tsec ! temporaries + integer (kind=int_kind) :: tdaymo (months_per_year) ! days per month + integer (kind=int_kind) :: tdaycal(months_per_year+1) ! day count per month + integer (kind=int_kind) :: tdayyr ! days in year + real (kind=dbl_kind) :: secday, rdays, ltimesecs + integer (kind=int_kind) :: lyear_ref,lmon_ref,lday_ref,lsec_ref ! local reference year, month, day, sec + integer (kind=int_kind) :: cnt + character(len=*),parameter :: subname='(calendar_time2date)' + + call icepack_query_parameters(secday_out=secday) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! we could allow negative atimesecs, but this shouldn't be needed + if (atimesecs < 0._dbl_kind) then + write(nu_diag,*) trim(subname),' ERROR in atimesecs ',atimesecs + call abort_ice(subname//'ERROR: in atimesecs') + endif + + ! set reference date and check that 0 or 4 optional arguments are passed + cnt = 0 + if (present(year_ref)) then + lyear_ref = year_ref + cnt = cnt + 1 + else + lyear_ref = year_init + endif + if (present(mon_ref)) then + lmon_ref = mon_ref + cnt = cnt + 1 + else + lmon_ref = month_init + endif + if (present(day_ref)) then + lday_ref = day_ref + cnt = cnt + 1 + else + lday_ref = day_init + endif + if (present(sec_ref)) then + lsec_ref = sec_ref + cnt = cnt + 1 + else + lsec_ref = sec_init + endif + if (cnt /= 0 .and. cnt /= 4) then + write(nu_diag,*) trim(subname),' ERROR in ref args, must pass 0 or 4 ' + call abort_ice(subname//'ERROR: in ref args, must pass 0 or 4') + endif + +! ------------------------------------------------------------------- +! tcraig, this is risky because atimesecs is real and could be very large +! ayear = lyear_ref +! amon = lmon_ref +! aday = lday_ref +! asec = lsec_ref ! -! HYCOM model day is calendar days since 31/12/1900 -!-------------------------------------------------------------------- - real(kind=dbl_kind) :: dtime - integer(kind=int_kind) :: iyear,iyr,imm,idd,idoy,ihr - integer(kind=int_kind), optional :: ihour - - if (present(ihour)) then - !----------------- - ! yyyy mm dd HH - !----------------- - iyr=iyear-1901 - if (mod(iyr,4)==3) then - dtime = floor(365.25_dbl_kind*iyr)*c1 + daycal366(imm)*c1 + idd*c1 + ihour/24._dbl_kind - else - dtime = floor(365.25_dbl_kind*iyr)*c1 + daycal365(imm)*c1 + idd*c1 + ihour/24._dbl_kind - endif - - else - !----------------- - ! yyyy DOY HH - !----------------- - ihr = idd ! redefine input - idoy = imm ! redefine input - iyr = iyear - 1901 - dtime = floor(365.25_dbl_kind*iyr)*c1 + idoy*c1 + ihr/24._dbl_kind - - endif - - hc_jday=dtime - - return - end function hc_jday +! call update_date(ayear,amon,aday,asec,dsec=nint(atimesecs)) +! return +! ------------------------------------------------------------------- + + ! initial guess + tyear = lyear_ref + tmon = 1 + tday = 1 + tsec = 0 + + ! add initial seconds to timesecs and treat lsec_ref as zero + ltimesecs = atimesecs + real(lsec_ref,kind=dbl_kind) + + ! first estimate of tyear + call compute_calendar_data(tyear,tdaymo,tdaycal,tdayyr) + rdays = ltimesecs/secday + tyear = tyear + int(rdays)/tdayyr + + ! reduce estimate of tyear if ndays > rdays + ndays = compute_days_between(lyear_ref,lmon_ref,lday_ref,tyear,tmon,tday) + if (ndays > int(rdays)) then + tyear = tyear - (ndays - int(rdays))/tdayyr - 1 + ndays = compute_days_between(lyear_ref,lmon_ref,lday_ref,tyear,tmon,tday) + endif + call compute_calendar_data(tyear,tdaymo,tdaycal,tdayyr) + + ! compute residual days, switch to integers, compute date + rdays = ltimesecs/secday + tday = int(rdays) - ndays + 1 + + do while (tday > tdaymo(tmon)) + tday = tday - tdaymo(tmon) + tmon = tmon + 1 + do while (tmon > months_per_year) + tmon = tmon - months_per_year + tyear = tyear + 1 + call compute_calendar_data(tyear,tdaymo,tdaycal,tdayyr) + enddo + enddo + + ndays = compute_days_between(lyear_ref,lmon_ref,lday_ref,tyear,tmon,tday) + tsec = int(ltimesecs - real(ndays,kind=dbl_kind)*secday) + if (tsec > secday) then + write(nu_diag,*) trim(subname),' ERROR in seconds, ',tyear,tmon,tday,tsec + call abort_ice(subname//'ERROR: in seconds') + endif + + ayear = tyear + amon = tmon + aday = tday + asec = tsec + + end subroutine calendar_time2date !======================================================================= diff --git a/cicecore/shared/ice_distribution.F90 b/cicecore/shared/ice_distribution.F90 index 8c5808820..1a23b63be 100644 --- a/cicecore/shared/ice_distribution.F90 +++ b/cicecore/shared/ice_distribution.F90 @@ -12,7 +12,7 @@ module ice_distribution use ice_kinds_mod use ice_domain_size, only: max_blocks use ice_communicate, only: my_task, master_task, create_communicator - use ice_blocks, only: nblocks_x, nblocks_y, nblocks_tot + use ice_blocks, only: nblocks_x, nblocks_y, nblocks_tot, debug_blocks use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag @@ -154,8 +154,6 @@ subroutine create_local_block_ids(block_ids, distribution) integer (int_kind) :: & n, bcount ! dummy counters - logical (log_kind) :: dbug - character(len=*),parameter :: subname='(create_local_block_ids)' !----------------------------------------------------------------------- @@ -178,15 +176,14 @@ subroutine create_local_block_ids(block_ids, distribution) ! !----------------------------------------------------------------------- -! dbug = .true. - dbug = .false. if (bcount > 0) then do n=1,size(distribution%blockLocation) if (distribution%blockLocation(n) == my_task+1) then block_ids(distribution%blockLocalID(n)) = n - if (dbug) then - write(nu_diag,*) subname,'block id, proc, local_block: ', & + if (debug_blocks .and. my_task == master_task) then + write(nu_diag,'(2a,3i8)') & + subname,' block id, proc, local_block: ', & block_ids(distribution%blockLocalID(n)), & distribution%blockLocation(n), & distribution%blockLocalID(n) @@ -402,7 +399,7 @@ subroutine ice_distributionGet(distribution,& numLocalBlocks ! number of blocks distributed to this ! local processor - integer (int_kind), dimension(:), pointer, optional :: & + integer (int_kind), dimension(:), optional :: & blockLocation ,&! processor location for all blocks blockLocalID ,&! local block id for all blocks blockGlobalID ! global block id for each local block @@ -422,7 +419,7 @@ subroutine ice_distributionGet(distribution,& if (present(blockLocation)) then if (associated(distribution%blockLocation)) then - blockLocation => distribution%blockLocation + blockLocation = distribution%blockLocation else call abort_ice(subname//'ERROR: blockLocation not allocated') return @@ -575,7 +572,11 @@ function create_distrb_cart(nprocs, workPerBlock) result(newDistrb) nprocsX, &! num of procs in x for global domain nprocsY, &! num of procs in y for global domain numBlocksXPerProc, &! num of blocks per processor in x - numBlocksYPerProc ! num of blocks per processor in y + numBlocksYPerProc, &! num of blocks per processor in y + numBlocksPerProc ! required number of blocks per processor + + character(len=char_len) :: & + numBlocksPerProc_str ! required number of blocks per processor (as string) character(len=*),parameter :: subname='(create_distrb_cart)' @@ -628,6 +629,14 @@ function create_distrb_cart(nprocs, workPerBlock) result(newDistrb) numBlocksXPerProc = (nblocks_x-1)/nprocsX + 1 numBlocksYPerProc = (nblocks_y-1)/nprocsY + 1 + ! Check if max_blocks is too small + numBlocksPerProc = numBlocksXPerProc * numBlocksYPerProc + if (numBlocksPerProc > max_blocks) then + write(numBlocksPerProc_str, '(i2)') numBlocksPerProc + call abort_ice(subname//'ERROR: max_blocks too small (need at least '//trim(numBlocksPerProc_str)//')') + return + endif + do j=1,nprocsY do i=1,nprocsX processor = (j-1)*nprocsX + i ! number the processors @@ -786,6 +795,8 @@ function create_distrb_rake(nprocs, workPerBlock) result(newDistrb) maxWork = maxval(workPerBlock) if (numOcnBlocks <= 2*nprocs) then + if (my_task == master_task) & + write(nu_diag,*) subname,' 1d rake on entire distribution' allocate(priority(nblocks_tot), stat=istat) if (istat > 0) then @@ -807,7 +818,7 @@ function create_distrb_rake(nprocs, workPerBlock) result(newDistrb) end do end do - allocate(workTmp(nblocks_tot), procTmp(nblocks_tot), stat=istat) + allocate(workTmp(nprocs), procTmp(nprocs), stat=istat) if (istat > 0) then call abort_ice( & 'create_distrb_rake: error allocating procTmp') @@ -841,6 +852,8 @@ function create_distrb_rake(nprocs, workPerBlock) result(newDistrb) !---------------------------------------------------------------------- else + if (my_task == master_task) & + write(nu_diag,*) subname,' rake in each direction' call proc_decomposition(dist%nprocs, nprocsX, nprocsY) @@ -996,6 +1009,10 @@ function create_distrb_rake(nprocs, workPerBlock) result(newDistrb) if (pid > 0) then procTmp(pid) = procTmp(pid) + 1 + if (procTmp(pid) > max_blocks) then + call abort_ice(subname//'ERROR: max_blocks too small') + return + endif newDistrb%blockLocalID (n) = procTmp(pid) newDistrb%blockIndex(pid,procTmp(pid)) = n else @@ -1413,7 +1430,7 @@ end function create_distrb_spiralcenter function create_distrb_wghtfile(nprocs, workPerBlock) result(newDistrb) ! This function creates a distribution of blocks across processors -! using a simple wghtfile algorithm. Mean for prescribed ice or +! using a simple wghtfile algorithm. Meant for prescribed ice or ! standalone CAM mode. integer (int_kind), intent(in) :: & @@ -2094,8 +2111,6 @@ function create_distrb_spacecurve(nprocs,work_per_block) ii,extra,tmp1, &! loop tempories used for s1,ig ! partitioning curve - logical, parameter :: Debug = .FALSE. - type (factor_t) :: xdim,ydim integer (int_kind) :: it,jj,i2,j2 @@ -2189,9 +2204,9 @@ function create_distrb_spacecurve(nprocs,work_per_block) call GenSpaceCurve(Mesh) Mesh = Mesh + 1 ! make it 1-based indexing - if(Debug) then - if(my_task ==0) call PrintCurve(Mesh) - endif +! if (debug_blocks) then +! if (my_task == master_task) call PrintCurve(Mesh) +! endif !----------------------------------------------- ! Reindex the SFC to address internal sub-blocks @@ -2238,8 +2253,8 @@ function create_distrb_spacecurve(nprocs,work_per_block) endif enddo nblocks=ii - if(Debug) then - if(my_task==0) call PrintCurve(Mesh3) + if (debug_blocks) then + if (my_task == master_task) call PrintCurve(Mesh3) endif !---------------------------------------------------- @@ -2258,8 +2273,8 @@ function create_distrb_spacecurve(nprocs,work_per_block) ! ! First region gets nblocksL+1 blocks per partition ! Second region gets nblocksL blocks per partition - if(Debug) print *,'nprocs,extra,nblocks,nblocksL,s1: ', & - nprocs,extra,nblocks,nblocksL,s1 +! if(debug_blocks) write(nu_diag,*) 'nprocs,extra,nblocks,nblocksL,s1: ', & +! nprocs,extra,nblocks,nblocksL,s1 !----------------------------------------------------------- ! Use the SFC to partition the blocks across processors @@ -2304,6 +2319,10 @@ function create_distrb_spacecurve(nprocs,work_per_block) if(pid>0) then proc_tmp(pid) = proc_tmp(pid) + 1 + if (proc_tmp(pid) > max_blocks) then + call abort_ice(subname//'ERROR: max_blocks too small') + return + endif dist%blockLocalID(n) = proc_tmp(pid) dist%blockIndex(pid,proc_tmp(pid)) = n else @@ -2326,11 +2345,11 @@ function create_distrb_spacecurve(nprocs,work_per_block) endif enddo - if(Debug) then - if(my_task==0) print *,'dist%blockLocation:= ',dist%blockLocation - print *,'IAM: ',my_task,' SpaceCurve: Number of blocks {total,local} :=', & - nblocks_tot,nblocks,proc_tmp(my_task+1) - endif +! if (debug_blocks) then +! if (my_task == master_task) write(nu_diag,*) 'dist%blockLocation:= ',dist%blockLocation +! write(nu_diag,*) 'IAM: ',my_task,' SpaceCurve: Number of blocks {total,local} :=', & +! nblocks_tot,nblocks,proc_tmp(my_task+1) +! endif !--------------------------------- ! Deallocate temporary arrays !--------------------------------- diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index b3937c0cd..1362e055e 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -188,7 +188,7 @@ subroutine init_shortwave swgrid, igrid use ice_blocks, only: block, get_block, nx_block, ny_block use ice_calendar, only: dt, calendar_type, & - days_per_year, nextsw_cday, yday, sec + days_per_year, nextsw_cday, yday, msec use ice_diagnostics, only: npnt, print_points, pmloc, piloc, pjloc use ice_domain, only: nblocks, blocks_ice use ice_flux, only: alvdf, alidf, alvdr, alidr, & @@ -356,7 +356,7 @@ subroutine init_shortwave calendar_type=calendar_type, & days_per_year=days_per_year, & nextsw_cday=nextsw_cday, yday=yday, & - sec=sec, & + sec=msec, & kaer_tab=kaer_tab, kaer_bc_tab=kaer_bc_tab(:,:), & waer_tab=waer_tab, waer_bc_tab=waer_bc_tab(:,:), & gaer_tab=gaer_tab, gaer_bc_tab=gaer_bc_tab(:,:), & @@ -408,7 +408,7 @@ subroutine init_shortwave do i = ilo, ihi if (aicen(i,j,n,iblk) > puny) then - + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) alidf(i,j,iblk) = alidf(i,j,iblk) & @@ -417,7 +417,7 @@ subroutine init_shortwave + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) alidr(i,j,iblk) = alidr(i,j,iblk) & + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) - + netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & + swvdf(i,j,iblk) + swidf(i,j,iblk) if (netsw > puny) then ! sun above horizon @@ -428,12 +428,12 @@ subroutine init_shortwave albpnd(i,j,iblk) = albpnd(i,j,iblk) & + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) endif - + apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) - + endif ! aicen > puny enddo ! i diff --git a/cicecore/shared/ice_spacecurve.F90 b/cicecore/shared/ice_spacecurve.F90 index 78b256b8f..931b2312b 100644 --- a/cicecore/shared/ice_spacecurve.F90 +++ b/cicecore/shared/ice_spacecurve.F90 @@ -13,12 +13,14 @@ module ice_spacecurve ! !USES: use ice_kinds_mod + use ice_blocks, only: debug_blocks use ice_communicate, only: my_task, master_task use ice_exit, only: abort_ice use ice_fileunits use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted implicit none + private ! !PUBLIC TYPES: @@ -30,13 +32,13 @@ module ice_spacecurve ! !PUBLIC MEMBER FUNCTIONS: - public :: GenSpaceCurve, & - IsLoadBalanced + public :: GenSpaceCurve public :: Factor, & IsFactorable, & PrintFactor, & ProdFactor, & + PrintCurve, & MatchFactor ! !PRIVATE MEMBER FUNCTIONS: @@ -60,8 +62,6 @@ module ice_spacecurve maxdim, &! dimensionality of entire space vcnt ! visitation count - logical :: verbose=.FALSE. - type (factor_t), public :: fact ! stores the factorization !EOP @@ -118,8 +118,6 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ltype, &! type of SFC on next level ll ! next level down - logical :: debug = .FALSE. - character(len=*),parameter :: subname='(Cinco)' !----------------------------------------------------------------------- @@ -136,12 +134,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,21) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,21) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'Cinco: After Position [0,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'Cinco: After Position [0,0] ',pos endif !-------------------------------------------------------------- @@ -153,12 +151,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,22) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,22) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [1,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [1,0] ',pos endif !-------------------------------------------------------------- @@ -170,12 +168,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,23) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,23) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [2,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [2,0] ',pos endif !-------------------------------------------------------------- @@ -187,12 +185,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,24) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,24) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [2,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [2,1] ',pos endif !-------------------------------------------------------------- @@ -204,12 +202,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = -md if(ll .gt. 1) then - if(debug) write(*,25) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,25) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [2,2] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [2,2] ',pos endif !-------------------------------------------------------------- @@ -221,12 +219,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,26) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,26) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [1,2] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [1,2] ',pos endif !-------------------------------------------------------------- @@ -238,12 +236,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,27) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,27) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [1,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [1,1] ',pos endif !-------------------------------------------------------------- @@ -255,12 +253,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = md if(ll .gt. 1) then - if(debug) write(*,28) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,28) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [0,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [0,1] ',pos endif !-------------------------------------------------------------- @@ -272,12 +270,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,29) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,29) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [0,2] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [0,2] ',pos endif !-------------------------------------------------------------- @@ -289,12 +287,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,30) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,30) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [0,3] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [0,3] ',pos endif !-------------------------------------------------------------- @@ -306,12 +304,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,31) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,31) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [0,4] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [0,4] ',pos endif !-------------------------------------------------------------- @@ -323,12 +321,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = -md if(ll .gt. 1) then - if(debug) write(*,32) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,32) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [1,4] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [1,4] ',pos endif !-------------------------------------------------------------- @@ -340,12 +338,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = md if(ll .gt. 1) then - if(debug) write(*,33) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,33) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [1,3] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [1,3] ',pos endif !-------------------------------------------------------------- @@ -357,12 +355,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,34) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,34) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [2,3] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [2,3] ',pos endif !-------------------------------------------------------------- @@ -374,12 +372,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,35) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,35) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [2,4] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [2,4] ',pos endif !-------------------------------------------------------------- @@ -391,12 +389,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,36) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,36) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [3,4] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [3,4] ',pos endif !-------------------------------------------------------------- @@ -408,12 +406,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = -md if(ll .gt. 1) then - if(debug) write(*,37) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,37) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [4,4] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [4,4] ',pos endif !-------------------------------------------------------------- @@ -425,12 +423,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,38) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,38) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [4,3] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [4,3] ',pos endif !-------------------------------------------------------------- @@ -442,12 +440,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,39) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,39) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [3,3] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [3,3] ',pos endif !-------------------------------------------------------------- @@ -459,12 +457,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = md if(ll .gt. 1) then - if(debug) write(*,40) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,40) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [3,2] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [3,2] ',pos endif !-------------------------------------------------------------- @@ -476,12 +474,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = -md if(ll .gt. 1) then - if(debug) write(*,41) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,41) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [4,2] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [4,2] ',pos endif !-------------------------------------------------------------- @@ -493,12 +491,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,42) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,42) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [4,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [4,1] ',pos endif !-------------------------------------------------------------- @@ -510,12 +508,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,43) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,43) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [3,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [3,1] ',pos endif !-------------------------------------------------------------- @@ -527,12 +525,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = md if(ll .gt. 1) then - if(debug) write(*,44) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,44) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [3,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [3,0] ',pos endif !-------------------------------------------------------------- @@ -544,12 +542,12 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ljd = jd if(ll .gt. 1) then - if(debug) write(*,45) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,45) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'After Position [4,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'After Position [4,0] ',pos endif 21 format('Call Cinco Pos [0,0] Level ',i1,' at (',i2,',',i2,')',4(i3)) @@ -632,8 +630,6 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) ltype, &! type of SFC on next level ll ! next level down - logical :: debug = .FALSE. - character(len=*),parameter :: subname='(PeanoM)' !----------------------------------------------------------------------- @@ -650,12 +646,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,21) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,21) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [0,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [0,0] ',pos endif @@ -667,12 +663,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) lja = lma ljd = lmd if(ll .gt. 1) then - if(debug) write(*,22) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,22) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [0,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [0,1] ',pos endif !-------------------------------------------------------------- @@ -683,12 +679,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) lja = lma ljd = lmd if(ll .gt. 1) then - if(debug) write(*,23) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,23) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [0,2] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [0,2] ',pos endif !-------------------------------------------------------------- @@ -699,12 +695,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) lja = lma ljd = lmd if(ll .gt. 1) then - if(debug) write(*,24) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,24) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [1,2] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [1,2] ',pos endif @@ -717,12 +713,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) ljd = -lmd if(ll .gt. 1) then - if(debug) write(*,25) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,25) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [2,2] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [2,2] ',pos endif !-------------------------------------------------------------- @@ -734,12 +730,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,26) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,26) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [2,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [2,1] ',pos endif !-------------------------------------------------------------- @@ -751,12 +747,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,27) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,27) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [1,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [1,1] ',pos endif @@ -769,12 +765,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) ljd = -lmd if(ll .gt. 1) then - if(debug) write(*,28) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,28) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [1,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [1,0] ',pos endif !-------------------------------------------------------------- @@ -786,12 +782,12 @@ recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) ljd = jd if(ll .gt. 1) then - if(debug) write(*,29) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,29) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'PeanoM: After Position [2,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'PeanoM: After Position [2,0] ',pos endif 21 format('Call PeanoM Pos [0,0] Level ',i1,' at (',i2,',',i2,')',4(i3)) @@ -858,8 +854,6 @@ recursive function Hilbert(l,type,ma,md,ja,jd) result(ierr) ltype, &! type of SFC on next level ll ! next level down - logical :: debug = .FALSE. - character(len=*),parameter :: subname='(Hilbert)' !----------------------------------------------------------------------- @@ -875,12 +869,12 @@ recursive function Hilbert(l,type,ma,md,ja,jd) result(ierr) ljd = lmd if(ll .gt. 1) then - if(debug) write(*,21) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,21) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'Hilbert: After Position [0,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'Hilbert: After Position [0,0] ',pos endif @@ -892,12 +886,12 @@ recursive function Hilbert(l,type,ma,md,ja,jd) result(ierr) lja = lma ljd = lmd if(ll .gt. 1) then - if(debug) write(*,22) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,22) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'Hilbert: After Position [0,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'Hilbert: After Position [0,1] ',pos endif @@ -910,12 +904,12 @@ recursive function Hilbert(l,type,ma,md,ja,jd) result(ierr) ljd = -md if(ll .gt. 1) then - if(debug) write(*,23) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,23) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'Hilbert: After Position [1,1] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'Hilbert: After Position [1,1] ',pos endif !-------------------------------------------------------------- @@ -927,12 +921,12 @@ recursive function Hilbert(l,type,ma,md,ja,jd) result(ierr) ljd = jd if(ll .gt. 1) then - if(debug) write(*,24) ll-1,pos(0),pos(1),lma,lmd,lja,ljd + if(debug_blocks .and. my_task==master_task) write(nu_diag,24) ll-1,pos(0),pos(1),lma,lmd,lja,ljd ierr = GenCurve(ll-1,ltype,lma,lmd,lja,ljd) - if(debug) call PrintCurve(ordered) + if(debug_blocks .and. my_task==master_task) call PrintCurve(ordered) else ierr = IncrementCurve(lja,ljd) - if(debug) print *,'Hilbert: After Position [1,0] ',pos + if(debug_blocks .and. my_task==master_task) write(nu_diag,*) 'Hilbert: After Position [1,0] ',pos endif 21 format('Call Hilbert Pos [0,0] Level ',i1,' at (',i2,',',i2,')',4(i3)) @@ -1048,6 +1042,7 @@ function log2( n) end function log2 !*********************************************************************** +#ifdef UNDEPRECATE_IsLoadBalanced !BOP ! !IROUTINE: IsLoadBalanced ! !INTERFACE: @@ -1095,7 +1090,7 @@ function IsLoadBalanced(nelem,npart) !----------------------------------------------------------------------- end function IsLoadBalanced - +#endif !*********************************************************************** !BOP ! !IROUTINE: GenCurve @@ -1128,6 +1123,7 @@ function GenCurve(l,type,ma,md,ja,jd) result(ierr) !EOP !BOC + logical, save :: f2=.true., f3=.true., f5=.true. ! first calls character(len=*),parameter :: subname='(GenCurve)' !----------------------------------------------------------------------- @@ -1137,11 +1133,17 @@ function GenCurve(l,type,ma,md,ja,jd) result(ierr) !------------------------------------------------- if(type == 2) then + if (f2 .and. my_task == master_task) write(nu_diag,*) subname,' calling Hilbert (2)' ierr = Hilbert(l,type,ma,md,ja,jd) + f2 = .false. elseif ( type == 3) then + if (f3 .and. my_task == master_task) write(nu_diag,*) subname,' calling PeanoM (3)' ierr = PeanoM(l,type,ma,md,ja,jd) + f3 = .false. elseif ( type == 5) then + if (f5 .and. my_task == master_task) write(nu_diag,*) subname,' calling Cinco (5)' ierr = Cinco(l,type,ma,md,ja,jd) + f5 = .false. endif !EOP @@ -1210,7 +1212,7 @@ subroutine MatchFactor(fac1,fac2,val,found) found = .false. val1 = FirstFactor(fac1) -!JMD print *,'Matchfactor: found value: ',val1 +!JMD write(nu_diag,*)'Matchfactor: found value: ',val1 found = FindandMark(fac2,val1,.true.) tmp = FindandMark(fac1,val1,found) if (found) then @@ -1245,10 +1247,10 @@ subroutine PrintFactor(msg,fac) integer (int_kind) :: i character(len=*),parameter :: subname='(PrintFactor)' - write(*,*) subname,' ' - write(*,*) subname,'msg = ',trim(msg) - write(*,*) subname,(fac%factors(i),i=1,fac%numfact) - write(*,*) subname,(fac%used(i),i=1,fac%numfact) + write(nu_diag,*) subname,' ' + write(nu_diag,*) subname,'msg = ',trim(msg) + write(nu_diag,*) subname,(fac%factors(i),i=1,fac%numfact) + write(nu_diag,*) subname,(fac%used(i),i=1,fac%numfact) end subroutine PrintFactor @@ -1448,6 +1450,9 @@ subroutine map(l) maxdim=d vcnt=0 + ! tcx, if l is 0, then fact has no factors, just return + if (l == 0) return + type = fact%factors(l) ierr = GenCurve(l,type,0,1,0,1) @@ -1492,113 +1497,113 @@ subroutine PrintCurve(Mesh) gridsize = SIZE(Mesh,dim=1) - write(*,*) subname,":" + write(nu_diag,*) subname,":",gridsize if(gridsize == 2) then - write (*,*) "A Level 1 Hilbert Curve:" - write (*,*) "------------------------" + write (nu_diag,*) "A Level 1 Hilbert Curve:" + write (nu_diag,*) "------------------------" do i=1,gridsize - write(*,2) Mesh(1,i),Mesh(2,i) + write(nu_diag,2) Mesh(1,i),Mesh(2,i) enddo else if(gridsize == 3) then - write (*,*) "A Level 1 Peano Meandering Curve:" - write (*,*) "---------------------------------" + write (nu_diag,*) "A Level 1 Peano Meandering Curve:" + write (nu_diag,*) "---------------------------------" do i=1,gridsize - write(*,3) Mesh(1,i),Mesh(2,i),Mesh(3,i) + write(nu_diag,3) Mesh(1,i),Mesh(2,i),Mesh(3,i) enddo else if(gridsize == 4) then - write (*,*) "A Level 2 Hilbert Curve:" - write (*,*) "------------------------" + write (nu_diag,*) "A Level 2 Hilbert Curve:" + write (nu_diag,*) "------------------------" do i=1,gridsize - write(*,4) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i) + write(nu_diag,4) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i) enddo else if(gridsize == 5) then - write (*,*) "A Level 1 Cinco Curve:" - write (*,*) "------------------------" + write (nu_diag,*) "A Level 1 Cinco Curve:" + write (nu_diag,*) "------------------------" do i=1,gridsize - write(*,5) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i),Mesh(5,i) + write(nu_diag,5) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i),Mesh(5,i) enddo else if(gridsize == 6) then - write (*,*) "A Level 1 Hilbert and Level 1 Peano Curve:" - write (*,*) "------------------------------------------" + write (nu_diag,*) "A Level 1 Hilbert and Level 1 Peano Curve:" + write (nu_diag,*) "------------------------------------------" do i=1,gridsize - write(*,6) Mesh(1,i),Mesh(2,i),Mesh(3,i), & + write(nu_diag,6) Mesh(1,i),Mesh(2,i),Mesh(3,i), & Mesh(4,i),Mesh(5,i),Mesh(6,i) enddo else if(gridsize == 8) then - write (*,*) "A Level 3 Hilbert Curve:" - write (*,*) "------------------------" + write (nu_diag,*) "A Level 3 Hilbert Curve:" + write (nu_diag,*) "------------------------" do i=1,gridsize - write(*,8) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & + write(nu_diag,8) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & Mesh(5,i),Mesh(6,i),Mesh(7,i),Mesh(8,i) enddo else if(gridsize == 9) then - write (*,*) "A Level 2 Peano Meandering Curve:" - write (*,*) "---------------------------------" + write (nu_diag,*) "A Level 2 Peano Meandering Curve:" + write (nu_diag,*) "---------------------------------" do i=1,gridsize - write(*,9) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & + write(nu_diag,9) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & Mesh(5,i),Mesh(6,i),Mesh(7,i),Mesh(8,i), & Mesh(9,i) enddo else if(gridsize == 10) then - write (*,*) "A Level 1 Hilbert and Level 1 Cinco Curve:" - write (*,*) "---------------------------------" + write (nu_diag,*) "A Level 1 Hilbert and Level 1 Cinco Curve:" + write (nu_diag,*) "---------------------------------" do i=1,gridsize - write(*,10) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & + write(nu_diag,10) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & Mesh(5,i),Mesh(6,i),Mesh(7,i),Mesh(8,i), & Mesh(9,i),Mesh(10,i) enddo else if(gridsize == 12) then - write (*,*) "A Level 2 Hilbert and Level 1 Peano Curve:" - write (*,*) "------------------------------------------" + write (nu_diag,*) "A Level 2 Hilbert and Level 1 Peano Curve:" + write (nu_diag,*) "------------------------------------------" do i=1,gridsize - write(*,12) Mesh(1,i),Mesh(2,i), Mesh(3,i), Mesh(4,i), & + write(nu_diag,12) Mesh(1,i),Mesh(2,i), Mesh(3,i), Mesh(4,i), & Mesh(5,i),Mesh(6,i), Mesh(7,i), Mesh(8,i), & Mesh(9,i),Mesh(10,i),Mesh(11,i),Mesh(12,i) enddo else if(gridsize == 15) then - write (*,*) "A Level 1 Peano and Level 1 Cinco Curve:" - write (*,*) "------------------------" + write (nu_diag,*) "A Level 1 Peano and Level 1 Cinco Curve:" + write (nu_diag,*) "------------------------" do i=1,gridsize - write(*,15) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & + write(nu_diag,15) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & Mesh(5,i),Mesh(6,i),Mesh(7,i),Mesh(8,i), & Mesh(9,i),Mesh(10,i),Mesh(11,i),Mesh(12,i), & Mesh(13,i),Mesh(14,i),Mesh(15,i) enddo else if(gridsize == 16) then - write (*,*) "A Level 4 Hilbert Curve:" - write (*,*) "------------------------" + write (nu_diag,*) "A Level 4 Hilbert Curve:" + write (nu_diag,*) "------------------------" do i=1,gridsize - write(*,16) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & + write(nu_diag,16) Mesh(1,i),Mesh(2,i),Mesh(3,i),Mesh(4,i), & Mesh(5,i),Mesh(6,i),Mesh(7,i),Mesh(8,i), & Mesh(9,i),Mesh(10,i),Mesh(11,i),Mesh(12,i), & Mesh(13,i),Mesh(14,i),Mesh(15,i),Mesh(16,i) enddo else if(gridsize == 18) then - write (*,*) "A Level 1 Hilbert and Level 2 Peano Curve:" - write (*,*) "------------------------------------------" + write (nu_diag,*) "A Level 1 Hilbert and Level 2 Peano Curve:" + write (nu_diag,*) "------------------------------------------" do i=1,gridsize - write(*,18) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & + write(nu_diag,18) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & Mesh(5,i), Mesh(6,i), Mesh(7,i), Mesh(8,i), & Mesh(9,i), Mesh(10,i),Mesh(11,i),Mesh(12,i), & Mesh(13,i),Mesh(14,i),Mesh(15,i),Mesh(16,i), & Mesh(17,i),Mesh(18,i) enddo else if(gridsize == 20) then - write (*,*) "A Level 2 Hilbert and Level 1 Cinco Curve:" - write (*,*) "------------------------------------------" + write (nu_diag,*) "A Level 2 Hilbert and Level 1 Cinco Curve:" + write (nu_diag,*) "------------------------------------------" do i=1,gridsize - write(*,20) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & + write(nu_diag,20) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & Mesh(5,i), Mesh(6,i), Mesh(7,i), Mesh(8,i), & Mesh(9,i), Mesh(10,i),Mesh(11,i),Mesh(12,i), & Mesh(13,i),Mesh(14,i),Mesh(15,i),Mesh(16,i), & Mesh(17,i),Mesh(18,i),Mesh(19,i),Mesh(20,i) enddo else if(gridsize == 24) then - write (*,*) "A Level 3 Hilbert and Level 1 Peano Curve:" - write (*,*) "------------------------------------------" + write (nu_diag,*) "A Level 3 Hilbert and Level 1 Peano Curve:" + write (nu_diag,*) "------------------------------------------" do i=1,gridsize - write(*,24) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & + write(nu_diag,24) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & Mesh(5,i), Mesh(6,i), Mesh(7,i), Mesh(8,i), & Mesh(9,i), Mesh(10,i),Mesh(11,i),Mesh(12,i), & Mesh(13,i),Mesh(14,i),Mesh(15,i),Mesh(16,i), & @@ -1606,10 +1611,10 @@ subroutine PrintCurve(Mesh) Mesh(21,i),Mesh(22,i),Mesh(23,i),Mesh(24,i) enddo else if(gridsize == 25) then - write (*,*) "A Level 2 Cinco Curve:" - write (*,*) "------------------------------------------" + write (nu_diag,*) "A Level 2 Cinco Curve:" + write (nu_diag,*) "------------------------------------------" do i=1,gridsize - write(*,25) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & + write(nu_diag,25) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & Mesh(5,i), Mesh(6,i), Mesh(7,i), Mesh(8,i), & Mesh(9,i), Mesh(10,i),Mesh(11,i),Mesh(12,i), & Mesh(13,i),Mesh(14,i),Mesh(15,i),Mesh(16,i), & @@ -1618,10 +1623,10 @@ subroutine PrintCurve(Mesh) Mesh(25,i) enddo else if(gridsize == 27) then - write (*,*) "A Level 3 Peano Meandering Curve:" - write (*,*) "---------------------------------" + write (nu_diag,*) "A Level 3 Peano Meandering Curve:" + write (nu_diag,*) "---------------------------------" do i=1,gridsize - write(*,27) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & + write(nu_diag,27) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & Mesh(5,i), Mesh(6,i), Mesh(7,i), Mesh(8,i), & Mesh(9,i), Mesh(10,i),Mesh(11,i),Mesh(12,i), & Mesh(13,i),Mesh(14,i),Mesh(15,i),Mesh(16,i), & @@ -1629,11 +1634,24 @@ subroutine PrintCurve(Mesh) Mesh(21,i),Mesh(22,i),Mesh(23,i),Mesh(24,i), & Mesh(25,i),Mesh(26,i),Mesh(27,i) enddo + else if(gridsize == 30) then + write (nu_diag,*) "A Level 1 Cinco and Level 1 Peano and Level 1 Hilbert Curve:" + write (nu_diag,*) "---------------------------------" + do i=1,gridsize + write(nu_diag,30) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & + Mesh(5,i), Mesh(6,i), Mesh(7,i), Mesh(8,i), & + Mesh(9,i), Mesh(10,i),Mesh(11,i),Mesh(12,i), & + Mesh(13,i),Mesh(14,i),Mesh(15,i),Mesh(16,i), & + Mesh(17,i),Mesh(18,i),Mesh(19,i),Mesh(20,i), & + Mesh(21,i),Mesh(22,i),Mesh(23,i),Mesh(24,i), & + Mesh(25,i),Mesh(26,i),Mesh(27,i),Mesh(28,i), & + Mesh(29,i),Mesh(30,i) + enddo else if(gridsize == 32) then - write (*,*) "A Level 5 Hilbert Curve:" - write (*,*) "------------------------" + write (nu_diag,*) "A Level 5 Hilbert Curve:" + write (nu_diag,*) "------------------------" do i=1,gridsize - write(*,32) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & + write(nu_diag,32) Mesh(1,i), Mesh(2,i), Mesh(3,i), Mesh(4,i), & Mesh(5,i), Mesh(6,i), Mesh(7,i), Mesh(8,i), & Mesh(9,i), Mesh(10,i),Mesh(11,i),Mesh(12,i), & Mesh(13,i),Mesh(14,i),Mesh(15,i),Mesh(16,i), & @@ -1659,6 +1677,7 @@ subroutine PrintCurve(Mesh) 24 format('|',24(i3,'|')) 25 format('|',25(i3,'|')) 27 format('|',27(i3,'|')) +30 format('|',30(i4,'|')) 32 format('|',32(i4,'|')) !EOC @@ -1711,7 +1730,13 @@ subroutine GenSpaceCurve(Mesh) fact = factor(gridsize) level = fact%numfact - if(verbose) print *,'GenSpacecurve: level is ',level + if (debug_blocks .and. my_task==master_task .and. my_task==master_task) then + write(nu_diag,*) subname,' dim,size = ',dim,gridsize + write(nu_diag,*) subname,' numfact = ',level + call printfactor(subname,fact) + call flush_fileunit(nu_diag) + endif + allocate(ordered(gridsize,gridsize)) !-------------------------------------------- @@ -1730,61 +1755,10 @@ subroutine GenSpaceCurve(Mesh) deallocate(pos,ordered) -!EOP -!----------------------------------------------------------------------- - end subroutine GenSpaceCurve - recursive subroutine qsort(a) - - integer, intent(inout) :: a(:) - integer :: split - character(len=*),parameter :: subname='(qsort)' - - if(SIZE(a) > 1) then - call partition(a,split) - call qsort(a(:split-1)) - call qsort(a(split:)) - endif - - end subroutine qsort - - subroutine partition(a,marker) - - INTEGER, INTENT(IN OUT) :: a(:) - INTEGER, INTENT(OUT) :: marker - INTEGER :: left, right, pivot, temp - character(len=*),parameter :: subname='(partition)' - - pivot = (a(1) + a(size(a))) / 2 ! Average of first and last elements to prevent quadratic - left = 0 ! behavior with sorted or reverse sorted data - right = size(a) + 1 - - DO WHILE (left < right) - right = right - 1 - DO WHILE (a(right) > pivot) - right = right-1 - END DO - left = left + 1 - DO WHILE (a(left) < pivot) - left = left + 1 - END DO - IF (left < right) THEN - temp = a(left) - a(left) = a(right) - a(right) = temp - END IF - END DO - - IF (left == right) THEN - marker = left + 1 - ELSE - marker = left - END IF - - end subroutine partition - - +!EOC +!----------------------------------------------------------------------- end module ice_spacecurve diff --git a/cicecore/version.txt b/cicecore/version.txt index e16cf8bfe..cfd991555 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.1.4 +CICE 6.2.0 diff --git a/configuration/scripts/Makefile b/configuration/scripts/Makefile index 7b39d5c8d..e0b7799d6 100644 --- a/configuration/scripts/Makefile +++ b/configuration/scripts/Makefile @@ -75,7 +75,7 @@ AR := ar .SUFFIXES: .SUFFIXES: .F90 .F .c .o -.PHONY: all cice libcice targets target db_files db_flags clean realclean +.PHONY: all cice libcice targets target db_files db_flags clean realclean helloworld calchk all: $(EXEC) cice: $(EXEC) @@ -92,7 +92,9 @@ cice: $(EXEC) targets: @echo " " - @echo "Supported Makefile Targets are: cice, libcice, makdep, depends, clean, realclean, targets, db_files, db_flags" + @echo "Supported Makefile Targets are: cice, libcice, makdep, depends, clean, realclean" + @echo " Diagnostics: targets, db_files, db_flags" + @echo " Unit Tests : helloworld, calchk" target: targets db_files: @@ -134,6 +136,20 @@ $(DEPGEN): $(OBJS_DEPGEN) @ echo "Building makdep" $(SCC) -o $@ $(CFLAGS_HOST) $< +#------------------------------------------------------------------------------- +# unit tests +#------------------------------------------------------------------------------- + +# this builds all dependent source code automatically even though only a subset might actually be used +# this is no different than the cice target and in fact the binary is called cice +# it exists just to create separation as needed for unit tests +calchk: $(EXEC) + +# this builds just a subset of source code specified explicitly and requires a separate target +HWOBJS := helloworld.o +helloworld: $(HWOBJS) + $(LD) -o $(EXEC) $(LDFLAGS) $(HWOBJS) $(ULIBS) $(SLIBS) + #------------------------------------------------------------------------------- # build rules: MACFILE, cmd-line, or env vars must provide the needed macros #------------------------------------------------------------------------------- diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 6d1f735a4..902abb56b 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -105,11 +105,20 @@ cat >> ${jobfile} << EOFB EOFB else if (${ICE_MACHINE} =~ onyx*) then +# special for onyx with 44 cores per node and constraint on mpiprocs +set tpn1 = ${taskpernode} +if (${taskpernode} < 44) set tpn1 = 22 +if (${taskpernode} < 22) set tpn1 = 11 +if (${taskpernode} < 11) set tpn1 = 4 +if (${taskpernode} < 4) set tpn1 = 2 +if (${taskpernode} < 2) set tpn1 = 1 +@ nn1 = ${ntasks} / ${tpn1} +if (${nn1} * ${tpn1} < ${ntasks}) @ nn1 = $nn1 + 1 cat >> ${jobfile} << EOFB #PBS -N ${ICE_CASENAME} #PBS -q ${queue} #PBS -A ${acct} -#PBS -l select=${nnodes}:ncpus=${maxtpn}:mpiprocs=${taskpernode} +#PBS -l select=${nn1}:ncpus=${maxtpn}:mpiprocs=${tpn1} #PBS -l walltime=${batchtime} #PBS -j oe ###PBS -M username@domain.com @@ -133,6 +142,22 @@ cat >> ${jobfile} << EOFB ###SBATCH --mail-user username@domain.com EOFB +else if (${ICE_MACHINE} =~ compy*) then +if (${runlength} <= 2) set queue = "short" +cat >> ${jobfile} <> ${jobfile} << EOFB #SBATCH -J ${ICE_CASENAME} diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index b9aed44fe..d75d74253 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -142,6 +142,10 @@ if !($?ICE_MACHINE_BLDTHRDS) then set ICE_MACHINE_BLDTHRDS = 1 endif +if (${directmake} == 0) then + set target = ${ICE_TARGET} +endif + if (${directmake} == 1) then echo "make ${target}" ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ @@ -185,12 +189,12 @@ if (${quiet} == "true") then echo " quiet mode on... patience" ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ - DEPFILE=${ICE_CASEDIR}/makdep.c cice >& ${ICE_BLDLOG_FILE} + DEPFILE=${ICE_CASEDIR}/makdep.c ${target} >& ${ICE_BLDLOG_FILE} set bldstat = ${status} else ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ - DEPFILE=${ICE_CASEDIR}/makdep.c cice |& tee ${ICE_BLDLOG_FILE} + DEPFILE=${ICE_CASEDIR}/makdep.c ${target} |& tee ${ICE_BLDLOG_FILE} set bldstat = ${status} endif diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index a05b3a9d3..7d45a387f 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -81,6 +81,18 @@ srun --cpu-bind=cores ./cice >&! \$ICE_RUNLOG_FILE EOFR endif +#======= +else if (${ICE_MACHINE} =~ compy*) then +if (${ICE_COMMDIR} =~ serial*) then +cat >> ${jobfile} << EOFR +./cice >&! \$ICE_RUNLOG_FILE +EOFR +else +cat >> ${jobfile} << EOFR +srun --mpi=pmi2 --kill-on-bad-exit --cpu-bind=cores ./cice >&! \$ICE_RUNLOG_FILE +EOFR +endif + #======= else if (${ICE_MACHINE} =~ badger*) then if (${ICE_COMMDIR} =~ serial*) then diff --git a/configuration/scripts/cice.run.setup.csh b/configuration/scripts/cice.run.setup.csh index 901671a36..ea8efeb03 100755 --- a/configuration/scripts/cice.run.setup.csh +++ b/configuration/scripts/cice.run.setup.csh @@ -95,9 +95,15 @@ if ( \$status == 0 ) then echo "CICE run completed successfully" echo "\`date\` \${0}: CICE run completed successfully" >> \${ICE_CASEDIR}/README.case else - echo "CICE run did NOT complete" - echo "\`date\` \${0}: CICE run did NOT complete" >> \${ICE_CASEDIR}/README.case - exit -1 + grep 'COMPLETED SUCCESSFULLY' \${checkfile} + if ( \$status == 0 ) then + echo "Run completed successfully" + echo "\`date\` \${0}: Run completed successfully" >> \${ICE_CASEDIR}/README.case + else + echo "CICE run did NOT complete" + echo "\`date\` \${0}: CICE run did NOT complete" >> \${ICE_CASEDIR}/README.case + exit -1 + endif endif if ( \${diagtype} == 0) then diff --git a/configuration/scripts/cice.settings b/configuration/scripts/cice.settings index d2653a29d..3bd85f5f9 100755 --- a/configuration/scripts/cice.settings +++ b/configuration/scripts/cice.settings @@ -13,6 +13,7 @@ setenv ICE_RSTDIR ${ICE_RUNDIR}/restart setenv ICE_HSTDIR ${ICE_RUNDIR}/history setenv ICE_LOGDIR ${ICE_CASEDIR}/logs setenv ICE_DRVOPT standalone/cice +setenv ICE_TARGET cice setenv ICE_IOTYPE netcdf # binary, netcdf, pio1, pio2 setenv ICE_CLEANBUILD true setenv ICE_CPPDEFS "" diff --git a/configuration/scripts/cice_decomp.csh b/configuration/scripts/cice_decomp.csh index b20f8d129..aa1bb9a54 100755 --- a/configuration/scripts/cice_decomp.csh +++ b/configuration/scripts/cice_decomp.csh @@ -44,6 +44,17 @@ else if (${grid} == 'gbox128') then set blckx = 8; set blcky = 8 endif +else if (${grid} == 'gbox180') then + set nxglob = 180 + set nyglob = 180 + if (${cicepes} <= 1) then + set blckx = 180; set blcky = 180 + else if (${cicepes} <= 36) then + set blckx = 30; set blcky = 30 + else + set blckx = 9; set blcky = 9 + endif + else if (${grid} == 'gbox80') then set nxglob = 80 set nyglob = 80 @@ -98,6 +109,12 @@ else if (${grid} == 'tx1') then set blckx = 10; set blcky = 10 endif +# this is for unit testing +else if (${grid} == 'none') then + set nxglob = 1 + set nyglob = 1 + set blckx = 1; set blcky = 1 + else echo "${0:t}: ERROR unknown grid ${grid}" exit -9 diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index f34db14f0..e5fcb9177 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -1,9 +1,13 @@ &setup_nml days_per_year = 365 - use_leap_years = .false. - year_init = 1997 + use_leap_years = .true. + year_init = 2005 + month_init = 1 + day_init = 1 + sec_init = 0 istep0 = 0 dt = 3600.0 + npt_unit = '1' npt = 24 ndtd = 1 runtype = 'initial' @@ -25,6 +29,9 @@ diagfreq = 24 diag_type = 'stdout' diag_file = 'ice_diag.d' + debug_model = .false. + debug_model_step = 999999999 + forcing_diag = .false. print_global = .true. print_points = .true. conserv_check = .false. @@ -32,7 +39,6 @@ lonpnt(1) = 0. latpnt(2) = -65. lonpnt(2) = -45. - dbug = .false. histfreq = 'm','x','x','x','x' histfreq_n = 1 , 1 , 1 , 1 , 1 hist_avg = .true. @@ -196,6 +202,7 @@ natmiter = 5 atmiter_conv = 0.0d0 ustar_min = 0.0005 + iceruf = 0.0005 emissivity = 0.985 fbot_xfer_type = 'constant' update_ocn_f = .false. @@ -216,7 +223,7 @@ bgc_data_type = 'default' fe_data_type = 'default' ice_data_type = 'default' - fyear_init = 1997 + fyear_init = 2005 ycycle = 1 atm_data_format = 'bin' atm_data_dir = '/glade/u/home/tcraig/cice_data/' @@ -236,12 +243,14 @@ processor_shape = 'slenderX2' distribution_type = 'cartesian' distribution_wght = 'latitude' + distribution_wght_file = 'unknown' ew_boundary_type = 'cyclic' ns_boundary_type = 'open' maskhalo_dyn = .false. maskhalo_remap = .false. maskhalo_bound = .false. add_mpi_barriers = .false. + debug_blocks = .false. / &zbgc_nml diff --git a/configuration/scripts/machines/Macros.banting_intel b/configuration/scripts/machines/Macros.banting_intel index 7ed7f7b5a..2bab45725 100644 --- a/configuration/scripts/machines/Macros.banting_intel +++ b/configuration/scripts/machines/Macros.banting_intel @@ -13,7 +13,7 @@ FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceb #-xHost ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check -fpe0 -ftrapuv -fp-model except -check noarg_temp_created + FFLAGS += -O0 -g -check -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays # -heap-arrays 1024 else FFLAGS += -O2 diff --git a/configuration/scripts/machines/Macros.cheyenne_gnu b/configuration/scripts/machines/Macros.cheyenne_gnu index f46d80414..082130f77 100644 --- a/configuration/scripts/machines/Macros.cheyenne_gnu +++ b/configuration/scripts/machines/Macros.cheyenne_gnu @@ -13,6 +13,7 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow +# FFLAGS += -O0 -g -fcheck=all -finit-real=snan -fimplicit-none -ffpe-trap=invalid,zero,overflow CFLAGS += -O0 endif diff --git a/configuration/scripts/machines/Macros.cheyenne_intel b/configuration/scripts/machines/Macros.cheyenne_intel index 243295487..52fc07ebb 100644 --- a/configuration/scripts/machines/Macros.cheyenne_intel +++ b/configuration/scripts/machines/Macros.cheyenne_intel @@ -12,7 +12,9 @@ FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -trace FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg +# FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg + FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -link_mpi=dbg +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays -link_mpi=dbg else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.compy_intel b/configuration/scripts/machines/Macros.compy_intel new file mode 100644 index 000000000..604337f59 --- /dev/null +++ b/configuration/scripts/machines/Macros.compy_intel @@ -0,0 +1,44 @@ +#============================================================================== +# Makefile macro for PNNL compy, intel compiler +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -xHost + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -xHost +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +else + FFLAGS += -O2 +endif + +SCC := icc +SFC := ifort +MPICC := mpiicc +MPIFC := mpiifort +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD := $(FC) + +INC_NETCDF := $(NETCDF_PATH)/include +LIB_NETCDF := $(NETCDF_PATH)/lib + +INCLDIR := $(INCLDIR) -I$(INC_NETCDF) +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + diff --git a/configuration/scripts/machines/Macros.conda_macos b/configuration/scripts/machines/Macros.conda_macos index 4acc4d3ba..9be1b9ab4 100644 --- a/configuration/scripts/machines/Macros.conda_macos +++ b/configuration/scripts/machines/Macros.conda_macos @@ -45,6 +45,7 @@ ifeq ($(strip $(SDKPATH)),) CFLAGS_HOST := else CFLAGS_HOST = -isysroot $(SDKPATH) + LD += -L$(SDKPATH)/usr/lib endif # Libraries to be passed to the linker diff --git a/configuration/scripts/machines/Macros.daley_intel b/configuration/scripts/machines/Macros.daley_intel index 897e6e057..a434ffdb3 100644 --- a/configuration/scripts/machines/Macros.daley_intel +++ b/configuration/scripts/machines/Macros.daley_intel @@ -13,7 +13,7 @@ FFLAGS := -fp-model source -convert big_endian -assume byterecl -ftz -traceb #-xHost ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check -fpe0 -ftrapuv -fp-model except -check noarg_temp_created + FFLAGS += -O0 -g -check -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays # -heap-arrays 1024 else FFLAGS += -O2 diff --git a/configuration/scripts/machines/Macros.gaffney_intel b/configuration/scripts/machines/Macros.gaffney_intel index 61dfe2518..7eccd36da 100644 --- a/configuration/scripts/machines/Macros.gaffney_intel +++ b/configuration/scripts/machines/Macros.gaffney_intel @@ -13,7 +13,7 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -# FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -init=snan,arrays +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.koehr_intel b/configuration/scripts/machines/Macros.koehr_intel index 284d30c55..aee4b31a8 100644 --- a/configuration/scripts/machines/Macros.koehr_intel +++ b/configuration/scripts/machines/Macros.koehr_intel @@ -13,7 +13,7 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -# FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -init=snan,arrays +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.mustang_intel18 b/configuration/scripts/machines/Macros.mustang_intel18 index 5d1849488..28c1c1964 100644 --- a/configuration/scripts/machines/Macros.mustang_intel18 +++ b/configuration/scripts/machines/Macros.mustang_intel18 @@ -13,6 +13,7 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.mustang_intel19 b/configuration/scripts/machines/Macros.mustang_intel19 index 5d1849488..28c1c1964 100644 --- a/configuration/scripts/machines/Macros.mustang_intel19 +++ b/configuration/scripts/machines/Macros.mustang_intel19 @@ -13,6 +13,7 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.mustang_intel20 b/configuration/scripts/machines/Macros.mustang_intel20 index 5d1849488..28c1c1964 100644 --- a/configuration/scripts/machines/Macros.mustang_intel20 +++ b/configuration/scripts/machines/Macros.mustang_intel20 @@ -13,6 +13,7 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.onyx_intel b/configuration/scripts/machines/Macros.onyx_intel index 55f6fbbf5..92879ee82 100644 --- a/configuration/scripts/machines/Macros.onyx_intel +++ b/configuration/scripts/machines/Macros.onyx_intel @@ -13,6 +13,7 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/env.cheyenne_gnu b/configuration/scripts/machines/env.cheyenne_gnu index b17a15917..3bfe59c31 100755 --- a/configuration/scripts/machines/env.cheyenne_gnu +++ b/configuration/scripts/machines/env.cheyenne_gnu @@ -31,6 +31,9 @@ endif endif +limit coredumpsize unlimited +limit stacksize unlimited + setenv ICE_MACHINE_MACHNAME cheyenne setenv ICE_MACHINE_MACHINFO "SGI ICE XA Xeon E5-2697V4 Broadwell" setenv ICE_MACHINE_ENVNAME gnu diff --git a/configuration/scripts/machines/env.cheyenne_intel b/configuration/scripts/machines/env.cheyenne_intel index ce4eba29b..4a430622e 100755 --- a/configuration/scripts/machines/env.cheyenne_intel +++ b/configuration/scripts/machines/env.cheyenne_intel @@ -31,6 +31,9 @@ endif endif +limit coredumpsize unlimited +limit stacksize unlimited + setenv ICE_MACHINE_MACHNAME cheyenne setenv ICE_MACHINE_MACHINFO "SGI ICE XA Xeon E5-2697V4 Broadwell" setenv ICE_MACHINE_ENVNAME intel diff --git a/configuration/scripts/machines/env.cheyenne_pgi b/configuration/scripts/machines/env.cheyenne_pgi index ba9ea498d..693692842 100755 --- a/configuration/scripts/machines/env.cheyenne_pgi +++ b/configuration/scripts/machines/env.cheyenne_pgi @@ -31,6 +31,9 @@ endif endif +limit coredumpsize unlimited +limit stacksize unlimited + setenv ICE_MACHINE_MACHNAME cheyenne setenv ICE_MACHINE_MACHINFO "SGI ICE XA Xeon E5-2697V4 Broadwell" setenv ICE_MACHINE_ENVNAME pgi diff --git a/configuration/scripts/machines/env.compy_intel b/configuration/scripts/machines/env.compy_intel new file mode 100755 index 000000000..fe3511aa6 --- /dev/null +++ b/configuration/scripts/machines/env.compy_intel @@ -0,0 +1,42 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source /share/apps/modules/init/csh + +module purge +module load intel/19.0.5 +module load intelmpi/2019u4 +module load netcdf/4.6.3 +module load hdf5/1.10.5 + +#setenv NETCDF_PATH ${NETCDF_DIR} +setenv NETCDF_PATH /share/apps/netcdf/4.6.3/intel/19.0.5 +setenv OMP_PROC_BIND true +setenv OMP_PLACES threads +setenv I_MPI_ADJUST_ALLREDUCE 1 +limit coredumpsize unlimited +limit stacksize unlimited + +endif + +setenv ICE_MACHINE_MACHNAME compy +setenv ICE_MACHINE_MACHINFO "PNNL Intel Xeon Skylake with 192 GB of DDR4 DRAM" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "intel/19.0.5 intelmpi/2019u4 netcdf/4.6.3" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /compyfs/$USER/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /compyfs/inputdata/cice-consortium/ +setenv ICE_MACHINE_BASELINE /compyfs/$USER/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "sbatch " +setenv ICE_MACHINE_ACCT e3sm +setenv ICE_MACHINE_QUEUE "slurm" +setenv ICE_MACHINE_TPNODE 40 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 4 +setenv ICE_MACHINE_QSTAT "squeue --jobs=" + diff --git a/configuration/scripts/options/set_env.calchk b/configuration/scripts/options/set_env.calchk new file mode 100644 index 000000000..7dfe9612e --- /dev/null +++ b/configuration/scripts/options/set_env.calchk @@ -0,0 +1,2 @@ +setenv ICE_DRVOPT unittest/calchk +setenv ICE_TARGET calchk diff --git a/configuration/scripts/options/set_env.helloworld b/configuration/scripts/options/set_env.helloworld new file mode 100644 index 000000000..60587fb91 --- /dev/null +++ b/configuration/scripts/options/set_env.helloworld @@ -0,0 +1,2 @@ +setenv ICE_DRVOPT unittest/helloworld +setenv ICE_TARGET helloworld diff --git a/configuration/scripts/options/set_nml.alt01 b/configuration/scripts/options/set_nml.alt01 index 98124b3f2..705fc8f63 100644 --- a/configuration/scripts/options/set_nml.alt01 +++ b/configuration/scripts/options/set_nml.alt01 @@ -15,7 +15,7 @@ kcatbound = 1 kitd = 0 ktherm = 0 conduct = 'bubbly' -kdyn = 0 +kdyn = 1 seabed_stress = .true. seabed_stress_method = 'probabilistic' use_bathymetry = .true. diff --git a/configuration/scripts/options/set_nml.alt03 b/configuration/scripts/options/set_nml.alt03 index 507f56a1b..a72696777 100644 --- a/configuration/scripts/options/set_nml.alt03 +++ b/configuration/scripts/options/set_nml.alt03 @@ -23,3 +23,4 @@ Ktens = 0. e_ratio = 2. seabed_stress = .true. use_bathymetry = .true. +l_mpond_fresh = .true. diff --git a/configuration/scripts/options/set_nml.alt05 b/configuration/scripts/options/set_nml.alt05 index 5a1f83110..5e439d9e0 100644 --- a/configuration/scripts/options/set_nml.alt05 +++ b/configuration/scripts/options/set_nml.alt05 @@ -8,6 +8,3 @@ tr_pond_topo = .false. tr_pond_lvl = .false. tr_aero = .false. shortwave = 'dEdd' -albedo_type = 'default' - - diff --git a/configuration/scripts/options/set_nml.alt06 b/configuration/scripts/options/set_nml.alt06 new file mode 100644 index 000000000..197f1f4a7 --- /dev/null +++ b/configuration/scripts/options/set_nml.alt06 @@ -0,0 +1,5 @@ +ncat = 7 +kcatbound = 3 +nslyr = 3 +ice_ic = 'default' +restart = .false. diff --git a/configuration/scripts/options/set_nml.bgcz b/configuration/scripts/options/set_nml.bgcz index 62c93f783..379a2fd63 100644 --- a/configuration/scripts/options/set_nml.bgcz +++ b/configuration/scripts/options/set_nml.bgcz @@ -26,5 +26,5 @@ tr_bgc_PON = .true. tr_bgc_hum = .true. tr_bgc_DON = .true. tr_bgc_Fe = .true. - - +# modal_aero = .true. +# dEdd_algae = .true. diff --git a/configuration/scripts/options/set_nml.bigdiag b/configuration/scripts/options/set_nml.bigdiag new file mode 100644 index 000000000..a98bc0c2b --- /dev/null +++ b/configuration/scripts/options/set_nml.bigdiag @@ -0,0 +1,8 @@ +forcing_diag = .true. +debug_model = .true. +debug_model_step = 4 +print_global = .true. +print_points = .true. +debug_blocks = .true. +latpnt(1) = 85. +lonpnt(1) = -150. diff --git a/configuration/scripts/options/set_nml.box2001 b/configuration/scripts/options/set_nml.box2001 index 79382d84e..84cac67b2 100644 --- a/configuration/scripts/options/set_nml.box2001 +++ b/configuration/scripts/options/set_nml.box2001 @@ -1,4 +1,5 @@ days_per_year = 360 +use_leap_years = .false. npt = 240 ice_ic = 'default' restart = .false. diff --git a/configuration/scripts/options/set_nml.boxadv b/configuration/scripts/options/set_nml.boxadv index 6fcdcc5df..49ab3f13c 100644 --- a/configuration/scripts/options/set_nml.boxadv +++ b/configuration/scripts/options/set_nml.boxadv @@ -18,4 +18,6 @@ kdyn = 2 kstrength = 0 krdg_partic = 0 krdg_redist = 0 +shortwave = 'ccsm3' +albedo_type = 'constant' diff --git a/configuration/scripts/options/set_nml.boxdyn b/configuration/scripts/options/set_nml.boxnodyn similarity index 88% rename from configuration/scripts/options/set_nml.boxdyn rename to configuration/scripts/options/set_nml.boxnodyn index 72e89db5c..e6de6be0d 100644 --- a/configuration/scripts/options/set_nml.boxdyn +++ b/configuration/scripts/options/set_nml.boxnodyn @@ -2,6 +2,7 @@ nilyr = 1 ice_ic = 'default' restart = .false. days_per_year = 360 +use_leap_years = .false. npt = 72 dumpfreq = 'd' dumpfreq_n = 2 @@ -25,3 +26,5 @@ revised_evp = .false. kstrength = 0 krdg_partic = 1 krdg_redist = 1 +shortwave = 'ccsm3' +albedo_type = 'constant' diff --git a/configuration/scripts/options/set_nml.boxrestore b/configuration/scripts/options/set_nml.boxrestore index d00ec41c8..6092a4d23 100644 --- a/configuration/scripts/options/set_nml.boxrestore +++ b/configuration/scripts/options/set_nml.boxrestore @@ -26,3 +26,5 @@ krdg_partic = 0 krdg_redist = 0 seabed_stress = .true. restore_ice = .true. +shortwave = 'ccsm3' +albedo_type = 'constant' diff --git a/configuration/scripts/options/set_nml.boxslotcyl b/configuration/scripts/options/set_nml.boxslotcyl index b13c8ca43..7d9f5e90e 100644 --- a/configuration/scripts/options/set_nml.boxslotcyl +++ b/configuration/scripts/options/set_nml.boxslotcyl @@ -11,6 +11,8 @@ kcatbound = 2 ew_boundary_type = 'open' ns_boundary_type = 'open' close_boundaries = .true. +tr_lvl = .false. +tr_pond_lvl = .false. ktherm = -1 kdyn = -1 kridge = -1 diff --git a/configuration/scripts/options/set_nml.debugblocks b/configuration/scripts/options/set_nml.debugblocks new file mode 100644 index 000000000..299dfff66 --- /dev/null +++ b/configuration/scripts/options/set_nml.debugblocks @@ -0,0 +1 @@ +debug_blocks = .true. diff --git a/configuration/scripts/options/set_nml.dspiralcenter b/configuration/scripts/options/set_nml.dspiralcenter new file mode 100644 index 000000000..fcf32dde7 --- /dev/null +++ b/configuration/scripts/options/set_nml.dspiralcenter @@ -0,0 +1 @@ +distribution_type = 'spiralcenter' diff --git a/configuration/scripts/options/set_nml.dwghtfile b/configuration/scripts/options/set_nml.dwghtfile new file mode 100644 index 000000000..d72b0fb8a --- /dev/null +++ b/configuration/scripts/options/set_nml.dwghtfile @@ -0,0 +1,3 @@ + distribution_type = 'wghtfile' + distribution_wght = 'file' + distribution_wght_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/cice62_gx1_wghtmask.nc' diff --git a/configuration/scripts/options/set_nml.gbox180 b/configuration/scripts/options/set_nml.gbox180 new file mode 100644 index 000000000..7b139f94a --- /dev/null +++ b/configuration/scripts/options/set_nml.gbox180 @@ -0,0 +1,4 @@ +ice_ic = 'default' +grid_type = 'rectangular' +atm_data_type = 'box2001' +ice_data_type = 'box2001' diff --git a/configuration/scripts/options/set_nml.gx1 b/configuration/scripts/options/set_nml.gx1 index e1d18dc8b..2e8d4f5b7 100644 --- a/configuration/scripts/options/set_nml.gx1 +++ b/configuration/scripts/options/set_nml.gx1 @@ -3,7 +3,7 @@ runtype = 'initial' year_init = 2005 use_leap_years = .true. use_restart_time = .false. -ice_ic = 'ICE_MACHINE_INPUTDATA/CICE_data/ic/gx1/iced_gx1_v5.nc' +ice_ic = 'ICE_MACHINE_INPUTDATA/CICE_data/ic/gx1/iced_gx1_v6.2005-01-01.nc' grid_format = 'bin' grid_type = 'displaced_pole' grid_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/grid_gx1.bin' @@ -17,5 +17,5 @@ atm_data_format = 'nc' atm_data_type = 'JRA55_gx1' atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/JRA55' precip_units = 'mks' -ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/COREII' +ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/CESM/MONTHLY' bgc_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/WOA/MONTHLY' diff --git a/configuration/scripts/options/set_nml.gx1apr b/configuration/scripts/options/set_nml.gx1apr new file mode 100644 index 000000000..c150d5815 --- /dev/null +++ b/configuration/scripts/options/set_nml.gx1apr @@ -0,0 +1,5 @@ +year_init = 2005 +month_init = 4 +day_init = 1 +sec_init = 0 +ice_ic = 'ICE_MACHINE_INPUTDATA/CICE_data/ic/gx1/iced_gx1_v6.2005-04-01.nc' diff --git a/configuration/scripts/options/set_nml.gx1coreii b/configuration/scripts/options/set_nml.gx1coreii index 44b334194..13b8db59e 100644 --- a/configuration/scripts/options/set_nml.gx1coreii +++ b/configuration/scripts/options/set_nml.gx1coreii @@ -1,6 +1,7 @@ year_init = 1997 use_leap_years = .false. use_restart_time = .true. +ice_ic = 'ICE_MACHINE_INPUTDATA/CICE_data/ic/gx1/iced_gx1_v5.nc' fyear_init = 2005 ycycle = 1 atm_data_format = 'bin' diff --git a/configuration/scripts/options/set_nml.gx1prod b/configuration/scripts/options/set_nml.gx1prod index a26af8102..f725c4367 100644 --- a/configuration/scripts/options/set_nml.gx1prod +++ b/configuration/scripts/options/set_nml.gx1prod @@ -1,7 +1,18 @@ -year_init = 1958 -dt = 3600 -npt = 87600 +year_init = 2005 +use_leap_years = .true. +npt_unit = 'y' +npt = 1 dumpfreq = 'm' -fyear_init = 1958 -ycycle = 52 -ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1' +fyear_init = 2005 +ycycle = 5 +ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/CESM/MONTHLY/' +use_bathymetry = .true. +seabed_stress = .true. +seabed_stress_method = 'LKD' +ocn_data_type = 'ncar' +ocn_data_format = 'nc' +oceanmixed_ice = .true. +oceanmixed_file = 'ocean_forcing_clim_2D_gx1.20210330.nc' +tr_brine = .true. +f_taubx = 'm' +f_tauby = 'm' diff --git a/configuration/scripts/options/set_nml.gx3sep2 b/configuration/scripts/options/set_nml.gx3sep2 new file mode 100644 index 000000000..4eeefc64d --- /dev/null +++ b/configuration/scripts/options/set_nml.gx3sep2 @@ -0,0 +1,6 @@ +year_init = 2005 +month_init = 9 +day_init = 2 +sec_init = 7200 +use_leap_years = .true. +ice_ic = 'ICE_MACHINE_INPUTDATA/CICE_data/ic/gx3/iced_gx3_v6.2005-09-01.nc' diff --git a/configuration/scripts/options/set_nml.ml b/configuration/scripts/options/set_nml.ml new file mode 100644 index 000000000..0d00cbd5b --- /dev/null +++ b/configuration/scripts/options/set_nml.ml @@ -0,0 +1,7 @@ + +oceanmixed_ice = .true. +ocn_data_type = 'ncar' +ocn_data_format = 'nc' +ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/CESM/MONTHLY' +oceanmixed_file = 'ocean_forcing_clim_2D_gx1.20210330.nc' + diff --git a/configuration/scripts/options/set_nml.run10day b/configuration/scripts/options/set_nml.run10day index deae3e993..05160c42d 100644 --- a/configuration/scripts/options/set_nml.run10day +++ b/configuration/scripts/options/set_nml.run10day @@ -1,4 +1,5 @@ -npt = 240 +npt_unit = 'd' +npt = 10 dumpfreq = 'd' dumpfreq_n = 10 histfreq = 'd','x','x','x','x' diff --git a/configuration/scripts/options/set_nml.run1day b/configuration/scripts/options/set_nml.run1day index d7b70f973..a4ed751d5 100644 --- a/configuration/scripts/options/set_nml.run1day +++ b/configuration/scripts/options/set_nml.run1day @@ -1,4 +1,5 @@ -npt = 24 +npt_unit = 'd' +npt = 1 dumpfreq = 'd' dumpfreq_n = 1 diag_type = 'stdout' diff --git a/configuration/scripts/options/set_nml.run1year b/configuration/scripts/options/set_nml.run1year index 9a5baadfd..4e481870c 100644 --- a/configuration/scripts/options/set_nml.run1year +++ b/configuration/scripts/options/set_nml.run1year @@ -1,4 +1,5 @@ -npt = 8760 +npt_unit = 'y' +npt = 1 dumpfreq = 'm' dumpfreq_n = 12 diagfreq = 24 diff --git a/configuration/scripts/options/set_nml.run2day b/configuration/scripts/options/set_nml.run2day index 8129d59f6..60ece02f0 100644 --- a/configuration/scripts/options/set_nml.run2day +++ b/configuration/scripts/options/set_nml.run2day @@ -1,4 +1,5 @@ -npt = 48 +npt_unit = 'd' +npt = 2 dumpfreq = 'd' dumpfreq_n = 2 histfreq = 'd','x','x','x','x' diff --git a/configuration/scripts/options/set_nml.run3day b/configuration/scripts/options/set_nml.run3day index 1fbf7a115..1a839468e 100644 --- a/configuration/scripts/options/set_nml.run3day +++ b/configuration/scripts/options/set_nml.run3day @@ -1,4 +1,5 @@ -npt = 72 +npt_unit = 'd' +npt = 3 dumpfreq = 'd' dumpfreq_n = 2 diag_type = 'stdout' diff --git a/configuration/scripts/options/set_nml.run3dt b/configuration/scripts/options/set_nml.run3dt index 102a19d80..4ff27ce22 100644 --- a/configuration/scripts/options/set_nml.run3dt +++ b/configuration/scripts/options/set_nml.run3dt @@ -1,3 +1,4 @@ +npt_unit = '1' npt = 3 dump_last = .true. histfreq = '1','x','x','x','x' diff --git a/configuration/scripts/options/set_nml.run5day b/configuration/scripts/options/set_nml.run5day index 4113c48e6..88d498a89 100644 --- a/configuration/scripts/options/set_nml.run5day +++ b/configuration/scripts/options/set_nml.run5day @@ -1,4 +1,5 @@ -npt = 120 +npt_unit = 'd' +npt = 5 dumpfreq = 'd' dumpfreq_n = 5 histfreq = 'd','x','x','x','x' diff --git a/configuration/scripts/options/set_nml.run60day b/configuration/scripts/options/set_nml.run60day index 01fd59504..96f6dea1c 100644 --- a/configuration/scripts/options/set_nml.run60day +++ b/configuration/scripts/options/set_nml.run60day @@ -1,4 +1,5 @@ -npt = 1440 +npt_unit = 'd' +npt = 60 dumpfreq = 'd' dumpfreq_n = 30 histfreq = 'd','x','x','x','x' diff --git a/configuration/scripts/options/set_nml.run90day b/configuration/scripts/options/set_nml.run90day index 06db1a3d8..34d31166f 100644 --- a/configuration/scripts/options/set_nml.run90day +++ b/configuration/scripts/options/set_nml.run90day @@ -1,4 +1,5 @@ -npt = 2160 +npt_unit = 'd' +npt = 90 dumpfreq = 'd' dumpfreq_n = 30 histfreq = 'd','x','x','x','x' diff --git a/configuration/scripts/options/set_nml.seabedLKD b/configuration/scripts/options/set_nml.seabedLKD new file mode 100644 index 000000000..b53977d36 --- /dev/null +++ b/configuration/scripts/options/set_nml.seabedLKD @@ -0,0 +1,6 @@ +use_bathymetry = .true. +seabed_stress = .true. +seabed_stress_method = 'LKD' +histfreq = 'm','d','x','x','x' +f_taubx = 'md' +f_tauby = 'md' diff --git a/configuration/scripts/options/set_nml.seabedprob b/configuration/scripts/options/set_nml.seabedprob new file mode 100644 index 000000000..d6ad877ee --- /dev/null +++ b/configuration/scripts/options/set_nml.seabedprob @@ -0,0 +1,6 @@ +use_bathymetry = .true. +seabed_stress = .true. +seabed_stress_method = 'probabilistic' +histfreq = 'm','d','x','x','x' +f_taubx = 'md' +f_tauby = 'md' diff --git a/configuration/scripts/options/test_nml.restart1 b/configuration/scripts/options/test_nml.restart1 index 82f934720..6ab0bd88b 100644 --- a/configuration/scripts/options/test_nml.restart1 +++ b/configuration/scripts/options/test_nml.restart1 @@ -1,4 +1,5 @@ -npt = 240 +npt = 10 +npt_unit = 'd' dumpfreq = 'd' dumpfreq_n = 5 runtype = 'initial' diff --git a/configuration/scripts/options/test_nml.restart2 b/configuration/scripts/options/test_nml.restart2 index 4ae10c5a6..c12887eb0 100644 --- a/configuration/scripts/options/test_nml.restart2 +++ b/configuration/scripts/options/test_nml.restart2 @@ -1,4 +1,5 @@ -npt = 120 +npt = 5 +npt_unit = 'd' dumpfreq = 'd' dumpfreq_n = 5 runtype = 'continue' diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts old mode 100755 new mode 100644 index 1ed489730..c37750a31 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -5,6 +5,7 @@ smoke gx3 1x4 debug,diag1,run2day smoke gx3 4x1 debug,diag1,run5day restart gx3 8x2 debug smoke gx3 8x2 diag24,run1year,medium +smoke gx3 7x2 diag1,bigdiag,run1day decomp gx3 4x2x25x29x5 none smoke gx3 4x2 diag1,run5day smoke_gx3_8x2_diag1_run5day smoke gx3 4x1 diag1,run5day,thread smoke_gx3_8x2_diag1_run5day @@ -16,14 +17,18 @@ restart gx3 8x2 alt02 restart gx3 4x2 alt03 restart gx3 4x4 alt04 restart gx3 4x4 alt05 +restart gx3 8x2 alt06 restart gx3 6x2 alt01,debug,short restart gx3 8x2 alt02,debug,short restart gx3 4x2 alt03,debug,short smoke gx3 4x4 alt04,debug,short smoke gx3 4x4 alt05,debug,short +smoke gx3 8x2 alt06,debug,short +smoke gx3 10x2 debug,diag1,run5day,gx3sep2 +smoke gx3 7x2 diag1,bigdiag,run1day restart gbox128 4x2 short -restart gbox128 4x2 boxdyn,short -restart gbox128 4x2 boxdyn,short,debug +restart gbox128 4x2 boxnodyn,short +restart gbox128 4x2 boxnodyn,short,debug restart gbox128 2x2 boxadv,short smoke gbox128 2x2 boxadv,short,debug restart gbox128 4x4 boxrestore,short @@ -39,7 +44,9 @@ restart gx1 8x1 bgczclim,medium smoke gx1 24x1 medium,run90day,yi2008 smoke gx3 8x1 medium,run90day,yi2008 restart gx1 24x1 short -restart gx3 8x1 short +restart gx1 16x2 seabedLKD,gx1apr,medium,debug +restart gx1 15x2 seabedprob,medium +restart gx1 32x1 gx1prod,medium smoke gx3 4x2 fsd1,diag24,run5day,debug smoke gx3 8x2 fsd12,diag24,run5day,short restart gx3 4x2 fsd12,debug,short diff --git a/configuration/scripts/tests/baseline.script b/configuration/scripts/tests/baseline.script index a1ab4e055..6f13807e3 100644 --- a/configuration/scripts/tests/baseline.script +++ b/configuration/scripts/tests/baseline.script @@ -20,38 +20,93 @@ endif # Baseline comparing run if (${ICE_BASECOM} != ${ICE_SPVAL}) then - set test_dir = ${ICE_RUNDIR}/restart - set base_dir = ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME}/restart - - set baseline_log = `ls -1t ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME}/cice.runlog* | head -1` set btimeloop = -1 set bdynamics = -1 set bcolumn = -1 - if (${baseline_log} != "" ) then - set btimeloop = `grep TimeLoop ${baseline_log} | grep Timer | cut -c 22-32` - set bdynamics = `grep Dynamics ${baseline_log} | grep Timer | cut -c 22-32` - set bcolumn = `grep Column ${baseline_log} | grep Timer | cut -c 22-32` - if (${btimeloop} == "") set btimeloop = -1 - if (${bdynamics} == "") set bdynamics = -1 - if (${bcolumn} == "") set bcolumn = -1 - endif - echo "" - echo "Regression Compare Mode:" - echo "base_dir: ${base_dir}" - echo "test_dir: ${test_dir}" + if (${ICE_TEST} == "unittest") then + set test_file = `ls -1t ${ICE_RUNDIR}/cice.runlog* | head -1` + set base_file = `ls -1t ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME}/cice.runlog* | head -1` + + echo "" + echo "bfb Log Compare Mode:" + echo "base_file: ${base_file}" + echo "test_file: ${test_file}" + + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} notcicefile + set bfbstatus = $status + + else + + set test_dir = ${ICE_RUNDIR}/restart + set base_dir = ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME}/restart + + set baseline_log = `ls -1t ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME}/cice.runlog* | head -1` + if (${baseline_log} != "" ) then + set btimeloop = `grep TimeLoop ${baseline_log} | grep Timer | cut -c 22-32` + set bdynamics = `grep Dynamics ${baseline_log} | grep Timer | cut -c 22-32` + set bcolumn = `grep Column ${baseline_log} | grep Timer | cut -c 22-32` + if (${btimeloop} == "") set btimeloop = -1 + if (${bdynamics} == "") set bdynamics = -1 + if (${bcolumn} == "") set bcolumn = -1 + endif + + echo "" + echo "Regression Compare Mode:" + echo "base_dir: ${base_dir}" + echo "test_dir: ${test_dir}" + + ${ICE_CASEDIR}/casescripts/comparebfb.csh ${base_dir} ${test_dir} + set bfbstatus = $status + + if ( ${bfbstatus} != 0 ) then + + set test_file = `ls -1t ${ICE_RUNDIR}/cice.runlog* | head -1` + set base_file = `ls -1t ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME}/cice.runlog* | head -1` + + echo "" + echo "bfb Log Compare Mode:" + echo "base_file: ${base_file}" + echo "test_file: ${test_file}" + + if ("${base_file}" == "" || "${test_file}" == "" ) then + echo "MISS ${ICE_TESTNAME} complog ${ICE_BASECOM} missing-data" >> ${ICE_CASEDIR}/test_output + echo "Missing data" + else + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} + set logstatus = $status + + if ( ${logstatus} == 0 ) then + echo "PASS ${ICE_TESTNAME} complog ${ICE_BASECOM}" >> ${ICE_CASEDIR}/test_output + echo "Regression baseline and test dataset may be the same" + else if ( ${logstatus} == 1 ) then + echo "FAIL ${ICE_TESTNAME} complog ${ICE_BASECOM} different-data" >> ${ICE_CASEDIR}/test_output + echo "Regression baseline and test dataset are not the same" + else if ( ${logstatus} == 2 ) then + echo "MISS ${ICE_TESTNAME} complog ${ICE_BASECOM} missing-data" >> ${ICE_CASEDIR}/test_output + echo "Missing data" + else + echo "FAIL ${ICE_TESTNAME} complog ${ICE_BASECOM} usage-error" >> ${ICE_CASEDIR}/test_output + echo "Regression baseline and test dataset error in usage" + endif + endif + + endif + + endif - ${ICE_CASEDIR}/casescripts/comparebfb.csh ${base_dir} ${test_dir} - set bfbstatus = $status if ( ${bfbstatus} == 0 ) then echo "PASS ${ICE_TESTNAME} compare ${ICE_BASECOM} ${btimeloop} ${bdynamics} ${bcolumn}" >> ${ICE_CASEDIR}/test_output echo "Regression baseline and test dataset are identical" + else if ( ${bfbstatus} == 1 ) then + echo "FAIL ${ICE_TESTNAME} compare ${ICE_BASECOM} ${btimeloop} ${bdynamics} ${bcolumn} different-data" >> ${ICE_CASEDIR}/test_output + echo "Regression baseline and test dataset are different" else if ( ${bfbstatus} == 2 ) then echo "MISS ${ICE_TESTNAME} compare ${ICE_BASECOM} ${btimeloop} ${bdynamics} ${bcolumn} missing-data" >> ${ICE_CASEDIR}/test_output echo "Missing data" else - echo "FAIL ${ICE_TESTNAME} compare ${ICE_BASECOM} ${btimeloop} ${bdynamics} ${bcolumn} different-data" >> ${ICE_CASEDIR}/test_output - echo "Regression baseline and test dataset are different" + echo "FAIL ${ICE_TESTNAME} compare ${ICE_BASECOM} ${btimeloop} ${bdynamics} ${bcolumn} usage-error" >> ${ICE_CASEDIR}/test_output + echo "Regression baseline and test dataset error in usage" endif endif @@ -88,12 +143,15 @@ if (${ICE_BFBCOMP} != ${ICE_SPVAL}) then if (${bfbstatus} == 0) then echo "PASS ${ICE_TESTNAME} bfbcomp ${ICE_BFBCOMP}" >> ${ICE_CASEDIR}/test_output echo "bfb baseline and test dataset are identical" + else if (${bfbstatus} == 1) then + echo "FAIL ${ICE_TESTNAME} bfbcomp ${ICE_BFBCOMP} different-data" >> ${ICE_CASEDIR}/test_output + echo "bfbcomp and test dataset are different" else if (${bfbstatus} == 2) then echo "MISS ${ICE_TESTNAME} bfbcomp ${ICE_BFBCOMP} missing-data" >> ${ICE_CASEDIR}/test_output echo "Missing data" else - echo "FAIL ${ICE_TESTNAME} bfbcomp ${ICE_BFBCOMP} different-data" >> ${ICE_CASEDIR}/test_output - echo "bfbcomp and test dataset are different" + echo "FAIL ${ICE_TESTNAME} bfbcomp ${ICE_BFBCOMP} usage-error" >> ${ICE_CASEDIR}/test_output + echo "bfbcomp and test dataset usage error" endif endif diff --git a/configuration/scripts/tests/comparelog.csh b/configuration/scripts/tests/comparelog.csh index 8c1ff3a3c..d9e4a7a89 100755 --- a/configuration/scripts/tests/comparelog.csh +++ b/configuration/scripts/tests/comparelog.csh @@ -3,8 +3,9 @@ # Compare prognostic output in two log files #----------------------------------------------------------- -# usage: comparelog.csh base_file test_file +# usage: comparelog.csh base_file test_file [notcicefile] # does diff of two files +# optional 3rd argument indicates the file is not a cice file so diff entire thing # # Return Codes (depends on quality of error checking) # 0 = pass @@ -13,13 +14,26 @@ # 9 = error set filearg = 0 +set cicefile = 0 +set notcicefile = "notcicefile" if ( $#argv == 2 ) then + set cicefile = 1 set filearg = 1 set base_data = $argv[1] set test_data = $argv[2] -else + if ("$argv[1]" == "${notcicefile}") set filearg = 0 + if ("$argv[2]" == "${notcicefile}") set filearg = 0 +else if ( $#argv == 3 ) then + set cicefile = 0 + set filearg = 1 + set base_data = $argv[1] + set test_data = $argv[2] + if ("$argv[3]" != "${notcicefile}") set filearg = 0 +endif + +if (${filearg} == 0) then echo "Error in ${0}" - echo "Usage: ${0} " + echo "Usage: ${0} [notcicefile]" echo " does diff of two files" exit 9 endif @@ -28,7 +42,7 @@ set failure = 0 set base_out = "comparelog_base_out_file.log" set test_out = "comparelog_test_out_file.log" -if ($filearg == 1) then +if (${filearg} == 1) then echo "base_data: $base_data" echo "test_data: $test_data" if ( -f ${base_data} && -f ${test_data}) then @@ -38,12 +52,18 @@ if ($filearg == 1) then else touch ${base_out} - cat ${base_data} | grep -A 99999999 istep1: | grep -e istep1: -e = | grep -iv "min, max, sum" >&! ${base_out} touch ${test_out} - cat ${test_data} | grep -A 99999999 istep1: | grep -e istep1: -e = | grep -iv "min, max, sum" >&! ${test_out} + + if (${cicefile} == 1) then + cat ${base_data} | grep -A 99999999 "total ice area (km^2)" | grep -e istep1: -e = | grep -iv "min, max, sum" >&! ${base_out} + cat ${test_data} | grep -A 99999999 "total ice area (km^2)" | grep -e istep1: -e = | grep -iv "min, max, sum" >&! ${test_out} + else + cp -f ${base_data} ${base_out} + cp -f ${test_data} ${test_out} + endif set basenum = `cat ${base_out} | wc -l` - set testnum = `cat ${base_out} | wc -l` + set testnum = `cat ${test_out} | wc -l` set filediff = `diff -w ${base_out} ${test_out} | wc -l` if (${basenum} > 0 && ${testnum} > 0) then diff --git a/configuration/scripts/tests/decomp_suite.ts b/configuration/scripts/tests/decomp_suite.ts index 4eb5394d9..9c82c5d27 100644 --- a/configuration/scripts/tests/decomp_suite.ts +++ b/configuration/scripts/tests/decomp_suite.ts @@ -1,15 +1,50 @@ # Test Grid PEs Sets BFB-compare restart gx3 4x2x25x29x4 dslenderX2 +restart gx1 64x1x16x16x10 dwghtfile +restart gbox180 16x1x6x6x60 dspacecurve,debugblocks decomp gx3 4x2x25x29x5 none sleep 30 -restart gx3 1x1x50x58x4 droundrobin,thread restart_gx3_4x2x25x29x4_dslenderX2 -restart gx3 4x1x25x116x1 dslenderX1,thread restart_gx3_4x2x25x29x4_dslenderX2 -restart gx3 6x2x4x29x18 dspacecurve restart_gx3_4x2x25x29x4_dslenderX2 -restart gx3 8x2x8x10x20 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 -restart gx3 6x2x50x58x1 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 -restart gx3 4x2x19x19x10 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 -restart gx3 1x20x5x29x80 dsectrobin,short restart_gx3_4x2x25x29x4_dslenderX2 -restart gx3 16x2x5x10x20 drakeX2 restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 1x1x50x58x4 droundrobin,thread restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 4x1x25x116x1 dslenderX1,thread restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 6x2x4x29x18 dspacecurve restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 8x2x8x10x20 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 6x2x50x58x1 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 5x2x33x23x4 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 4x2x19x19x10 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 20x2x5x4x30 dsectrobin,short restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 16x2x5x10x20 drakeX2 restart_gx3_4x2x25x29x4_dslenderX2 restart gx3 8x2x8x10x20 droundrobin,maskhalo restart_gx3_4x2x25x29x4_dslenderX2 -restart gx3 1x4x25x29x16 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 1x4x25x29x16 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 1x8x30x20x32 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 1x1x120x125x1 droundrobin,thread restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 16x2x1x1x800 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 16x2x2x2x200 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 16x2x3x3x100 droundrobin restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 16x2x8x8x80 dspiralcenter restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 10x1x10x29x4 dsquarepop,thread restart_gx3_4x2x25x29x4_dslenderX2 +restart gx3 8x1x25x29x4 drakeX2,thread restart_gx3_4x2x25x29x4_dslenderX2 + +smoke gx3 4x2x25x29x4 debug,run2day,dslenderX2 +smoke gx1 64x1x16x16x10 debug,run2day,dwghtfile +smoke gbox180 16x1x6x6x60 debug,run2day,dspacecurve,debugblocks +sleep 30 +smoke gx3 1x1x25x58x8 debug,run2day,droundrobin,thread smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 20x1x5x116x1 debug,run2day,dslenderX1,thread smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 6x2x4x29x18 debug,run2day,dspacecurve smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 8x2x10x12x16 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 6x2x50x58x1 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 5x2x33x23x4 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 4x2x19x19x10 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 20x2x5x4x30 debug,run2day,dsectrobin,short smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 16x2x5x10x20 debug,run2day,drakeX2 smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 8x2x8x10x20 debug,run2day,droundrobin,maskhalo smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 1x6x25x29x16 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 1x8x30x20x32 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 1x1x120x125x1 debug,run2day,droundrobin,thread smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 16x2x1x1x800 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 16x2x2x2x200 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 16x2x3x3x100 debug,run2day,droundrobin smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 16x2x8x8x80 debug,run2day,dspiralcenter smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 10x1x10x29x4 debug,run2day,dsquarepop,thread smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +smoke gx3 8x1x25x29x4 debug,run2day,drakeX2,thread smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day diff --git a/configuration/scripts/tests/io_suite.ts b/configuration/scripts/tests/io_suite.ts old mode 100755 new mode 100644 index a17e3f625..6fe1f589a --- a/configuration/scripts/tests/io_suite.ts +++ b/configuration/scripts/tests/io_suite.ts @@ -7,6 +7,7 @@ restart gx3 16x2 gx3ncarbulk,alt02,histall,iobinary,precision8 #restart gx3 4x2 gx3ncarbulk,alt03,histall,iobinary restart gx3 8x4 gx3ncarbulk,alt04,histall,iobinary,precision8 restart gx3 4x4 gx3ncarbulk,alt05,histall,iobinary +restart gx3 14x2 gx3ncarbulk,alt06,histall,iobinary,precision8 restart gx3 32x1 gx3ncarbulk,bgcz,histall,iobinary,precision8 restart gx3 16x2 gx3ncarbulk,bgcskl,histall,iobinary restart gx3 14x2 gx3ncarbulk,isotope,histall,iobinary,precision8 @@ -18,6 +19,7 @@ restart gx3 15x2 alt02,histall,ionetcdf restart gx3 24x1 alt03,histall,ionetcdf,precision8 restart gx3 8x4 alt04,histall,ionetcdf,cdf64 restart gx3 8x4 alt05,histall,ionetcdf,precision8,cdf64 +restart gx3 16x2 alt06,histall,ionetcdf restart gx3 30x1 bgcz,histall,ionetcdf restart gx3 15x2 bgcskl,histall,ionetcdf,precision8 restart gx3 31x1 isotope,histall,ionetcdf,cdf64 @@ -29,6 +31,7 @@ restart gx3 32x1 alt02,histall,iopio1,precision8 restart gx3 24x1 alt03,histall,iopio1 restart gx3 8x4 alt04,histall,iopio1,precision8,cdf64 restart gx3 8x4 alt05,histall,iopio1,cdf64 +restart gx3 32x1 alt06,histall,iopio1,precision8 restart gx3 16x2 bgcz,histall,iopio1,precision8 restart gx3 30x1 bgcskl,histall,iopio1 restart gx3 8x4 isotope,histall,iopio1,precision8,cdf64 @@ -40,6 +43,7 @@ restart gx3 32x1 alt02,histall,iopio2,cdf64 restart gx3 24x1 alt03,histall,iopio2,precision8 restart gx3 8x4 alt04,histall,iopio2 restart gx3 8x4 alt05,histall,iopio2,precision8,cdf64 +restart gx3 16x2 alt06,histall,iopio2,cdf64 restart gx3 16x2 bgcz,histall,iopio2,cdf64 restart gx3 30x1 bgcskl,histall,iopio2,precision8 restart gx3 8x4 isotope,histall,iopio2 @@ -51,6 +55,7 @@ restart gx3 32x1 alt02,histall,iopio1p,precision8,cdf64 restart gx3 24x1 alt03,histall,iopio1p,cdf64 restart gx3 8x4 alt04,histall,iopio1p,precision8 restart gx3 8x4 alt05,histall,iopio1p +restart gx3 6x4 alt06,histall,iopio1p,precision8,cdf64 restart gx3 16x2 bgcz,histall,iopio1p,precision8,cdf64 restart gx3 30x1 bgcskl,histall,iopio1p,cdf64 restart gx3 8x4 isotope,histall,iopio1p,precision8 @@ -62,6 +67,7 @@ restart gx3 32x1 alt02,histall,iopio2p restart gx3 24x1 alt03,histall,iopio2p,precision8,cdf64 restart gx3 8x4 alt04,histall,iopio2p,cdf64 restart gx3 8x4 alt05,histall,iopio2p,precision8 +restart gx3 24x1 alt06,histall,iopio2p restart gx3 16x2 bgcz,histall,iopio2p restart gx3 30x1 bgcskl,histall,iopio2p,precision8,cdf64 restart gx3 8x4 isotope,histall,iopio2p,cdf64 diff --git a/configuration/scripts/tests/lcov_modify_source.sh b/configuration/scripts/tests/lcov_modify_source.sh new file mode 100755 index 000000000..ceadca4f4 --- /dev/null +++ b/configuration/scripts/tests/lcov_modify_source.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +filelist=`find cicecore icepack -type f -name "*.F90"` +LCOV_EXCL=" ! LCOV_EXCL_LINE" + +#echo $filelist + +for file in $filelist; do + + echo $file + ofile=${file}.orig + nfile=${file} + + mv ${file} ${file}.orig + + # line by line making sure each line has a trailing newline (-n) + # preserve whitespace (IFS) + # and include backslashes (-r) + IFS='' + contblock=0 + cat $ofile | while read -r line || [[ -n $line ]]; do + + if [[ $contblock == 1 ]]; then + # in a continuation block + if [[ $line =~ ^.*"&".*$ ]]; then + # found another continuation line, add exclude string and write out line + echo ${line} ${LCOV_EXCL} >> ${nfile} + else + # continuation block ends, write out line + contblock=0 + echo ${line} >> ${nfile} + fi + else + # not in a continuation block, write out line + echo ${line} >> ${nfile} + if [[ $line =~ ^\s*.*"&".*$ && ! $line =~ ^\s*( if ).*$ ]]; then + # new continuation block found + contblock=1 + fi + fi + + done + +done diff --git a/configuration/scripts/tests/nothread_suite.ts b/configuration/scripts/tests/nothread_suite.ts index afe1963b3..da1267e86 100644 --- a/configuration/scripts/tests/nothread_suite.ts +++ b/configuration/scripts/tests/nothread_suite.ts @@ -21,11 +21,13 @@ restart gx3 16x1 alt02 restart gx3 8x1 alt03 restart gx3 16x1 alt04 restart gx3 16x1 alt05 +restart gx3 20x1 alt06 restart gx3 18x1 alt01,debug,short restart gx3 20x1 alt02,debug,short restart gx3 24x1 alt03,debug,short smoke gx3 24x1 alt04,debug,short smoke gx3 32x1 alt05,debug,short +smoke gx3 16x1 alt06,debug,short restart gx3 16x1 isotope smoke gx3 6x1 isotope,debug smoke gx3 8x1 fsd1,diag24,run5day,debug @@ -34,8 +36,8 @@ restart gx3 12x1 fsd12,debug,short smoke gx3 20x1 fsd12ww3,diag24,run1day,medium restart gbox128 8x1 short -restart gbox128 16x1 boxdyn,short -restart gbox128 24x1 boxdyn,short,debug +restart gbox128 16x1 boxnodyn,short +restart gbox128 24x1 boxnodyn,short,debug restart gbox128 12x1 boxadv,short smoke gbox128 20x1 boxadv,short,debug restart gbox128 32x1 boxrestore,short diff --git a/configuration/scripts/tests/quick_suite.ts b/configuration/scripts/tests/quick_suite.ts index 9384f0333..48646673d 100644 --- a/configuration/scripts/tests/quick_suite.ts +++ b/configuration/scripts/tests/quick_suite.ts @@ -2,5 +2,5 @@ smoke gx3 8x2 diag1,run5day smoke gx3 1x1 diag1,run1day restart gbox128 8x1 diag1 -restart gx3 4x2 debug,diag1,run5day +restart gx3 4x2 debug,diag1 smoke gx3 4x1 diag1,run5day,thread smoke_gx3_8x2_diag1_run5day diff --git a/configuration/scripts/tests/report_results.csh b/configuration/scripts/tests/report_results.csh index 2eb3731d5..e1f3a7342 100755 --- a/configuration/scripts/tests/report_results.csh +++ b/configuration/scripts/tests/report_results.csh @@ -82,11 +82,11 @@ if ("${shrepo}" !~ "*cice-consortium*") then endif set noglob -set green = "\![#00C000](https://placehold.it/15/00C000/000000?text=+)" -set red = "\![#F00000](https://placehold.it/15/F00000/000000?text=+)" -set orange = "\![#FFA500](https://placehold.it/15/FFA500/000000?text=+)" -set yellow = "\![#FFE600](https://placehold.it/15/FFE600/000000?text=+)" -set gray = "\![#AAAAAA](https://placehold.it/15/AAAAAA/000000?text=+)" +set green = "\![#00C000](images/00C000.png)" +set red = "\![#F00000](images/F00000.png)" +set orange = "\![#FFA500](images/FFA500.png)" +set yellow = "\![#FFE600](images/FFE600.png)" +set gray = "\![#AAAAAA](images/AAAAAA.png)" unset noglob #============================================================== diff --git a/configuration/scripts/tests/test_unittest.script b/configuration/scripts/tests/test_unittest.script new file mode 100644 index 000000000..0fcd148a6 --- /dev/null +++ b/configuration/scripts/tests/test_unittest.script @@ -0,0 +1,24 @@ + +#---------------------------------------------------- +# Run the CICE model +# cice.run returns -1 if run did not complete successfully + +./cice.run +set res="$status" + +set log_file = `ls -t1 ${ICE_RUNDIR}/cice.runlog* | head -1` + +mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev +cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} run" >! ${ICE_CASEDIR}/test_output +mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev +cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} test" >! ${ICE_CASEDIR}/test_output +rm -f ${ICE_CASEDIR}/test_output.prev + +set grade = FAIL +if ( $res == 0 ) then + set grade = PASS +endif + +echo "$grade ${ICE_TESTNAME} run " >> ${ICE_CASEDIR}/test_output +echo "$grade ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output + diff --git a/configuration/scripts/tests/unittest_suite.ts b/configuration/scripts/tests/unittest_suite.ts new file mode 100644 index 000000000..2e9dcc7cf --- /dev/null +++ b/configuration/scripts/tests/unittest_suite.ts @@ -0,0 +1,4 @@ +# Test Grid PEs Sets BFB-compare +unittest gx3 1x1 helloworld +unittest gx3 1x1 calchk + diff --git a/configuration/tools/convert_restarts.f90 b/configuration/tools/cice4_restart_conversion/convert_restarts.f90 similarity index 100% rename from configuration/tools/convert_restarts.f90 rename to configuration/tools/cice4_restart_conversion/convert_restarts.f90 diff --git a/configuration/tools/jra55_datasets/interp_jra55_ncdf_bilinear.py b/configuration/tools/jra55_datasets/interp_jra55_ncdf_bilinear.py new file mode 100755 index 000000000..6cc796481 --- /dev/null +++ b/configuration/tools/jra55_datasets/interp_jra55_ncdf_bilinear.py @@ -0,0 +1,441 @@ +#! /usr/bin/env python3 + +import xesmf as xe +from netCDF4 import Dataset +import argparse +import os +import numpy as np +from datetime import datetime + +###################################################### +###################################################### +def make_regridder(lon1, lat1, lon2, lat2, method, periodic, grdname, + lon1_b=None, lat1_b=None, lon2_b=None, lat2_b=None): + ''' + make nearest neighbor xESMF regridder object. + input: + lon1: source longitudes (degrees) + lat1: source latitudes (degrees) + lon2: target longitudes (degrees) + lat2: target latitudes (degrees) + method: regridding method (bilinear, patch, conservative, nearest_s2d) + periodic: True if periodic longitudes, false if not + grdname: filename for regridder (Ugrid or Tgrid) + ''' + if method != "conservative": + # define grids for regridder + grid1 = {'lon' : lon1, 'lat' : lat1} + grid2 = {'lon' : lon2, 'lat' : lat2} + + + else: + # conservative needs boundary lon/lat + grid1 = {'lon' : lon1, 'lat' : lat1, + 'lon_b' : lon1_b, 'lat_b' : lat1_b} + + grid2 = {'lon' : lon2, 'lat' : lat2, + 'lon_b' : lon2_b, 'lat_b' : lat2_b} + + # make regridder + # here specify reuse_weights=False to re-generate weight file. + # if wanted to reuse file inteas of making int, + # check if file exists and change use_file_weights=True. + # see commented out example below + use_file_weights=False + + # check if regrid file exists. + # If so, reuse file instead of regenerating. + # if (os.path.isfile(blin_grid_name)): + # use_file_weights = True + + regridder = xe.Regridder(ds_in=grid1,ds_out=grid2, + method=method, + periodic=periodic, + filename=grdname, + reuse_weights=use_file_weights) + + + return regridder + +######################################### +######################################### +def halo_extrapolate(a,ew_bndy_type,ns_bndy_type): + ''' + Extrapolate to 'halo' cell as in CICE code + ice_boundary.F90:ice_HaloExtrapolate. + inputs: + a: array nx+1, ny+1 (nghost/nhalo hard-coded as 1 for now) + ew_bndy_type: east/west boundary type (cyclic, regional, etc) + ns_bndy_type: norh/south boundary type (cyclic, regional, etc) + + return: a with halo applied + ''' + + # get dimension of a + # expected to be 0:nx+nghost, 0:ny+nghost + nj, ni = a.shape # note with Python NetCDF is nj, ni order + # W/E edges + if ew_bndy_type == 'cyclic': + a[: ,0] = a[:,-2] # -2, since -1 is ghost cell + a[:,-1] = a[:, 1] # 1, since 0 is ghost cell + else: # if (trim(ew_bndy_type) /= 'cyclic') then + a[:, 0] = 2.0*a[:, 1] - a[:, 2] + a[:,-1] = 2.0*a[:,-2] - a[:,-3] + + # south edge + if ns_bndy_type == 'cyclic': + a[0,:] = a[-2,:] # -2, since -1 is ghost cell + else: + a[0,:] = 2.0*a[1,:] - a[2,:] + + # north edge treated a little different, depending + # on if bndy type is tripole + if ns_bndy_type == 'cyclic': + a[-1,:] = a[1,:] # 1, since 0 is ghost cell + + elif (ns_bndy_type != 'cyclic' and + ns_bndy_type != 'tripole' and + ns_bndy_type != 'tripoleT'): + + a[-1,:] = 2.0*a[-2,:] - a[-3,:] + + else: + pass # do nothing + + # return array with halo upated + return a + +######################################### +######################################### + +def Tlatlon(ulat,ulon,ew_bndy_type,ns_bndy_type): + ''' + Make TLAT/TLON from ULAT/ULON. + see ice_grid.F90:Tlatlon for method + Inputs: + ulat: U grid latitude in degrees + ulon: U grid longitude in degrees + + output: + tlat in degrees + tlon in degrees + ''' + + # method obtained from ice_grid.F90: subroutine Tlatlon + ulatcos = np.cos(np.deg2rad(ulat)) + ulatsin = np.sin(np.deg2rad(ulat)) + + uloncos = np.cos(np.deg2rad(ulon)) + ulonsin = np.sin(np.deg2rad(ulon)) + + # initialize array with nghost=1 on each side + nj, ni = ulatcos.shape # note: Python NetCDF is nj, ni order + print("Tlatlon nj, ni", nj, ni) + + nghost = 1 + workdims = (nj+2*nghost,ni+2*nghost) + #print("Tlatlon workdims", workdims) + + ulatcos1 = np.zeros(workdims,dtype='f8') + ulatsin1 = np.zeros(workdims,dtype='f8') + uloncos1 = np.zeros(workdims,dtype='f8') + ulonsin1 = np.zeros(workdims,dtype='f8') + + # fill middle of work arrays + ulatcos1[1:nj+1,1:ni+1] = ulatcos + ulatsin1[1:nj+1,1:ni+1] = ulatsin + + # fill middle of work arrays + ulatcos1[1:nj+1,1:ni+1] = ulatcos + ulatsin1[1:nj+1,1:ni+1] = ulatsin + + uloncos1[1:nj+1,1:ni+1] = uloncos + ulonsin1[1:nj+1,1:ni+1] = ulonsin + + # fill halos + ulatcos1 = halo_extrapolate(ulatcos1,ew_bndy_type,ns_bndy_type) + ulatsin1 = halo_extrapolate(ulatsin1,ew_bndy_type,ns_bndy_type) + uloncos1 = halo_extrapolate(uloncos1,ew_bndy_type,ns_bndy_type) + ulonsin1 = halo_extrapolate(ulonsin1,ew_bndy_type,ns_bndy_type) + + # now do computations as in ice_grid.F90:Tlatlon + + # x, y, z are full 2d + x = uloncos1 * ulatcos1 + y = ulonsin1 * ulatcos1 + z = ulatsin1 + + tx = 0.25*(x[0:nj, 0:ni ] + # x1 + x[0:nj, 1:ni+1] + # x2 + x[1:nj+1,0:ni ] + # x3 + x[1:nj+1,1:ni+1]) # x4 + + #print("Tlonlat: x.shape", x.shape) + #print("Tlonlat: tx.shape", tx.shape) + + + ty = 0.25*(y[0:nj, 0:ni ] + # y1 + y[0:nj, 1:ni+1] + # y2 + y[1:nj+1,0:ni ] + # y3 + y[1:nj+1,1:ni+1]) # y4 + + + tz = 0.25*(z[0:nj, 0:ni ] + # z1 + z[0:nj, 1:ni+1] + # z2 + z[1:nj+1,0:ni ] + # z3 + z[1:nj+1,1:ni+1]) # z4 + + da = np.sqrt(tx*tx + ty*ty + tz*tz) + + tz = tz/da + + tlon = np.arctan2(ty,tx) + tlat = np.arcsin(tz) + + # returnd tlat, tlon in degrees + return np.rad2deg(tlat), np.rad2deg(tlon) + +########################## +########################## + +def get_command_line_args(): + ''' + argument parser for command line arguments + ''' + + dstr = "Interplate JRA55 data" + parser = argparse.ArgumentParser(description=dstr) + + # add arguments + parser.add_argument("JRADTG", type=str, help="JRA55 file date time group") + parser.add_argument("dstgrid", type=str, help="Destination grid file (NetCDF)") + parser.add_argument("ncout", type=str, help="Output file name (NetCDF)") + + + # get the arguments + args = parser.parse_args() + + # return values + return args.JRADTG, args.dstgrid, args.ncout + + +################################ +################################ + +def get_jra55_nc_dict(): + ''' + Create dictionary that links the NetCDF variable name + with the file prefix. The file prefix is appended by + JRADTG from command line + ''' + # specify dictionary with dataset prefix names + jra55dict = {"TPRAT_GDS4_SFC_ave3h" : "fcst_phy2m.061_tprat.reg_tl319", # precip + "DSWRF_GDS4_SFC_ave3h" : "fcst_phy2m.204_dswrf.reg_tl319", # downward shortwave + "DLWRF_GDS4_SFC_ave3h" : "fcst_phy2m.205_dlwrf.reg_tl319", # downward longwave + "TMP_GDS4_HTGL" : "fcst_surf.011_tmp.reg_tl319" , # air temp + "UGRD_GDS4_HTGL" : "fcst_surf.033_ugrd.reg_tl319" , # u velocity + "VGRD_GDS4_HTGL" : "fcst_surf.034_vgrd.reg_tl319" , # v velocity + "SPFH_GDS4_HTGL" : "fcst_surf.051_spfh.reg_tl319"} # specify humidity + + + return jra55dict + +################################ +################################ + +def get_jra55_cice_var(): + ''' + Make dictionary relating JRA55 NetCDF variables + to CICE variables. + ''' + + # specify output variable names + # This is for current CICE expected names + # it might be better to change CICE in long run + cice_var = {"TPRAT_GDS4_SFC_ave3h" : "ttlpcp", + "DSWRF_GDS4_SFC_ave3h" : "glbrad", + "DLWRF_GDS4_SFC_ave3h" : "dlwsfc", + "TMP_GDS4_HTGL" : "airtmp", + "UGRD_GDS4_HTGL" : "wndewd", + "VGRD_GDS4_HTGL" : "wndnwd", + "SPFH_GDS4_HTGL" : "spchmd"} + + return cice_var + +################################ +################################ + +def init_ncout(ncout,nc1,llat,llon): + + ''' + Initialize output NetCDF file + with proper units and dimensions. + ''' + + dsout = Dataset(ncout,'w',format='NETCDF3_64BIT_OFFSET') + + # get dimensions from size of lat + (nlat,nlon) = llat.shape + + # create dimensions + time = dsout.createDimension('time',None) # unlimited + dim_j = dsout.createDimension('dim_j',nlat) + dim_i = dsout.createDimension('dim_i',nlon) + + # create time variable. + # note is defined as 'times' (with and s) to not conflict + # with dimension 'time' + times = dsout.createVariable('time','f8',('time',)) + times.units = nc1['initial_time0_hours'].units + times.calendar = 'gregorian' + + # loop over nc1 times + dates = [] + dates.append(nc1['initial_time0_hours'][0] + nc1['forecast_time1'][1]) + # loop over remaining + for h in nc1['initial_time0_hours'][1:-1]: + for ft in nc1['forecast_time1'][:]: + dates.append(h + ft) + + # include only first forecast_time of last initial time + dates.append(nc1['initial_time0_hours'][-1] + nc1['forecast_time1'][0]) + + # write dates to file + times[:] = dates + + # create LON/LAT variables + LON = dsout.createVariable('LON','f8',('dim_j','dim_i',)) + LON.units = 'degrees_east' + + LAT = dsout.createVariable('LAT','f8',('dim_j','dim_i',)) + LAT.units = 'degrees_north' + + # write LON, LAT to file + LON[:] = llon[:,:] + LAT[:] = llat[:,:] + + + return dsout + +################################ +################################ + + +# main subroutine +if __name__ == "__main__": + + # get jra dtg and ncout from command line + JRADTG, dstgrid, ncout = get_command_line_args() + + # get jra55 variable/filename prefix dictionary + jra55dict = get_jra55_nc_dict() + + # get dictionary linking jra55 variables names + # and CICE forcing varible names + cice_var = get_jra55_cice_var() + + # read input grid. + # use one of the jra55 files. + # it is assumed all JRA data are the same grid for later + fname = f"{jra55dict['TMP_GDS4_HTGL']:s}.{JRADTG:s}.nc" + print("opening dataset ", fname) + grid1_ds = Dataset(fname,'r',format='NETCDF3_64BIT_OFFSET') + lon1 = grid1_ds['g4_lon_3'][:] # 1D + lat1 = grid1_ds['g4_lat_2'][:] # 1D + + # open destination grid + # here it is assumed a CICE NetCDF file. + # the user can update as appropriate + print("Opening ", dstgrid) + grid2_ds = Dataset(dstgrid,'r',format='NETCDF3_64BIT_OFFSET') + ulon2 = grid2_ds["lon"][:,:] # 2D. Assumed ULON in degrees + ulat2 = grid2_ds["lat"][:,:] # 2D. Assumed ULAT in degrees + if np.max(np.abs(ulat2)) < 10. : + ulon2 = np.rad2deg(ulon2) + ulat2 = np.rad2deg(ulat2) + + # make tgrid from ugrid + ew_bndy_type = 'cyclic' + ns_bndy_type = 'open' + tlat2, tlon2 = Tlatlon(ulat2,ulon2,ew_bndy_type,ns_bndy_type) + + # make regridders + print("making bilinear regridder") + method = 'bilinear' + periodic = True + blin_grid_name = 'bilinear_jra55_gx3.nc' + rgrd_bilinear = make_regridder(lon1,lat1,tlon2,tlat2, + method,periodic,blin_grid_name) + + # setup output dataset by adding lat/lon + dsout = init_ncout(ncout,grid1_ds,tlat2,tlon2) + + # no longer need grid1, grid2 + grid1_ds.close() + grid2_ds.close() + + # do the regridding + # Loop over all the files using regridder from above + # and add to dataout + for var, fprefix in jra55dict.items(): + fname = f"{fprefix:s}.{JRADTG:s}.nc" + print("reading ", fname) + jra_ds = Dataset(fname,'r',format='NETCDF3_CLASSIC') + + # make output variable + data = dsout.createVariable(cice_var[var],'f4',('time','dim_j','dim_i')) + + # do interpolation + print("Interpolating ", var) + + if var.find('ave3h') > 0: # ave3r in var + # use bilinear here + d = rgrd_bilinear(jra_ds[var][:,:,:,:]) + + # write to file in correct time order + for t in range(d.shape[0]): + for n in range(d.shape[1]): + #print('indx (2*t)+n = ', (2*t)+n) + data[(2*t)+n,:,:] = d[t,n,:,:] + + else: + # instantaneous use bilinear + d = rgrd_bilinear(jra_ds[var][:,:,:,:]) + + # write to file in correct time order. + # note need to write 2nd forecast_time first. + # in this case first forecast_time is NaN + data[0,:,:] = d[0,1,:,:] + for t in range(1,d.shape[0]-1): + for n in range(d.shape[1]): + #print('indx (2*t)+n-1 = ', (2*t)+n-1) + data[(2*t)+n-1,:,:] = d[t,n,:,:] + + # write first forecast time of last initial time + # second forecast time is NAN + data[-1,:,:] = d[-1,0,:,:] + + # add coordinates attribute + data.coordinates = "LON LAT time" + data.long_name = jra_ds[var].long_name + data.units = jra_ds[var].units + + precip_factor = 1. / 86400. + + # Convert mm / day to kg/m^2/s. + if var.find('PRAT') > 0: + data[:] = data[:] * precip_factor + data.units = 'kg/m2/s' + else: + data.units = jra_ds[var].units + + # close jra55 file + jra_ds.close() + + + # write tou output file + # close output file + dsout.close() + + print("Done") + diff --git a/configuration/tools/jra55_datasets/make_forcing.csh b/configuration/tools/jra55_datasets/make_forcing.csh new file mode 100755 index 000000000..c57871a25 --- /dev/null +++ b/configuration/tools/jra55_datasets/make_forcing.csh @@ -0,0 +1,49 @@ +#!/bin/csh +# ----- +# This is a script that worked on NCAR's cheyenne in March, 2021. +# It converts raw JRA55 datasets to a format that CICE can use. +# This tools is documented in the CICE user guide. The +# tool interpolates to a CICE grid and does things like convert units. +# ----- +# The interp_jra55_ncdf_bilinar.py script was placed in "scripts_dir" +# The raw JRA55 datasets were placed in "jra55_data_dir" +# The CICE grid files were places in "jra55_data_dir" +# The model output was created in "output_data_dir" +# ----- +#PBS -N make_forcing +#PBS -q regular +#PBS -l select=1:ncpus=4:mpiprocs=4 +#PBS -l walltime=06:00:00 +#PBS -A P93300665 + +set scripts_dir = "/glade/work/tcraig/cice-consortium/cice.jra55_tool/configuration/tools/jra55_datasets" +set jra55_data_dir = "/glade/scratch/dbailey/JRA_DATA/" +set output_data_dir = "/glade/scratch/tcraig/JRA_DATA_output" +set grid = "gx3" +set cice_grid_file = "grid_gx3.nc" + +module load python/3.7.9 +source /glade/u/apps/opt/ncar_pylib/ncar_pylib.csh default +module load nco + +mkdir -p ${output_data_dir} +cd ${output_data_dir} + +ln -s ${jra55_data_dir}/fcst_*.nc . +ln -s ${jra55_data_dir}/grid_*.nc . + +ln -s ${scripts_dir}/interp_jra55_ncdf_bilinear.py . + +#foreach year ( 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 ) +foreach year ( 1997 ) + +./interp_jra55_ncdf_bilinear.py ${year}010100_${year}033121 ${cice_grid_file} JRA55_${grid}_03hr_forcing_${year}-q1.nc +./interp_jra55_ncdf_bilinear.py ${year}040100_${year}063021 ${cice_grid_file} JRA55_${grid}_03hr_forcing_${year}-q2.nc +./interp_jra55_ncdf_bilinear.py ${year}070100_${year}093021 ${cice_grid_file} JRA55_${grid}_03hr_forcing_${year}-q3.nc +./interp_jra55_ncdf_bilinear.py ${year}100100_${year}123121 ${cice_grid_file} JRA55_${grid}_03hr_forcing_${year}-q4.nc + +ncrcat JRA55_${grid}_03hr_forcing_${year}-??.nc JRA55_${grid}_03hr_forcing_${year}.nc + +/bin/rm -f ${jra55_data_dir}/JRA55_${grid}_03hr_forcing_${year}-??.nc + +end diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 59ddc4122..9e2868947 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -139,8 +139,10 @@ either Celsius or Kelvin units). "daymo", "number of days in one month", "" "daycal", "day number at end of month", "" "days_per_year", ":math:`\bullet` number of days in one year", "365" + "day_init", ":math:`\bullet` the initial day of the month", "" "dbl_kind", "definition of double precision", "selected_real_kind(13)" - "dbug", ":math:`\bullet` write extra diagnostics", ".false." + "debug_model", "Logical that controls extended model point debugging.", "" + "debug_model_step", "Initial timestep for output associated with debug_model.", "" "Delta", "function of strain rates (see Section :ref:`dynam`)", "1/s" "default_season", "Season from which initial values of forcing are set.", "winter" "denom1", "combination of constants for stress equation", "" @@ -229,6 +231,7 @@ either Celsius or Kelvin units). "flw", "incoming longwave radiation", "W/m\ :math:`^2`" "flwout", "outgoing longwave radiation", "W/m\ :math:`^2`" "fm", "Coriolis parameter * mass in U cell", "kg/s" + "forcing_diag", ":math:`\bullet` write extra diagnostics for forcing inputs", ".false." "formdrag", ":math:`\bullet` calculate form drag", "" "fpond", "fresh water flux to ponds", "kg/m\ :math:`^2`/s" "fr_resp", "bgc respiration fraction", "0.05" @@ -258,9 +261,9 @@ either Celsius or Kelvin units). "fswthru_idr", "near IR direct shortwave penetrating to ocean", "W/m\ :math:`^2`" "fswthru_idf", "near IR diffuse shortwave penetrating to ocean", "W/m\ :math:`^2`" "fswthru_ai", "grid-box-mean shortwave penetrating to ocean (fswthru)", "W/m\ :math:`^2`" - "fyear", "current data year", "" - "fyear_final", "last data year", "" - "fyear_init", ":math:`\bullet` initial data year", "" + "fyear", "current forcing data year", "" + "fyear_final", "last forcing data year", "" + "fyear_init", ":math:`\bullet` initial forcing data year", "" "**G**", "", "" "gravit", "gravitational acceleration", "9.80616 m/s\ :math:`^2`" "grid_file", ":math:`\bullet` input file for grid info", "" @@ -313,7 +316,7 @@ either Celsius or Kelvin units). "ice_stderr", "unit number for standard error output", "" "ice_ref_salinity", "reference salinity for ice–ocean exchanges", "4. ppt" "icells", "number of grid cells with specified property (for vectorization)", "" - "iceruf", "ice surface roughness", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" + "iceruf", ":math:`\bullet` ice surface roughness at atmosphere interface", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" "icetmask", "ice extent mask (T-cell)", "" "iceumask", "ice extent mask (U-cell)", "" "idate", "the date at the end of the current time step (yyyymmdd)", "" @@ -378,18 +381,21 @@ either Celsius or Kelvin units). "max_blocks", "maximum number of blocks per processor", "" "max_ntrcr", "maximum number of tracers available", "5" "maxraft", "maximum thickness of ice that rafts", "1. m" - "mday", "day of the month", "" + "mday", "model day of the month", "" "meltb", "basal ice melt", "m" "meltl", "lateral ice melt", "m" "melts", "snow melt", "m" "meltt", "top ice melt", "m" "min_salin", "threshold for brine pockets", "0.1 ppt" "mlt_onset", "day of year that surface melt begins", "" - "month", "the month number", "" + "mmonth", "model month number", "" "monthp", "previous month number", "" + "month_init", ":math:`\bullet` the initial month", "" "mps_to_cmpdy", "m per s to cm per day conversion", "8.64\ :math:`\times`\ 10\ :math:`^6`" + "msec", "model seconds elasped into day", "" "mtask", "local processor number that writes debugging data", "" "mu_rdg", ":math:`\bullet` e-folding scale of ridged ice", "" + "myear", "model year", "" "my_task", "task ID for the current processor", "" "**N**", "", "" "n_aero", "number of aerosol species", "" @@ -416,7 +422,8 @@ either Celsius or Kelvin units). "nlt_bgc_[chem]", "ocean sources and sinks for biogeochemistry", "" "nml_filename", "namelist file name", "" "nprocs", ":math:`\bullet` total number of processors", "" - "npt", ":math:`\bullet` total number of time steps (dt)", "" + "npt", ":math:`\bullet` total run length values associate with npt_unit", "" + "npt_unit", "units of the run length, number set by npt", "" "ns_boundary_type", ":math:`\bullet` type of north-south boundary condition", "" "nslyr", "number of snow layers in each category", "" "nspint", "number of solar spectral intervals", "" @@ -443,7 +450,6 @@ either Celsius or Kelvin units). "nvarz", "number of category, vertical grid fields written to history", "" "nx(y)_block", "total number of gridpoints on block in x(y) direction", "" "nx(y)_global", "number of physical gridpoints in x(y) direction, global domain", "" - "nyr", "year number", "" "**O**", "", "" "ocean_bio", "concentrations of bgc constituents in the ocean", "" "oceanmixed_file", ":math:`\bullet` data file containing ocean forcing data", "" @@ -555,8 +561,8 @@ either Celsius or Kelvin units). "scale_factor", "scaling factor for shortwave radiation components", "" "seabed_stress", "if true, calculate seabed stress", "F" "seabed_stress_method", "method for calculating seabed stress (‘LKD’ or ‘probabilistic’)", "LKD" - "sec", "seconds elasped into idate", "" "secday", "number of seconds in a day", "86400." + "sec_init", ":math:`\bullet` the initial second", "" "shcoef", "transfer coefficient for sensible heat", "" "shear", "strain rate II component", "1/s" "shlat", "southern latitude of artificial mask edge", "30\ :math:`^\circ`\ N" @@ -596,12 +602,11 @@ either Celsius or Kelvin units). "tarear", "1/tarea", "1/m\ :math:`^2`" "tareas", "area of southern hemisphere T-cells", "m\ :math:`^2`" "tcstr", "string identifying T grid for history variables", "" - "tday", "absolute day number", "" "Tf", "freezing temperature", "C" "Tffresh", "freezing temp of fresh ice", "273.15 K" "tfrz_option", ":math:`\bullet` form of ocean freezing temperature", "" "thinS", "minimum ice thickness for brine tracer", "" - "time", "total elapsed time", "s" + "timesecs", "total elapsed time in seconds", "s" "time_beg", "beginning time for history averages", "" "time_bounds", "beginning and ending time for history averages", "" "time_end", "ending time for history averages", "" @@ -681,7 +686,7 @@ either Celsius or Kelvin units). "**X**", "", "" "**Y**", "", "" "ycycle", ":math:`\bullet` number of years in forcing data cycle", "" - "yday", "day of the year", "" + "yday", "day of the year, computed in the model calendar", "" "yield_curve", "type of yield curve", "ellipse" "yieldstress11(12, 22)", "yield stress tensor components", "" "year_init", ":math:`\bullet` the initial year", "" diff --git a/doc/source/conf.py b/doc/source/conf.py index e876980ab..4cf2f580d 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.1.4' +version = u'6.2.0' # The full version, including alpha/beta/rc tags. -version = u'6.1.4' +version = u'6.2.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/developer_guide/dg_dynamics.rst b/doc/source/developer_guide/dg_dynamics.rst index c94d47b35..47b54bde2 100644 --- a/doc/source/developer_guide/dg_dynamics.rst +++ b/doc/source/developer_guide/dg_dynamics.rst @@ -90,12 +90,9 @@ Time Manager Time manager data is module data in **cicecore/shared/ice_calendar.F90**. Much of the time manager data is public and operated on during the model timestepping. The model timestepping actually takes -place in the **CICE_RunMod.F90** file which is part of the driver code and tends to look like this:: +place in the **CICE_RunMod.F90** file which is part of the driver code. - call ice_step - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date +The time manager was updated in early 2021. Additional information about the time manager can be found here, :ref:`timemanagerplus` diff --git a/doc/source/developer_guide/dg_forcing.rst b/doc/source/developer_guide/dg_forcing.rst index 90ef843b0..0c0380538 100644 --- a/doc/source/developer_guide/dg_forcing.rst +++ b/doc/source/developer_guide/dg_forcing.rst @@ -120,8 +120,8 @@ Time interpolation coefficients are computed in the **JRA55_data** subroutine. The forcing data is converted to model inputs in the subroutine **prepare_forcing** called in **get_forcing_atmo**. To clarify, the JRA55 input data includes -- uatm = model grid i-direction wind velocity component (m/s) -- vatm = model grid j-direction wind velocity component (m/s) +- uatm = T-cell centered, model grid i-direction wind velocity component (m/s) +- vatm = T-cell-centered, model grid j-direction wind velocity component (m/s) - Tair = air temperature (K) - Qa = specific humidity (kg/kg) - flw = incoming longwave radiation (W/m^2) diff --git a/doc/source/developer_guide/dg_scripts.rst b/doc/source/developer_guide/dg_scripts.rst index da5ef7d24..50853b3ea 100644 --- a/doc/source/developer_guide/dg_scripts.rst +++ b/doc/source/developer_guide/dg_scripts.rst @@ -161,25 +161,25 @@ To add a new test (for example newtest), several files may be needed, Generating a new test, particularly the **test_newtest.script** usually takes some iteration before it's working properly. -.. _dev_compliance: +.. _dev_validation: -Code Compliance Script +Code Validation Script ---------------------- -The code compliance test validates non bit-for-bit model changes. The directory -**configuration/scripts/tests/QC** contains scripts related to the compliance testing, -and this process is described in :ref:`compliance`. This section will describe a set -of scripts that test and validate the code compliance process. This should be done -when the compliance test or compliance test scripts (i.e., ``cice.t-test.py``) are modified. -Again, this section **documents a validation process for the compliance scripts**; it does not -describe to how run the compliance test itself. +The code validation (aka QC or quality control) test validates non bit-for-bit model changes. The directory +**configuration/scripts/tests/QC** contains scripts related to the validation testing, +and this process is described in :ref:`validation`. This section will describe a set +of scripts that test and validate the QC process. This should be done +when the QC test or QC test scripts (i.e., ``cice.t-test.py``) are modified. +Again, this section **documents a validation process for the QC scripts**; it does not +describe to how run the validation test itself. -Two scripts have been created to automatically validate the code compliance script. +Two scripts have been created to automatically validate the QC script. These scripts are: * ``gen_qc_cases.csh``, which creates the 4 test cases required for validation, builds the executable, and submits to the queue. -* ``compare_qc_cases.csh``, which runs the code compliance script on three combinations +* ``compare_qc_cases.csh``, which runs the QC script on three combinations of the 4 test cases and outputs whether or not the correct response was received. The ``gen_qc_cases.csh`` script allows users to pass some arguments similar @@ -216,7 +216,7 @@ To install the necessary Python packages, the ``pip`` Python utility can be used check to see if there is any Python module (``module avail python``) that you might need to load prior to using ``pip``. -To perform the validation, execute the following commands. +To perform the QC validation, execute the following commands. .. code-block:: bash diff --git a/doc/source/developer_guide/dg_tools.rst b/doc/source/developer_guide/dg_tools.rst new file mode 100644 index 000000000..ba29e0184 --- /dev/null +++ b/doc/source/developer_guide/dg_tools.rst @@ -0,0 +1,150 @@ +:tocdepth: 3 + +.. _tools: + +Tools +============= + + +.. _cice4restart: + +CICE4 restart conversion +------------------------- + +There is a Fortran program in **configuration/tools/cice4_restart_conversion** +that will help convert a CICE4 restart file into a CICE5 restart file. +There is a bit of documentation contained in that source code about how +to build, use, and run the tool. A few prognostic variables were changed +from CICE4 to CICE5 which fundamentally altered the fields saved to +the restart file. See +**configuration/tools/cice4_restart_conversion/convert_restarts.f90** +for additional information. + + +.. _jra55datasettool: + +JRA55 forcing datasets +------------------------ + +This section describes how to generate JRA55 forcing data for the CICE model. +Raw JRA55 files have to be interpolated and processed into input files specifically +for the CICE model. A tool exists in **configuration/tools/jra55_datasets** +to support that process. +The raw JRA55 data is obtained from the NCAR/UCAR Research Data Archive and +the conversion tools are written in python. + +Requirements +********************* + +Python3 is required, and the following +python packages are required with the tested version number in parenthesis. These +versions are not necessarily the only versions that work, they just indicate what +versions were used when the script was recently run. + +- python3 (python3.7.9) +- numpy (1.18.5) +- netCDF4 (1.5.5) +- ESMPy (8.0.0) +- xesmf (0.3.0) + +NCO is required for aggregating the output files into yearly files. + +- netcdf (4.7.4) +- nco (4.9.5) + +Raw JRA55 forcing data +************************* + +The raw JRA55 forcing data is obtained from the UCAR/NCAR Research Data Archive, +https://rda.ucar.edu/. You must first register (free) and then sign in. The +"JRA-55 Reanalysis Daily 3-Hourly and 6-Hourly Data" is ds628.0 and can be found here, +https://rda.ucar.edu/datasets/ds628.0. + +The "Data access" tabs will provide a list of product categories. +The JRA55 data of interest are located in 2 separate products. Winds, air +temperature, and specific humidity fields are included in "JRA-55 +3-Hourly Model Resolution 2-Dimensional Instantaneous Diagnostic Fields". +Precipitation and downward radiation fluxes are found in "JRA-55 3-Hourly +Model Resolution 2-Dimensional Average Diagnostic Fields". (Note the +difference between instantaneous and averaged data products. There are several +JRA55 datasets available, you will likely have to scroll down the page to find +these datasets.) Data are also available on a coarser 1.25° grid, but the tools +are best used with the native TL319 JRA55 grid. + +The fields needed for CICE are + +- specific humidity (3-hourly instantaneous), Qa +- temperature (3-hourly instantaneous), Tair +- u-component of wind (3-hourly instantaneous), uatm +- v-component of wind(3-hourly instantaneous), vatm +- downward longwave radiation flux (3 hourly average), flw +- downward solar radiation flux (3 hourly average), fsw +- total precipitation (3 hourly average), fsnow + +To customize the dataset for download, choose the “Get a Subset” option. Select +the desired times in the “Temporal Selection” section, then click on desired parameters +(see list above). After clicking continue, select Output Format "Converted to NetCDF". + +Once the data request is made, an email notification will be sent with a dedicated +URL that will provide a variety of options for downloading the data remotely. +The data will be available to download for 5 days. +The raw data consists of multiple files, each containing three months of data for +one field. + + +Data conversion +************************* + +The script, **configuration/tools/jra55_datasets/interp_jra55_ncdf_bilinear.py**, +converts the raw data to CICE input files. + +The script uses a bilinear regridding algorithm to regrid from the JRA55 grid to +the CICE grid. The scripts use the Python package ‘xesmf’ to generate bilinear +regridding weights, and these regridding weights are written to the file defined by +the variable "blin_grid_name" in **interp_jra55_ncdf_bilinear.py**. This filename +can be modified by editing **interp_jra55_ncdf_bilinear.py**. +The weights file can be re-used if interpolating different data on the same grid. +Although not tested in this version of the scripts, additional regridding options +are available by xesmf, including ‘conservative’ and ‘nearest neighbor’. These +methods have not been tested in the current version of the scripts. The reader +is referred to the xESMF web page for further documentation +(https://xesmf.readthedocs.io/en/latest/ last accessed 5 NOV 2020). + +To use the **interp_jra55_ncdf_bilinear** script, do :: + + python3 interp_jra55_ncdf_bilinear.py –h + +to see the latest interface information :: + + usage: interp_jra55_ncdf_bilinear.py [-h] JRADTG gridout ncout + + Interpolate JRA55 data to CICE grid + + positional arguments: + JRADTG JRA55 input file date time group + gridout CICE grid file (NetCDF) + ncout Output NetCDF filename + + optional arguments: + -h, --help show this help message and exit + +Sample usage is :: + + ./interp_jra55_ncdf_bilinear.py 1996010100_1996033121 grid_gx3.nc JRA55_gx3_03hr_forcing_1996-q1.nc + ./interp_jra55_ncdf_bilinear.py 1996040100_1996063021 grid_gx3.nc JRA55_gx3_03hr_forcing_1996-q2.nc + ./interp_jra55_ncdf_bilinear.py 1996070100_1996093021 grid_gx3.nc JRA55_gx3_03hr_forcing_1996-q3.nc + ./interp_jra55_ncdf_bilinear.py 1996100100_1996123121 grid_gx3.nc JRA55_gx3_03hr_forcing_1996-q4.nc + +In this case, the 4 quarters of 1996 JRA55 data is going to be interpolated to the gx3 grid. +NCO can be used to aggregate these files into a single file :: + + ncrcat JRA55_gx3_03hr_forcing_1996-??.nc JRA55_${grid}_03hr_forcing_1996.nc + +NOTES + +- The scripts are designed to read a CICE grid file in netCDF format. This is the "grid_gx3.nc" file above. The NetCDF grid names are hardcoded in **interp_jra55_ncdf_bilinear.py**. If you are using a different grid file with different variable names, this subroutine needs to be updated. +- All files should be placed in a common directory. This includes the raw JRA55 input files, the CICE grid file, and **interp_jra55_ncdf_bilinear.py**. The output files will be written to the same directory. +- The script **configuration/tools/jra55_datasets/make_forcing.csh** was used on the NCAR cheyenne machine in March, 2021 to generate CICE forcing data. It assumes the raw JRA55 is downloaded, but then sets up the python environment, links all the data in a common directory, runs **interp_jra55_ncdf_bilinear.py** and then aggregates the quarterly data using NCO. +- The new forcing files can then be defined in the **ice_in** namelist file using the input variables, ``atm_data_type``, ``atm_data_format``, ``atm_data_dir``, ``fyear_init``, and ``ycycle``. See :ref:`forcing` for more information. +- The total precipitation field is mm/day in JRA55. This field is initially read in as snow, but prepare_forcing in **ice_forcing.F90** splits that into rain or snow forcing depending on the air temperature. + diff --git a/doc/source/developer_guide/index.rst b/doc/source/developer_guide/index.rst index ab5b2d1e6..6fc3356f4 100644 --- a/doc/source/developer_guide/index.rst +++ b/doc/source/developer_guide/index.rst @@ -17,5 +17,6 @@ Developer Guide dg_forcing.rst dg_icepack.rst dg_scripts.rst + dg_tools.rst dg_other.rst diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index ccf7f0356..44ee6f5b0 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -78,6 +78,7 @@ can be modified as needed. "ICE_HSTDIR", "string", "unused", "${ICE_RUNDIR}/history" "ICE_LOGDIR", "string", "log directory", "${ICE_CASEDIR}/logs" "ICE_DRVOPT", "string", "unused", "standalone/cice" + "ICE_TARGET", "string", "build target", "set by cice.setup" "ICE_IOTYPE", "string", "I/O format", "set by cice.setup" " ", "netcdf", "serial netCDF" " ", "pio", "parallel netCDF" @@ -143,7 +144,9 @@ setup_nml "``conserv_check``", "logical", "check conservation", "``.false.``" "``cpl_bgc``", "logical", "couple bgc thru driver", "``.false.``" "``days_per_year``", "integer", "number of days in a model year", "365" - "``dbug``", "logical", "write extra diagnostics", "``.false.``" + "``day_init``", "integer", "the initial day of the month if not using restart", "1" + "``debug_model``", "logical", "write extended model point diagnostics", "``.false.``" + "``debug_model_step``", "logical", "initial timestep to write ``debug_model`` output", "999999999" "``diagfreq``", "integer", "frequency of diagnostic output in timesteps", "24" "``diag_type``", "``stdout``", "write diagnostic output to stdout", "``stdout``" "", "``file``", "write diagnostic output to file", "" @@ -156,6 +159,7 @@ setup_nml "", "``1``", "write restart every ``dumpfreq_n`` time step", "" "``dumpfreq_n``", "integer", "write restart frequency with ``dumpfreq``", "1" "``dump_last``", "logical", "write restart on last time step of simulation", "``.false.``" + "``forcing_diag``", "logical", "write extra diagnostics", "``.false.``" "``hist_avg``", "logical", "write time-averaged data", "``.true.``" "``histfreq``", "``d``", "write history every ``histfreq_n`` days", "'1','h','d','m','y'" "", "``h``", "write history every ``histfreq_n`` hours", "" @@ -178,8 +182,15 @@ setup_nml "``latpnt``", "real", "latitude of (2) diagnostic points", "90.0,-65.0" "``lcdf64``", "logical", "use 64-bit netcdf format", "``.false.``" "``lonpnt``", "real", "longitude of (2) diagnostic points", "0.0,-45.0" + "``month_init``", "integer", "the initial month if not using restart", "1" "``ndtd``", "integer", "number of dynamics/advection/ridging/steps per thermo timestep", "1" - "``npt``", "integer", "total number of time steps to take", "99999" + "``npt``", "integer", "total number of npt_units to run the model", "99999" + "``npt_unit``", "``d``", "run ``npt`` days", "1" + "", "``h``", "run ``npt`` hours", "" + "", "``m``", "run ``npt`` months", "" + "", "``s``", "run ``npt`` seconds", "" + "", "``y``", "run ``npt`` years", "" + "", "``1``", "run ``npt`` timesteps", "" "``numin``", "integer", "minimum internal IO unit number", "11" "``numax``", "integer", "maximum internal IO unit number", "99" "``pointer_file``", "string", "restart pointer filename", "'ice.restart_file'" @@ -194,6 +205,7 @@ setup_nml "``runid``", "string", "label for run (currently CESM only)", "'unknown'" "``runtype``", "``continue``", "restart using ``pointer_file``", "``initial``" "", "``initial``", "start from ``ice_ic``", "" + "``sec_init``", "integer", "the initial second if not using restart", "0" "``use_leap_years``", "logical", "include leap days", "``.false.``" "``use_restart_time``", "logical", "set initial date using restart file", "``.true.``" "``version_name``", "string", "model version", "'unknown_version_name'" @@ -212,6 +224,7 @@ grid_nml "``bathymetry_file``", "string", "name of bathymetry file to be read", "‘unknown_bathymetry_file’" "``bathymetry_format``", "``default``", "NetCDF depth field", "‘default’" "", "``pop``", "pop thickness file in cm in ascii format", "" + "``close_boundaries``", "logical", "force two gridcell wide land mask on boundaries", "``.false.`` "``dxrect``", "real", "x-direction grid spacing for rectangular grid in cm", "0.0" "``dyrect``", "real", "y-direction grid spacing for rectangular grid in cm", "0.0" "``gridcpl_file``", "string", "input file for coupling grid info", "'unknown_gridcpl_file'" @@ -248,6 +261,7 @@ domain_nml "``add_mpi_barriers``", "logical", "throttle communication", "``.false.``" "``block_size_x``", "integer", "block size in x direction", "-1" "``block_size_y``", "integer", "block size in y direction", "-1" + "``debug_blocks``", "logical", "add additional print statements to debug the block decomposition", "``.false.``" "``distribution_type``", "``cartesian``", "2D cartesian block distribution method", "``cartesian``" "", "``rake``", "redistribute blocks among neighbors", "" "", "``roundrobin``", "1 block per proc until blocks are used", "" @@ -355,6 +369,8 @@ dynamics_nml "", "", "", "" "``advection``", "``remap``", "linear remapping advection scheme", "``remap``" "", "``upwind``", "donor cell advection", "" + "``algo_nonlin``", "``anderson``", "use nonlinear anderson algorithm for implicit solver", "picard" + "", "``picard``", "use picard algorithm", "" "``alphab``", "real", ":math:`\alpha_{b}` factor in :cite:`Lemieux16`", "20.0" "``arlx``", "real", "revised_evp value", "300.0" "``brlx``", "real", "revised_evp value", "300.0" @@ -394,11 +410,11 @@ dynamics_nml "``monitor_pgmres``", "logical", "write velocity norm at each PGMRES iteration", "``.false.``" "``mu_rdg``", "real", "e-folding scale of ridged ice for ``krdg_partic`` = 1 in m^0.5", "3.0" "``ndte``", "integer", "number of EVP subcycles", "120" - "``ortho_type``", "``mgs``", "Use modified Gram-Shchmidt in FGMRES solver", "``mgs``" - "", "``cgs``", "Use classical Gram-Shchmidt in FGMRES solver", "" - "``precond``", "``pgmres``", "Use GMRES as preconditioner for FGMRES solver", "``pgmres``" - "", "``diag``", "Use Jacobi preconditioner for the FGMRES solver", "" + "``ortho_type``", "``cgs``", "Use classical Gram-Shchmidt in FGMRES solver", "``mgs``" + "", "``mgs``", "Use modified Gram-Shchmidt in FGMRES solver", "" + "``precond``", "``diag``", "Use Jacobi preconditioner for the FGMRES solver", "``pgmres``" "", "``ident``", "Don't use a preconditioner for the FGMRES solver", "" + "", "``pgmres``", "Use GMRES as preconditioner for FGMRES solver", "" "``Pstar``", "real", "constant in Hibler strength formula (N/m\ :math:`^2`)", "2.75e4" "``reltol_nonlin``", "real", "relative tolerance for nonlinear solver", "1e-8" "``reltol_fgmres``", "real", "relative tolerance for FGMRES solver", "1e-2" @@ -411,6 +427,7 @@ dynamics_nml "", "``geostropic``", "computed from ocean velocity", "" "``threshold_hw``", "real", "Max water depth for grounding (see :cite:`Amundrud04`)", "30." "``yield_curve``", "``ellipse``", "elliptical yield curve", "``ellipse``" + "``use_mean_vrel``", "logical", "Use mean of two previous iterations for vrel in VP", "``.true.``" "", "", "", "" shortwave_nml @@ -495,7 +512,7 @@ forcing_nml "``default_season``", "``summer``", "forcing initial summer values", "``winter``" "", "``winter``", "forcing initial winter values", "" "``emissivity``", "real", "emissivity of snow and ice", "0.985" - "``fbot_xfer_type``", "``Cdn_ocn``", "variabler ocean heat transfer coefficient scheme", "``constant``" + "``fbot_xfer_type``", "``Cdn_ocn``", "variable ocean heat transfer coefficient scheme", "``constant``" "", "``constant``", "constant ocean heat transfer coefficient", "" "``fe_data_type``", "``clim``", "ocean climatology forcing value for iron", "``default``" "", "``default``", "default forcing value for iron", "" @@ -505,6 +522,7 @@ forcing_nml "``ice_data_type``", "``boxslotcyl``", "initialize ice concentration and velocity for :ref:`boxslotcyl` test (:cite:`Zalesak79`)", "``default``" "", "``box2001``", "initialize ice concentration for :ref:`box2001` test (:cite:`Hunke01`)", "" "", "``default``", "no special initialization", "" + "``iceruf``", "real", "ice surface roughness at atmosphere interface", "0.0005" "``l_mpond_fresh``", "``.false.``", "release pond water immediately to ocean", "``.false.``" "", "``true``", "retain (topo) pond water until ponds drain", "" "``natmiter``", "integer", "number of atmo boundary layer iterations", "5" @@ -525,6 +543,7 @@ forcing_nml "``restart_coszen``", "logical", "read/write coszen in restart files", "``.false.``" "``restore_ocn``", "logical", "restore sst to data", "``.false.``" "``restore_ice``", "logical", "restore ice state along lateral boundaries", "``.false.``" + "``rotate_wind``", "logical", "rotate wind from east/north to computation grid", "``.true.``" "``tfrz_option``", "``linear_salt``", "linear functino of salinity (ktherm=1)", "``mushy``" "", "``minus1p8``", "constant ocean freezing temperature (:math:`-1.8^{\circ} C`)", "" "", "``mushy``", "matches mushy-layer thermo (ktherm=2)", "" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index cbfe37b0c..566d10fbc 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -163,18 +163,19 @@ information to the log file, and if the block size or max blocks is inconsistent with the task and thread size, the model will abort. The code will also print a warning if the maximum number of blocks is too large. Although this is not fatal, it does use extra memory. If ``max_blocks`` is -set to -1, the code will compute a ``max_blocks`` on the fly. +set to -1, the code will compute a tentative ``max_blocks`` on the fly. A loop at the end of routine *create\_blocks* in module **ice\_blocks.F90** will print the locations for all of the blocks on -the global grid if dbug is set to be true. Likewise, a similar loop at +the global grid if the namelist variable ``debug_blocks`` is set to be true. Likewise, a similar loop at the end of routine *create\_local\_block\_ids* in module **ice\_distribution.F90** will print the processor and local block number for each block. With this information, the grid decomposition -into processors and blocks can be ascertained. The dbug flag must be -manually set in the code in each case (independently of the dbug flag in -**ice\_in**), as there may be hundreds or thousands of blocks to print -and this information should be needed only rarely. This information is +into processors and blocks can be ascertained. This ``debug_blocks`` variable +should be used carefully as there may be hundreds or thousands of blocks to print +and this information should be needed only rarely. ``debug_blocks`` +can be set to true using the +``debugblocks`` option with **cice.setup**. This information is much easier to look at using a debugger such as Totalview. There is also an output field that can be activated in `icefields\_nml`, ``f_blkmask``, that prints out the variable ``blkmask`` to the history file and @@ -268,8 +269,11 @@ routines, is adopted from POP. The boundary routines perform boundary communications among processors when MPI is in use and among blocks whenever there is more than one block per processor. -Open/cyclic boundary conditions are the default in CICE; the physical -domain can still be closed using the land mask. In our bipolar, +Open/cyclic boundary conditions are the default in CICE. Closed boundary +conditions are not supported currently. The physical +domain can still be closed using the land mask and this can be done in +namelist with the ``close_boundaries`` namelist which forces the mask +on the boundary to land for a two gridcell depth. In our bipolar, displaced-pole grids, one row of grid cells along the north and south boundaries is located on land, and along east/west domain boundaries not masked by land, periodic conditions wrap the domain around the globe. @@ -529,12 +533,72 @@ schemes and the aerosol tracers, and the level-ice pond parameterization additionally requires the level-ice tracers. +.. _timemanagerplus: + +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Time Manager and Initialization +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The time manager is an important piece of the CICE model. + +.. _timemanager: + +**************************** +Time Manager +**************************** + +The primary prognostic variables in the time manager are ``myear``, +``mmonth``, ``mday``, and ``msec``. These are integers and identify +the current model year, month, day, and second respectively. +The model timestep is ``dt`` with units of seconds. See :ref:`parameters` +for additional information about choosing an appropriate timestep. +The internal variables ``istep``, ``istep0``, and ``istep1`` keep +track of the number of timesteps. ``istep`` is the counter for +the current run and is set to 0 at the start of each run. ``istep0`` +is the step count at the start of a long multi-restart run, and +``istep1`` is the step count of a long multi-restart run. + +In general, the time manager should be advanced by calling +*advance\_timestep*. This subroutine in **ice\_calendar.F90** +automatically advances the model time by ``dt``. It also advances +the istep numbers and calls subroutine *calendar* to update +additional calendar data. + +The namelist variable ``use_restart_time`` specifies whether to +use the time and step numbers saved on a restart file or whether +to set the initial model time to the namelist values defined by +``year_init``, ``month_init``, ``day_init``, and ``sec_init``. +Normally, ``use_restart_time`` is set to false on the initial run +and then set to true on subsequent restart runs of the same +case to allow time to advance thereafter. More information about +the restart capability can be found here, :ref:`restartfiles`. + +The time manager was updated in early 2021. The standalone model +was modified, and some tests were done in a coupled framework after +modifications to the high level coupling interface. For some coupled models, the +coupling interface may need to be updated when updating CICE with the new time manager. +In particular, the old prognostic variable ``time`` no longer exists in CICE, +``year_init`` only defines the model initial year, and +the calendar subroutine is called without any arguments. One can +set the namelist variables ``year_init``, ``month_init``, ``day_init``, +``sec_init``, and ``dt`` in conjuction with ``days_per_year`` and +``use_leap_years`` to initialize the model date, timestep, and calendar. +To overwrite the default/namelist settings in the coupling layer, +set the **ice\_calendar.F90** variables ``myear``, ``mmonth``, ``mday``, +``msec`` and ``dt`` after the namelists have been read. Subroutine +*calendar* should then be called to update all the calendar data. +Finally, subroutine *advance\_timestep* should be used to advance +the model time manager. It advances the step numbers, advances +time by ``dt``, and updates the calendar data. The older method +of manually advancing the steps and adding ``dt`` to ``time`` should +be deprecated. + .. _init: -~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Initialization and coupling -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +**************************** +Initialization and Restarts +**************************** The ice model’s parameters and variables are initialized in several steps. Many constants and physical parameters are set in @@ -612,9 +676,9 @@ reset to ‘none.’ .. _parameters: -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +********************************** Choosing an appropriate time step -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +********************************** The time step is chosen based on stability of the transport component (both horizontal and in thickness space) and on resolution of the @@ -705,6 +769,8 @@ the problem, and ``brlx`` represents the effective subcycling Model output ~~~~~~~~~~~~ +There are a number of model output streams and formats. + .. _history: ************* @@ -720,7 +786,8 @@ for history and restart files, and history and restart file must use the same io package. The namelist variable ``history_format`` further refines the format approach or style for some io packages. -Model output data is averaged over the period(s) given by ``histfreq`` and +Model output data can be written as instantaneous or average data as specified +by the ``hist_avg`` namelist flag. The data is written at the period(s) given by ``histfreq`` and ``histfreq_n``, and written to binary or netCDF files prepended by ``history_file`` in **ice_in**. These settings for history files are set in the **setup_nml** section of **ice_in** (see :ref:`tabnamelist`). @@ -759,20 +826,22 @@ is now a character string corresponding to ``histfreq`` or ‘x’ for none. files, no matter what the frequency is.) If there are no namelist flags with a given ``histfreq`` value, or if an element of ``histfreq_n`` is 0, then no file will be written at that frequency. The output period can be -discerned from the filenames. +discerned from the filenames. All history streams will be either instantaneous +or averaged as specified by the ``hist_avg`` namelist setting. For example, in the namelist: :: - ``histfreq`` = ’1’, ’h’, ’d’, ’m’, ’y’ - ``histfreq_n`` = 1, 6, 0, 1, 1 - ``f_hi`` = ’1’ - ``f_hs`` = ’h’ - ``f_Tsfc`` = ’d’ - ``f_aice`` = ’m’ - ``f_meltb`` = ’mh’ - ``f_iage`` = ’x’ + histfreq = ’1’, ’h’, ’d’, ’m’, ’y’ + histfreq_n = 1, 6, 0, 1, 1 + hist_avg = .true. + f_hi = ’1’ + f_hs = ’h’ + f_Tsfc = ’d’ + f_aice = ’m’ + f_meltb = ’mh’ + f_iage = ’x’ Here, ``hi`` will be written to a file on every timestep, ``hs`` will be written once every 6 hours, ``aice`` once a month, ``meltb`` once a month AND @@ -784,6 +853,14 @@ as long as for a single frequency. If you only want monthly output, the most efficient setting is ``histfreq`` = ’m’,’x’,’x’,’x’,’x’. The code counts the number of desired streams (``nstreams``) based on ``histfreq``. +There is no restart capability built into the history implementation. If the +model stops in the middle of a history accumulation period, that data is lost +on restart, and the accumulation is zeroed out at startup. That means the +dump frequency (see :ref:`restartfiles`) and history frequency need to be +somewhat coordinated. For +example, if monthly history files are requested, the dump frequency should be +set to an integer number of months. + The history variable names must be unique for netCDF, so in cases where a variable is written at more than one frequency, the variable name is appended with the frequency in files after the first one. In the example @@ -799,7 +876,7 @@ every 3 months, for example. If ``write_ic`` is set to true in **ice\_in**, a snapshot of the same set of history fields at the start of the run will be written to the history directory in **iceh\_ic.[timeID].nc(da)**. Several history variables are -hard-coded for instantaneous output regardless of the averaging flag, at +hard-coded for instantaneous output regardless of the ``hist_avg`` averaging flag, at the frequency given by their namelist flag. The normalized principal components of internal ice stress are computed @@ -908,6 +985,8 @@ The timers use *MPI\_WTIME* for parallel runs and the F90 intrinsic | 16 | BGC | biogeochemistry | +--------------+-------------+----------------------------------------------------+ +.. _restartfiles: + ************* Restart files ************* @@ -937,7 +1016,8 @@ Additional namelist flags provide further control of restart behavior. of a run when it is otherwise not scheduled to occur. The flag ``use_restart_time`` enables the user to choose to use the model date provided in the restart files. If ``use_restart_time`` = false then the -initial model date stamp is determined from the namelist parameters. +initial model date stamp is determined from the namelist parameters, +``year_init``, ``month_init``, ``day_init``, and ``sec_init``.. lcdf64 = true sets 64-bit netCDF output, allowing larger file sizes. Routines for gathering, scattering and (unformatted) reading and writing @@ -957,5 +1037,6 @@ initialized with no ice. The gx3 case was run for 1 year using the 1997 forcing data provided with the code. The gx1 case was run for 20 years, so that the date of restart in the file is 1978-01-01. Note that the restart dates provided in the restart files can be overridden using the -namelist variables ``use_restart_time``, ``year_init`` and ``istep0``. The +namelist variables ``use_restart_time``, ``year_init``, ``month_init``, +``day_init``, and ``sec_init``. The forcing time can also be overridden using ``fyear_init``. diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index 541fa81a4..aca7d4933 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -36,12 +36,19 @@ The Consortium has tested the following compilers at some point, - Intel 17.0.2.174 - Intel 17.0.5.239 - Intel 18.0.1.163 +- Intel 18.0.5 - Intel 19.0.2 - Intel 19.0.3.199 +- Intel 19.1.0.166 +- Intel 19.1.1.217 - PGI 16.10.0 +- PGI 19.9-0 +- PGI 20.1-0 - GNU 6.3.0 - GNU 7.2.0 - GNU 7.3.0 +- GNU 8.3.0 +- GNU 9.3.0 - Cray 8.5.8 - Cray 8.6.4 - NAG 6.2 @@ -54,22 +61,33 @@ The Consortium has tested the following mpi versions, - MPICH 7.6.3 - MPICH 7.7.6 - Intel MPI 18.0.1 +- Intel MPI 18.0.4 +- Intel MPI 2019 Update 6 - MPT 2.14 - MPT 2.17 - MPT 2.18 - MPT 2.19 +- MPT 2.20 +- MPT 2.21 +- mvapich2-2.3.3 - OpenMPI 1.6.5 +- OpenMPI 4.0.2 The NetCDF implementation is relatively general and should work with any version of NetCDF 3 or 4. The Consortium has tested - NetCDF 4.3.0 - NetCDF 4.3.2 - NetCDF 4.4.0 -- NetCDF 4.4.1.1.32 +- NetCDF 4.4.1.1.3 - NetCDF 4.4.1.1 - NetCDF 4.4.2 - NetCDF 4.5.0 +- NetCDF 4.5.2 - NetCDF 4.6.1.3 +- NetCDF 4.6.3 +- NetCDF 4.6.3.2 +- NetCDF 4.7.2 +- NetCDF 4.7.4 Please email the Consortium if this list can be extended. @@ -257,7 +275,7 @@ Some of the options are ``bgcISPOL`` and ``bgcNICE`` specify bgc options -``boxadv``, ``boxdyn``, and ``boxrestore`` are simple box configurations +``boxadv``, ``boxnodyn``, and ``boxrestore`` are simple box configurations ``alt*`` which turns on various combinations of dynamics and physics options for testing @@ -713,7 +731,14 @@ Next, create the "cice" conda environment from the ``environment.yml`` file in t conda env create -f configuration/scripts/machines/environment.yml -This step needs to be done only once. +This step needs to be done only once and will maintain a static conda environment. To update the conda environment later, use + +.. code-block:: bash + + conda env create -f configuration/scripts/machines/environment.yml --force + +This will update the conda environment to the latest software versions. + .. _using_conda_env: @@ -772,7 +797,7 @@ A few notes about the conda configuration: - It is not recommeded to run other test suites than ``quick_suite`` or ``travis_suite`` on a personal computer. - The conda environment is automatically activated when compiling or running the model using the ``./cice.build`` and ``./cice.run`` scripts in the case directory. These scripts source the file ``env.conda_{linux.macos}``, which calls ``conda activate cice``. -- To use the "cice" conda environment with the Python plotting (see :ref:`timeseries`) and quality control scripts (see :ref:`CodeCompliance`), you must manually activate the environment: +- To use the "cice" conda environment with the Python plotting (see :ref:`timeseries`) and quality control (QC) scripts (see :ref:`CodeValidation`), you must manually activate the environment: .. code-block:: bash @@ -897,7 +922,7 @@ To use the ``timeseries.py`` script, the following requirements must be met: * matplotlib Python package * datetime Python package -See :ref:`CodeCompliance` for additional information about how to setup the Python +See :ref:`CodeValidation` for additional information about how to setup the Python environment, but we recommend using ``pip`` as follows: :: pip install --user numpy diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 61aa1c05f..5a289db6a 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -525,7 +525,7 @@ Test Suite Examples This will compare to results saved in the baseline [bdir] directory under the subdirectory cice.v01a. With the ``--bcmp`` option, the results will be tested against prior baselines to verify bit-for-bit, which is an important step prior - to approval of many (not all, see :ref:`compliance`) Pull Requests to incorporate code into + to approval of many (not all, see :ref:`validation`) Pull Requests to incorporate code into the CICE Consortium master code. You can use other regression options as well. (``--bdir`` and ``--bgen``) @@ -625,6 +625,49 @@ Test Suite Examples The setenv syntax is for csh/tcsh. In bash, the syntax would be SUITE_BUILD=true. +.. _unittesting: + +Unit Testing +--------------- + +Unit testing is supported in the CICE scripts. Unit tests are implemented +via a distinct top level driver that tests CICE model features explicitly. +These drivers can be found in **cicecore/drivers/unittest/**. In addition, +there are some script files that also support the unit testing. + +The unit tests build and run very much like the standard CICE model. +A case is created and model output is saved to the case logs directory. +Unit tests can be run as part of a test suite and the output is +compared against an earlier set of output using a simple diff of the +log files. + +For example, to run the existing calendar unit test as a case, + +.. code-block:: bash + + ./cice.setup -m onyx -e intel --case calchk01 -p 1x1 -s calchk + cd calchk01 + ./cice.build + ./cice.submit + +Or to run the existing calendar unit test as a test, + +.. code-block:: bash + + ./cice.setup -m onyx -e intel --test unittest -p 1x1 --testid cc01 -s calchk --bgen cice.cc01 + cd onyx_intel_unittest_gx3_1x1_calchk.cc01/ + ./cice.build + ./cice.submit + +To create a new unit test, add a new driver in **cicecore/driver/unittest**. +The directory name should be the name of the test. +Then create the appropriate set_nml or set_env files for the new unittest name +in **configuration/scripts/options**. In particular, **ICE_DRVOPT** and +**ICE_TARGET** need to be defined in a set_env file. Finally, edit +**configuration/scripts/Makefile** and create a target for the unit test. +The unit tests calchk or helloworld can be used as examples. + + .. _testreporting: Test Reporting @@ -672,7 +715,10 @@ This argument turns on special compiler flags including reduced optimization and invokes the gcov tool. Once runs are complete, either lcov or codecov can be used to analyze the results. This option is currently only available with the gnu compiler and on a few systems -with modified Macros files. +with modified Macros files. In the current implementation, when ``--coverage`` is +invoked, the sandbox is copied to a new sandbox called something like cice_lcov_yymmdd-hhmmss. +The source code in the new sandbox is modified slightly to improve coverage statistics +and the full coverage suite is run there. At the present time, the ``--coverage`` flag invokes the lcov analysis automatically by running the **report_lcov.csh** script in the test suite directory. The output @@ -728,9 +774,9 @@ assess test coverage. ..in the future. -.. _compliance: +.. _validation: -Code Compliance Test (non bit-for-bit validation) +Code Validation Test (non bit-for-bit validation) ---------------------------------------------------- A core tenet of CICE dycore and CICE innovations is that they must not change @@ -855,7 +901,7 @@ autocorrelation :math:`r_1`. .. _quadratic: -Quadratic Skill Compliance Test +Quadratic Skill Validation Test ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In addition to the two-stage test of mean sea ice thickness, we also @@ -939,12 +985,12 @@ hemispheres, and must exceed a critical value nominally set to test and the Two-Stage test described in the previous section are provided in :cite:`Hunke18`. -.. _CodeCompliance: +.. _CodeValidation: -Code Compliance Testing Procedure +Code Validation Testing Procedure ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The CICE code compliance test is performed by running a python script +The CICE code validation (QC) test is performed by running a python script (**configurations/scripts/tests/QC/cice.t-test.py**). In order to run the script, the following requirements must be met: @@ -958,7 +1004,7 @@ QC testing should be carried out using configurations (ie. namelist settings) th exercise the active code modifications. Multiple configurations may need to be tested in some cases. Developers can contact the Consortium for guidance or if there are questions. -In order to generate the files necessary for the compliance test, test cases should be +In order to generate the files necessary for the validation test, test cases should be created with the ``qc`` option (i.e., ``--set qc``) when running cice.setup. This option results in daily, non-averaged history files being written for a 5 year simulation. @@ -970,7 +1016,7 @@ To install the necessary Python packages, the ``pip`` Python utility can be used pip install --user numpy pip install --user matplotlib -To run the compliance test, setup a baseline run with the original baseline model and then +To run the validation test, setup a baseline run with the original baseline model and then a perturbation run based on recent model changes. Use ``--set qc`` in both runs in addition to other settings needed. Then use the QC script to compare history output, diff --git a/doc/source/user_guide/ug_troubleshooting.rst b/doc/source/user_guide/ug_troubleshooting.rst index 9e6f39941..a8a9c2c4d 100644 --- a/doc/source/user_guide/ug_troubleshooting.rst +++ b/doc/source/user_guide/ug_troubleshooting.rst @@ -119,17 +119,24 @@ Several utilities are available that can be helpful when debugging the code. Not all of these will work everywhere in the code, due to possible conflicts in module dependencies. -*debug\_ice* (**CICE.F90**) +*debug\_ice* (**ice\_diagnostics.F90**) A wrapper for *print\_state* that is easily called from numerous - points during the timestepping loop (see - **CICE\_RunMod.F90\_debug**, which can be substituted for - **CICE\_RunMod.F90**). + points during the timestepping loop. *print\_state* (**ice\_diagnostics.F90**) Print the ice state and forcing fields for a given grid cell. -`dbug` = true (**ice\_in**) - Print numerous diagnostic quantities. +`forcing\_diag` = true (**ice\_in**) + Print numerous diagnostic quantities associated with input forcing. + +`debug\_blocks` = true (**ice\_in**) + Print diagnostics during block decomposition and distribution. + +`debug\_model` = true (**ice\_in**) + Print extended diagnostics for the first point associated with `print\_points`. + +`debug\_model\_step` = true (**ice\_in**) + Timestep to starting printing diagnostics associated with `debug\_model`. `print\_global` (**ice\_in**) If true, compute and print numerous global sums for energy and mass @@ -138,11 +145,11 @@ conflicts in module dependencies. `print\_points` (**ice\_in**) If true, print numerous diagnostic quantities for two grid cells, - one near the north pole and one in the Weddell Sea. This utility + defined by `lonpnt` and `latpnt` in the namelist file. + This utility also provides the local grid indices and block and processor numbers (`ip`, `jp`, `iblkp`, `mtask`) for these points, which can be used in - conjunction with `check\_step`, to call *print\_state*. These flags - are set in **ice\_diagnostics.F90**. This option can be fairly slow, + to call *print\_state*. This option can be fairly slow, due to gathering data from processors. `conserv\_check` = true (**ice\_in**) diff --git a/icepack b/icepack index 8bc17e1ee..9a7e22089 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 8bc17e1eee235fb0e26857119175990aa0102613 +Subproject commit 9a7e220890f70d2654314cbb023b9137f15d6327 From 9d88d928209d988b1e6cde1240796e479414259b Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Thu, 10 Jun 2021 18:08:12 -0400 Subject: [PATCH 37/90] add cice changes for zlvs (#29) --- cicecore/cicedynB/general/ice_flux.F90 | 9 ++++++--- cicecore/cicedynB/general/ice_step_mod.F90 | 3 ++- doc/source/cice_index.rst | 3 ++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 06b371c3c..53b326808 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -121,7 +121,8 @@ module ice_flux ! in from atmosphere (if calc_Tsfc) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - zlvl , & ! atm level height (m) + zlvl , & ! atm level height (momentum) (m) + zlvs , & ! atm level height (scalar quantities) (m) uatm , & ! wind velocity components (m/s) vatm , & wind , & ! wind speed (m/s) @@ -391,7 +392,8 @@ subroutine alloc_flux iceumask (nx_block,ny_block,max_blocks), & ! ice extent mask (U-cell) fm (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in U-cell (kg/s) Tbu (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) - zlvl (nx_block,ny_block,max_blocks), & ! atm level height (m) + zlvl (nx_block,ny_block,max_blocks), & ! atm level height (momentum) (m) + zlvs (nx_block,ny_block,max_blocks), & ! atm level height (scalar quantities) (m) uatm (nx_block,ny_block,max_blocks), & ! wind velocity components (m/s) vatm (nx_block,ny_block,max_blocks), & wind (nx_block,ny_block,max_blocks), & ! wind speed (m/s) @@ -570,7 +572,8 @@ subroutine init_coupler_flux !----------------------------------------------------------------- ! fluxes received from atmosphere !----------------------------------------------------------------- - zlvl (:,:,:) = c10 ! atm level height (m) + zlvl (:,:,:) = c10 ! atm level height (momentum) (m) + zlvs (:,:,:) = c10 ! atm level height (scalar quantities) (m) rhoa (:,:,:) = 1.3_dbl_kind ! air density (kg/m^3) uatm (:,:,:) = c5 ! wind velocity (m/s) vatm (:,:,:) = c5 diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index 29bfdbf0e..d65cf52d3 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -171,7 +171,7 @@ subroutine step_therm1 (dt, iblk) use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero use ice_flux, only: frzmlt, sst, Tf, strocnxT, strocnyT, rside, fbot, Tbot, Tsnice, & meltsn, melttn, meltbn, congeln, snoicen, uatm, vatm, fside, & - wind, rhoa, potT, Qa, zlvl, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & + wind, rhoa, potT, Qa, zlvl, zlvs, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, & frain, Tair, strairxT, strairyT, fsurf, fcondtop, fsens, & flat, fswabs, flwout, evap, evaps, evapi, Tref, Qref, Uref, fresh, fsalt, fhocn, & @@ -358,6 +358,7 @@ subroutine step_therm1 (dt, iblk) vatm = vatm (i,j, iblk), & wind = wind (i,j, iblk), & zlvl = zlvl (i,j, iblk), & + zlvs = zlvs (i,j, iblk), & Qa = Qa (i,j, iblk), & Qa_iso = Qa_iso (i,j,:,iblk), & rhoa = rhoa (i,j, iblk), & diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 9e2868947..69222e10c 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -691,7 +691,8 @@ either Celsius or Kelvin units). "yieldstress11(12, 22)", "yield stress tensor components", "" "year_init", ":math:`\bullet` the initial year", "" "**Z**", "", "" - "zlvl", "atmospheric level height", "m" + "zlvl", "atmospheric level height (momentum)", "m" + "zlvs", "atmospheric level height (scalars)", "m" "zref", "reference height for stability", "10. m" "zTrf", "reference height for :math:`T_{ref}`, :math:`Q_{ref}`, :math:`U_{ref}`", "2. m" "zvir", "gas constant (water vapor)/gas constant (air) - 1", "0.606" From f3b26524302859f93c7c93b2bcf0e140434cd2e2 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Thu, 24 Jun 2021 08:32:44 -0400 Subject: [PATCH 38/90] update icepack and pointer --- .gitmodules | 3 ++- icepack | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 8a773d230..b84a13b43 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,4 @@ [submodule "icepack"] path = icepack - url = https://github.com/NOAA-EMC/Icepack + #url = https://github.com/NOAA-EMC/Icepack + url = https://github.com/DeniseWorthen/Icepack diff --git a/icepack b/icepack index 9a7e22089..0f0e1b2aa 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 9a7e220890f70d2654314cbb023b9137f15d6327 +Subproject commit 0f0e1b2aada8cb49655b65dbf721cf6549ce7b51 From 55586f764d7260ce9740ca880a9dc0118dca2313 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Tue, 20 Jul 2021 11:21:12 -0400 Subject: [PATCH 39/90] update icepack and revert gitmodules --- .gitmodules | 3 +-- icepack | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.gitmodules b/.gitmodules index b84a13b43..8a773d230 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +1,3 @@ [submodule "icepack"] path = icepack - #url = https://github.com/NOAA-EMC/Icepack - url = https://github.com/DeniseWorthen/Icepack + url = https://github.com/NOAA-EMC/Icepack diff --git a/icepack b/icepack index 0f0e1b2aa..41cc89d0a 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 0f0e1b2aada8cb49655b65dbf721cf6549ce7b51 +Subproject commit 41cc89d0afc0494c545adaacd2082cc5f2da6959 From 441f69396b5cc91935cf86019b8700b38f9547fc Mon Sep 17 00:00:00 2001 From: apcraig Date: Tue, 10 Aug 2021 17:04:56 -0600 Subject: [PATCH 40/90] Fix history features - Fix bug in history time axis when sec_init is not zero. - Fix issue with time_beg and time_end uninitialized values. - Add support for averaging with histfreq='1' by allowing histfreq_n to be any value in that case. Extend and clean up construct_filename for history files. More could be done, but wanted to preserve backwards compatibility. - Add new calendar_sec2hms to converts daily seconds to hh:mm:ss. Update the calchk calendar unit tester to check this method - Remove abort test in bcstchk, this was just causing problems in regression testing - Remove known problems documentation about problems writing when istep=1. This issue does not exist anymore with the updated time manager. - Add new tests with hist_avg = false. Add set_nml.histinst. --- cicecore/cicedynB/analysis/ice_history.F90 | 16 +-- .../cicedynB/analysis/ice_history_shared.F90 | 97 ++++++++++--------- .../io/io_netcdf/ice_history_write.F90 | 30 +++--- .../io/io_pio2/ice_history_write.F90 | 71 ++++++++------ cicecore/drivers/unittest/bcstchk/bcstchk.F90 | 25 +---- cicecore/drivers/unittest/calchk/calchk.F90 | 30 +++++- cicecore/shared/ice_calendar.F90 | 33 ++++++- configuration/scripts/options/set_nml.histall | 2 +- .../scripts/options/set_nml.histinst | 1 + configuration/scripts/tests/io_suite.ts | 6 ++ doc/source/user_guide/ug_troubleshooting.rst | 3 - 11 files changed, 182 insertions(+), 132 deletions(-) create mode 100644 configuration/scripts/options/set_nml.histinst diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedynB/analysis/ice_history.F90 index f91562449..c50ec7b9f 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedynB/analysis/ice_history.F90 @@ -1758,6 +1758,7 @@ subroutine accum_hist (dt) nstrm ! nstreams (1 if writing initial condition) real (kind=dbl_kind) :: & + timedbl , & ! temporary dbl for time bounds ravgct , & ! 1/avgct ravgctz ! 1/avgct @@ -1814,7 +1815,7 @@ subroutine accum_hist (dt) n4Dfcum = n4Dscum + num_avail_hist_fields_4Df ! should equal num_avail_hist_fields_tot do ns = 1,nstreams - if (.not. hist_avg .or. histfreq(ns) == '1') then ! write snapshots + if (.not. hist_avg) then ! write snapshots do n = 1,n2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) & a2D(:,:,n,:) = c0 @@ -1862,11 +1863,10 @@ subroutine accum_hist (dt) avgct(ns) = c1 else ! write averages over time histfreq avgct(ns) = avgct(ns) + c1 -! if (avgct(ns) == c1) time_beg(ns) = (time-dt)/int(secday) - if (avgct(ns) == c1) then - time_beg(ns) = (timesecs-dt)/int(secday) - time_beg(ns) = real(time_beg(ns),kind=real_kind) - endif + endif + if (avgct(ns) == c1) then + timedbl = (timesecs-dt)/(secday) + time_beg(ns) = real(timedbl,kind=real_kind) endif enddo @@ -3966,8 +3966,8 @@ subroutine accum_hist (dt) enddo ! iblk !$OMP END PARALLEL DO - time_end(ns) = timesecs/int(secday) - time_end(ns) = real(time_end(ns),kind=real_kind) + timedbl = timesecs/secday + time_end(ns) = real(timedbl,kind=real_kind) !--------------------------------------------------------------- ! write file diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index 52d268990..f0343f320 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -672,64 +672,67 @@ subroutine construct_filename(ncfile,suffix,ns) iday = mday isec = msec - dt - if (write_ic) isec = msec ! construct filename if (write_ic) then + isec = msec write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & incond_file(1:lenstr(incond_file)),'.',iyear,'-', & - imonth,'-',iday,'-',isec,'.',suffix + imonth,'-',iday,'-',isec,'.',trim(suffix) else - if (hist_avg .and. histfreq(ns) /= '1') then - if (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then - ! do nothing - elseif (new_year) then - iyear = iyear - 1 - imonth = 12 - iday = daymo(imonth) - elseif (new_month) then - imonth = mmonth - 1 - iday = daymo(imonth) - elseif (new_day) then - iday = iday - 1 - endif - endif - - cstream = '' + if (hist_avg) then + if (histfreq(ns) == '1' .or. histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then + ! do nothing + elseif (new_year) then + iyear = iyear - 1 + imonth = 12 + iday = daymo(imonth) + elseif (new_month) then + imonth = mmonth - 1 + iday = daymo(imonth) + elseif (new_day) then + iday = iday - 1 + endif + endif + + cstream = '' !echmod ! this was implemented for CESM but it breaks post-processing software !echmod ! of other groups (including RASM which uses CESMCOUPLED) !echmod if (ns > 1) write(cstream,'(i1.1)') ns-1 - if (histfreq(ns) == '1') then ! instantaneous, write every dt - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & - iyear,'-',imonth,'-',iday,'-',msec,'.',suffix - - elseif (hist_avg) then ! write averaged data - - if (histfreq(ns) == 'd'.or.histfreq(ns) == 'D') then ! daily - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream), & - '.',iyear,'-',imonth,'-',iday,'.',suffix - elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly - write(ncfile,'(a,a,i2.2,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_', & - histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',msec,'.',suffix - elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly - write(ncfile,'(a,a,i4.4,a,i2.2,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'-',imonth,'.',suffix - elseif (histfreq(ns) == 'y'.or.histfreq(ns) == 'Y') then ! yearly - write(ncfile,'(a,a,i4.4,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'.',suffix - endif + if (hist_avg) then ! write averaged data + if (histfreq(ns) == '1' .and. histfreq_n(ns) == 1) then ! timestep + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & + iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) + elseif (histfreq(ns) == '1' .and. histfreq_n(ns) > 1) then ! timestep + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) + elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly + write(ncfile,'(a,a,i2.2,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'_', & + histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) + elseif (histfreq(ns) == 'd'.or.histfreq(ns) == 'D') then ! daily + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'-',imonth,'-',iday,'.',trim(suffix) + elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly + write(ncfile,'(a,a,i4.4,a,i2.2,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'-',imonth,'.',trim(suffix) + elseif (histfreq(ns) == 'y'.or.histfreq(ns) == 'Y') then ! yearly + write(ncfile,'(a,a,i4.4,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'.',trim(suffix) + endif + + else ! instantaneous + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & + iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) + endif - else ! instantaneous with histfreq > dt - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file)),'_inst.', & - iyear,'-',imonth,'-',iday,'-',msec,'.',suffix - endif endif end subroutine construct_filename diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 index 9c6b30ee1..9fe3a5bfe 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -48,8 +48,8 @@ subroutine ice_write_hist (ns) use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_calendar, only: msec, timesecs, idate, idate0, write_ic, & - histfreq, days_per_year, use_leap_years, dayyr, & - year_init, month_init, day_init + histfreq, histfreq_n, days_per_year, use_leap_years, dayyr, & + hh_init, mm_init, ss_init use ice_communicate, only: my_task, master_task use ice_domain, only: distrb_info use ice_domain_size, only: nx_global, ny_global, max_nstrm, max_blocks @@ -205,7 +205,6 @@ subroutine ice_write_hist (ns) ! define coordinate variables !----------------------------------------------------------------- -!sgl status = nf90_def_var(ncid,'time',nf90_float,timid,varid) status = nf90_def_var(ncid,'time',nf90_double,timid,varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining var time') @@ -215,8 +214,9 @@ subroutine ice_write_hist (ns) 'ice Error: time long_name') write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & + hh_init,':',mm_init,':',ss_init status = nf90_put_att(ncid,varid,'units',title) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time units') @@ -258,8 +258,9 @@ subroutine ice_write_hist (ns) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time_bounds long_name') write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & + hh_init,':',mm_init,':',ss_init status = nf90_put_att(ncid,varid,'units',title) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time_bounds units') @@ -575,7 +576,8 @@ subroutine ice_write_hist (ns) endif endif - if (histfreq(ns) == '1' .or. .not. hist_avg & + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg & .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & .or. n==n_sigP(ns) .or. n==n_trsig(ns) & @@ -640,7 +642,8 @@ subroutine ice_write_hist (ns) 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') else status = nf90_put_att(ncid,varid,'time_rep','averaged') @@ -881,7 +884,8 @@ subroutine ice_write_hist (ns) 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') else status = nf90_put_att(ncid,varid,'time_rep','averaged') @@ -942,7 +946,8 @@ subroutine ice_write_hist (ns) 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') else status = nf90_put_att(ncid,varid,'time_rep','averaged') @@ -1003,7 +1008,8 @@ subroutine ice_write_hist (ns) 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') else status = nf90_put_att(ncid,varid,'time_rep','averaged') diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 index 72a1ed97f..b35b44b1b 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -42,7 +42,8 @@ subroutine ice_write_hist (ns) use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_calendar, only: msec, timesecs, idate, idate0, write_ic, & - histfreq, days_per_year, use_leap_years, dayyr + histfreq, histfreq_n, days_per_year, use_leap_years, dayyr, & + hh_init, mm_init, ss_init use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c360, spval, spval_dbl use ice_domain, only: distrb_info, nblocks @@ -70,7 +71,6 @@ subroutine ice_write_hist (ns) integer (kind=int_kind), dimension(5) :: dimidcz integer (kind=int_kind), dimension(3) :: dimid_nverts integer (kind=int_kind), dimension(6) :: dimidex - real (kind=real_kind) :: ltime real (kind= dbl_kind) :: ltime2 character (char_len) :: title character (char_len_long) :: ncfile(max_nstrm) @@ -176,7 +176,6 @@ subroutine ice_write_hist (ns) call ice_pio_initdecomp(ndim3=nfsd_hist, ndim4=ncat_hist, iodesc=iodesc4df) ltime2 = timesecs/secday - ltime = real(timesecs/secday,kind=real_kind) ! option of turning on double precision history files lprecision = pio_real @@ -186,7 +185,7 @@ subroutine ice_write_hist (ns) ! define dimensions !----------------------------------------------------------------- - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_def_dim(File,'d2',2,boundid) endif @@ -205,13 +204,13 @@ subroutine ice_write_hist (ns) ! define coordinate variables: time, time_bounds !----------------------------------------------------------------- -!sgl status = pio_def_var(File,'time',pio_real,(/timid/),varid) status = pio_def_var(File,'time',pio_double,(/timid/),varid) status = pio_put_att(File,varid,'long_name','model time') write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & + hh_init,':',mm_init,':',ss_init status = pio_put_att(File,varid,'units',trim(title)) if (days_per_year == 360) then @@ -224,21 +223,21 @@ subroutine ice_write_hist (ns) call abort_ice(subname//'ERROR: invalid calendar settings') endif - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'bounds','time_bounds') endif ! Define attributes for time_bounds if hist_avg is true - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then dimid2(1) = boundid dimid2(2) = timid -!sgl status = pio_def_var(File,'time_bounds',pio_real,dimid2,varid) status = pio_def_var(File,'time_bounds',pio_double,dimid2,varid) status = pio_put_att(File,varid,'long_name', & 'boundaries for time-averaging interval') write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & + hh_init,':',mm_init,':',ss_init status = pio_put_att(File,varid,'units',trim(title)) endif @@ -473,7 +472,7 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then if (TRIM(avail_hist_fields(n)%vname)/='sig1' & .or.TRIM(avail_hist_fields(n)%vname)/='sig2' & .or.TRIM(avail_hist_fields(n)%vname)/='sistreave' & @@ -483,7 +482,8 @@ subroutine ice_write_hist (ns) endif endif - if (histfreq(ns) == '1' .or. .not. hist_avg & + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & .or. n==n_sigP(ns) .or. n==n_trsig(ns) & @@ -527,11 +527,12 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -569,11 +570,12 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -611,11 +613,12 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -653,11 +656,12 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -695,11 +699,12 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -743,11 +748,12 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -786,11 +792,12 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -830,11 +837,12 @@ subroutine ice_write_hist (ns) endif ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -901,14 +909,13 @@ subroutine ice_write_hist (ns) !----------------------------------------------------------------- status = pio_inq_varid(File,'time',varid) -!sgl status = pio_put_var(File,varid,(/1/),ltime) status = pio_put_var(File,varid,(/1/),ltime2) !----------------------------------------------------------------- ! write time_bounds info !----------------------------------------------------------------- - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_inq_varid(File,'time_bounds',varid) time_bounds=(/time_beg(ns),time_end(ns)/) bnd_start = (/1,1/) diff --git a/cicecore/drivers/unittest/bcstchk/bcstchk.F90 b/cicecore/drivers/unittest/bcstchk/bcstchk.F90 index 4b723a391..d267f77e6 100644 --- a/cicecore/drivers/unittest/bcstchk/bcstchk.F90 +++ b/cicecore/drivers/unittest/bcstchk/bcstchk.F90 @@ -265,29 +265,8 @@ program bcstchk endif endif - ! Test abort_ice, regardless of test outcome - ! Set doabort to false to support code coverage stats, aborted runs don't seem to generate - ! gcov statistics - - call flush_fileunit(6) - call ice_barrier() - if (my_task == master_task) then - write(6,*) ' ' - write(6,*) '==========================================================' - write(6,*) ' ' - write(6,*) 'NOTE: We are testing the abort now so you should see an abort to follow' - write(6,*) 'The BCSTCHK passed, so please ignore the abort' - write(6,*) ' ' - call abort_ice(subname//' Test abort ',file=__FILE__,line=__LINE__, doabort=.false.) - endif - call flush_fileunit(6) - call ice_barrier() - - if (my_task == master_task) then - write(6,*) ' ' - write(6,*) 'BCSTCHK done' - write(6,*) ' ' - endif + ! --------------------------- + ! exit gracefully call end_run() diff --git a/cicecore/drivers/unittest/calchk/calchk.F90 b/cicecore/drivers/unittest/calchk/calchk.F90 index 345782281..6fa99e4dd 100644 --- a/cicecore/drivers/unittest/calchk/calchk.F90 +++ b/cicecore/drivers/unittest/calchk/calchk.F90 @@ -15,13 +15,14 @@ program calchk use ice_calendar, only: init_calendar, calendar use ice_calendar, only: set_date_from_timesecs use ice_calendar, only: calendar_date2time, calendar_time2date - use ice_calendar, only: compute_calendar_data + use ice_calendar, only: compute_calendar_data, calendar_sec2hms implicit none integer(kind=int_kind) :: yearmax integer(kind=int_kind) :: nday,nptc integer(kind=int_kind) :: n,m,ny,nm,nd,nf1,nf2,xadd,nfa,nfb,nfc,ns1,ns2 integer(kind=int_kind) :: yi,mi,di,si + integer(kind=int_kind) :: hh,mm,ss integer(kind=int_kind) :: dyear,dmon,dday,dsec integer(kind=int_kind) :: fyear,fmon,fday,fsec character(len=32) :: calstr,unitstr,signstr @@ -29,7 +30,7 @@ program calchk integer (kind=int_kind) :: tdaycal(months_per_year+1) ! day count per month integer (kind=int_kind) :: tdayyr ! days in year - integer(kind=int_kind), parameter :: ntests = 8 + integer(kind=int_kind), parameter :: ntests = 9 character(len=8) :: errorflag0,errorflag(1:ntests),errorflagtmp character(len=32) :: testname(ntests) integer(kind=int_kind) :: yearv(ntests),monv(ntests),dayv(ntests),secv(ntests),ndayv(ntests) ! computed values @@ -54,10 +55,11 @@ program calchk testname(6) = 'small add/sub update_date' testname(7) = 'special checks' testname(8) = 'calc_timesteps' + testname(9) = 'seconds_to_hms' ! test yearmax years from year 0 -! yearmax = 1000 - yearmax = 100000 + yearmax = 1000 +! yearmax = 100000 ! test 3 calendars do n = 1,3 @@ -561,6 +563,26 @@ program calchk endif enddo + !------------------------- + ! calc hms + !------------------------- + + write(6,*) ' ' + do ns1 = 0,86399 + call calendar_sec2hms(ns1,hh,mm,ss) + if (ns1 < 10 .or. ns1 > 86390 .or. (ns1 > 7195 .and. ns1 < 7205)) then + write(6,'(a,i8,2x,i2.2,a,i2.2,a,i2.2)') ' CHECK9 ',ns1,hh,':',mm,':',ss + endif + enddo + monc(9) = 23 ! hh correct result for 86399 + dayc(9) = 59 ! mm correct result for 86399 + secc(9) = 59 ! ss correct result for 86399 + if (hh /= monc(9) .or. mm /= dayc(9) .or. ss /= secc(9)) then + errorflag(9) = failflag + write(6,*) 'ERROR9: hms expected',ns1,monc(9),dayc(9),secc(9) + write(6,*) 'ERROR9: hms error ',ns1,hh,mm,ss + endif + !------------------------- ! write test results !------------------------- diff --git a/cicecore/shared/ice_calendar.F90 b/cicecore/shared/ice_calendar.F90 index f76b3b30b..7684fef67 100644 --- a/cicecore/shared/ice_calendar.F90 +++ b/cicecore/shared/ice_calendar.F90 @@ -47,6 +47,7 @@ module ice_calendar public :: update_date ! input date and delta date, compute new date public :: calendar_date2time ! convert date to time relative to init date public :: calendar_time2date ! convert time to date relative to init date + public :: calendar_sec2hms ! convert seconds to hour, minute, seconds public :: compute_calendar_data ! compute info about calendar for a given year ! private functions @@ -61,8 +62,10 @@ module ice_calendar ice_calendar_360day = '360day' ! 360 day calendar with 30 days per month integer (kind=int_kind), public, parameter :: & - months_per_year = 12, & ! months per year - hours_per_day = 24 ! hours per day + months_per_year = 12, & ! months per year + hours_per_day = 24, & ! hours per day + minutes_per_hour = 60, & ! minutes per hour + seconds_per_minute = 60 ! seconds per minute integer (kind=int_kind), public :: & seconds_per_day , & ! seconds per day @@ -87,6 +90,9 @@ module ice_calendar day_init, & ! initial day of month sec_init , & ! initial seconds ! other stuff + hh_init , & ! initial hour derived from sec_init + mm_init , & ! initial minute derived from sec_init + ss_init , & ! initial second derived from sec_init idate , & ! date (yyyymmdd) idate0 , & ! initial date (yyyymmdd), associated with year_init, month_init, day_init dayyr , & ! number of days in the current year @@ -189,6 +195,7 @@ subroutine init_calendar mmonth=month_init ! month mday=day_init ! day of the month msec=sec_init ! seconds into date + call calendar_sec2hms(sec_init,hh_init,mm_init,ss_init) ! initialize hh,mm,ss _init hour=0 ! computed in calendar, but needs some reasonable initial value istep1 = istep0 ! number of steps at current timestep ! real (dumped) or imagined (use to set calendar) @@ -948,6 +955,28 @@ subroutine calendar_time2date(atimesecs,ayear,amon,aday,asec,year_ref,mon_ref,da end subroutine calendar_time2date +!======================================================================= +! Compute hours, minutes, seconds from seconds + + subroutine calendar_sec2hms(seconds, hh, mm, ss) + + integer(kind=int_kind), intent(in) :: & + seconds ! calendar seconds in day + integer(kind=int_kind), intent(out) :: & + hh, mm, ss ! output hours, minutes, seconds + + character(len=*),parameter :: subname='(calendar_sec2hms)' + + if (seconds >= seconds_per_day) then + write(nu_diag,*) trim(subname),' ERROR seconds >= seconds_per_day, ',seconds,seconds_per_day + call abort_ice(subname//'ERROR: in seconds') + endif + hh = seconds/(seconds_per_hour) + mm = (seconds - hh*seconds_per_hour)/seconds_per_minute + ss = (seconds - hh*seconds_per_hour - mm*seconds_per_minute) + + end subroutine calendar_sec2hms + !======================================================================= ! Compute relative elapsed years, months, days, hours from base time diff --git a/configuration/scripts/options/set_nml.histall b/configuration/scripts/options/set_nml.histall index 758289099..d7a42176e 100644 --- a/configuration/scripts/options/set_nml.histall +++ b/configuration/scripts/options/set_nml.histall @@ -1,5 +1,5 @@ histfreq = 'm','d','1','h','x' - histfreq_n = 1,2,6,4,1 + histfreq_n = 1,2,6,4,5 histfreq_base = 'zero' write_ic = .true. f_tmask = .true. diff --git a/configuration/scripts/options/set_nml.histinst b/configuration/scripts/options/set_nml.histinst new file mode 100644 index 000000000..f2f0995c8 --- /dev/null +++ b/configuration/scripts/options/set_nml.histinst @@ -0,0 +1 @@ +hist_avg = .false. diff --git a/configuration/scripts/tests/io_suite.ts b/configuration/scripts/tests/io_suite.ts index 6fe1f589a..4d5129578 100644 --- a/configuration/scripts/tests/io_suite.ts +++ b/configuration/scripts/tests/io_suite.ts @@ -12,6 +12,7 @@ restart gx3 32x1 gx3ncarbulk,bgcz,histall,iobinary,precision8 restart gx3 16x2 gx3ncarbulk,bgcskl,histall,iobinary restart gx3 14x2 gx3ncarbulk,isotope,histall,iobinary,precision8 restart gx3 16x2 gx3ncarbulk,fsd12,histall,iobinary +restart gx3 8x4 gx3ncarbulk,debug,histall,iobinary,precision8,histinst restart gx3 32x1 debug,histall,ionetcdf restart gx3 15x2 alt01,histall,ionetcdf,precision8,cdf64 @@ -24,6 +25,7 @@ restart gx3 30x1 bgcz,histall,ionetcdf restart gx3 15x2 bgcskl,histall,ionetcdf,precision8 restart gx3 31x1 isotope,histall,ionetcdf,cdf64 restart gx3 14x2 fsd12,histall,ionetcdf,precision8 +restart gx3 32x1 debug,histall,ionetcdf,histinst restart gx3 16x2 debug,histall,iopio1,precision8,cdf64 restart gx3 14x2 alt01,histall,iopio1,cdf64 @@ -36,6 +38,7 @@ restart gx3 16x2 bgcz,histall,iopio1,precision8 restart gx3 30x1 bgcskl,histall,iopio1 restart gx3 8x4 isotope,histall,iopio1,precision8,cdf64 restart gx3 12x2 fsd12,histall,iopio1,cdf64 +restart gx3 16x2 debug,histall,iopio1,precision8,cdf64,histinst restart gx3 16x2 debug,histall,iopio2 restart gx3 14x2 alt01,histall,iopio2,precision8,cdf64 @@ -48,6 +51,7 @@ restart gx3 16x2 bgcz,histall,iopio2,cdf64 restart gx3 30x1 bgcskl,histall,iopio2,precision8 restart gx3 8x4 isotope,histall,iopio2 restart gx3 12x2 fsd12,histall,iopio2,precision8,cdf64 +restart gx3 16x2 debug,histall,iopio2,histinst restart gx3 16x2 debug,histall,iopio1p,precision8 restart gx3 14x2 alt01,histall,iopio1p @@ -60,6 +64,7 @@ restart gx3 16x2 bgcz,histall,iopio1p,precision8,cdf64 restart gx3 30x1 bgcskl,histall,iopio1p,cdf64 restart gx3 8x4 isotope,histall,iopio1p,precision8 restart gx3 12x2 fsd12,histall,iopio1p +restart gx3 16x2 debug,histall,iopio1p,precision8,histinst restart gx3 16x2 debug,histall,iopio2p,cdf64 restart gx3 14x2 alt01,histall,iopio2p,precision8 @@ -72,4 +77,5 @@ restart gx3 16x2 bgcz,histall,iopio2p restart gx3 30x1 bgcskl,histall,iopio2p,precision8,cdf64 restart gx3 8x4 isotope,histall,iopio2p,cdf64 restart gx3 12x2 fsd12,histall,iopio2p,precision8 +restart gx3 16x2 debug,histall,iopio2p,cdf64,histinst diff --git a/doc/source/user_guide/ug_troubleshooting.rst b/doc/source/user_guide/ug_troubleshooting.rst index f400673ac..cd8f1acaf 100644 --- a/doc/source/user_guide/ug_troubleshooting.rst +++ b/doc/source/user_guide/ug_troubleshooting.rst @@ -212,9 +212,6 @@ Known bugs - Latitude and longitude fields in the history output may be wrong when using padding. -- History and restart files will not be written on the first timestep in - some cases. - Interpretation of albedos ---------------------------------------- From 15763d8707dce08a8aeeff2d0886d0fa9a310004 Mon Sep 17 00:00:00 2001 From: apcraig Date: Tue, 10 Aug 2021 17:14:25 -0600 Subject: [PATCH 41/90] revert set_nml.histall --- configuration/scripts/options/set_nml.histall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configuration/scripts/options/set_nml.histall b/configuration/scripts/options/set_nml.histall index d7a42176e..758289099 100644 --- a/configuration/scripts/options/set_nml.histall +++ b/configuration/scripts/options/set_nml.histall @@ -1,5 +1,5 @@ histfreq = 'm','d','1','h','x' - histfreq_n = 1,2,6,4,5 + histfreq_n = 1,2,6,4,1 histfreq_base = 'zero' write_ic = .true. f_tmask = .true. From b3364a63d056010504ec09d395f8cdd212985e54 Mon Sep 17 00:00:00 2001 From: apcraig Date: Tue, 10 Aug 2021 22:21:33 -0600 Subject: [PATCH 42/90] fix implementation error --- .../cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 index b35b44b1b..fd20f4c03 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -483,7 +483,7 @@ subroutine ice_write_hist (ns) endif if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & - .or..not. hist_avg) then + .or..not. hist_avg & .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & .or. n==n_sigP(ns) .or. n==n_trsig(ns) & From 96d5851a86cc757751f7d63095d6dde295203d65 Mon Sep 17 00:00:00 2001 From: apcraig Date: Wed, 11 Aug 2021 13:26:01 -0600 Subject: [PATCH 43/90] update model log output in ice_init --- cicecore/cicedynB/general/ice_init.F90 | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index c0c089ac8..6aa9280a7 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -1705,13 +1705,13 @@ subroutine input_data write(nu_diag,1023) ' histfreq_n = ', histfreq_n(:) write(nu_diag,1031) ' histfreq_base = ', trim(histfreq_base) write(nu_diag,1011) ' hist_avg = ', hist_avg - if (.not. hist_avg) write(nu_diag,1031) ' History data will be snapshots' + if (.not. hist_avg) write(nu_diag,1039) ' History data will be snapshots' write(nu_diag,1031) ' history_dir = ', trim(history_dir) write(nu_diag,1031) ' history_file = ', trim(history_file) write(nu_diag,1021) ' history_precision= ', history_precision write(nu_diag,1031) ' history_format = ', trim(history_format) if (write_ic) then - write(nu_diag,1031) ' Initial condition will be written in ', & + write(nu_diag,1039) ' Initial condition will be written in ', & trim(incond_dir) endif write(nu_diag,1031) ' dumpfreq = ', trim(dumpfreq) @@ -1878,6 +1878,7 @@ subroutine input_data 1030 format (a20,a14,1x,a) ! character 1031 format (a20,1x,a,a) 1033 format (a20,1x,6a6) + 1039 format (a,1x,a,1x,a,1x,a) end subroutine input_data From 7b5c2b404addb356c75d87f7cb4ae2fa59bfc20b Mon Sep 17 00:00:00 2001 From: apcraig Date: Thu, 12 Aug 2021 16:57:18 -0600 Subject: [PATCH 44/90] Fix QC issues - Add netcdf ststus checks and aborts in ice_read_write.F90 - Check for end of file when reading records in ice_read_write.F90 for ice_read_nc methods - Update set_nml.qc to better specify the test, turn off leap years since we're cycling 2005 data - Add check in c ice.t-test.py to make sure there is at least 1825 files, 5 years of data - Add QC run to base_suite.ts to verify qc runs to completion and possibility to use those results directly for QC validation - Clean up error messages and some indentation in ice_read_write.F90 --- .../infrastructure/ice_read_write.F90 | 477 ++++++++++++------ configuration/scripts/options/set_nml.qc | 10 +- configuration/scripts/tests/QC/cice.t-test.py | 9 + configuration/scripts/tests/base_suite.ts | 1 + 4 files changed, 353 insertions(+), 144 deletions(-) diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index d902c62f8..b8b749482 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -285,7 +285,7 @@ subroutine ice_read_xyt(nu, nrec, work, atype, diag, & read(nu) ((work_g1(i,j),i=1,nx_global),j=1,ny_global) endif else - write(nu_diag,*) ' ERROR: reading unknown atype ',atype + write(nu_diag,*) subname,' ERROR: reading unknown atype ',atype endif endif ! my_task = master_task @@ -304,7 +304,7 @@ subroutine ice_read_xyt(nu, nrec, work, atype, diag, & amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum(work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' read_global ',nu, nrec, amin, amax, asum endif !------------------------------------------------------------------- @@ -433,7 +433,7 @@ subroutine ice_read_xyzt(nu, nrec, work, atype, diag, & k=1,nblyr+2) endif else - write(nu_diag,*) ' ERROR: reading unknown atype ',atype + write(nu_diag,*) subname,' ERROR: reading unknown atype ',atype endif endif ! my_task = master_task @@ -452,7 +452,7 @@ subroutine ice_read_xyzt(nu, nrec, work, atype, diag, & amin = minval(work_g4) amax = maxval(work_g4, mask = work_g4 /= spval_dbl) asum = sum (work_g4, mask = work_g4 /= spval_dbl) - write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' read_global ',nu, nrec, amin, amax, asum endif !------------------------------------------------------------------- @@ -566,7 +566,7 @@ subroutine ice_read_global (nu, nrec, work_g, atype, diag, & read(nu) ((work_g(i,j),i=1,nx_global),j=1,ny_global) endif else - write(nu_diag,*) ' ERROR: reading unknown atype ',atype + write(nu_diag,*) subname,' ERROR: reading unknown atype ',atype endif endif ! my_task = master_task @@ -582,7 +582,7 @@ subroutine ice_read_global (nu, nrec, work_g, atype, diag, & amin = minval(work_g) amax = maxval(work_g, mask = work_g /= spval_dbl) asum = sum (work_g, mask = work_g /= spval_dbl) - write(nu_diag,*) ' read_global ',nu, nrec, amin, amax,asum + write(nu_diag,*) subname,' read_global ',nu, nrec, amin, amax,asum endif end subroutine ice_read_global @@ -686,7 +686,7 @@ subroutine ice_read_ext(nu, nrec, work, atype, diag, & read(nu) ((work_g1(i,j),i=1,nx),j=1,ny) endif else - write(nu_diag,*) ' ERROR: reading unknown atype ',atype + write(nu_diag,*) subname,' ERROR: reading unknown atype ',atype endif endif ! my_task = master_task @@ -705,7 +705,7 @@ subroutine ice_read_ext(nu, nrec, work, atype, diag, & amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' read_global ',nu, nrec, amin, amax, asum endif !------------------------------------------------------------------- @@ -800,7 +800,7 @@ subroutine ice_write_xyt(nu, nrec, work, atype, diag) elseif (atype == 'ruf8') then write(nu) ((work_g1(i,j),i=1,nx_global),j=1,ny_global) else - write(nu_diag,*) ' ERROR: writing unknown atype ',atype + write(nu_diag,*) subname,' ERROR: writing unknown atype ',atype endif !------------------------------------------------------------------- @@ -810,7 +810,7 @@ subroutine ice_write_xyt(nu, nrec, work, atype, diag) amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' write_global ', nu, nrec, amin, amax, asum endif endif ! my_task = master_task @@ -905,7 +905,7 @@ subroutine ice_write_xyzt(nu, nrec, work, atype, diag) write(nu)(((work_g4(i,j,k),i=1,nx_global),j=1,ny_global), & k=1,nblyr+2) else - write(nu_diag,*) ' ERROR: writing unknown atype ',atype + write(nu_diag,*) subname,' ERROR: writing unknown atype ',atype endif !------------------------------------------------------------------- @@ -915,7 +915,7 @@ subroutine ice_write_xyzt(nu, nrec, work, atype, diag) amin = minval(work_g4) amax = maxval(work_g4, mask = work_g4 /= spval_dbl) asum = sum (work_g4, mask = work_g4 /= spval_dbl) - write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' write_global ', nu, nrec, amin, amax, asum endif endif ! my_task = master_task @@ -1011,7 +1011,7 @@ subroutine ice_write_ext(nu, nrec, work, atype, diag) elseif (atype == 'ruf8') then write(nu) ((work_g1(i,j),i=1,nx),j=1,ny) else - write(nu_diag,*) ' ERROR: writing unknown atype ',atype + write(nu_diag,*) subname,' ERROR: writing unknown atype ',atype endif !------------------------------------------------------------------- @@ -1021,7 +1021,7 @@ subroutine ice_write_ext(nu, nrec, work, atype, diag) amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' write_global ', nu, nrec, amin, amax, asum endif endif ! my_task = master_task @@ -1055,14 +1055,15 @@ subroutine ice_open_nc(filename, fid) status = nf90_open(filename, NF90_NOWRITE, fid) if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot open '//trim(filename) ) + call abort_ice(subname//' ERROR: Cannot open '//trim(filename), & + file=__FILE__, line=__LINE__) endif endif ! my_task = master_task #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(filename), & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined for '//trim(filename), & + file=__FILE__, line=__LINE__) fid = -999 ! to satisfy intent(out) attribute #endif end subroutine ice_open_nc @@ -1110,26 +1111,29 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & ! netCDF file diagnostics: integer (kind=int_kind) :: & varid , & ! variable id - status ! status output from netcdf routines -! ndim, nvar, & ! sizes of netcdf file -! id, & ! dimension index -! dimlen ! dimension size + status , & ! status output from netcdf routines + ndims , & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids real (kind=dbl_kind) :: & missingvalue, & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & -! dimname ! dimension name - real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 integer (kind=int_kind) :: nx, ny + integer (kind=int_kind) :: lnrec ! local value of nrec + real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g2 + lnrec = nrec + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1)) @@ -1164,9 +1168,31 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 2) then + status = nf90_inquire_dimension(fid, dimids(3), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 3 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1175,13 +1201,21 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,nrec/), & - count=(/nx_global+2,ny_global+1,1/) ) + start=(/1,1,lnrec/), & + count=(/nx_global+2,ny_global+1,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif work_g1 = work_g2(2:nx_global+1,1:ny_global) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,nrec/), & - count=(/nx,ny,1/) ) + start=(/1,1,lnrec/), & + count=(/nx,ny,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif status = nf90_get_att(fid, varid, "_FillValue", missingvalue) @@ -1192,19 +1226,19 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_xy, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= missingvalue) asum = sum (work_g1, mask = work_g1 /= missingvalue) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) endif !------------------------------------------------------------------- @@ -1234,8 +1268,8 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_read_nc_xy @@ -1282,11 +1316,13 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & ! netCDF file diagnostics: integer (kind=int_kind) :: & n, & ! ncat index - varid , & ! variable id - status ! status output from netcdf routines -! ndim, nvar, & ! sizes of netcdf file -! id, & ! dimension index -! dimlen ! size of dimension + varid , & ! variable id + status , & ! status output from netcdf routines + ndims , & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids real (kind=dbl_kind) :: & missingvalue, & ! missing value @@ -1300,9 +1336,13 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: nx, ny + integer (kind=int_kind) :: lnrec ! local value of nrec + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g2 + lnrec = nrec + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1,ncat)) @@ -1335,9 +1375,31 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 3) then + status = nf90_inquire_dimension(fid, dimids(4), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 4 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1346,13 +1408,21 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,1,nrec/), & - count=(/nx_global+2,ny_global+1,ncat,1/) ) + start=(/1,1,1,lnrec/), & + count=(/nx_global+2,ny_global+1,ncat,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif work_g1 = work_g2(2:nx_global+1,1:ny_global,:) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & - count=(/nx,ny,ncat,1/) ) + start=(/1,1,1,lnrec/), & + count=(/nx,ny,ncat,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif status = nf90_get_att(fid, varid, "_FillValue", missingvalue) @@ -1363,20 +1433,20 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo do n=1,ncat amin = minval(work_g1(:,:,n)) amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) enddo endif @@ -1410,8 +1480,8 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_xyz @@ -1465,7 +1535,11 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ndim, nvar, & ! sizes of netcdf file id, & ! dimension index n, & ! ncat index - dimlen ! size of dimension + ndims, & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids real (kind=dbl_kind) :: & missingvalue, & ! missing value @@ -1479,12 +1553,16 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: nx, ny + integer (kind=int_kind) :: lnrec ! local value of nrec + character(len=*), parameter :: subname = '(ice_read_nc_xyf)' #ifdef USE_NETCDF real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g2 + lnrec = nrec + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1,nfreq)) @@ -1517,10 +1595,31 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice ( & - 'ice_read_nc_xyf: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 3) then + status = nf90_inquire_dimension(fid, dimids(4), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 4 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1529,13 +1628,21 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,1,nrec/), & - count=(/nx_global+2,ny_global+1,nfreq,1/) ) + start=(/1,1,1,lnrec/), & + count=(/nx_global+2,ny_global+1,nfreq,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif work_g1 = work_g2(2:nx_global+1,1:ny_global,:) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & - count=(/nx,ny,nfreq,1/) ) + start=(/1,1,1,lnrec/), & + count=(/nx,ny,nfreq,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif status = nf90_get_att(fid, varid, "missing_value", missingvalue) @@ -1546,21 +1653,21 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_xyf, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) - write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar do id=1,ndim status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) - write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen enddo - write(nu_diag,*) 'missingvalue= ',missingvalue + write(nu_diag,*) subname,' missingvalue= ',missingvalue do n = 1, nfreq amin = minval(work_g1(:,:,n)) amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum enddo endif @@ -1597,8 +1704,8 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif @@ -1640,24 +1747,54 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & status, & ! status output from netcdf routines ndim, nvar, & ! sizes of netcdf file id, & ! dimension index - dimlen ! size of dimension + ndims, & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids real (kind=dbl_kind), dimension(1) :: & workg ! temporary work variable + integer (kind=int_kind) :: lnrec ! local value of nrec + character (char_len) :: & dimname ! dimension name - if (my_task == master_task) then + lnrec = nrec + + if (my_task == master_task) then !------------------------------------------------------------- ! Find out ID of required variable !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 0) then + status = nf90_inquire_dimension(fid, dimids(1), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 1 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1665,11 +1802,11 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & !-------------------------------------------------------------- status = nf90_get_var(fid, varid, workg, & - start= (/ nrec /), & - count=(/ 1 /) ) - - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot get variable '//trim(varname) ) + start= (/ lnrec /), & + count=(/ 1 /)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif endif ! my_task = master_task @@ -1678,22 +1815,22 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_point, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) - write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar do id=1,ndim status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) - write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen enddo endif work = workg(1) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_point @@ -1736,16 +1873,25 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & status, & ! status output from netcdf routines ndim, nvar, & ! sizes of netcdf file id, & ! dimension index - dimlen ! size of dimension + ndims, & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids character (char_len) :: & dimname ! dimension name + + integer (kind=int_kind) :: lnrec ! local value of nrec + #endif character(len=*), parameter :: subname = '(ice_read_nc_z)' #ifdef USE_NETCDF + lnrec = nrec + allocate(work_z(nilyr)) if (my_task == master_task) then @@ -1755,9 +1901,31 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 1) then + status = nf90_inquire_dimension(fid, dimids(2), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 2 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1765,9 +1933,12 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & !-------------------------------------------------------------- status = nf90_get_var( fid, varid, work_z, & - start=(/1,nrec/), & - count=(/nilyr,1/) ) - + start=(/1,lnrec/), & + count=(/nilyr,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif ! my_task = master_task !------------------------------------------------------------------- @@ -1775,14 +1946,14 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_z, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) - write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar do id=1,ndim status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) - write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen enddo endif @@ -1790,8 +1961,8 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & deallocate(work_z) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_z @@ -1826,7 +1997,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & ! local variables - character(len=*), parameter :: subname = '(ice_read_nc_xy)' + character(len=*), parameter :: subname = '(ice_write_nc_xy)' #ifdef USE_NETCDF ! netCDF file diagnostics: @@ -1886,7 +2057,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & status = nf90_put_var( fid, varid, work_g1, & start=(/1,1,nrec/), & - count=(/nx,ny,1/) ) + count=(/nx,ny,1/)) endif ! my_task = master_task @@ -1896,25 +2067,25 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & if (my_task==master_task .and. diag) then ! write(nu_diag,*) & -! 'ice_write_nc_xy, fid= ',fid, ', nrec = ',nrec, & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varid = ',varid ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(lvarname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(lvarname) endif deallocate(work_g1) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_write_nc_xy @@ -1949,7 +2120,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & ! local variables - character(len=*), parameter :: subname = '(ice_read_nc_xyz)' + character(len=*), parameter :: subname = '(ice_write_nc_xyz)' #ifdef USE_NETCDF ! netCDF file diagnostics: @@ -2016,7 +2187,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & status = nf90_put_var( fid, varid, work_g1, & start=(/1,1,1,nrec/), & - count=(/nx,ny,ncat,1/) ) + count=(/nx,ny,ncat,1/)) endif ! my_task = master_task @@ -2026,13 +2197,13 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & if (my_task==master_task .and. diag) then ! write(nu_diag,*) & -! 'ice_write_nc_xyz, fid= ',fid, ', nrec = ',nrec, & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varid = ',varid ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = 10000._dbl_kind amax = -10000._dbl_kind @@ -2040,15 +2211,15 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & amin = minval(work_g1(:,:,n)) amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(lvarname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(lvarname) enddo endif deallocate(work_g1) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_write_nc_xyz @@ -2117,9 +2288,9 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif !-------------------------------------------------------------- @@ -2129,12 +2300,20 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) if (orca_halogrid) then status = nf90_get_var( fid, varid, work_g3, & start=(/1,1,nrec/), & - count=(/nx_global+2,ny_global+1,1/) ) + count=(/nx_global+2,ny_global+1,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif work_g=work_g3(2:nx_global+1,1:ny_global) else status = nf90_get_var( fid, varid, work_g, & start=(/1,1,nrec/), & - count=(/nx_global,ny_global,1/) ) + count=(/nx_global,ny_global,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif endif ! my_task = master_task @@ -2144,25 +2323,25 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) if (my_task == master_task .and. diag) then ! write(nu_diag,*) & -! 'ice_read_global_nc, fid= ',fid, ', nrec = ',nrec, & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = minval(work_g) amax = maxval(work_g, mask = work_g /= spval_dbl) asum = sum (work_g, mask = work_g /= spval_dbl) - write(nu_diag,*) 'min, max, sum = ', amin, amax, asum, trim(varname) + write(nu_diag,*) subname,' min, max, sum = ', amin, amax, asum, trim(varname) endif if (orca_halogrid) deallocate(work_g3) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work_g = c0 ! to satisfy intent(out) attribute #endif @@ -2190,8 +2369,8 @@ subroutine ice_close_nc(fid) status = nf90_close(fid) endif #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_close_nc @@ -2279,9 +2458,9 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif !-------------------------------------------------------------- @@ -2290,7 +2469,11 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & status = nf90_get_var( fid, varid, work_g1, & start=(/1,1,nzlev,nrec/), & - count=(/nx,ny,1,1/) ) + count=(/nx,ny,1,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif ! my_task = master_task @@ -2302,7 +2485,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) endif !------------------------------------------------------------------- @@ -2327,8 +2510,8 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & deallocate(work_g1) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif @@ -2380,9 +2563,9 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif !-------------------------------------------------------------- @@ -2391,7 +2574,12 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) status = nf90_get_var( fid, varid, work_g, & start=(/1/), & - count=(/nrec/) ) + count=(/nrec/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + endif ! my_task = master_task !------------------------------------------------------------------- @@ -2401,12 +2589,12 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) if (my_task == master_task .and. diag) then amin = minval(work_g) amax = maxval(work_g) - write(nu_diag,*) 'min, max, nrec = ', amin, amax, nrec + write(nu_diag,*) subname,' min, max, nrec = ', amin, amax, nrec endif #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work_g = c0 ! to satisfy intent(out) attribute #endif @@ -2437,22 +2625,25 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) if (my_task == master_task) then status=nf90_inquire(fid, nDimensions = nDims) if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: inquire nDimensions' ) + call abort_ice(subname//' ERROR: inquire nDimensions', & + file=__FILE__, line=__LINE__ ) endif do i=1,nDims status = nf90_inquire_dimension(fid,i,name=cvar,len=recsize) if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: inquire len for variable '//trim(cvar) ) + call abort_ice(subname//' ERROR: inquire len for variable '//trim(cvar), & + file=__FILE__, line=__LINE__) endif if (trim(cvar) == trim(varname)) exit enddo if (trim(cvar) .ne. trim(varname)) then - call abort_ice (subname//'ERROR: Did not find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Did not find variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif endif #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) recsize = 0 ! to satisfy intent(out) attribute #endif diff --git a/configuration/scripts/options/set_nml.qc b/configuration/scripts/options/set_nml.qc index 2b1528cc5..70ba1b429 100644 --- a/configuration/scripts/options/set_nml.qc +++ b/configuration/scripts/options/set_nml.qc @@ -1,4 +1,12 @@ -npt = 43800 +npt_unit = 'y' +npt = 5 +year_init = 2005 +month_init = 1 +day_init = 1 +sec_init = 0 +use_leap_years = .false. +fyear_init = 2005 +ycycle = 1 dumpfreq = 'm' dumpfreq_n = 12 diagfreq = 24 diff --git a/configuration/scripts/tests/QC/cice.t-test.py b/configuration/scripts/tests/QC/cice.t-test.py index 987175245..6f2c7e89b 100755 --- a/configuration/scripts/tests/QC/cice.t-test.py +++ b/configuration/scripts/tests/QC/cice.t-test.py @@ -57,6 +57,15 @@ def gen_filenames(base_dir, test_dir): " # of files: {}".format(len(files_b))) sys.exit(-1) + if len(files_a) < 1825: + logger.error("Number of output files too small, expecting at least 1825." + \ + " Exiting...\n" + \ + "Baseline directory: {}\n".format(path_a) + \ + " # of files: {}\n".format(len(files_a)) + \ + "Test directory: {}\n".format(path_b) + \ + " # of files: {}".format(len(files_b))) + sys.exit(-1) + logger.info("Number of files: %d", len(files_a)) return path_a, path_b, files_a, files_b diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index 78df9ac81..b94be6219 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -4,6 +4,7 @@ smoke gx3 1x1 debug,diag1,run2day smoke gx3 1x4 debug,diag1,run2day smoke gx3 4x1 debug,diag1,run5day restart gx3 8x2 debug +smoke gx1 64x1 qc,medium smoke gx3 8x2 diag24,run1year,medium smoke gx3 7x2 diag1,bigdiag,run1day,diagpt1 decomp gx3 4x2x25x29x5 none From c5794b4b5c6162d2d148215dadaf6b31d119714f Mon Sep 17 00:00:00 2001 From: apcraig Date: Thu, 12 Aug 2021 23:49:05 -0600 Subject: [PATCH 45/90] Update testing - Add prod suite including 10 year gx1prod and qc test - Update unit test compare scripts --- cicecore/drivers/unittest/bcstchk/bcstchk.F90 | 2 +- cicecore/drivers/unittest/calchk/calchk.F90 | 2 +- cicecore/drivers/unittest/helloworld/helloworld.F90 | 2 +- cicecore/drivers/unittest/sumchk/sumchk.F90 | 2 +- configuration/scripts/options/set_nml.run10year | 7 +++++++ configuration/scripts/tests/base_suite.ts | 1 - configuration/scripts/tests/comparelog.csh | 4 ++-- configuration/scripts/tests/prod_suite.ts | 3 +++ 8 files changed, 16 insertions(+), 7 deletions(-) create mode 100644 configuration/scripts/options/set_nml.run10year create mode 100644 configuration/scripts/tests/prod_suite.ts diff --git a/cicecore/drivers/unittest/bcstchk/bcstchk.F90 b/cicecore/drivers/unittest/bcstchk/bcstchk.F90 index d267f77e6..264931780 100644 --- a/cicecore/drivers/unittest/bcstchk/bcstchk.F90 +++ b/cicecore/drivers/unittest/bcstchk/bcstchk.F90 @@ -51,7 +51,7 @@ program bcstchk write(6,*) ' ' write(6,*) '==========================================================' write(6,*) ' ' - write(6,*) 'Running BCSTCHK' + write(6,*) 'RunningUnitTest BCSTCHK' write(6,*) ' ' write(6,*) ' npes = ',npes write(6,*) ' my_task = ',my_task diff --git a/cicecore/drivers/unittest/calchk/calchk.F90 b/cicecore/drivers/unittest/calchk/calchk.F90 index 6fa99e4dd..3c340eb26 100644 --- a/cicecore/drivers/unittest/calchk/calchk.F90 +++ b/cicecore/drivers/unittest/calchk/calchk.F90 @@ -41,7 +41,7 @@ program calchk failflag = 'FAIL' write(6,*) ' ' - write(6,*) 'Running CALCHK' + write(6,*) 'RunningUnitTest CALCHK' write(6,*) ' ' errorflag0 = passflag diff --git a/cicecore/drivers/unittest/helloworld/helloworld.F90 b/cicecore/drivers/unittest/helloworld/helloworld.F90 index 435d5479e..c4e4ae91f 100644 --- a/cicecore/drivers/unittest/helloworld/helloworld.F90 +++ b/cicecore/drivers/unittest/helloworld/helloworld.F90 @@ -1,7 +1,7 @@ program hello_world - write(6,*) 'hello_world' + write(6,*) 'RunningUnitTest hello_world' write(6,*) 'hello_world COMPLETED SUCCESSFULLY' write(6,*) 'hello_world TEST COMPLETED SUCCESSFULLY' diff --git a/cicecore/drivers/unittest/sumchk/sumchk.F90 b/cicecore/drivers/unittest/sumchk/sumchk.F90 index 210ca669f..f314959cb 100644 --- a/cicecore/drivers/unittest/sumchk/sumchk.F90 +++ b/cicecore/drivers/unittest/sumchk/sumchk.F90 @@ -107,7 +107,7 @@ program sumchk write(6,*) ' ' write(6,*) '==========================================================' write(6,*) ' ' - write(6,*) 'Running SUMCHK' + write(6,*) 'RunningUnitTest SUMCHK' write(6,*) ' ' write(6,*) ' npes = ',npes write(6,*) ' my_task = ',my_task diff --git a/configuration/scripts/options/set_nml.run10year b/configuration/scripts/options/set_nml.run10year new file mode 100644 index 000000000..cf672e991 --- /dev/null +++ b/configuration/scripts/options/set_nml.run10year @@ -0,0 +1,7 @@ +npt_unit = 'y' +npt = 10 +dumpfreq = 'y' +dumpfreq_n = 12 +diagfreq = 24 +histfreq = 'm','x','x','x','x' + diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index b94be6219..78df9ac81 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -4,7 +4,6 @@ smoke gx3 1x1 debug,diag1,run2day smoke gx3 1x4 debug,diag1,run2day smoke gx3 4x1 debug,diag1,run5day restart gx3 8x2 debug -smoke gx1 64x1 qc,medium smoke gx3 8x2 diag24,run1year,medium smoke gx3 7x2 diag1,bigdiag,run1day,diagpt1 decomp gx3 4x2x25x29x5 none diff --git a/configuration/scripts/tests/comparelog.csh b/configuration/scripts/tests/comparelog.csh index d9e4a7a89..af6b2d76e 100755 --- a/configuration/scripts/tests/comparelog.csh +++ b/configuration/scripts/tests/comparelog.csh @@ -58,8 +58,8 @@ if (${filearg} == 1) then cat ${base_data} | grep -A 99999999 "total ice area (km^2)" | grep -e istep1: -e = | grep -iv "min, max, sum" >&! ${base_out} cat ${test_data} | grep -A 99999999 "total ice area (km^2)" | grep -e istep1: -e = | grep -iv "min, max, sum" >&! ${test_out} else - cp -f ${base_data} ${base_out} - cp -f ${test_data} ${test_out} + sed -n '/RunningUnitTest/,$p' ${base_data} >! ${base_out} + sed -n '/RunningUnitTest/,$p' ${test_data} >! ${test_out} endif set basenum = `cat ${base_out} | wc -l` diff --git a/configuration/scripts/tests/prod_suite.ts b/configuration/scripts/tests/prod_suite.ts new file mode 100644 index 000000000..66226b104 --- /dev/null +++ b/configuration/scripts/tests/prod_suite.ts @@ -0,0 +1,3 @@ +# Test Grid PEs Sets BFB-compare +smoke gx1 64x1 qc,medium +smoke gx1 64x2 gx1prod,long,run10year \ No newline at end of file From eaa3c3ab880e75a023e2a83303b04896e7ffbb60 Mon Sep 17 00:00:00 2001 From: apcraig Date: Thu, 12 Aug 2021 23:53:55 -0600 Subject: [PATCH 46/90] update documentation --- doc/source/user_guide/ug_testing.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 2fadeacd0..ce5c2ef41 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -670,6 +670,9 @@ The unit tests calchk or helloworld can be used as examples. The following strings should be written to the log file at the end of the unit test run. The string "COMPLETED SUCCESSFULLY" will indicate the run ran to completion. The string "TEST COMPLETED SUCCESSFULLY" will indicate all the unit testing passed during the run. +The unit test log file output is compared as part of regression testing. The string +"RunningUnitTest" indicates the start of the output to compare. +That string should be written to the log file at the start of the unit test model output. These strings will be queried by the testing scripts and will impact the test reporting. See other unit tests for examples about how these strings could be written. From e31ce7e996dedbc050965bfbd226fa4a5da4df1e Mon Sep 17 00:00:00 2001 From: apcraig Date: Fri, 13 Aug 2021 09:00:01 -0600 Subject: [PATCH 47/90] reset calchk to 100000 years --- cicecore/drivers/unittest/calchk/calchk.F90 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cicecore/drivers/unittest/calchk/calchk.F90 b/cicecore/drivers/unittest/calchk/calchk.F90 index 3c340eb26..d669dbad3 100644 --- a/cicecore/drivers/unittest/calchk/calchk.F90 +++ b/cicecore/drivers/unittest/calchk/calchk.F90 @@ -58,8 +58,8 @@ program calchk testname(9) = 'seconds_to_hms' ! test yearmax years from year 0 - yearmax = 1000 -! yearmax = 100000 +! yearmax = 1000 + yearmax = 100000 ! test 3 calendars do n = 1,3 From 83068c7a3802948d3d89e36b6c871aa155afb684 Mon Sep 17 00:00:00 2001 From: apcraig Date: Fri, 13 Aug 2021 09:24:49 -0600 Subject: [PATCH 48/90] update evp1d test --- configuration/scripts/tests/base_suite.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index 78df9ac81..71b33272d 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -9,7 +9,7 @@ smoke gx3 7x2 diag1,bigdiag,run1day,diagpt1 decomp gx3 4x2x25x29x5 none smoke gx3 4x2 diag1,run5day smoke_gx3_8x2_diag1_run5day smoke gx3 4x1 diag1,run5day,thread smoke_gx3_8x2_diag1_run5day -smoke gx3 1x8 diag1,run5day,evp1d smoke_gx3_8x2_diag1_run5day +smoke gx3 1x8 diag1,run5day,evp1d restart gx1 40x4 droundrobin,medium restart tx1 40x4 dsectrobin,medium restart gx3 4x4 none From 4373d3d8ff5767d6379ccaece8788a603170cb4d Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Sun, 15 Aug 2021 11:04:01 -0400 Subject: [PATCH 49/90] update icepack --- .gitmodules | 4 +++- icepack | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 8a773d230..472a87b2e 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,5 @@ [submodule "icepack"] path = icepack - url = https://github.com/NOAA-EMC/Icepack + #url = https://github.com/NOAA-EMC/Icepack + url = https://github.com/DeniseWorthen/Icepack + branch = feature/updcice diff --git a/icepack b/icepack index 41cc89d0a..5b0cf5bac 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 41cc89d0afc0494c545adaacd2082cc5f2da6959 +Subproject commit 5b0cf5bacc9018904cfe5df08ab707239cc8e8a2 From aade12430c0155e776084e819d6a33f6241860f7 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Fri, 20 Aug 2021 08:14:44 -0400 Subject: [PATCH 50/90] update icepack --- icepack | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/icepack b/icepack index 5b0cf5bac..31a575dea 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 5b0cf5bacc9018904cfe5df08ab707239cc8e8a2 +Subproject commit 31a575dea3fdb9f94f548255fd2790d538d3f67a From 7f089d01893c1c229dcf0a70c046ced43c712754 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Mon, 23 Aug 2021 08:19:25 -0400 Subject: [PATCH 51/90] add memory profiling (#36) * add profile_memory calls to CICE cap --- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index a832e7bdf..9d650d1ff 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -88,6 +88,7 @@ module ice_comp_nuopc integer :: nthrds ! Number of threads to use in this component integer :: dbug = 0 + logical :: profile_memory = .false. integer , parameter :: debug_import = 0 ! internal debug level integer , parameter :: debug_export = 0 ! internal debug level character(*), parameter :: modName = "(ice_comp_nuopc)" @@ -157,6 +158,10 @@ subroutine InitializeP0(gcomp, importState, exportState, clock, rc) type(ESMF_State) :: importState, exportState type(ESMF_Clock) :: clock integer, intent(out) :: rc + + logical :: isPresent, isSet + character(len=64) :: value + character(len=char_len_long) :: logmsg !-------------------------------- rc = ESMF_SUCCESS @@ -166,6 +171,14 @@ subroutine InitializeP0(gcomp, importState, exportState, clock, rc) acceptStringList=(/"IPDv01p"/), rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + profile_memory = .false. + call NUOPC_CompAttributeGet(gcomp, name="ProfileMemory", value=value, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) profile_memory=(trim(value)=="true") + write(logmsg,*) profile_memory + call ESMF_LogWrite('CICE_cap:ProfileMemory = '//trim(logmsg), ESMF_LOGMSG_INFO) + end subroutine InitializeP0 !=============================================================================== @@ -902,6 +915,8 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS + if(profile_memory) call ESMF_VMLogMemInfo("Entering CICE Model_ADVANCE: ") + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) ! query the Component for its clock, importState and exportState @@ -1049,7 +1064,9 @@ subroutine ModelAdvance(gcomp, rc) ! Advance cice and timestep update !-------------------------------- + if(profile_memory) call ESMF_VMLogMemInfo("Entering CICE_Run : ") call CICE_Run() + if(profile_memory) call ESMF_VMLogMemInfo("Leaving CICE_Run : ") !-------------------------------- ! Create export state @@ -1110,6 +1127,8 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + if(profile_memory) call ESMF_VMLogMemInfo("Leaving CICE Model_ADVANCE: ") + end subroutine ModelAdvance !=============================================================================== From a1b3375b0fd931ba9082d4ad809d7fedaea82d66 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Tue, 31 Aug 2021 14:07:50 -0400 Subject: [PATCH 52/90] update icepack --- icepack | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/icepack b/icepack index 31a575dea..29ee0cefc 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 31a575dea3fdb9f94f548255fd2790d538d3f67a +Subproject commit 29ee0cefc35b6d6b00f3f0fd11cb9a1877f20fa6 From 5cb78cdba55029ba5a1755ee76c86a0a7158fc13 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Wed, 15 Sep 2021 09:15:42 -0400 Subject: [PATCH 53/90] fix rhoa when lowest_temp is 0.0 * provide default value for rhoa when imported temp_height_lowest (Tair) is 0.0 * resolves seg fault when frac_grid=false and do_ca=true --- cicecore/drivers/nuopc/cmeps/ice_import_export.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 62ff2727d..f8627d690 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -582,7 +582,7 @@ subroutine ice_import( importState, rc ) rhoa(i,j,iblk) = inst_pres_height_lowest / & (287.058_ESMF_KIND_R8*(1._ESMF_KIND_R8+0.608_ESMF_KIND_R8*Qa(i,j,iblk))*Tair(i,j,iblk)) else - rhoa(i,j,iblk) = 0._ESMF_KIND_R8 + rhoa(i,j,iblk) = 1.2_ESMF_KIND_R8 endif end do !i end do !j From d0a45a2f7d6ea70442571f17058ccc272083179b Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Thu, 16 Sep 2021 07:44:22 -0400 Subject: [PATCH 54/90] update icepack submodule --- .gitmodules | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.gitmodules b/.gitmodules index 472a87b2e..8a773d230 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,5 +1,3 @@ [submodule "icepack"] path = icepack - #url = https://github.com/NOAA-EMC/Icepack - url = https://github.com/DeniseWorthen/Icepack - branch = feature/updcice + url = https://github.com/NOAA-EMC/Icepack From 2540695698e1a733af9ce74609365faf4cb35d66 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Thu, 16 Sep 2021 08:28:30 -0400 Subject: [PATCH 55/90] Update CICE for latest Consortium master (#38) * Implement advanced snow physics in icepack and CICE * Fix time-stamping of CICE history files * Fix CICE history file precision --- .../cicedynB/analysis/ice_diagnostics.F90 | 85 +- .../cicedynB/analysis/ice_diagnostics_bgc.F90 | 15 +- cicecore/cicedynB/analysis/ice_history.F90 | 226 +- .../cicedynB/analysis/ice_history_fsd.F90 | 2 +- .../cicedynB/analysis/ice_history_pond.F90 | 8 +- .../cicedynB/analysis/ice_history_shared.F90 | 99 +- .../cicedynB/analysis/ice_history_snow.F90 | 430 ++ cicecore/cicedynB/dynamics/ice_dyn_eap.F90 | 12 +- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 227 +- cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 | 3906 ++++++++--------- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 47 +- cicecore/cicedynB/dynamics/ice_dyn_vp.F90 | 44 +- .../dynamics/ice_transport_driver.F90 | 25 +- cicecore/cicedynB/general/ice_flux.F90 | 7 + cicecore/cicedynB/general/ice_forcing.F90 | 209 +- cicecore/cicedynB/general/ice_init.F90 | 304 +- cicecore/cicedynB/general/ice_step_mod.F90 | 220 +- .../infrastructure/comm/mpi/ice_boundary.F90 | 133 +- .../comm/mpi/ice_gather_scatter.F90 | 138 +- .../comm/serial/ice_boundary.F90 | 133 +- .../comm/serial/ice_gather_scatter.F90 | 40 +- .../cicedynB/infrastructure/ice_domain.F90 | 2 +- cicecore/cicedynB/infrastructure/ice_grid.F90 | 54 +- .../infrastructure/ice_read_write.F90 | 765 +++- .../io/io_binary/ice_restart.F90 | 61 +- .../io/io_netcdf/ice_history_write.F90 | 357 +- .../io/io_netcdf/ice_restart.F90 | 15 +- .../io/io_pio2/ice_history_write.F90 | 448 +- .../infrastructure/io/io_pio2/ice_pio.F90 | 66 +- .../infrastructure/io/io_pio2/ice_restart.F90 | 23 +- .../drivers/nuopc/cmeps/ice_import_export.F90 | 2 +- .../drivers/standalone/cice/CICE_InitMod.F90 | 52 +- .../drivers/standalone/cice/CICE_RunMod.F90 | 27 +- cicecore/drivers/unittest/bcstchk/bcstchk.F90 | 32 +- cicecore/drivers/unittest/calchk/calchk.F90 | 33 +- .../unittest/helloworld/helloworld.F90 | 5 +- cicecore/drivers/unittest/sumchk/sumchk.F90 | 7 +- cicecore/shared/ice_arrays_column.F90 | 11 + cicecore/shared/ice_calendar.F90 | 33 +- cicecore/shared/ice_fileunits.F90 | 6 + cicecore/shared/ice_init_column.F90 | 77 +- cicecore/shared/ice_restart_column.F90 | 91 +- cicecore/version.txt | 2 +- configuration/scripts/cice.batch.csh | 17 + configuration/scripts/cice.launch.csh | 6 + configuration/scripts/cice.run.setup.csh | 2 +- configuration/scripts/ice_in | 41 +- .../scripts/machines/Macros.gaea_intel | 56 + .../scripts/machines/Macros.onyx_cray | 2 +- .../scripts/machines/Macros.onyx_gnu | 2 +- configuration/scripts/machines/env.gaea_intel | 34 + configuration/scripts/machines/env.onyx_cray | 13 +- configuration/scripts/machines/env.onyx_gnu | 13 +- configuration/scripts/machines/env.onyx_intel | 13 +- configuration/scripts/options/set_nml.alt04 | 2 +- configuration/scripts/options/set_nml.evp1d | 1 + configuration/scripts/options/set_nml.gx1prod | 4 +- .../scripts/options/set_nml.gx1prod15 | 19 + .../scripts/options/set_nml.histinst | 1 + configuration/scripts/options/set_nml.kevp102 | 1 - configuration/scripts/options/set_nml.qc | 10 +- .../scripts/options/set_nml.run10year | 7 + .../scripts/options/set_nml.snw30percent | 5 + .../scripts/options/set_nml.snwITDrdg | 10 + .../scripts/options/set_nml.snwgrain | 15 + configuration/scripts/tests/QC/cice.t-test.py | 9 + configuration/scripts/tests/base_suite.ts | 9 +- configuration/scripts/tests/comparelog.csh | 4 +- configuration/scripts/tests/io_suite.ts | 6 + configuration/scripts/tests/prod_suite.ts | 4 + configuration/scripts/tests/reprosum_suite.ts | 1 + .../scripts/tests/test_unittest.script | 27 +- doc/source/cice_index.rst | 34 +- doc/source/conf.py | 4 +- doc/source/developer_guide/dg_driver.rst | 11 +- doc/source/developer_guide/dg_dynamics.rst | 38 +- doc/source/developer_guide/dg_forcing.rst | 2 +- doc/source/science_guide/sg_dynamics.rst | 171 +- doc/source/science_guide/sg_tracers.rst | 6 +- doc/source/user_guide/figures/CICE_Bgrid.png | Bin 0 -> 53070 bytes doc/source/user_guide/ug_case_settings.rst | 39 +- doc/source/user_guide/ug_implementation.rst | 39 +- doc/source/user_guide/ug_testing.rst | 9 + doc/source/user_guide/ug_troubleshooting.rst | 3 - icepack | 2 +- 85 files changed, 5657 insertions(+), 3514 deletions(-) create mode 100644 cicecore/cicedynB/analysis/ice_history_snow.F90 mode change 100644 => 100755 cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 mode change 100644 => 100755 cicecore/cicedynB/dynamics/ice_dyn_shared.F90 create mode 100644 configuration/scripts/machines/Macros.gaea_intel create mode 100755 configuration/scripts/machines/env.gaea_intel create mode 100644 configuration/scripts/options/set_nml.evp1d create mode 100644 configuration/scripts/options/set_nml.gx1prod15 create mode 100644 configuration/scripts/options/set_nml.histinst delete mode 100644 configuration/scripts/options/set_nml.kevp102 create mode 100644 configuration/scripts/options/set_nml.run10year create mode 100644 configuration/scripts/options/set_nml.snw30percent create mode 100644 configuration/scripts/options/set_nml.snwITDrdg create mode 100644 configuration/scripts/options/set_nml.snwgrain create mode 100644 configuration/scripts/tests/prod_suite.ts create mode 100755 doc/source/user_guide/figures/CICE_Bgrid.png diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedynB/analysis/ice_diagnostics.F90 index 6b9b32301..d4e7066fb 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics.F90 @@ -14,6 +14,7 @@ module ice_diagnostics use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c1 use ice_calendar, only: istep1 + use ice_domain_size, only: nslyr use ice_fileunits, only: nu_diag use ice_fileunits, only: flush_fileunit use ice_exit, only: abort_ice @@ -142,15 +143,19 @@ subroutine runtime_diags (dt) i, j, k, n, iblk, nc, & ktherm, & nt_tsfc, nt_aero, nt_fbri, nt_apnd, nt_hpnd, nt_fsd, & - nt_isosno, nt_isoice + nt_isosno, nt_isoice, nt_rsnw, nt_rhos, nt_smice, nt_smliq logical (kind=log_kind) :: & - tr_pond_topo, tr_brine, tr_iso, tr_aero, calc_Tsfc, tr_fsd + tr_pond_topo, tr_brine, tr_iso, tr_aero, calc_Tsfc, tr_fsd, & + tr_snow, snwgrain real (kind=dbl_kind) :: & rhow, rhos, rhoi, puny, awtvdr, awtidr, awtvdf, awtidf, & rhofresh, lfresh, lvap, ice_ref_salinity, Tffresh + character (len=char_len) :: & + snwredist + ! hemispheric state quantities real (kind=dbl_kind) :: & umaxn, hmaxn, shmaxn, arean, snwmxn, extentn, shmaxnt, & @@ -190,7 +195,8 @@ subroutine runtime_diags (dt) pTsfc, pevap, pfswabs, pflwout, pflat, pfsens, & pfsurf, pfcondtop, psst, psss, pTf, hiavg, hsavg, hbravg, & pfhocn, psalt, fsdavg, & - pmeltt, pmeltb, pmeltl, psnoice, pdsnow, pfrazil, pcongel + pmeltt, pmeltb, pmeltl, psnoice, pdsnow, pfrazil, pcongel, & + prsnwavg, prhosavg, psmicetot, psmliqtot, psmtot real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & work1, work2 @@ -199,15 +205,19 @@ subroutine runtime_diags (dt) call icepack_query_parameters(ktherm_out=ktherm, calc_Tsfc_out=calc_Tsfc) call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_pond_topo_out=tr_pond_topo, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + tr_pond_topo_out=tr_pond_topo, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso, & + tr_snow_out=tr_snow) call icepack_query_tracer_indices(nt_fbri_out=nt_fbri, nt_Tsfc_out=nt_Tsfc, & nt_aero_out=nt_aero, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & - nt_fsd_out=nt_fsd,nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + nt_fsd_out=nt_fsd,nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice, & + nt_rsnw_out=nt_rsnw, nt_rhos_out=nt_rhos, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq) call icepack_query_parameters(Tffresh_out=Tffresh, rhos_out=rhos, & rhow_out=rhow, rhoi_out=rhoi, puny_out=puny, & awtvdr_out=awtvdr, awtidr_out=awtidr, awtvdf_out=awtvdf, awtidf_out=awtidf, & rhofresh_out=rhofresh, lfresh_out=lfresh, lvap_out=lvap, & - ice_ref_salinity_out=ice_ref_salinity) + ice_ref_salinity_out=ice_ref_salinity,snwredist_out=snwredist, & + snwgrain_out=snwgrain) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -825,6 +835,27 @@ subroutine runtime_diags (dt) enddo endif endif + if (tr_snow) then ! snow tracer quantities + prsnwavg (n) = c0 ! avg snow grain radius + prhosavg (n) = c0 ! avg snow density + psmicetot(n) = c0 ! total mass of ice in snow (kg/m2) + psmliqtot(n) = c0 ! total mass of liquid in snow (kg/m2) + psmtot (n) = c0 ! total mass of snow volume (kg/m2) + if (vsno(i,j,iblk) > c0) then + do k = 1, nslyr + prsnwavg (n) = prsnwavg (n) + trcr(i,j,nt_rsnw +k-1,iblk) ! snow grain radius + prhosavg (n) = prhosavg (n) + trcr(i,j,nt_rhos +k-1,iblk) ! compacted snow density + psmicetot(n) = psmicetot(n) + trcr(i,j,nt_smice+k-1,iblk) * vsno(i,j,iblk) + psmliqtot(n) = psmliqtot(n) + trcr(i,j,nt_smliq+k-1,iblk) * vsno(i,j,iblk) + end do + endif + psmtot (n) = rhos * vsno(i,j,iblk) ! mass of ice in standard density snow + prsnwavg (n) = prsnwavg (n) / real(nslyr,kind=dbl_kind) ! snow grain radius + prhosavg (n) = prhosavg (n) / real(nslyr,kind=dbl_kind) ! compacted snow density + psmicetot(n) = psmicetot(n) / real(nslyr,kind=dbl_kind) ! mass of ice in snow + psmliqtot(n) = psmliqtot(n) / real(nslyr,kind=dbl_kind) ! mass of liquid in snow + end if + psalt(n) = c0 if (vice(i,j,iblk) /= c0) psalt(n) = work2(i,j,iblk)/vice(i,j,iblk) pTsfc(n) = trcr(i,j,nt_Tsfc,iblk) ! ice/snow sfc temperature pevap(n) = evap(i,j,iblk)*dt/rhoi ! sublimation/condensation @@ -876,6 +907,11 @@ subroutine runtime_diags (dt) call broadcast_scalar(pmeltl (n), pmloc(n)) call broadcast_scalar(psnoice (n), pmloc(n)) call broadcast_scalar(pdsnow (n), pmloc(n)) + call broadcast_scalar(psmtot (n), pmloc(n)) + call broadcast_scalar(prsnwavg (n), pmloc(n)) + call broadcast_scalar(prhosavg (n), pmloc(n)) + call broadcast_scalar(psmicetot(n), pmloc(n)) + call broadcast_scalar(psmliqtot(n), pmloc(n)) call broadcast_scalar(pfrazil (n), pmloc(n)) call broadcast_scalar(pcongel (n), pmloc(n)) call broadcast_scalar(pdhi (n), pmloc(n)) @@ -1059,6 +1095,26 @@ subroutine runtime_diags (dt) write(nu_diag,900) 'effective dhi (m) = ',pdhi(1),pdhi(2) write(nu_diag,900) 'effective dhs (m) = ',pdhs(1),pdhs(2) write(nu_diag,900) 'intnl enrgy chng(W/m^2)= ',pde (1),pde (2) + + if (tr_snow) then + if (trim(snwredist) /= 'none') then + write(nu_diag,900) 'avg snow density(kg/m3)= ',prhosavg(1) & + ,prhosavg(2) + endif + if (snwgrain) then + write(nu_diag,900) 'avg snow grain radius = ',prsnwavg(1) & + ,prsnwavg(2) + write(nu_diag,900) 'mass ice in snow(kg/m2)= ',psmicetot(1) & + ,psmicetot(2) + write(nu_diag,900) 'mass liq in snow(kg/m2)= ',psmliqtot(1) & + ,psmliqtot(2) + write(nu_diag,900) 'mass std snow (kg/m2)= ',psmtot(1) & + ,psmtot(2) + write(nu_diag,900) 'max ice+liq (kg/m2)= ',rhow * hsavg(1) & + ,rhow * hsavg(2) + endif + endif + write(nu_diag,*) '----------ocn----------' write(nu_diag,900) 'sst (C) = ',psst(1),psst(2) write(nu_diag,900) 'sss (ppt) = ',psss(1),psss(2) @@ -1596,19 +1652,21 @@ subroutine print_state(plabel,i,j,iblk) rad_to_deg, puny, rhoi, lfresh, rhos, cp_ice integer (kind=int_kind) :: n, k, nt_Tsfc, nt_qice, nt_qsno, nt_fsd, & - nt_isosno, nt_isoice, nt_sice + nt_isosno, nt_isoice, nt_sice, nt_smice, nt_smliq - logical (kind=log_kind) :: tr_fsd, tr_iso + logical (kind=log_kind) :: tr_fsd, tr_iso, tr_snow type (block) :: & this_block ! block information for current block character(len=*), parameter :: subname = '(print_state)' - call icepack_query_tracer_flags(tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd, tr_iso_out=tr_iso, & + tr_snow_out=tr_snow) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, nt_fsd_out=nt_fsd, & - nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq) call icepack_query_parameters( & rad_to_deg_out=rad_to_deg, puny_out=puny, rhoi_out=rhoi, lfresh_out=lfresh, & rhos_out=rhos, cp_ice_out=cp_ice) @@ -1638,8 +1696,11 @@ subroutine print_state(plabel,i,j,iblk) endif write(nu_diag,*) 'Tsfcn',trcrn(i,j,nt_Tsfc,n,iblk) if (tr_fsd) write(nu_diag,*) 'afsdn',trcrn(i,j,nt_fsd,n,iblk) ! fsd cat 1 -! if (tr_iso) write(nu_diag,*) 'isosno',trcrn(i,j,nt_isosno,n,iblk) ! isotopes in snow -! if (tr_iso) write(nu_diag,*) 'isoice',trcrn(i,j,nt_isoice,n,iblk) ! isotopes in ice +! layer 1 diagnostics +! if (tr_iso) write(nu_diag,*) 'isosno',trcrn(i,j,nt_isosno,n,iblk) ! isotopes in snow +! if (tr_iso) write(nu_diag,*) 'isoice',trcrn(i,j,nt_isoice,n,iblk) ! isotopes in ice +! if (tr_snow) write(nu_diag,*) 'smice', trcrn(i,j,nt_smice, n,iblk) ! ice mass in snow +! if (tr_snow) write(nu_diag,*) 'smliq', trcrn(i,j,nt_smliq, n,iblk) ! liquid mass in snow write(nu_diag,*) ' ' ! dynamics (transport and/or ridging) causes the floe size distribution to become non-normal diff --git a/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 b/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 index fa965dfe0..74485a5e2 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 @@ -937,19 +937,18 @@ subroutine zsal_diags enddo if (aice(i,j,iblk) > c0) & psice_rho(n) = psice_rho(n)/aice(i,j,iblk) - if (tr_brine .and. aice(i,j,iblk) > c0) & + if (tr_brine .and. aice(i,j,iblk) > c0) then phinS(n) = trcr(i,j,nt_fbri,iblk)*vice(i,j,iblk)/aice(i,j,iblk) - - if (aicen(i,j,1,iblk)> c0) then - if (tr_brine) phinS1(n) = trcrn(i,j,nt_fbri,1,iblk) & - * vicen(i,j,1,iblk)/aicen(i,j,1,iblk) + phbrn(n) = (c1 - rhosi/rhow)*vice(i,j,iblk)/aice(i,j,iblk) & + - rhos/rhow *vsno(i,j,iblk)/aice(i,j,iblk) + endif + if (tr_brine .and. aicen(i,j,1,iblk)> c0) then + phinS1(n) = trcrn(i,j,nt_fbri,1,iblk) & + * vicen(i,j,1,iblk)/aicen(i,j,1,iblk) pdh_top1(n) = dhbr_top(i,j,1,iblk) pdh_bot1(n) = dhbr_bot(i,j,1,iblk) pdarcy_V1(n) = darcy_V(i,j,1,iblk) endif - if (tr_brine .AND. aice(i,j,iblk) > c0) & - phbrn(n) = (c1 - rhosi/rhow)*vice(i,j,iblk)/aice(i,j,iblk) & - - rhos/rhow *vsno(i,j,iblk)/aice(i,j,iblk) do k = 1, nblyr+1 pbTiz(n,k) = c0 piDin(n,k) = c0 diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedynB/analysis/ice_history.F90 index f91562449..0ecc2ee5a 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedynB/analysis/ice_history.F90 @@ -32,7 +32,7 @@ module ice_history use ice_kinds_mod use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c1, c2, c100, c360, c180, & - p001, p25, p5, mps_to_cmpdy, kg_to_g, spval + p001, p25, p5, mps_to_cmpdy, kg_to_g, spval_dbl use ice_fileunits, only: nu_nml, nml_filename, nu_diag, & get_fileunit, release_fileunit, flush_fileunit use ice_exit, only: abort_ice @@ -67,10 +67,11 @@ subroutine init_hist (dt) histfreq_n, nstreams use ice_domain_size, only: max_blocks, max_nstrm, nilyr, nslyr, nblyr, ncat, nfsd use ice_dyn_shared, only: kdyn - use ice_flux, only: mlt_onset, frz_onset, albcnt + use ice_flux, only: mlt_onset, frz_onset, albcnt, snwcnt use ice_history_shared ! everything use ice_history_mechred, only: init_hist_mechred_2D, init_hist_mechred_3Dc use ice_history_pond, only: init_hist_pond_2D, init_hist_pond_3Dc + use ice_history_snow, only: init_hist_snow_2D, init_hist_snow_3Dc use ice_history_bgc, only:init_hist_bgc_2D, init_hist_bgc_3Dc, & init_hist_bgc_3Db, init_hist_bgc_3Da use ice_history_drag, only: init_hist_drag_2D @@ -86,7 +87,7 @@ subroutine init_hist (dt) real (kind=dbl_kind) :: rhofresh, Tffresh, secday, rad_to_deg logical (kind=log_kind) :: formdrag logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_brine - logical (kind=log_kind) :: tr_fsd + logical (kind=log_kind) :: tr_fsd, tr_snow logical (kind=log_kind) :: skl_bgc, solve_zsal, solve_zbgc, z_tracers integer (kind=int_kind) :: n, ns, ns1, ns2 integer (kind=int_kind), dimension(max_nstrm) :: & @@ -115,7 +116,7 @@ subroutine init_hist (dt) solve_zsal_out=solve_zsal, solve_zbgc_out=solve_zbgc, z_tracers_out=z_tracers) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_out=tr_pond, tr_aero_out=tr_aero, & - tr_brine_out=tr_brine, tr_fsd_out=tr_fsd) + tr_brine_out=tr_brine, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -1426,6 +1427,9 @@ subroutine init_hist (dt) ! floe size distribution call init_hist_fsd_2D + ! advanced snow physics + call init_hist_snow_2D (dt) + !----------------------------------------------------------------- ! 3D (category) variables looped separately for ordering !----------------------------------------------------------------- @@ -1501,6 +1505,9 @@ subroutine init_hist (dt) ! biogeochemistry call init_hist_bgc_3Dc + ! advanced snow physics + call init_hist_snow_3Dc + !----------------------------------------------------------------- ! 3D (vertical) variables must be looped separately !----------------------------------------------------------------- @@ -1688,6 +1695,7 @@ subroutine init_hist (dt) if (allocated(a4Df)) a4Df(:,:,:,:,:,:) = c0 avgct(:) = c0 albcnt(:,:,:,:) = c0 + snwcnt(:,:,:,:) = c0 if (restart .and. yday >= c2) then ! restarting midyear gives erroneous onset dates @@ -1726,7 +1734,7 @@ subroutine accum_hist (dt) fhocn, fhocn_ai, uatm, vatm, fbot, Tbot, Tsnice, & fswthru_ai, strairx, strairy, strtltx, strtlty, strintx, strinty, & taubx, tauby, strocnx, strocny, fm, daidtt, dvidtt, daidtd, dvidtd, fsurf, & - fcondtop, fcondbot, fsurfn, fcondtopn, flatn, fsensn, albcnt, & + fcondtop, fcondbot, fsurfn, fcondtopn, flatn, fsensn, albcnt, snwcnt, & stressp_1, stressm_1, stress12_1, & stressp_2, & stressp_3, & @@ -1739,6 +1747,8 @@ subroutine accum_hist (dt) use ice_history_bgc, only: accum_hist_bgc use ice_history_mechred, only: accum_hist_mechred use ice_history_pond, only: accum_hist_pond + use ice_history_snow, only: accum_hist_snow, & + f_rhos_cmp, f_rhos_cnt, n_rhos_cmp, n_rhos_cnt use ice_history_drag, only: accum_hist_drag use icepack_intfc, only: icepack_mushy_density_brine, icepack_mushy_liquid_fraction use icepack_intfc, only: icepack_mushy_temperature_mush @@ -1758,6 +1768,7 @@ subroutine accum_hist (dt) nstrm ! nstreams (1 if writing initial condition) real (kind=dbl_kind) :: & + timedbl , & ! temporary dbl for time bounds ravgct , & ! 1/avgct ravgctz ! 1/avgct @@ -1775,7 +1786,7 @@ subroutine accum_hist (dt) real (kind=dbl_kind) :: Tffresh, rhoi, rhos, rhow, ice_ref_salinity real (kind=dbl_kind) :: rho_ice, rho_ocn, Tice, Sbr, phi, rhob, dfresh, dfsalt logical (kind=log_kind) :: formdrag, skl_bgc - logical (kind=log_kind) :: tr_pond, tr_aero, tr_brine + logical (kind=log_kind) :: tr_pond, tr_aero, tr_brine, tr_snow integer (kind=int_kind) :: ktherm integer (kind=int_kind) :: nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY, nt_Tsfc, & nt_alvl, nt_vlvl @@ -1791,7 +1802,7 @@ subroutine accum_hist (dt) rhow_out=rhow, ice_ref_salinity_out=ice_ref_salinity) call icepack_query_parameters(formdrag_out=formdrag, skl_bgc_out=skl_bgc, ktherm_out=ktherm) call icepack_query_tracer_flags(tr_pond_out=tr_pond, tr_aero_out=tr_aero, & - tr_brine_out=tr_brine) + tr_brine_out=tr_brine, tr_snow_out=tr_snow) call icepack_query_tracer_indices(nt_sice_out=nt_sice, nt_qice_out=nt_qice, & nt_qsno_out=nt_qsno, nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_Tsfc_out=nt_Tsfc, & nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl) @@ -1814,7 +1825,7 @@ subroutine accum_hist (dt) n4Dfcum = n4Dscum + num_avail_hist_fields_4Df ! should equal num_avail_hist_fields_tot do ns = 1,nstreams - if (.not. hist_avg .or. histfreq(ns) == '1') then ! write snapshots + if (.not. hist_avg) then ! write snapshots do n = 1,n2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) & a2D(:,:,n,:) = c0 @@ -1862,11 +1873,10 @@ subroutine accum_hist (dt) avgct(ns) = c1 else ! write averages over time histfreq avgct(ns) = avgct(ns) + c1 -! if (avgct(ns) == c1) time_beg(ns) = (time-dt)/int(secday) - if (avgct(ns) == c1) then - time_beg(ns) = (timesecs-dt)/int(secday) - time_beg(ns) = real(time_beg(ns),kind=real_kind) - endif + endif + if (avgct(ns) == c1) then + timedbl = (timesecs-dt)/(secday) + time_beg(ns) = real(timedbl,kind=real_kind) endif enddo @@ -3040,6 +3050,9 @@ subroutine accum_hist (dt) ! floe size distribution call accum_hist_fsd (iblk) + ! advanced snow physics + call accum_hist_snow (iblk) + enddo ! iblk !$OMP END PARALLEL DO @@ -3105,7 +3118,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a2D(i,j,n,iblk) = spval + a2D(i,j,n,iblk) = spval_dbl else ! convert units a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & * ravgct + avail_hist_fields(n)%conb @@ -3122,7 +3135,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sithick(ns),iblk) = & a2D(i,j,n_sithick(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sithick(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sithick(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3135,7 +3148,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siage(ns),iblk) = & a2D(i,j,n_siage(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siage(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siage(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3148,7 +3161,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sisnthick(ns),iblk) = & a2D(i,j,n_sisnthick(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sisnthick(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sisnthick(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3161,7 +3174,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sitemptop(ns),iblk) = & a2D(i,j,n_sitemptop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sitemptop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sitemptop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3174,7 +3187,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sitempsnic(ns),iblk) = & a2D(i,j,n_sitempsnic(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sitempsnic(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sitempsnic(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3187,7 +3200,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sitempbot(ns),iblk) = & a2D(i,j,n_sitempbot(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sitempbot(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sitempbot(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3200,7 +3213,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siu(ns),iblk) = & a2D(i,j,n_siu(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siu(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siu(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3213,7 +3226,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siv(ns),iblk) = & a2D(i,j,n_siv(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siv(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siv(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3226,7 +3239,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sistrxdtop(ns),iblk) = & a2D(i,j,n_sistrxdtop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sistrxdtop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sistrxdtop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3239,7 +3252,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sistrydtop(ns),iblk) = & a2D(i,j,n_sistrydtop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sistrydtop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sistrydtop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3252,7 +3265,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sistrxubot(ns),iblk) = & a2D(i,j,n_sistrxubot(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sistrxubot(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sistrxubot(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3265,7 +3278,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sistryubot(ns),iblk) = & a2D(i,j,n_sistryubot(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sistryubot(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sistryubot(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3278,7 +3291,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sicompstren(ns),iblk) = & a2D(i,j,n_sicompstren(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sicompstren(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sicompstren(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3291,7 +3304,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sispeed(ns),iblk) = & a2D(i,j,n_sispeed(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sispeed(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sispeed(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3304,8 +3317,8 @@ subroutine accum_hist (dt) a2D(i,j,n_sialb(ns),iblk) = & a2D(i,j,n_sialb(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sialb(ns),iblk) = spval - if (albcnt(i,j,iblk,ns) <= puny) a2D(i,j,n_sialb(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sialb(ns),iblk) = spval_dbl + if (albcnt(i,j,iblk,ns) <= puny) a2D(i,j,n_sialb(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3318,7 +3331,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflswdtop(ns),iblk) = & a2D(i,j,n_siflswdtop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflswdtop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflswdtop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3331,7 +3344,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflswutop(ns),iblk) = & a2D(i,j,n_siflswutop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflswutop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflswutop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3344,7 +3357,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflswdbot(ns),iblk) = & a2D(i,j,n_siflswdbot(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflswdbot(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflswdbot(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3357,7 +3370,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sifllwdtop(ns),iblk) = & a2D(i,j,n_sifllwdtop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sifllwdtop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sifllwdtop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3370,7 +3383,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sifllwutop(ns),iblk) = & a2D(i,j,n_sifllwutop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sifllwutop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sifllwutop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3383,7 +3396,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflsenstop(ns),iblk) = & a2D(i,j,n_siflsenstop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflsenstop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflsenstop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3396,7 +3409,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflsensupbot(ns),iblk) = & a2D(i,j,n_siflsensupbot(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflsensupbot(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflsensupbot(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3409,7 +3422,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sifllatstop(ns),iblk) = & a2D(i,j,n_sifllatstop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sifllatstop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sifllatstop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3422,7 +3435,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sipr(ns),iblk) = & a2D(i,j,n_sipr(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sipr(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sipr(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3435,7 +3448,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sifb(ns),iblk) = & a2D(i,j,n_sifb(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sifb(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sifb(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3448,7 +3461,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflcondtop(ns),iblk) = & a2D(i,j,n_siflcondtop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflcondtop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflcondtop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3461,7 +3474,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflcondbot(ns),iblk) = & a2D(i,j,n_siflcondbot(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflcondbot(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflcondbot(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3474,7 +3487,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflsaltbot(ns),iblk) = & a2D(i,j,n_siflsaltbot(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflsaltbot(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflsaltbot(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3487,7 +3500,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflfwbot(ns),iblk) = & a2D(i,j,n_siflfwbot(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflfwbot(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflfwbot(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3500,7 +3513,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siflfwdrain(ns),iblk) = & a2D(i,j,n_siflfwdrain(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siflfwdrain(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siflfwdrain(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3513,7 +3526,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sidragtop(ns),iblk) = & a2D(i,j,n_sidragtop(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sidragtop(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sidragtop(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3526,7 +3539,7 @@ subroutine accum_hist (dt) a2D(i,j,n_sirdgthick(ns),iblk) = & a2D(i,j,n_sirdgthick(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_sirdgthick(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_sirdgthick(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3539,7 +3552,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siforcetiltx(ns),iblk) = & a2D(i,j,n_siforcetiltx(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siforcetiltx(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siforcetiltx(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3552,7 +3565,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siforcetilty(ns),iblk) = & a2D(i,j,n_siforcetilty(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siforcetilty(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siforcetilty(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3565,7 +3578,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siforcecoriolx(ns),iblk) = & a2D(i,j,n_siforcecoriolx(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siforcecoriolx(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siforcecoriolx(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3578,7 +3591,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siforcecorioly(ns),iblk) = & a2D(i,j,n_siforcecorioly(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siforcecorioly(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siforcecorioly(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3591,7 +3604,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siforceintstrx(ns),iblk) = & a2D(i,j,n_siforceintstrx(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siforceintstrx(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siforceintstrx(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3604,7 +3617,7 @@ subroutine accum_hist (dt) a2D(i,j,n_siforceintstry(ns),iblk) = & a2D(i,j,n_siforceintstry(ns),iblk)*avgct(ns)*ravgip(i,j) endif - if (ravgip(i,j) == c0) a2D(i,j,n_siforceintstry(ns),iblk) = spval + if (ravgip(i,j) == c0) a2D(i,j,n_siforceintstry(ns),iblk) = spval_dbl enddo ! i enddo ! j endif @@ -3669,7 +3682,38 @@ subroutine accum_hist (dt) enddo ! j endif - endif +! snwcnt averaging is not working correctly +! for now, these history fields will have zeroes includes in the averages +! if (avail_hist_fields(n)%vname(1:8) == 'rhos_cmp') then +! do j = jlo, jhi +! do i = ilo, ihi +! if (tmask(i,j,iblk)) then +! ravgctz = c0 +! if (snwcnt(i,j,iblk,ns) > puny) & +! ravgctz = c1/snwcnt(i,j,iblk,ns) +! if (f_rhos_cmp (1:1) /= 'x' .and. n_rhos_cmp(ns) /= 0) & +! a2D(i,j,n_rhos_cmp(ns),iblk) = & +! a2D(i,j,n_rhos_cmp(ns),iblk)*avgct(ns)*ravgctz +! endif +! enddo ! i +! enddo ! j +! endif +! if (avail_hist_fields(n)%vname(1:8) == 'rhos_cnt') then +! do j = jlo, jhi +! do i = ilo, ihi +! if (tmask(i,j,iblk)) then +! ravgctz = c0 +! if (snwcnt(i,j,iblk,ns) > puny) & +! ravgctz = c1/snwcnt(i,j,iblk,ns) +! if (f_rhos_cnt (1:1) /= 'x' .and. n_rhos_cnt(ns) /= 0) & +! a2D(i,j,n_rhos_cnt(ns),iblk) = & +! a2D(i,j,n_rhos_cnt(ns),iblk)*avgct(ns)*ravgctz +! endif +! enddo ! i +! enddo ! j +! endif + + endif ! avail_hist_fields(n)%vhistfreq == histfreq(ns) enddo ! n do n = 1, num_avail_hist_fields_3Dc @@ -3680,7 +3724,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Dc(i,j,k,n,iblk) = spval + a3Dc(i,j,k,n,iblk) = spval_dbl else ! convert units a3Dc(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Dc(i,j,k,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -3729,7 +3773,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Dz(i,j,k,n,iblk) = spval + a3Dz(i,j,k,n,iblk) = spval_dbl else ! convert units a3Dz(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Dz(i,j,k,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -3746,7 +3790,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Db(i,j,k,n,iblk) = spval + a3Db(i,j,k,n,iblk) = spval_dbl else ! convert units a3Db(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Db(i,j,k,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -3764,7 +3808,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Da(i,j,k,n,iblk) = spval + a3Da(i,j,k,n,iblk) = spval_dbl else ! convert units a3Da(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Da(i,j,k,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -3782,7 +3826,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Df(i,j,k,n,iblk) = spval + a3Df(i,j,k,n,iblk) = spval_dbl else ! convert units a3Df(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Df(i,j,k,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -3801,7 +3845,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a4Di(i,j,k,ic,n,iblk) = spval + a4Di(i,j,k,ic,n,iblk) = spval_dbl else ! convert units a4Di(i,j,k,ic,n,iblk) = avail_hist_fields(nn)%cona*a4Di(i,j,k,ic,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -3821,7 +3865,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a4Ds(i,j,k,ic,n,iblk) = spval + a4Ds(i,j,k,ic,n,iblk) = spval_dbl else ! convert units a4Ds(i,j,k,ic,n,iblk) = avail_hist_fields(nn)%cona*a4Ds(i,j,k,ic,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -3841,7 +3885,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - a4Df(i,j,k,ic,n,iblk) = spval + a4Df(i,j,k,ic,n,iblk) = spval_dbl else ! convert units a4Df(i,j,k,ic,n,iblk) = avail_hist_fields(nn)%cona*a4Df(i,j,k,ic,n,iblk) & * ravgct + avail_hist_fields(nn)%conb @@ -3871,32 +3915,32 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (.not. tmask(i,j,iblk)) then ! mask out land points - if (n_divu (ns) /= 0) a2D(i,j,n_divu(ns), iblk) = spval - if (n_shear (ns) /= 0) a2D(i,j,n_shear(ns), iblk) = spval - if (n_sig1 (ns) /= 0) a2D(i,j,n_sig1(ns), iblk) = spval - if (n_sig2 (ns) /= 0) a2D(i,j,n_sig2(ns), iblk) = spval - if (n_sigP (ns) /= 0) a2D(i,j,n_sigP(ns), iblk) = spval - if (n_sistreave(ns) /= 0) a2D(i,j,n_sistreave(ns),iblk) = spval - if (n_sistremax(ns) /= 0) a2D(i,j,n_sistremax(ns),iblk) = spval - if (n_mlt_onset(ns) /= 0) a2D(i,j,n_mlt_onset(ns),iblk) = spval - if (n_frz_onset(ns) /= 0) a2D(i,j,n_frz_onset(ns),iblk) = spval - if (n_hisnap (ns) /= 0) a2D(i,j,n_hisnap(ns), iblk) = spval - if (n_aisnap (ns) /= 0) a2D(i,j,n_aisnap(ns), iblk) = spval - if (n_trsig (ns) /= 0) a2D(i,j,n_trsig(ns), iblk) = spval - if (n_iage (ns) /= 0) a2D(i,j,n_iage(ns), iblk) = spval - if (n_FY (ns) /= 0) a2D(i,j,n_FY(ns), iblk) = spval - - if (n_a11 (ns) /= 0) a2D(i,j,n_a11(ns), iblk) = spval - if (n_a12 (ns) /= 0) a2D(i,j,n_a12(ns), iblk) = spval - if (n_e11 (ns) /= 0) a2D(i,j,n_e11(ns), iblk) = spval - if (n_e12 (ns) /= 0) a2D(i,j,n_e12(ns), iblk) = spval - if (n_e22 (ns) /= 0) a2D(i,j,n_e22(ns), iblk) = spval - if (n_s11 (ns) /= 0) a2D(i,j,n_s11(ns), iblk) = spval - if (n_s12 (ns) /= 0) a2D(i,j,n_s12(ns), iblk) = spval - if (n_s22 (ns) /= 0) a2D(i,j,n_s22(ns), iblk) = spval - if (n_yieldstress11 (ns) /= 0) a2D(i,j,n_yieldstress11(ns),iblk) = spval - if (n_yieldstress12 (ns) /= 0) a2D(i,j,n_yieldstress12(ns),iblk) = spval - if (n_yieldstress22 (ns) /= 0) a2D(i,j,n_yieldstress22(ns),iblk) = spval + if (n_divu (ns) /= 0) a2D(i,j,n_divu(ns), iblk) = spval_dbl + if (n_shear (ns) /= 0) a2D(i,j,n_shear(ns), iblk) = spval_dbl + if (n_sig1 (ns) /= 0) a2D(i,j,n_sig1(ns), iblk) = spval_dbl + if (n_sig2 (ns) /= 0) a2D(i,j,n_sig2(ns), iblk) = spval_dbl + if (n_sigP (ns) /= 0) a2D(i,j,n_sigP(ns), iblk) = spval_dbl + if (n_sistreave(ns) /= 0) a2D(i,j,n_sistreave(ns),iblk) = spval_dbl + if (n_sistremax(ns) /= 0) a2D(i,j,n_sistremax(ns),iblk) = spval_dbl + if (n_mlt_onset(ns) /= 0) a2D(i,j,n_mlt_onset(ns),iblk) = spval_dbl + if (n_frz_onset(ns) /= 0) a2D(i,j,n_frz_onset(ns),iblk) = spval_dbl + if (n_hisnap (ns) /= 0) a2D(i,j,n_hisnap(ns), iblk) = spval_dbl + if (n_aisnap (ns) /= 0) a2D(i,j,n_aisnap(ns), iblk) = spval_dbl + if (n_trsig (ns) /= 0) a2D(i,j,n_trsig(ns), iblk) = spval_dbl + if (n_iage (ns) /= 0) a2D(i,j,n_iage(ns), iblk) = spval_dbl + if (n_FY (ns) /= 0) a2D(i,j,n_FY(ns), iblk) = spval_dbl + + if (n_a11 (ns) /= 0) a2D(i,j,n_a11(ns), iblk) = spval_dbl + if (n_a12 (ns) /= 0) a2D(i,j,n_a12(ns), iblk) = spval_dbl + if (n_e11 (ns) /= 0) a2D(i,j,n_e11(ns), iblk) = spval_dbl + if (n_e12 (ns) /= 0) a2D(i,j,n_e12(ns), iblk) = spval_dbl + if (n_e22 (ns) /= 0) a2D(i,j,n_e22(ns), iblk) = spval_dbl + if (n_s11 (ns) /= 0) a2D(i,j,n_s11(ns), iblk) = spval_dbl + if (n_s12 (ns) /= 0) a2D(i,j,n_s12(ns), iblk) = spval_dbl + if (n_s22 (ns) /= 0) a2D(i,j,n_s22(ns), iblk) = spval_dbl + if (n_yieldstress11 (ns) /= 0) a2D(i,j,n_yieldstress11(ns),iblk) = spval_dbl + if (n_yieldstress12 (ns) /= 0) a2D(i,j,n_yieldstress12(ns),iblk) = spval_dbl + if (n_yieldstress22 (ns) /= 0) a2D(i,j,n_yieldstress22(ns),iblk) = spval_dbl else if (n_divu (ns) /= 0) a2D(i,j,n_divu(ns),iblk) = & divu (i,j,iblk)*avail_hist_fields(n_divu(ns))%cona @@ -3966,8 +4010,8 @@ subroutine accum_hist (dt) enddo ! iblk !$OMP END PARALLEL DO - time_end(ns) = timesecs/int(secday) - time_end(ns) = real(time_end(ns),kind=real_kind) + timedbl = timesecs/secday + time_end(ns) = real(timedbl,kind=real_kind) !--------------------------------------------------------------- ! write file @@ -3992,10 +4036,12 @@ subroutine accum_hist (dt) if (allocated(a4Df)) a4Df(:,:,:,:,:,:) = c0 avgct(:) = c0 albcnt(:,:,:,:) = c0 + snwcnt(:,:,:,:) = c0 write_ic = .false. ! write initial condition once at most else avgct(ns) = c0 albcnt(:,:,:,ns) = c0 + snwcnt(:,:,:,ns) = c0 endif ! if (write_history(ns)) albcnt(:,:,:,ns) = c0 diff --git a/cicecore/cicedynB/analysis/ice_history_fsd.F90 b/cicecore/cicedynB/analysis/ice_history_fsd.F90 index 43afc1e27..7ad81e7d2 100644 --- a/cicecore/cicedynB/analysis/ice_history_fsd.F90 +++ b/cicecore/cicedynB/analysis/ice_history_fsd.F90 @@ -303,7 +303,7 @@ subroutine accum_hist_fsd (iblk) integer (kind=int_kind) :: & i, j, n, k, & ! loop indices - nt_fsd ! ! fsd tracer index + nt_fsd ! fsd tracer index logical (kind=log_kind) :: tr_fsd real (kind=dbl_kind) :: floeshape, puny diff --git a/cicecore/cicedynB/analysis/ice_history_pond.F90 b/cicecore/cicedynB/analysis/ice_history_pond.F90 index de10eb9fb..182865fec 100644 --- a/cicecore/cicedynB/analysis/ice_history_pond.F90 +++ b/cicecore/cicedynB/analysis/ice_history_pond.F90 @@ -75,15 +75,15 @@ subroutine init_hist_pond_2D logical (kind=log_kind) :: tr_pond character(len=*), parameter :: subname = '(init_hist_pond_2D)' - !----------------------------------------------------------------- - ! read namelist - !----------------------------------------------------------------- - call icepack_query_tracer_flags(tr_pond_out=tr_pond) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) + !----------------------------------------------------------------- + ! read namelist + !----------------------------------------------------------------- + call get_fileunit(nu_nml) if (my_task == master_task) then open (nu_nml, file=nml_filename, status='old',iostat=nml_error) diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index 52d268990..9b58deeec 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -59,7 +59,7 @@ module ice_history_shared !--------------------------------------------------------------- ! Instructions for adding a field: (search for 'example') - ! Here: + ! Here or in ice_history_[process].F90: ! (1) Add to frequency flags (f_) ! (2) Add to namelist (here and also in ice_in) ! (3) Add to index list @@ -672,64 +672,67 @@ subroutine construct_filename(ncfile,suffix,ns) iday = mday isec = msec - dt - if (write_ic) isec = msec ! construct filename if (write_ic) then + isec = msec write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & incond_file(1:lenstr(incond_file)),'.',iyear,'-', & - imonth,'-',iday,'-',isec,'.',suffix + imonth,'-',iday,'-',isec,'.',trim(suffix) else - if (hist_avg .and. histfreq(ns) /= '1') then - if (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then - ! do nothing - elseif (new_year) then - iyear = iyear - 1 - imonth = 12 - iday = daymo(imonth) - elseif (new_month) then - imonth = mmonth - 1 - iday = daymo(imonth) - elseif (new_day) then - iday = iday - 1 - endif - endif - - cstream = '' + if (hist_avg) then + if (histfreq(ns) == '1' .or. histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then + ! do nothing + elseif (new_year) then + iyear = iyear - 1 + imonth = 12 + iday = daymo(imonth) + elseif (new_month) then + imonth = mmonth - 1 + iday = daymo(imonth) + elseif (new_day) then + iday = iday - 1 + endif + endif + + cstream = '' !echmod ! this was implemented for CESM but it breaks post-processing software !echmod ! of other groups (including RASM which uses CESMCOUPLED) !echmod if (ns > 1) write(cstream,'(i1.1)') ns-1 - if (histfreq(ns) == '1') then ! instantaneous, write every dt - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & - iyear,'-',imonth,'-',iday,'-',msec,'.',suffix - - elseif (hist_avg) then ! write averaged data - - if (histfreq(ns) == 'd'.or.histfreq(ns) == 'D') then ! daily - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream), & - '.',iyear,'-',imonth,'-',iday,'.',suffix - elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly - write(ncfile,'(a,a,i2.2,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_', & - histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',msec,'.',suffix - elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly - write(ncfile,'(a,a,i4.4,a,i2.2,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'-',imonth,'.',suffix - elseif (histfreq(ns) == 'y'.or.histfreq(ns) == 'Y') then ! yearly - write(ncfile,'(a,a,i4.4,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'.',suffix - endif + if (hist_avg) then ! write averaged data + if (histfreq(ns) == '1' .and. histfreq_n(ns) == 1) then ! timestep + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & + iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) + elseif (histfreq(ns) == '1' .and. histfreq_n(ns) > 1) then ! timestep + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) + elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly + write(ncfile,'(a,a,i2.2,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'_', & + histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) + elseif (histfreq(ns) == 'd'.or.histfreq(ns) == 'D') then ! daily + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'-',imonth,'-',iday,'.',trim(suffix) + elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly + write(ncfile,'(a,a,i4.4,a,i2.2,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'-',imonth,'.',trim(suffix) + elseif (histfreq(ns) == 'y'.or.histfreq(ns) == 'Y') then ! yearly + write(ncfile,'(a,a,i4.4,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'.',trim(suffix) + endif + + else ! instantaneous + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & + iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) + endif - else ! instantaneous with histfreq > dt - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file)),'_inst.', & - iyear,'-',imonth,'-',iday,'-',msec,'.',suffix - endif endif end subroutine construct_filename diff --git a/cicecore/cicedynB/analysis/ice_history_snow.F90 b/cicecore/cicedynB/analysis/ice_history_snow.F90 new file mode 100644 index 000000000..5a590af2b --- /dev/null +++ b/cicecore/cicedynB/analysis/ice_history_snow.F90 @@ -0,0 +1,430 @@ +!======================================================================= + +! Snow tracer history output + + module ice_history_snow + + use ice_kinds_mod + use ice_constants, only: c0, c1, mps_to_cmpdy + use ice_domain_size, only: max_nstrm, nslyr + use ice_fileunits, only: nu_nml, nml_filename, & + get_fileunit, release_fileunit + use ice_fileunits, only: nu_diag + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters, & + icepack_query_tracer_flags, icepack_query_tracer_indices + + implicit none + private + public :: accum_hist_snow, init_hist_snow_2D, init_hist_snow_3Dc + + !--------------------------------------------------------------- + ! flags: write to output file if true or histfreq value + !--------------------------------------------------------------- + + character (len=max_nstrm), public :: & + f_smassice = 'm', f_smassicen = 'x', & + f_smassliq = 'm', f_smassliqn = 'x', & + f_rhos_cmp = 'm', f_rhos_cmpn = 'x', & + f_rhos_cnt = 'm', f_rhos_cntn = 'x', & + f_rsnw = 'm', f_rsnwn = 'x', & + f_meltsliq = 'm', f_fsloss = 'x' + + !--------------------------------------------------------------- + ! namelist variables + !--------------------------------------------------------------- + + namelist / icefields_snow_nml / & + f_smassice, f_smassicen, & + f_smassliq, f_smassliqn, & + f_rhos_cmp, f_rhos_cmpn, & + f_rhos_cnt, f_rhos_cntn, & + f_rsnw, f_rsnwn, & + f_meltsliq, f_fsloss + + !--------------------------------------------------------------- + ! field indices + !--------------------------------------------------------------- + + integer (kind=int_kind), dimension(max_nstrm), public :: & + n_smassice, n_smassicen, & + n_smassliq, n_smassliqn, & + n_rhos_cmp, n_rhos_cmpn, & + n_rhos_cnt, n_rhos_cntn, & + n_rsnw, n_rsnwn, & + n_meltsliq, n_fsloss + +!======================================================================= + + contains + +!======================================================================= + + subroutine init_hist_snow_2D (dt) + + use ice_broadcast, only: broadcast_scalar + use ice_calendar, only: nstreams, histfreq + use ice_communicate, only: my_task, master_task + use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: nu_nml, nml_filename, & + get_fileunit, release_fileunit + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + integer (kind=int_kind) :: ns + integer (kind=int_kind) :: nml_error ! namelist i/o error flag + real (kind=dbl_kind) :: rhofresh, secday + logical (kind=log_kind) :: tr_snow + character(len=*), parameter :: subname = '(init_hist_snow_2D)' + + call icepack_query_tracer_flags(tr_snow_out=tr_snow) + call icepack_query_parameters(rhofresh_out=rhofresh,secday_out=secday) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (tr_snow) then + + !----------------------------------------------------------------- + ! read namelist + !----------------------------------------------------------------- + + call get_fileunit(nu_nml) + if (my_task == master_task) then + open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + if (nml_error /= 0) then + nml_error = -1 + else + nml_error = 1 + endif + do while (nml_error > 0) + read(nu_nml, nml=icefields_snow_nml,iostat=nml_error) + end do + if (nml_error == 0) close(nu_nml) + endif + call release_fileunit(nu_nml) + + call broadcast_scalar(nml_error, master_task) + if (nml_error /= 0) then + close (nu_nml) + call abort_ice('ice: error reading icefields_snow_nml') + endif + + else ! .not. tr_snow + f_smassice = 'x' + f_smassliq = 'x' + f_rhos_cmp = 'x' + f_rhos_cnt = 'x' + f_rsnw = 'x' + f_smassicen= 'x' + f_smassliqn= 'x' + f_rhos_cmpn= 'x' + f_rhos_cntn= 'x' + f_rsnwn = 'x' + f_meltsliq = 'x' + f_fsloss = 'x' + endif + + call broadcast_scalar (f_smassice, master_task) + call broadcast_scalar (f_smassliq, master_task) + call broadcast_scalar (f_rhos_cmp, master_task) + call broadcast_scalar (f_rhos_cnt, master_task) + call broadcast_scalar (f_rsnw, master_task) + call broadcast_scalar (f_smassicen,master_task) + call broadcast_scalar (f_smassliqn,master_task) + call broadcast_scalar (f_rhos_cmpn,master_task) + call broadcast_scalar (f_rhos_cntn,master_task) + call broadcast_scalar (f_rsnwn, master_task) + call broadcast_scalar (f_meltsliq, master_task) + call broadcast_scalar (f_fsloss, master_task) + + if (tr_snow) then + + ! 2D variables + do ns = 1, nstreams + if (histfreq(ns) /= 'x') then + + if (f_smassice(1:1) /= 'x') & + call define_hist_field(n_smassice,"smassice","kg/m^2",tstr2D, tcstr, & + "ice mass per unit area in snow", & + "none", c1, c0, & + ns, f_smassice) + + if (f_smassliq(1:1) /= 'x') & + call define_hist_field(n_smassliq,"smassliq","kg/m^2",tstr2D, tcstr, & + "liquid mass per unit area in snow", & + "none", c1, c0, & + ns, f_smassliq) + + if (f_rhos_cmp(1:1) /= 'x') & + call define_hist_field(n_rhos_cmp,"rhos_cmp","kg/m^3",tstr2D, tcstr, & + "snow density: compaction", & + "none", c1, c0, & + ns, f_rhos_cmp) + + if (f_rhos_cnt(1:1) /= 'x') & + call define_hist_field(n_rhos_cnt,"rhos_cnt","kg/m^3",tstr2D, tcstr, & + "snow density: content", & + "none", c1, c0, & + ns, f_rhos_cnt) + + if (f_rsnw(1:1) /= 'x') & + call define_hist_field(n_rsnw,"rsnw","10^-6 m",tstr2D, tcstr, & + "average snow grain radius", & + "none", c1, c0, & + ns, f_rsnw) + + if (f_meltsliq(1:1) /= 'x') & + call define_hist_field(n_meltsliq,"meltsliq","kg/m^2/s",tstr2D, tcstr, & + "snow liquid contribution to meltponds", & + "none", c1/dt, c0, & + ns, f_meltsliq) + + if (f_fsloss(1:1) /= 'x') & + call define_hist_field(n_fsloss,"fsloss","kg/m^2/s",tstr2D, tcstr, & + "rate of snow loss to leads (liquid)", & + "none", c1, c0, & + ns, f_fsloss) + + endif ! histfreq(ns) /= 'x' + enddo ! nstreams + endif ! tr_snow + + end subroutine init_hist_snow_2D + +!======================================================================= + + subroutine init_hist_snow_3Dc + + use ice_calendar, only: nstreams, histfreq + use ice_history_shared, only: tstr3Dc, tcstr, define_hist_field + + integer (kind=int_kind) :: ns + logical (kind=log_kind) :: tr_snow + character(len=*), parameter :: subname = '(init_hist_pond_3Dc)' + + call icepack_query_tracer_flags(tr_snow_out=tr_snow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (tr_snow) then + + ! 3D (category) variables must be looped separately + do ns = 1, nstreams + if (histfreq(ns) /= 'x') then + + if (f_smassicen(1:1) /= 'x') & + call define_hist_field(n_smassicen,"smassicen","kg/m^2",tstr3Dc, tcstr, & + "ice mass per unit area in snow, category", & + "none", c1, c0, & + ns, f_smassicen) + + if (f_smassliqn(1:1) /= 'x') & + call define_hist_field(n_smassliqn,"smassliqn","kg/m^2",tstr3Dc, tcstr, & + "liquid mass per unit area in snow, category", & + "none", c1, c0, & + ns, f_smassliqn) + + if (f_rhos_cmpn(1:1) /= 'x') & + call define_hist_field(n_rhos_cmpn,"rhos_cmpn","kg/m^3",tstr3Dc, tcstr, & + "snow density: compaction, category", & + "none", c1, c0, & + ns, f_rhos_cmpn) + + if (f_rhos_cntn(1:1) /= 'x') & + call define_hist_field(n_rhos_cntn,"rhos_cntn","kg/m^3",tstr3Dc, tcstr, & + "snow density: content, category", & + "none", c1, c0, & + ns, f_rhos_cntn) + + if (f_rsnwn(1:1) /= 'x') & + call define_hist_field(n_rsnwn,"rsnwn","10^-6 m",tstr3Dc, tcstr, & + "average snow grain radius, category", & + "none", c1, c0, & + ns, f_rsnwn) + + endif ! histfreq(ns) /= 'x' + enddo ! ns + + endif ! tr_snow + + end subroutine init_hist_snow_3Dc + +!======================================================================= + +! accumulate average ice quantities or snapshots + + subroutine accum_hist_snow (iblk) + + use ice_arrays_column, only: meltsliq + use ice_blocks, only: block, nx_block, ny_block + use ice_domain, only: blocks_ice + use ice_flux, only: fsloss + use ice_history_shared, only: n2D, a2D, a3Dc, ncat_hist, & + accum_hist_field, nzslyr + use ice_state, only: vsno, vsnon, trcr, trcrn + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + i, j, k, n + + integer (kind=int_kind) :: & + nt_smice, nt_smliq, nt_rhos, nt_rsnw + + logical (kind=log_kind) :: tr_snow + + real (kind=dbl_kind), dimension (nx_block,ny_block) :: & + worka + + real (kind=dbl_kind), dimension (nx_block,ny_block,ncat_hist) :: & + workb + + character(len=*), parameter :: subname = '(accum_hist_snow)' + + !--------------------------------------------------------------- + ! increment field + !--------------------------------------------------------------- + + call icepack_query_tracer_flags(tr_snow_out=tr_snow) + call icepack_query_tracer_indices(nt_smice_out=nt_smice, & + nt_smliq_out=nt_smliq, nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (allocated(a2D)) then + if (tr_snow) then + + if (f_smassice(1:1)/= 'x') then + worka(:,:) = c0 + do k = 1, nzslyr + worka(:,:) = worka(:,:) & + + trcr(:,:,nt_smice+k-1,iblk) + enddo + worka(:,:) = worka(:,:) * vsno(:,:,iblk) / real(nslyr,kind=dbl_kind) + call accum_hist_field(n_smassice, iblk, worka, a2D) + endif + if (f_smassliq(1:1)/= 'x') then + worka(:,:) = c0 + do k = 1, nzslyr + worka(:,:) = worka(:,:) & + + trcr(:,:,nt_smliq+k-1,iblk) + enddo + worka(:,:) = worka(:,:) * vsno(:,:,iblk) / real(nslyr,kind=dbl_kind) + call accum_hist_field(n_smassliq, iblk, worka, a2D) + endif + if (f_rhos_cmp(1:1)/= 'x') then + worka(:,:) = c0 + do k = 1, nzslyr + worka(:,:) = worka(:,:) & + + trcr(:,:,nt_rhos+k-1,iblk) + enddo + worka(:,:) = worka(:,:) / real(nslyr,kind=dbl_kind) + call accum_hist_field(n_rhos_cmp, iblk, worka, a2D) + endif + if (f_rhos_cnt(1:1)/= 'x') then + worka(:,:) = c0 + do k = 1, nzslyr + worka(:,:) = worka(:,:) & + + trcr(:,:,nt_smice+k-1,iblk) & + + trcr(:,:,nt_smliq+k-1,iblk) + enddo + worka(:,:) = worka(:,:) / real(nslyr,kind=dbl_kind) + call accum_hist_field(n_rhos_cnt, iblk, worka, a2D) + endif + if (f_rsnw(1:1)/= 'x') then + worka(:,:) = c0 + do k = 1, nzslyr + worka(:,:) = worka(:,:) & + + trcr(:,:,nt_rsnw+k-1,iblk) + enddo + worka(:,:) = worka(:,:) / real(nslyr,kind=dbl_kind) + call accum_hist_field(n_rsnw, iblk, worka, a2D) + endif + if (f_meltsliq(1:1)/= 'x') & + call accum_hist_field(n_meltsliq, iblk, & + meltsliq(:,:,iblk), a2D) + if (f_fsloss(1:1)/= 'x') & + call accum_hist_field(n_fsloss, iblk, & + fsloss(:,:,iblk), a2D) + + endif ! allocated(a2D) + + ! 3D category fields + if (allocated(a3Dc)) then + if (f_smassicen(1:1)/= 'x') then + workb(:,:,:) = c0 + do n = 1, ncat_hist + do k = 1, nzslyr + workb(:,:,n) = workb(:,:,n) & + + trcrn(:,:,nt_smice+k-1,n,iblk) + enddo + workb(:,:,n) = workb(:,:,n) & + * vsnon(:,:,n,iblk) / real(nslyr,kind=dbl_kind) + enddo + call accum_hist_field(n_smassicen-n2D, iblk, ncat_hist, workb, a3Dc) + endif + if (f_smassliqn(1:1)/= 'x') then + workb(:,:,:) = c0 + do n = 1, ncat_hist + do k = 1, nzslyr + workb(:,:,n) = workb(:,:,n) & + + trcrn(:,:,nt_smliq+k-1,n,iblk) + enddo + workb(:,:,n) = workb(:,:,n) & + * vsnon(:,:,n,iblk) / real(nslyr,kind=dbl_kind) + enddo + call accum_hist_field(n_smassliqn-n2D, iblk, ncat_hist, workb, a3Dc) + endif + if (f_rhos_cmpn(1:1)/= 'x') then + workb(:,:,:) = c0 + do n = 1, ncat_hist + do k = 1, nzslyr + workb(:,:,n) = workb(:,:,n) & + + trcrn(:,:,nt_rhos+k-1,n,iblk) + enddo + workb(:,:,n) = workb(:,:,n) / real(nslyr,kind=dbl_kind) + enddo + call accum_hist_field(n_rhos_cmpn-n2D, iblk, ncat_hist, workb, a3Dc) + endif + if (f_rhos_cntn(1:1)/= 'x') then + workb(:,:,:) = c0 + do n = 1, ncat_hist + do k = 1, nzslyr + workb(:,:,n) = workb(:,:,n) & + + trcrn(:,:,nt_smice+k-1,n,iblk) & + + trcrn(:,:,nt_smliq+k-1,n,iblk) + enddo + workb(:,:,n) = workb(:,:,n) / real(nslyr,kind=dbl_kind) + enddo + call accum_hist_field(n_rhos_cntn-n2D, iblk, ncat_hist, workb, a3Dc) + endif + if (f_rsnwn(1:1)/= 'x') then + workb(:,:,:) = c0 + do n = 1, ncat_hist + do k = 1, nzslyr + workb(:,:,n) = workb(:,:,n) & + + trcrn(:,:,nt_rsnw+k-1,n,iblk) + enddo + workb(:,:,n) = workb(:,:,n) / real(nslyr,kind=dbl_kind) + enddo + call accum_hist_field(n_rsnwn-n2D, iblk, ncat_hist, workb, a3Dc) + endif + endif ! allocated(a3Dc) + + endif ! tr_snow + + end subroutine accum_hist_snow + +!======================================================================= + + end module ice_history_snow + +!======================================================================= diff --git a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 index 2face07c2..9c52bb888 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 @@ -1206,12 +1206,12 @@ subroutine stress_eap (nx_block, ny_block, & vvel , & ! y-component of velocity (m/s) dxt , & ! width of T-cell through the middle (m) dyt , & ! height of T-cell through the middle (m) - dxhy , & ! 0.5*(HTE - HTE) - dyhx , & ! 0.5*(HTN - HTN) - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm , & ! 0.5*HTN - 1.5*HTN + dxhy , & ! 0.5*(HTE - HTW) + dyhx , & ! 0.5*(HTN - HTS) + cyp , & ! 1.5*HTE - 0.5*HTW + cxp , & ! 1.5*HTN - 0.5*HTS + cym , & ! 0.5*HTE - 1.5*HTW + cxm , & ! 0.5*HTN - 1.5*HTS tarear ! 1/tarea real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index 2206e0de7..276c8bb79 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -34,7 +34,7 @@ module ice_dyn_evp use ice_kinds_mod - use ice_communicate, only: my_task + use ice_communicate, only: my_task, master_task use ice_constants, only: field_loc_center, field_loc_NEcorner, & field_type_scalar, field_type_vector use ice_constants, only: c0, p027, p055, p111, p166, & @@ -88,14 +88,14 @@ subroutine evp (dt) stress12_1, stress12_2, stress12_3, stress12_4 use ice_grid, only: tmask, umask, dxt, dyt, dxhy, dyhx, cxp, cyp, cxm, cym, & tarear, uarear, tinyarea, to_ugrid, t2ugrid_vector, u2tgrid_vector, & - grid_type, HTE, HTN + grid_type use ice_state, only: aice, vice, vsno, uvel, vvel, divu, shear, & aice_init, aice0, aicen, vicen, strength use ice_timers, only: timer_dynamics, timer_bound, & ice_timer_start, ice_timer_stop, timer_evp_1d, timer_evp_2d use ice_dyn_evp_1d, only: ice_dyn_evp_1d_copyin, ice_dyn_evp_1d_kernel, & ice_dyn_evp_1d_copyout - use ice_dyn_shared, only: kevp_kernel, stack_velocity_field, unstack_velocity_field + use ice_dyn_shared, only: evp_algorithm, stack_velocity_field, unstack_velocity_field real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -331,7 +331,7 @@ subroutine evp (dt) if (seabed_stress) then - !$OMP PARALLEL DO PRIVATE(iblk) + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks if ( seabed_stress_method == 'LKD' ) then @@ -351,118 +351,115 @@ subroutine evp (dt) hwater(:,:,iblk), Tbu(:,:,iblk)) endif - enddo + enddo !$OMP END PARALLEL DO endif + call ice_timer_start(timer_evp_2d) - if (kevp_kernel > 0) then - if (first_time .and. my_task == 0) then - write(nu_diag,'(2a,i6)') subname,' Entering kevp_kernel version ',kevp_kernel - first_time = .false. - endif - if (trim(grid_type) == 'tripole') then - call abort_ice(trim(subname)//' Kernel not tested on tripole grid. Set kevp_kernel=0') - endif - call ice_dyn_evp_1d_copyin( & - nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost, & - HTE,HTN, & -!v1 dxhy,dyhx,cyp,cxp,cym,cxm,tinyarea, & -!v1 waterx,watery, & - icetmask, iceumask, & - cdn_ocn,aiu,uocn,vocn,forcex,forcey,Tbu, & - umassdti,fm,uarear,tarear,strintx,strinty,uvel_init,vvel_init,& - strength,uvel,vvel,dxt,dyt, & - stressp_1 ,stressp_2, stressp_3, stressp_4, & - stressm_1 ,stressm_2, stressm_3, stressm_4, & - stress12_1,stress12_2,stress12_3,stress12_4 ) - if (kevp_kernel == 2) then - call ice_timer_start(timer_evp_1d) - call ice_dyn_evp_1d_kernel() - call ice_timer_stop(timer_evp_1d) -!v1 else if (kevp_kernel == 1) then -!v1 call evp_kernel_v1() - else - if (my_task == 0) write(nu_diag,*) subname,' ERROR: kevp_kernel = ',kevp_kernel - call abort_ice(subname//' kevp_kernel not supported.') - endif - call ice_dyn_evp_1d_copyout( & - nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost,& -!strocn uvel,vvel, strocnx,strocny, strintx,strinty, & - uvel,vvel, strintx,strinty, & - stressp_1, stressp_2, stressp_3, stressp_4, & - stressm_1, stressm_2, stressm_3, stressm_4, & - stress12_1,stress12_2,stress12_3,stress12_4, & - divu,rdg_conv,rdg_shear,shear,taubx,tauby ) - - else ! kevp_kernel == 0 (Standard CICE) - - do ksub = 1,ndte ! subcycling - - !----------------------------------------------------------------- - ! stress tensor equation, total surface stress - !----------------------------------------------------------------- - - !$TCXOMP PARALLEL DO PRIVATE(iblk,strtmp) - do iblk = 1, nblocks -! if (trim(yield_curve) == 'ellipse') then - call stress (nx_block, ny_block, & - ksub, icellt(iblk), & - indxti (:,iblk), indxtj (:,iblk), & - uvel (:,:,iblk), vvel (:,:,iblk), & - dxt (:,:,iblk), dyt (:,:,iblk), & - dxhy (:,:,iblk), dyhx (:,:,iblk), & - cxp (:,:,iblk), cyp (:,:,iblk), & - cxm (:,:,iblk), cym (:,:,iblk), & - tarear (:,:,iblk), tinyarea (:,:,iblk), & - strength (:,:,iblk), & - stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & - stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & - stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & - stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & - stress12_1(:,:,iblk), stress12_2(:,:,iblk), & - stress12_3(:,:,iblk), stress12_4(:,:,iblk), & - shear (:,:,iblk), divu (:,:,iblk), & - rdg_conv (:,:,iblk), rdg_shear (:,:,iblk), & - strtmp (:,:,:) ) -! endif ! yield_curve - - !----------------------------------------------------------------- - ! momentum equation - !----------------------------------------------------------------- - - call stepu (nx_block, ny_block, & - icellu (iblk), Cdn_ocn (:,:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & - ksub, & - aiu (:,:,iblk), strtmp (:,:,:), & - uocn (:,:,iblk), vocn (:,:,iblk), & - waterx (:,:,iblk), watery (:,:,iblk), & - forcex (:,:,iblk), forcey (:,:,iblk), & - umassdti (:,:,iblk), fm (:,:,iblk), & - uarear (:,:,iblk), & - strintx (:,:,iblk), strinty (:,:,iblk), & - taubx (:,:,iblk), tauby (:,:,iblk), & - uvel_init(:,:,iblk), vvel_init(:,:,iblk),& - uvel (:,:,iblk), vvel (:,:,iblk), & - Tbu (:,:,iblk)) - enddo - !$TCXOMP END PARALLEL DO + if (evp_algorithm == "shared_mem_1d" ) then - call stack_velocity_field(uvel, vvel, fld2) - call ice_timer_start(timer_bound) - if (maskhalo_dyn) then - call ice_HaloUpdate (fld2, halo_info_mask, & - field_loc_NEcorner, field_type_vector) - else - call ice_HaloUpdate (fld2, halo_info, & - field_loc_NEcorner, field_type_vector) + if (first_time .and. my_task == master_task) then + write(nu_diag,'(3a)') subname,' Entering evp_algorithm version ',evp_algorithm + first_time = .false. endif - call ice_timer_stop(timer_bound) - call unstack_velocity_field(fld2, uvel, vvel) + if (trim(grid_type) == 'tripole') then + call abort_ice(trim(subname)//' & + & Kernel not tested on tripole grid. Set evp_algorithm=standard_2d') + endif + + call ice_dyn_evp_1d_copyin( & + nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost, & + icetmask, iceumask, & + cdn_ocn,aiu,uocn,vocn,forcex,forcey,Tbu, & + umassdti,fm,uarear,tarear,strintx,strinty,uvel_init,vvel_init,& + strength,uvel,vvel,dxt,dyt, & + stressp_1 ,stressp_2, stressp_3, stressp_4, & + stressm_1 ,stressm_2, stressm_3, stressm_4, & + stress12_1,stress12_2,stress12_3,stress12_4 ) + call ice_timer_start(timer_evp_1d) + call ice_dyn_evp_1d_kernel() + call ice_timer_stop(timer_evp_1d) + call ice_dyn_evp_1d_copyout( & + nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost,& +!strocn uvel,vvel, strocnx,strocny, strintx,strinty, & + uvel,vvel, strintx,strinty, & + stressp_1, stressp_2, stressp_3, stressp_4, & + stressm_1, stressm_2, stressm_3, stressm_4, & + stress12_1,stress12_2,stress12_3,stress12_4, & + divu,rdg_conv,rdg_shear,shear,taubx,tauby ) + + else ! evp_algorithm == standard_2d (Standard CICE) + + do ksub = 1,ndte ! subcycling + + !----------------------------------------------------------------- + ! stress tensor equation, total surface stress + !----------------------------------------------------------------- + + !$TCXOMP PARALLEL DO PRIVATE(iblk,strtmp) + do iblk = 1, nblocks + +! if (trim(yield_curve) == 'ellipse') then + call stress (nx_block, ny_block, & + ksub, icellt(iblk), & + indxti (:,iblk), indxtj (:,iblk), & + uvel (:,:,iblk), vvel (:,:,iblk), & + dxt (:,:,iblk), dyt (:,:,iblk), & + dxhy (:,:,iblk), dyhx (:,:,iblk), & + cxp (:,:,iblk), cyp (:,:,iblk), & + cxm (:,:,iblk), cym (:,:,iblk), & + tarear (:,:,iblk), tinyarea (:,:,iblk), & + strength (:,:,iblk), & + stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & + stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & + stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & + stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & + stress12_1(:,:,iblk), stress12_2(:,:,iblk), & + stress12_3(:,:,iblk), stress12_4(:,:,iblk), & + shear (:,:,iblk), divu (:,:,iblk), & + rdg_conv (:,:,iblk), rdg_shear (:,:,iblk), & + strtmp (:,:,:) ) +! endif ! yield_curve + + !----------------------------------------------------------------- + ! momentum equation + !----------------------------------------------------------------- + + call stepu (nx_block, ny_block, & + icellu (iblk), Cdn_ocn (:,:,iblk), & + indxui (:,iblk), indxuj (:,iblk), & + ksub, & + aiu (:,:,iblk), strtmp (:,:,:), & + uocn (:,:,iblk), vocn (:,:,iblk), & + waterx (:,:,iblk), watery (:,:,iblk), & + forcex (:,:,iblk), forcey (:,:,iblk), & + umassdti (:,:,iblk), fm (:,:,iblk), & + uarear (:,:,iblk), & + strintx (:,:,iblk), strinty (:,:,iblk), & + taubx (:,:,iblk), tauby (:,:,iblk), & + uvel_init(:,:,iblk), vvel_init(:,:,iblk),& + uvel (:,:,iblk), vvel (:,:,iblk), & + Tbu (:,:,iblk)) + + enddo + !$TCXOMP END PARALLEL DO + + call stack_velocity_field(uvel, vvel, fld2) + call ice_timer_start(timer_bound) + if (maskhalo_dyn) then + call ice_HaloUpdate (fld2, halo_info_mask, & + field_loc_NEcorner, field_type_vector) + else + call ice_HaloUpdate (fld2, halo_info, & + field_loc_NEcorner, field_type_vector) + endif + call ice_timer_stop(timer_bound) + call unstack_velocity_field(fld2, uvel, vvel) - enddo ! subcycling - endif ! kevp_kernel + enddo ! subcycling + endif ! evp_algorithm + call ice_timer_stop(timer_evp_2d) deallocate(fld2) @@ -610,12 +607,12 @@ subroutine stress (nx_block, ny_block, & vvel , & ! y-component of velocity (m/s) dxt , & ! width of T-cell through the middle (m) dyt , & ! height of T-cell through the middle (m) - dxhy , & ! 0.5*(HTE - HTE) - dyhx , & ! 0.5*(HTN - HTN) - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm , & ! 0.5*HTN - 1.5*HTN + dxhy , & ! 0.5*(HTE - HTW) + dyhx , & ! 0.5*(HTN - HTS) + cyp , & ! 1.5*HTE - 0.5*HTW + cxp , & ! 1.5*HTN - 0.5*HTS + cym , & ! 0.5*HTE - 1.5*HTW + cxm , & ! 0.5*HTN - 1.5*HTS tarear , & ! 1/tarea tinyarea ! puny*tarea diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 old mode 100644 new mode 100755 index 78469cc86..c691453cb --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 @@ -1,2135 +1,1941 @@ -! ice_dyn_evp_1d +!======================================================================= ! -! Contained 3 Fortran modules, -! * dmi_omp -! * bench_v2 -! * ice_dyn_evp_1d -! These were merged into one module, ice_dyn_evp_1d to support some -! coupled build systems. +! Elastic-viscous-plastic sea ice dynamics model (1D implementations) +! Computes ice velocity and deformation ! -! Modules used for: -! * convert 2D arrays into 1D vectors -! * Do stress/stepu/halo_update interations -! * convert 1D vectors into 2D matrices -! -! Call from ice_dyn_evp.F90: -! call ice_dyn_evp_1d_copyin(...) -! call ice_dyn_evp_1d_kernel() -! call ice_dyn_evp_1d_copyout(...) -! -! * REAL4 internal version: -! mv evp_kernel1d.F90 evp_kernel1d_r8.F90 -! cat evp_kernel1d_r8.F90 | sed s/DBL_KIND/REAL_KIND/g > evp_kernel1d.F90 -! -! * !v1 : a) "dxhy,dyhx,cyp,cxp,cym,cxm,tinyarea" input variables is replaced by -! "HTE,HTN"->"HTE,HTN,HTEm1,HTNm1" and variables are calculated in-line -! b) "waterx,watery" is calculated using existing input "uocn,vocn" -! -! Jacob Weismann Poulsen (JWP), Mads Hvid Ribergaard (MHRI), DMI -!=============================================================================== +! authors: Jacob Weismann Poulsen, DMI +! Mads Hvid Ribergaard, DMI -!=============================================================================== - -!-- One dimension representation of EVP 2D arrays used for EVP kernels module ice_dyn_evp_1d - use ice_kinds_mod - use ice_fileunits, only: nu_diag - use ice_exit, only: abort_ice - use icepack_intfc, only: icepack_query_parameters - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - - implicit none - private - public :: ice_dyn_evp_1d_copyin, ice_dyn_evp_1d_copyout, ice_dyn_evp_1d_kernel - - interface ice_dyn_evp_1d_copyin -! module procedure evp_copyin_v1 - module procedure evp_copyin_v2 - end interface - - interface ice_dyn_evp_1d_kernel -! module procedure evp_kernel_v1 - module procedure evp_kernel_v2 - end interface - - interface ice_dyn_evp_1d_copyout - module procedure evp_copyout - end interface - - interface convert_2d_1d -! module procedure convert_2d_1d_v1 - module procedure convert_2d_1d_v2 - end interface - - integer(kind=int_kind) :: & - NA_len, NAVEL_len - logical(kind=log_kind), dimension(:), allocatable :: & - skipucell - integer(kind=int_kind), dimension(:), allocatable :: & - ee,ne,se,nw,sw,sse,indi,indj,indij , halo_parent - real (kind=dbl_kind), dimension(:), allocatable :: & - cdn_ocn,aiu,uocn,vocn,forcex,forcey,Tbu,tarear, & - umassdti,fm,uarear,strintx,strinty,uvel_init,vvel_init - real (kind=dbl_kind), dimension(:), allocatable :: & - strength,uvel,vvel,dxt,dyt, & -!v1 dxhy,dyhx,cyp,cxp,cym,cxm,tinyarea, & -!v1 waterx,watery, & - stressp_1, stressp_2, stressp_3, stressp_4, & - stressm_1, stressm_2, stressm_3, stressm_4, & - stress12_1,stress12_2,stress12_3,stress12_4, & - divu,rdg_conv,rdg_shear,shear,taubx,tauby - real (kind=DBL_KIND), dimension(:), allocatable :: & - str1, str2, str3, str4, str5, str6, str7, str8 - real (kind=dbl_kind), dimension(:), allocatable :: & - HTE,HTN, & - HTEm1,HTNm1 - logical(kind=log_kind),parameter :: dbug = .false. - - -!--- dmi_omp --------------------------- - interface domp_get_domain - module procedure domp_get_domain_rlu - end interface - - INTEGER, PARAMETER :: JPIM = SELECTED_INT_KIND(9) - integer(int_kind) :: domp_iam, domp_nt + use ice_kinds_mod + use ice_fileunits, only : nu_diag + use ice_exit, only : abort_ice + use icepack_intfc, only : icepack_query_parameters + use icepack_intfc, only : icepack_warnings_flush, & + icepack_warnings_aborted + implicit none + private + public :: ice_dyn_evp_1d_copyin, ice_dyn_evp_1d_copyout, & + ice_dyn_evp_1d_kernel + + integer(kind=int_kind) :: NA_len, NAVEL_len, domp_iam, domp_nt #if defined (_OPENMP) - ! Please note, this constant will create a compiler info for a constant - ! expression in IF statements: - real(kind=dbl_kind) :: rdomp_iam, rdomp_nt - !$OMP THREADPRIVATE(domp_iam,domp_nt,rdomp_iam,rdomp_nt) + real(kind=dbl_kind) :: rdomp_iam, rdomp_nt + !$OMP THREADPRIVATE(domp_iam, domp_nt, rdomp_iam, rdomp_nt) #endif -!--- dmi_omp --------------------------- - -!--- bench_v2 -------------------------- - interface evp1d_stress - module procedure stress_i - module procedure stress_l - end interface - interface evp1d_stepu - module procedure stepu_iter - module procedure stepu_last - end interface -!--- bench_v2 -------------------------- + logical(kind=log_kind), dimension(:), allocatable :: skiptcell, skipucell + integer(kind=int_kind), dimension(:), allocatable :: ee, ne, se, & + nw, sw, sse, indi, indj, indij, halo_parent + real(kind=dbl_kind), dimension(:), allocatable :: cdn_ocn, aiu, & + uocn, vocn, forcex, forcey, Tbu, tarear, umassdti, fm, uarear, & + strintx, strinty, uvel_init, vvel_init, strength, uvel, vvel, & + dxt, dyt, stressp_1, stressp_2, stressp_3, stressp_4, stressm_1, & + stressm_2, stressm_3, stressm_4, stress12_1, stress12_2, & + stress12_3, stress12_4, divu, rdg_conv, rdg_shear, shear, taubx, & + tauby, str1, str2, str3, str4, str5, str6, str7, str8, HTE, HTN, & + HTEm1, HTNm1 + integer, parameter :: JPIM = selected_int_kind(9) + + interface evp1d_stress + module procedure stress_iter + module procedure stress_last + end interface + + interface evp1d_stepu + module procedure stepu_iter + module procedure stepu_last + end interface + +!======================================================================= + +contains + +!======================================================================= + + subroutine domp_init +#if defined (_OPENMP) - contains + use omp_lib, only : omp_get_thread_num, omp_get_num_threads +#endif -!=============================================================================== -!former module dmi_omp + implicit none - subroutine domp_init(nt_out) + character(len=*), parameter :: subname = '(domp_init)' + !$OMP PARALLEL DEFAULT(none) #if defined (_OPENMP) - use omp_lib, only : omp_get_thread_num, omp_get_num_threads + domp_iam = omp_get_thread_num() + rdomp_iam = real(domp_iam, dbl_kind) + domp_nt = omp_get_num_threads() + rdomp_nt = real(domp_nt, dbl_kind) +#else + domp_iam = 0 + domp_nt = 1 #endif + !$OMP END PARALLEL - integer(int_kind), intent(out) :: nt_out + end subroutine domp_init - character(len=*), parameter :: subname = '(domp_init)' - !--------------------------------------- +!======================================================================= - !$OMP PARALLEL DEFAULT(none) + subroutine domp_get_domain(lower, upper, d_lower, d_upper) #if defined (_OPENMP) - domp_iam = omp_get_thread_num() - rdomp_iam = real(domp_iam,dbl_kind) - domp_nt = omp_get_num_threads() - rdomp_nt = real(domp_nt,dbl_kind) -#else - domp_iam = 0 - domp_nt = 1 -#endif - !$OMP END PARALLEL - - if (dbug) then -#if defined (_OPENACC) - write(nu_diag,'(2a)') subname,' Build with openACC support' -!#elif defined (_OPENMP) -! write(nu_diag,'(2a)') subname,' Build with openMP support' -!#else -! write(nu_diag,'(2a)') subname,' Build without openMP and openACC support' + + use omp_lib, only : omp_in_parallel + use ice_constants, only : p5 #endif - !- echo #threads: - if (domp_nt > 1) then - write(nu_diag,'(2a,i5,a)') subname,' Running openMP with ', domp_nt, ' threads' - else + implicit none + + integer(kind=JPIM), intent(in) :: lower, upper + integer(kind=JPIM), intent(out) :: d_lower, d_upper + + ! local variables #if defined (_OPENMP) - write(nu_diag,'(2a)') subname,' Running openMP with a single thread' -#else - write(nu_diag,'(2a)') subname,' Running without openMP' -#endif - endif - endif - !- return value of #threads: - nt_out = domp_nt + real(kind=dbl_kind) :: dlen +#endif - end subroutine domp_init - -!---------------------------------------------------------------------------- + character(len=*), parameter :: subname = '(domp_get_domain)' - subroutine domp_get_domain_rlu(lower,upper,d_lower,d_upper) + ! proper action in "null" case + if (upper <= 0 .or. upper < lower) then + d_lower = 0 + d_upper = -1 + return + end if + ! proper action in serial case + d_lower = lower + d_upper = upper #if defined (_OPENMP) - use omp_lib, only : omp_in_parallel - use ice_constants, only: p5 + + if (omp_in_parallel()) then + dlen = real((upper - lower + 1), dbl_kind) + d_lower = lower + floor(((rdomp_iam * dlen + p5) / rdomp_nt), JPIM) + d_upper = lower - 1 + floor(((rdomp_iam * dlen + dlen + p5) / rdomp_nt), JPIM) + end if #endif - integer(KIND=JPIM), intent(in) :: lower,upper - integer(KIND=JPIM), intent(out) :: d_lower,d_upper + end subroutine domp_get_domain + +!======================================================================= + + subroutine stress_iter(NA_len, ee, ne, se, lb, ub, uvel, vvel, dxt, & + dyt, hte, htn, htem1, htnm1, strength, stressp_1, stressp_2, & + stressp_3, stressp_4, stressm_1, stressm_2, stressm_3, & + stressm_4, stress12_1, stress12_2, stress12_3, stress12_4, str1, & + str2, str3, str4, str5, str6, str7, str8, skiptcell) + + use ice_kinds_mod + use ice_constants, only : p027, p055, p111, p166, p222, p25, & + p333, p5, c1p5, c1 + use ice_dyn_shared, only : ecci, denom1, arlx1i, Ktens, revp + + implicit none + + integer(kind=int_kind), intent(in) :: NA_len, lb, ub + integer(kind=int_kind), dimension(:), intent(in), contiguous :: & + ee, ne, se + real(kind=dbl_kind), dimension(:), intent(in), contiguous :: & + strength, uvel, vvel, dxt, dyt, hte, htn, htem1, htnm1 + logical(kind=log_kind), intent(in), dimension(:) :: skiptcell + real(kind=dbl_kind), dimension(:), intent(inout), contiguous :: & + stressp_1, stressp_2, stressp_3, stressp_4, stressm_1, & + stressm_2, stressm_3, stressm_4, stress12_1, stress12_2, & + stress12_3, stress12_4 + real(kind=dbl_kind), dimension(:), intent(out), contiguous :: & + str1, str2, str3, str4, str5, str6, str7, str8 + + ! local variables + + integer(kind=int_kind) :: iw, il, iu + real(kind=dbl_kind) :: puny, divune, divunw, divuse, divusw, & + tensionne, tensionnw, tensionse, tensionsw, shearne, shearnw, & + shearse, shearsw, Deltane, Deltanw, Deltase, Deltasw, c0ne, & + c0nw, c0se, c0sw, c1ne, c1nw, c1se, c1sw, ssigpn, ssigps, & + ssigpe, ssigpw, ssigmn, ssigms, ssigme, ssigmw, ssig12n, & + ssig12s, ssig12e, ssig12w, ssigp1, ssigp2, ssigm1, ssigm2, & + ssig121, ssig122, csigpne, csigpnw, csigpse, csigpsw, & + csigmne, csigmnw, csigmse, csigmsw, csig12ne, csig12nw, & + csig12se, csig12sw, str12ew, str12we, str12ns, str12sn, & + strp_tmp, strm_tmp, tmp_uvel_ee, tmp_vvel_se, tmp_vvel_ee, & + tmp_vvel_ne, tmp_uvel_ne, tmp_uvel_se, dxhy, dyhx, cxp, cyp, & + cxm, cym, tinyarea,tmparea + + character(len=*), parameter :: subname = '(stress_iter)' + + call icepack_query_parameters(puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) then + call abort_ice(error_message=subname, file=__FILE__, & + line=__LINE__) + end if -#if defined (_OPENMP) - !-- local variables - real(kind=dbl_kind) :: dlen +#ifdef _OPENACC + !$acc parallel & + !$acc present(ee, ne, se, strength, uvel, vvel, dxt, dyt, hte, & + !$acc htn, htem1, htnm1, str1, str2, str3, str4, str5, str6, & + !$acc str7, str8, stressp_1, stressp_2, stressp_3, stressp_4, & + !$acc stressm_1, stressm_2, stressm_3, stressm_4, stress12_1, & + !$acc stress12_2, stress12_3, stress12_4, skiptcell) + !$acc loop + do iw = 1, NA_len +#else + call domp_get_domain(lb, ub, il, iu) + do iw = il, iu #endif - character(len=*), parameter :: subname = '(domp_get_domain_rlu)' - !--------------------------------------- + if (skiptcell(iw)) cycle + + tmparea = dxt(iw) * dyt(iw) ! necessary to split calc of tinyarea. Otherwize not binary identical + tinyarea = puny * tmparea + dxhy = p5 * (hte(iw) - htem1(iw)) + dyhx = p5 * (htn(iw) - htnm1(iw)) + cxp = c1p5 * htn(iw) - p5 * htnm1(iw) + cyp = c1p5 * hte(iw) - p5 * htem1(iw) + cxm = -(c1p5 * htnm1(iw) - p5 * htn(iw)) + cym = -(c1p5 * htem1(iw) - p5 * hte(iw)) + + !-------------------------------------------------------------- + ! strain rates + ! NOTE: these are actually strain rates * area (m^2/s) + !-------------------------------------------------------------- + + tmp_uvel_ne = uvel(ne(iw)) + tmp_uvel_se = uvel(se(iw)) + tmp_uvel_ee = uvel(ee(iw)) + + tmp_vvel_ee = vvel(ee(iw)) + tmp_vvel_se = vvel(se(iw)) + tmp_vvel_ne = vvel(ne(iw)) + ! divergence = e_11 + e_22 + divune = cyp * uvel(iw) - dyt(iw) * tmp_uvel_ee & + + cxp * vvel(iw) - dxt(iw) * tmp_vvel_se + divunw = cym * tmp_uvel_ee + dyt(iw) * uvel(iw) & + + cxp * tmp_vvel_ee - dxt(iw) * tmp_vvel_ne + divusw = cym * tmp_uvel_ne + dyt(iw) * tmp_uvel_se & + + cxm * tmp_vvel_ne + dxt(iw) * tmp_vvel_ee + divuse = cyp * tmp_uvel_se - dyt(iw) * tmp_uvel_ne & + + cxm * tmp_vvel_se + dxt(iw) * vvel(iw) + + ! tension strain rate = e_11 - e_22 + tensionne = -cym * uvel(iw) - dyt(iw) * tmp_uvel_ee & + + cxm * vvel(iw) + dxt(iw) * tmp_vvel_se + tensionnw = -cyp * tmp_uvel_ee + dyt(iw) * uvel(iw) & + + cxm * tmp_vvel_ee + dxt(iw) * tmp_vvel_ne + tensionsw = -cyp * tmp_uvel_ne + dyt(iw) * tmp_uvel_se & + + cxp * tmp_vvel_ne - dxt(iw) * tmp_vvel_ee + tensionse = -cym * tmp_uvel_se - dyt(iw) * tmp_uvel_ne & + + cxp * tmp_vvel_se - dxt(iw) * vvel(iw) + + ! shearing strain rate = 2 * e_12 + shearne = -cym * vvel(iw) - dyt(iw) * tmp_vvel_ee & + - cxm * uvel(iw) - dxt(iw) * tmp_uvel_se + shearnw = -cyp * tmp_vvel_ee + dyt(iw) * vvel(iw) & + - cxm * tmp_uvel_ee - dxt(iw) * tmp_uvel_ne + shearsw = -cyp * tmp_vvel_ne + dyt(iw) * tmp_vvel_se & + - cxp * tmp_uvel_ne + dxt(iw) * tmp_uvel_ee + shearse = -cym * tmp_vvel_se - dyt(iw) * tmp_vvel_ne & + - cxp * tmp_uvel_se + dxt(iw) * uvel(iw) + + ! Delta (in the denominator of zeta and eta) + Deltane = sqrt(divune**2 + ecci * (tensionne**2 + shearne**2)) + Deltanw = sqrt(divunw**2 + ecci * (tensionnw**2 + shearnw**2)) + Deltasw = sqrt(divusw**2 + ecci * (tensionsw**2 + shearsw**2)) + Deltase = sqrt(divuse**2 + ecci * (tensionse**2 + shearse**2)) + + !-------------------------------------------------------------- + ! replacement pressure/Delta (kg/s) + ! save replacement pressure for principal stress calculation + !-------------------------------------------------------------- + + c0ne = strength(iw) / max(Deltane, tinyarea) + c0nw = strength(iw) / max(Deltanw, tinyarea) + c0sw = strength(iw) / max(Deltasw, tinyarea) + c0se = strength(iw) / max(Deltase, tinyarea) + + c1ne = c0ne * arlx1i + c1nw = c0nw * arlx1i + c1sw = c0sw * arlx1i + c1se = c0se * arlx1i + + c0ne = c1ne * ecci + c0nw = c1nw * ecci + c0sw = c1sw * ecci + c0se = c1se * ecci + + !-------------------------------------------------------------- + ! the stresses (kg/s^2) + ! (1) northeast, (2) northwest, (3) southwest, (4) southeast + !-------------------------------------------------------------- + + stressp_1(iw) = (stressp_1(iw) * (c1 - arlx1i * revp) & + + c1ne * (divune * (c1 + Ktens) - Deltane * (c1 - Ktens))) * denom1 + stressp_2(iw) = (stressp_2(iw) * (c1 - arlx1i * revp) & + + c1nw * (divunw * (c1 + Ktens) - Deltanw * (c1 - Ktens))) * denom1 + stressp_3(iw) = (stressp_3(iw) * (c1 - arlx1i * revp) & + + c1sw * (divusw * (c1 + Ktens) - Deltasw * (c1 - Ktens))) * denom1 + stressp_4(iw) = (stressp_4(iw) * (c1 - arlx1i * revp) & + + c1se * (divuse * (c1 + Ktens) - Deltase * (c1 - Ktens))) * denom1 + + stressm_1(iw) = (stressm_1(iw) * (c1 - arlx1i * revp) + c0ne * tensionne * (c1 + Ktens)) * denom1 + stressm_2(iw) = (stressm_2(iw) * (c1 - arlx1i * revp) + c0nw * tensionnw * (c1 + Ktens)) * denom1 + stressm_3(iw) = (stressm_3(iw) * (c1 - arlx1i * revp) + c0sw * tensionsw * (c1 + Ktens)) * denom1 + stressm_4(iw) = (stressm_4(iw) * (c1 - arlx1i * revp) + c0se * tensionse * (c1 + Ktens)) * denom1 + + stress12_1(iw) = (stress12_1(iw) * (c1 - arlx1i * revp) + c0ne * shearne * p5 * (c1 + Ktens)) * denom1 + stress12_2(iw) = (stress12_2(iw) * (c1 - arlx1i * revp) + c0nw * shearnw * p5 * (c1 + Ktens)) * denom1 + stress12_3(iw) = (stress12_3(iw) * (c1 - arlx1i * revp) + c0sw * shearsw * p5 * (c1 + Ktens)) * denom1 + stress12_4(iw) = (stress12_4(iw) * (c1 - arlx1i * revp) + c0se * shearse * p5 * (c1 + Ktens)) * denom1 + + !-------------------------------------------------------------- + ! combinations of the stresses for the momentum equation + ! (kg/s^2) + !-------------------------------------------------------------- + + ssigpn = stressp_1(iw) + stressp_2(iw) + ssigps = stressp_3(iw) + stressp_4(iw) + ssigpe = stressp_1(iw) + stressp_4(iw) + ssigpw = stressp_2(iw) + stressp_3(iw) + ssigp1 = (stressp_1(iw) + stressp_3(iw)) * p055 + ssigp2 = (stressp_2(iw) + stressp_4(iw)) * p055 + + ssigmn = stressm_1(iw) + stressm_2(iw) + ssigms = stressm_3(iw) + stressm_4(iw) + ssigme = stressm_1(iw) + stressm_4(iw) + ssigmw = stressm_2(iw) + stressm_3(iw) + ssigm1 = (stressm_1(iw) + stressm_3(iw)) * p055 + ssigm2 = (stressm_2(iw) + stressm_4(iw)) * p055 + + ssig12n = stress12_1(iw) + stress12_2(iw) + ssig12s = stress12_3(iw) + stress12_4(iw) + ssig12e = stress12_1(iw) + stress12_4(iw) + ssig12w = stress12_2(iw) + stress12_3(iw) + ssig121 = (stress12_1(iw) + stress12_3(iw)) * p111 + ssig122 = (stress12_2(iw) + stress12_4(iw)) * p111 + + csigpne = p111 * stressp_1(iw) + ssigp2 + p027 * stressp_3(iw) + csigpnw = p111 * stressp_2(iw) + ssigp1 + p027 * stressp_4(iw) + csigpsw = p111 * stressp_3(iw) + ssigp2 + p027 * stressp_1(iw) + csigpse = p111 * stressp_4(iw) + ssigp1 + p027 * stressp_2(iw) + + csigmne = p111 * stressm_1(iw) + ssigm2 + p027 * stressm_3(iw) + csigmnw = p111 * stressm_2(iw) + ssigm1 + p027 * stressm_4(iw) + csigmsw = p111 * stressm_3(iw) + ssigm2 + p027 * stressm_1(iw) + csigmse = p111 * stressm_4(iw) + ssigm1 + p027 * stressm_2(iw) + + csig12ne = p222 * stress12_1(iw) + ssig122 + p055 * stress12_3(iw) + csig12nw = p222 * stress12_2(iw) + ssig121 + p055 * stress12_4(iw) + csig12sw = p222 * stress12_3(iw) + ssig122 + p055 * stress12_1(iw) + csig12se = p222 * stress12_4(iw) + ssig121 + p055 * stress12_2(iw) + + str12ew = p5 * dxt(iw) * (p333 * ssig12e + p166 * ssig12w) + str12we = p5 * dxt(iw) * (p333 * ssig12w + p166 * ssig12e) + str12ns = p5 * dyt(iw) * (p333 * ssig12n + p166 * ssig12s) + str12sn = p5 * dyt(iw) * (p333 * ssig12s + p166 * ssig12n) + + !-------------------------------------------------------------- + ! for dF/dx (u momentum) + !-------------------------------------------------------------- + + strp_tmp = p25 * dyt(iw) * (p333 * ssigpn + p166 * ssigps) + strm_tmp = p25 * dyt(iw) * (p333 * ssigmn + p166 * ssigms) + + ! northeast (i,j) + str1(iw) = -strp_tmp - strm_tmp - str12ew & + + dxhy * (-csigpne + csigmne) + dyhx * csig12ne + + ! northwest (i+1,j) + str2(iw) = strp_tmp + strm_tmp - str12we & + + dxhy * (-csigpnw + csigmnw) + dyhx * csig12nw + + strp_tmp = p25 * dyt(iw) * (p333 * ssigps + p166 * ssigpn) + strm_tmp = p25 * dyt(iw) * (p333 * ssigms + p166 * ssigmn) + + ! southeast (i,j+1) + str3(iw) = -strp_tmp - strm_tmp + str12ew & + + dxhy * (-csigpse + csigmse) + dyhx * csig12se + + ! southwest (i+1,j+1) + str4(iw) = strp_tmp + strm_tmp + str12we & + + dxhy * (-csigpsw + csigmsw) + dyhx * csig12sw + + !-------------------------------------------------------------- + ! for dF/dy (v momentum) + !-------------------------------------------------------------- + + strp_tmp = p25 * dxt(iw) * (p333 * ssigpe + p166 * ssigpw) + strm_tmp = p25 * dxt(iw) * (p333 * ssigme + p166 * ssigmw) + + ! northeast (i,j) + str5(iw) = -strp_tmp + strm_tmp - str12ns & + - dyhx * (csigpne + csigmne) + dxhy * csig12ne + + ! southeast (i,j+1) + str6(iw) = strp_tmp - strm_tmp - str12sn & + - dyhx * (csigpse + csigmse) + dxhy * csig12se + + strp_tmp = p25 * dxt(iw) * (p333 * ssigpw + p166 * ssigpe) + strm_tmp = p25 * dxt(iw) * (p333 * ssigmw + p166 * ssigme) + + ! northwest (i+1,j) + str7(iw) = -strp_tmp + strm_tmp + str12ns & + - dyhx * (csigpnw + csigmnw) + dxhy * csig12nw + + ! southwest (i+1,j+1) + str8(iw) = strp_tmp - strm_tmp + str12sn & + - dyhx * (csigpsw + csigmsw) + dxhy * csig12sw + + end do +#ifdef _OPENACC + !$acc end parallel +#endif - ! proper action in "null" cases: - if (upper <= 0 .or. upper < lower) then - d_lower = 0 - d_upper = -1 - return - endif + end subroutine stress_iter + +!======================================================================= + + subroutine stress_last(NA_len, ee, ne, se, lb, ub, uvel, vvel, dxt, & + dyt, hte, htn, htem1, htnm1, strength, stressp_1, stressp_2, & + stressp_3, stressp_4, stressm_1, stressm_2, stressm_3, & + stressm_4, stress12_1, stress12_2, stress12_3, stress12_4, str1, & + str2, str3, str4, str5, str6, str7, str8, skiptcell, tarear, divu, & + rdg_conv, rdg_shear, shear) + + use ice_kinds_mod + use ice_constants, only : p027, p055, p111, p166, p222, p25, & + p333, p5, c1p5, c1, c0 + use ice_dyn_shared, only : ecci, denom1, arlx1i, Ktens, revp + + implicit none + + integer(kind=int_kind), intent(in) :: NA_len, lb, ub + integer(kind=int_kind), dimension(:), intent(in), contiguous :: & + ee, ne, se + real(kind=dbl_kind), dimension(:), intent(in), contiguous :: & + strength, uvel, vvel, dxt, dyt, hte, htn, htem1, htnm1, tarear + logical(kind=log_kind), intent(in), dimension(:) :: skiptcell + real(kind=dbl_kind), dimension(:), intent(inout), contiguous :: & + stressp_1, stressp_2, stressp_3, stressp_4, stressm_1, & + stressm_2, stressm_3, stressm_4, stress12_1, stress12_2, & + stress12_3, stress12_4 + real(kind=dbl_kind), dimension(:), intent(out), contiguous :: & + str1, str2, str3, str4, str5, str6, str7, str8, divu, & + rdg_conv, rdg_shear, shear + + ! local variables + + integer(kind=int_kind) :: iw, il, iu + real(kind=dbl_kind) :: puny, divune, divunw, divuse, divusw, & + tensionne, tensionnw, tensionse, tensionsw, shearne, shearnw, & + shearse, shearsw, Deltane, Deltanw, Deltase, Deltasw, c0ne, & + c0nw, c0se, c0sw, c1ne, c1nw, c1se, c1sw, ssigpn, ssigps, & + ssigpe, ssigpw, ssigmn, ssigms, ssigme, ssigmw, ssig12n, & + ssig12s, ssig12e, ssig12w, ssigp1, ssigp2, ssigm1, ssigm2, & + ssig121, ssig122, csigpne, csigpnw, csigpse, csigpsw, & + csigmne, csigmnw, csigmse, csigmsw, csig12ne, csig12nw, & + csig12se, csig12sw, str12ew, str12we, str12ns, str12sn, & + strp_tmp, strm_tmp, tmp_uvel_ee, tmp_vvel_se, tmp_vvel_ee, & + tmp_vvel_ne, tmp_uvel_ne, tmp_uvel_se, dxhy, dyhx, cxp, cyp, & + cxm, cym, tinyarea, tmparea + + character(len=*), parameter :: subname = '(stress_last)' + + call icepack_query_parameters(puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) then + call abort_ice(error_message=subname, file=__FILE__, & + line=__LINE__) + end if - ! proper action in serial sections - d_lower = lower - d_upper = upper +#ifdef _OPENACC + !$acc parallel & + !$acc present(ee, ne, se, strength, uvel, vvel, dxt, dyt, hte, & + !$acc htn, htem1, htnm1, str1, str2, str3, str4, str5, str6, & + !$acc str7, str8, stressp_1, stressp_2, stressp_3, stressp_4, & + !$acc stressm_1, stressm_2, stressm_3, stressm_4, stress12_1, & + !$acc stress12_2, stress12_3, stress12_4, tarear, divu, & + !$acc rdg_conv, rdg_shear, shear, skiptcell) + !$acc loop + do iw = 1, NA_len +#else + call domp_get_domain(lb, ub, il, iu) + do iw = il, iu +#endif -#if defined (_OPENMP) - if (omp_in_parallel()) then - dlen = real(upper-lower+1, dbl_kind) - d_lower = lower + floor((rdomp_iam*dlen+p5)/rdomp_nt, JPIM) - d_upper = lower -1 + floor((rdomp_iam*dlen+dlen+p5)/rdomp_nt, JPIM) - endif + if (skiptcell(iw)) cycle + + tmparea = dxt(iw) * dyt(iw) ! necessary to split calc of tinyarea. Otherwize not binary identical + tinyarea = puny * tmparea + dxhy = p5 * (hte(iw) - htem1(iw)) + dyhx = p5 * (htn(iw) - htnm1(iw)) + cxp = c1p5 * htn(iw) - p5 * htnm1(iw) + cyp = c1p5 * hte(iw) - p5 * htem1(iw) + cxm = -(c1p5 * htnm1(iw) - p5 * htn(iw)) + cym = -(c1p5 * htem1(iw) - p5 * hte(iw)) + + !-------------------------------------------------------------- + ! strain rates + ! NOTE: these are actually strain rates * area (m^2/s) + !-------------------------------------------------------------- + + tmp_uvel_ne = uvel(ne(iw)) + tmp_uvel_se = uvel(se(iw)) + tmp_uvel_ee = uvel(ee(iw)) + + tmp_vvel_ee = vvel(ee(iw)) + tmp_vvel_se = vvel(se(iw)) + tmp_vvel_ne = vvel(ne(iw)) + + ! divergence = e_11 + e_22 + divune = cyp * uvel(iw) - dyt(iw) * tmp_uvel_ee & + + cxp * vvel(iw) - dxt(iw) * tmp_vvel_se + divunw = cym * tmp_uvel_ee + dyt(iw) * uvel(iw) & + + cxp * tmp_vvel_ee - dxt(iw) * tmp_vvel_ne + divusw = cym * tmp_uvel_ne + dyt(iw) * tmp_uvel_se & + + cxm * tmp_vvel_ne + dxt(iw) * tmp_vvel_ee + divuse = cyp * tmp_uvel_se - dyt(iw) * tmp_uvel_ne & + + cxm * tmp_vvel_se + dxt(iw) * vvel(iw) + + ! tension strain rate = e_11 - e_22 + tensionne = -cym * uvel(iw) - dyt(iw) * tmp_uvel_ee & + + cxm * vvel(iw) + dxt(iw) * tmp_vvel_se + tensionnw = -cyp * tmp_uvel_ee + dyt(iw) * uvel(iw) & + + cxm * tmp_vvel_ee + dxt(iw) * tmp_vvel_ne + tensionsw = -cyp * tmp_uvel_ne + dyt(iw) * tmp_uvel_se & + + cxp * tmp_vvel_ne - dxt(iw) * tmp_vvel_ee + tensionse = -cym * tmp_uvel_se - dyt(iw) * tmp_uvel_ne & + + cxp * tmp_vvel_se - dxt(iw) * vvel(iw) + + ! shearing strain rate = 2 * e_12 + shearne = -cym * vvel(iw) - dyt(iw) * tmp_vvel_ee & + - cxm * uvel(iw) - dxt(iw) * tmp_uvel_se + shearnw = -cyp * tmp_vvel_ee + dyt(iw) * vvel(iw) & + - cxm * tmp_uvel_ee - dxt(iw) * tmp_uvel_ne + shearsw = -cyp * tmp_vvel_ne + dyt(iw) * tmp_vvel_se & + - cxp * tmp_uvel_ne + dxt(iw) * tmp_uvel_ee + shearse = -cym * tmp_vvel_se - dyt(iw) * tmp_vvel_ne & + - cxp * tmp_uvel_se + dxt(iw) * uvel(iw) + + ! Delta (in the denominator of zeta and eta) + Deltane = sqrt(divune**2 + ecci * (tensionne**2 + shearne**2)) + Deltanw = sqrt(divunw**2 + ecci * (tensionnw**2 + shearnw**2)) + Deltasw = sqrt(divusw**2 + ecci * (tensionsw**2 + shearsw**2)) + Deltase = sqrt(divuse**2 + ecci * (tensionse**2 + shearse**2)) + + !-------------------------------------------------------------- + ! on last subcycle, save quantities for mechanical + ! redistribution + !-------------------------------------------------------------- + + divu(iw) = p25 * (divune + divunw + divuse + divusw) * tarear(iw) + rdg_conv(iw) = -min(divu(iw), c0) ! TODO: Could move outside the entire kernel + rdg_shear(iw) = p5 * (p25 * (Deltane + Deltanw + Deltase + Deltasw) * tarear(iw) - abs(divu(iw))) + + ! diagnostic only + ! shear = sqrt(tension**2 + shearing**2) + shear(iw) = p25 * tarear(iw) * sqrt((tensionne + tensionnw + tensionse + tensionsw)**2 & + + (shearne + shearnw + shearse + shearsw)**2) + + !-------------------------------------------------------------- + ! replacement pressure/Delta (kg/s) + ! save replacement pressure for principal stress calculation + !-------------------------------------------------------------- + + c0ne = strength(iw) / max(Deltane, tinyarea) + c0nw = strength(iw) / max(Deltanw, tinyarea) + c0sw = strength(iw) / max(Deltasw, tinyarea) + c0se = strength(iw) / max(Deltase, tinyarea) + + c1ne = c0ne * arlx1i + c1nw = c0nw * arlx1i + c1sw = c0sw * arlx1i + c1se = c0se * arlx1i + + c0ne = c1ne * ecci + c0nw = c1nw * ecci + c0sw = c1sw * ecci + c0se = c1se * ecci + + !-------------------------------------------------------------- + ! the stresses (kg/s^2) + ! (1) northeast, (2) northwest, (3) southwest, (4) southeast + !-------------------------------------------------------------- + + stressp_1(iw) = (stressp_1(iw) * (c1 - arlx1i * revp) & + + c1ne * (divune * (c1 + Ktens) - Deltane * (c1 - Ktens))) * denom1 + stressp_2(iw) = (stressp_2(iw) * (c1 - arlx1i * revp) & + + c1nw * (divunw * (c1 + Ktens) - Deltanw * (c1 - Ktens))) * denom1 + stressp_3(iw) = (stressp_3(iw) * (c1 - arlx1i * revp) & + + c1sw * (divusw * (c1 + Ktens) - Deltasw * (c1 - Ktens))) * denom1 + stressp_4(iw) = (stressp_4(iw) * (c1 - arlx1i * revp) & + + c1se * (divuse * (c1 + Ktens) - Deltase * (c1 - Ktens))) * denom1 + + stressm_1(iw) = (stressm_1(iw) * (c1 - arlx1i * revp) + c0ne * tensionne * (c1 + Ktens)) * denom1 + stressm_2(iw) = (stressm_2(iw) * (c1 - arlx1i * revp) + c0nw * tensionnw * (c1 + Ktens)) * denom1 + stressm_3(iw) = (stressm_3(iw) * (c1 - arlx1i * revp) + c0sw * tensionsw * (c1 + Ktens)) * denom1 + stressm_4(iw) = (stressm_4(iw) * (c1 - arlx1i * revp) + c0se * tensionse * (c1 + Ktens)) * denom1 + + stress12_1(iw) = (stress12_1(iw) * (c1 - arlx1i * revp) + c0ne * shearne * p5 * (c1 + Ktens)) * denom1 + stress12_2(iw) = (stress12_2(iw) * (c1 - arlx1i * revp) + c0nw * shearnw * p5 * (c1 + Ktens)) * denom1 + stress12_3(iw) = (stress12_3(iw) * (c1 - arlx1i * revp) + c0sw * shearsw * p5 * (c1 + Ktens)) * denom1 + stress12_4(iw) = (stress12_4(iw) * (c1 - arlx1i * revp) + c0se * shearse * p5 * (c1 + Ktens)) * denom1 + + !-------------------------------------------------------------- + ! combinations of the stresses for the momentum equation + ! (kg/s^2) + !-------------------------------------------------------------- + + ssigpn = stressp_1(iw) + stressp_2(iw) + ssigps = stressp_3(iw) + stressp_4(iw) + ssigpe = stressp_1(iw) + stressp_4(iw) + ssigpw = stressp_2(iw) + stressp_3(iw) + ssigp1 = (stressp_1(iw) + stressp_3(iw)) * p055 + ssigp2 = (stressp_2(iw) + stressp_4(iw)) * p055 + + ssigmn = stressm_1(iw) + stressm_2(iw) + ssigms = stressm_3(iw) + stressm_4(iw) + ssigme = stressm_1(iw) + stressm_4(iw) + ssigmw = stressm_2(iw) + stressm_3(iw) + ssigm1 = (stressm_1(iw) + stressm_3(iw)) * p055 + ssigm2 = (stressm_2(iw) + stressm_4(iw)) * p055 + + ssig12n = stress12_1(iw) + stress12_2(iw) + ssig12s = stress12_3(iw) + stress12_4(iw) + ssig12e = stress12_1(iw) + stress12_4(iw) + ssig12w = stress12_2(iw) + stress12_3(iw) + ssig121 = (stress12_1(iw) + stress12_3(iw)) * p111 + ssig122 = (stress12_2(iw) + stress12_4(iw)) * p111 + + csigpne = p111 * stressp_1(iw) + ssigp2 + p027 * stressp_3(iw) + csigpnw = p111 * stressp_2(iw) + ssigp1 + p027 * stressp_4(iw) + csigpsw = p111 * stressp_3(iw) + ssigp2 + p027 * stressp_1(iw) + csigpse = p111 * stressp_4(iw) + ssigp1 + p027 * stressp_2(iw) + + csigmne = p111 * stressm_1(iw) + ssigm2 + p027 * stressm_3(iw) + csigmnw = p111 * stressm_2(iw) + ssigm1 + p027 * stressm_4(iw) + csigmsw = p111 * stressm_3(iw) + ssigm2 + p027 * stressm_1(iw) + csigmse = p111 * stressm_4(iw) + ssigm1 + p027 * stressm_2(iw) + + csig12ne = p222 * stress12_1(iw) + ssig122 + p055 * stress12_3(iw) + csig12nw = p222 * stress12_2(iw) + ssig121 + p055 * stress12_4(iw) + csig12sw = p222 * stress12_3(iw) + ssig122 + p055 * stress12_1(iw) + csig12se = p222 * stress12_4(iw) + ssig121 + p055 * stress12_2(iw) + + str12ew = p5 * dxt(iw) * (p333 * ssig12e + p166 * ssig12w) + str12we = p5 * dxt(iw) * (p333 * ssig12w + p166 * ssig12e) + str12ns = p5 * dyt(iw) * (p333 * ssig12n + p166 * ssig12s) + str12sn = p5 * dyt(iw) * (p333 * ssig12s + p166 * ssig12n) + + !-------------------------------------------------------------- + ! for dF/dx (u momentum) + !-------------------------------------------------------------- + + strp_tmp = p25 * dyt(iw) * (p333 * ssigpn + p166 * ssigps) + strm_tmp = p25 * dyt(iw) * (p333 * ssigmn + p166 * ssigms) + + ! northeast (i,j) + str1(iw) = -strp_tmp - strm_tmp - str12ew & + + dxhy * (-csigpne + csigmne) + dyhx * csig12ne + + ! northwest (i+1,j) + str2(iw) = strp_tmp + strm_tmp - str12we & + + dxhy * (-csigpnw + csigmnw) + dyhx * csig12nw + + strp_tmp = p25 * dyt(iw) * (p333 * ssigps + p166 * ssigpn) + strm_tmp = p25 * dyt(iw) * (p333 * ssigms + p166 * ssigmn) + + ! southeast (i,j+1) + str3(iw) = -strp_tmp - strm_tmp + str12ew & + + dxhy * (-csigpse + csigmse) + dyhx * csig12se + + ! southwest (i+1,j+1) + str4(iw) = strp_tmp + strm_tmp + str12we & + + dxhy * (-csigpsw + csigmsw) + dyhx * csig12sw + + !-------------------------------------------------------------- + ! for dF/dy (v momentum) + !-------------------------------------------------------------- + + strp_tmp = p25 * dxt(iw) * (p333 * ssigpe + p166 * ssigpw) + strm_tmp = p25 * dxt(iw) * (p333 * ssigme + p166 * ssigmw) + + ! northeast (i,j) + str5(iw) = -strp_tmp + strm_tmp - str12ns & + - dyhx * (csigpne + csigmne) + dxhy * csig12ne + + ! southeast (i,j+1) + str6(iw) = strp_tmp - strm_tmp - str12sn & + - dyhx * (csigpse + csigmse) + dxhy * csig12se + + strp_tmp = p25 * dxt(iw) * (p333 * ssigpw + p166 * ssigpe) + strm_tmp = p25 * dxt(iw) * (p333 * ssigmw + p166 * ssigme) + + ! northwest (i+1,j) + str7(iw) = -strp_tmp + strm_tmp + str12ns & + - dyhx * (csigpnw + csigmnw) + dxhy * csig12nw + + ! southwest (i+1,j+1) + str8(iw) = strp_tmp - strm_tmp + str12sn & + - dyhx * (csigpsw + csigmsw) + dxhy * csig12sw + + end do +#ifdef _OPENACC + !$acc end parallel #endif - if (.false.) then - write(nu_diag,'(2a,i3,a,2i10)') subname,' openMP thread ', domp_iam, & - ' handles range: ', d_lower, d_upper - endif + end subroutine stress_last - end subroutine domp_get_domain_rlu +!======================================================================= -!---------------------------------------------------------------------------- + subroutine stepu_iter(NA_len, rhow, lb, ub, Cw, aiu, uocn, vocn, & + forcex, forcey, umassdti, fm, uarear, Tbu, uvel_init, vvel_init, & + uvel, vvel, str1, str2, str3, str4, str5, str6, str7, str8, nw, & + sw, sse, skipucell) - subroutine domp_get_thread_no (tnum) + use ice_kinds_mod + use ice_constants, only : c0, c1 + use ice_dyn_shared, only : brlx, revp, u0, cosw, sinw - implicit none - integer(int_kind), intent(out) :: tnum - character(len=*), parameter :: subname = '(domp_get_thread_no)' + implicit none - tnum = domp_iam + 1 + integer(kind=int_kind), intent(in) :: NA_len, lb, ub + real(kind=dbl_kind), intent(in) :: rhow + logical(kind=log_kind), intent(in), dimension(:) :: skipucell + integer(kind=int_kind), dimension(:), intent(in), contiguous :: & + nw, sw, sse + real(kind=dbl_kind), dimension(:), intent(in), contiguous :: & + uvel_init, vvel_init, aiu, forcex, forcey, umassdti, Tbu, & + uocn, vocn, fm, uarear, Cw, str1, str2, str3, str4, str5, & + str6, str7, str8 + real(kind=dbl_kind), dimension(:), intent(inout), contiguous :: & + uvel, vvel - end subroutine domp_get_thread_no + ! local variables -!---------------------------------------------------------------------------- + integer(kind=int_kind) :: iw, il, iu + real(kind=dbl_kind) :: uold, vold, vrel, cca, ccb, ab2, cc1, & + cc2, taux, tauy, Cb, tmp_str2_nw, tmp_str3_sse, tmp_str4_sw, & + tmp_str6_sse, tmp_str7_nw, tmp_str8_sw, waterx, watery, & + tmp_strintx, tmp_strinty -!former end module dmi_omp + character(len=*), parameter :: subname = '(stepu_iter)' -!=============================================================================== +#ifdef _OPENACC + !$acc parallel & + !$acc present(Cw, aiu, uocn, vocn, forcex, forcey, umassdti, fm, & + !$acc uarear, Tbu, uvel_init, vvel_init, nw, sw, sse, skipucell, & + !$acc str1, str2, str3, str4, str5, str6, str7, str8, uvel, & + !$acc vvel) + !$acc loop + do iw = 1, NA_len +#else + call domp_get_domain(lb, ub, il, iu) + do iw = il, iu +#endif -!former module bench_v2 + if (skipucell(iw)) cycle -!---------------------------------------------------------------------------- + uold = uvel(iw) + vold = vvel(iw) - subroutine stress_i(NA_len, & - ee,ne,se,lb,ub,uvel,vvel,dxt,dyt, & - hte,htn,htem1,htnm1, & - strength,stressp_1,stressp_2,stressp_3,stressp_4, & - stressm_1,stressm_2,stressm_3,stressm_4,stress12_1, & - stress12_2,stress12_3,stress12_4,str1,str2,str3,str4,str5, & - str6,str7,str8) + vrel = aiu(iw) * rhow * Cw(iw) * sqrt((uocn(iw) - uold)**2 + (vocn(iw) - vold)**2) - use ice_kinds_mod - use ice_constants, only: p027, p055, p111, p166, p222, p25, p333, p5, c1p5, c1 - use ice_dyn_shared, only: ecci, denom1, arlx1i, Ktens, revp + waterx = uocn(iw) * cosw - vocn(iw) * sinw * sign(c1, fm(iw)) + watery = vocn(iw) * cosw + uocn(iw) * sinw * sign(c1, fm(iw)) - implicit none + taux = vrel * waterx + tauy = vrel * watery - integer (kind=int_kind), intent(in) :: NA_len - integer (kind=int_kind), intent(in) :: lb,ub - integer (kind=int_kind), dimension(:), intent(in), contiguous :: ee,ne,se - real (kind=dbl_kind), dimension(:), intent(in), contiguous :: & - strength, uvel, vvel, dxt, dyt, & - hte,htn,htem1,htnm1 - real (kind=dbl_kind), dimension(:), intent(inout), contiguous :: & - stressp_1,stressp_2, stressp_3, stressp_4, stressm_1, stressm_2, & - stressm_3,stressm_4, stress12_1,stress12_2,stress12_3, stress12_4 - real (kind=DBL_KIND), dimension(:), intent(out), contiguous :: & - str1,str2,str3,str4,str5,str6,str7,str8 + Cb = Tbu(iw) / (sqrt(uold**2 + vold**2) + u0) - !-- local variables + cca = (brlx + revp) * umassdti(iw) + vrel * cosw + Cb + ccb = fm(iw) + sign(c1, fm(iw)) * vrel * sinw - integer (kind=int_kind) :: iw,il,iu - real (kind=dbl_kind) :: & - puny - real (kind=DBL_KIND) :: & - divune, divunw, divuse, divusw,tensionne, tensionnw, tensionse, tensionsw, & - shearne, shearnw, shearse, shearsw, Deltane, Deltanw, Deltase, Deltasw , & - c0ne, c0nw, c0se, c0sw, c1ne, c1nw, c1se, c1sw , & - ssigpn, ssigps, ssigpe, ssigpw, ssigmn, ssigms, ssigme, ssigmw , & - ssig12n, ssig12s, ssig12e, ssig12w, ssigp1, ssigp2,ssigm1, ssigm2,ssig121, & - ssig122, csigpne, csigpnw, csigpse, csigpsw,csigmne, csigmnw, csigmse , & - csigmsw, csig12ne, csig12nw, csig12se, csig12sw, str12ew, str12we,str12ns, & - str12sn, strp_tmp, strm_tmp, tmp_uvel_ee, tmp_vvel_se, tmp_vvel_ee, & - tmp_vvel_ne, tmp_uvel_ne, tmp_uvel_se - real (kind=DBL_KIND) :: dxhy,dyhx,cxp,cyp,cxm,cym,tinyarea - - character(len=*), parameter :: subname = '(stress_i)' - !--------------------------------------- + ab2 = cca**2 + ccb**2 - call icepack_query_parameters(puny_out=puny) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) + tmp_str2_nw = str2(nw(iw)) + tmp_str3_sse = str3(sse(iw)) + tmp_str4_sw = str4(sw(iw)) + tmp_str6_sse = str6(sse(iw)) + tmp_str7_nw = str7(nw(iw)) + tmp_str8_sw = str8(sw(iw)) -#ifdef _OPENACC - !$acc parallel & - !$acc present(ee,ne,se,strength,uvel,vvel,dxt,dyt, & - !$acc hte, htn, htem1, htnm1, & - !$acc str1,str2,str3,str4,str5,str6,str7,str8, & - !$acc stressp_1,stressp_2,stressp_3,stressp_4, & - !$acc stressm_1,stressm_2,stressm_3,stressm_4, & - !$acc stress12_1,stress12_2,stress12_3,stress12_4) - !$acc loop - do iw = 1,NA_len -#else - call domp_get_domain(lb,ub,il,iu) - do iw = il, iu -#endif - tinyarea = puny*dxt(iw)*dyt(iw) - dxhy = p5*(hte(iw) - htem1(iw)) - dyhx = p5*(htn(iw) - htnm1(iw)) - cxp = c1p5*htn(iw) - p5*htnm1(iw) - cyp = c1p5*hte(iw) - p5*htem1(iw) - cxm = -(c1p5*htnm1(iw) - p5*htn(iw)) - cym = -(c1p5*htem1(iw) - p5*hte(iw)) - - !----------------------------------------------------------------- - ! strain rates - ! NOTE these are actually strain rates * area (m^2/s) - !----------------------------------------------------------------- - ! divergence = e_11 + e_22 - tmp_uvel_ee = uvel(ee(iw)) - tmp_vvel_ee = vvel(ee(iw)) - - tmp_vvel_se = vvel(se(iw)) - tmp_uvel_se = uvel(se(iw)) - - ! ne - divune = cyp*uvel(iw) - dyt(iw)*tmp_uvel_ee & - + cxp*vvel(iw) - dxt(iw)*tmp_vvel_se - ! tension strain rate = e_11 - e_22 - tensionne = -cym*uvel(iw) - dyt(iw)*tmp_uvel_ee & - + cxm*vvel(iw) + dxt(iw)*tmp_vvel_se - ! shearing strain rate = 2*e_12 - shearne = -cym*vvel(iw) - dyt(iw)*tmp_vvel_ee & - - cxm*uvel(iw) - dxt(iw)*tmp_uvel_se - ! Delta (in the denominator of zeta, eta) - Deltane = sqrt(divune**2 + ecci*(tensionne**2 + shearne**2)) - - ! These two can move after ne block - ! - tmp_uvel_ne = uvel(ne(iw)) - tmp_vvel_ne = vvel(ne(iw)) - - ! nw - divunw = cym*tmp_uvel_ee + dyt(iw)*uvel(iw) & - + cxp*tmp_vvel_ee - dxt(iw)*tmp_vvel_ne - tensionnw = -cyp*tmp_uvel_ee + dyt(iw)*uvel(iw) & - + cxm*tmp_vvel_ee + dxt(iw)*tmp_vvel_ne - shearnw = -cyp*tmp_vvel_ee + dyt(iw)*vvel(iw) & - - cxm*tmp_uvel_ee - dxt(iw)*tmp_uvel_ne - Deltanw = sqrt(divunw**2 + ecci*(tensionnw**2 + shearnw**2)) - - ! sw - divusw = cym*tmp_uvel_ne + dyt(iw)*tmp_uvel_se & - + cxm*tmp_vvel_ne + dxt(iw)*tmp_vvel_ee - tensionsw = -cyp*tmp_uvel_ne + dyt(iw)*tmp_uvel_se & - + cxp*tmp_vvel_ne - dxt(iw)*tmp_vvel_ee - shearsw = -cyp*tmp_vvel_ne + dyt(iw)*tmp_vvel_se & - - cxp*tmp_uvel_ne + dxt(iw)*tmp_uvel_ee - Deltasw = sqrt(divusw**2 + ecci*(tensionsw**2 + shearsw**2)) - - ! se - divuse = cyp*tmp_uvel_se - dyt(iw)*tmp_uvel_ne & - + cxm*tmp_vvel_se + dxt(iw)*vvel(iw) - tensionse = -cym*tmp_uvel_se - dyt(iw)*tmp_uvel_ne & - + cxp*tmp_vvel_se - dxt(iw)*vvel(iw) - shearse = -cym*tmp_vvel_se - dyt(iw)*tmp_vvel_ne & - - cxp*tmp_uvel_se + dxt(iw)*uvel(iw) - Deltase = sqrt(divuse**2 + ecci*(tensionse**2 + shearse**2)) - - !----------------------------------------------------------------- - ! replacement pressure/Delta ! kg/s - ! save replacement pressure for principal stress calculation - !----------------------------------------------------------------- - c0ne = strength(iw)/max(Deltane,tinyarea) - c0nw = strength(iw)/max(Deltanw,tinyarea) - c0sw = strength(iw)/max(Deltasw,tinyarea) - c0se = strength(iw)/max(Deltase,tinyarea) - - c1ne = c0ne*arlx1i - c1nw = c0nw*arlx1i - c1sw = c0sw*arlx1i - c1se = c0se*arlx1i - - c0ne = c1ne*ecci - c0nw = c1nw*ecci - c0sw = c1sw*ecci - c0se = c1se*ecci - - !----------------------------------------------------------------- - ! the stresses ! kg/s^2 - ! (1) northeast, (2) northwest, (3) southwest, (4) southeast - !----------------------------------------------------------------- - - stressp_1(iw) = (stressp_1(iw)*(c1-arlx1i*revp) + c1ne*(divune*(c1+Ktens) - Deltane*(c1-Ktens))) * denom1 - stressp_2(iw) = (stressp_2(iw)*(c1-arlx1i*revp) + c1nw*(divunw*(c1+Ktens) - Deltanw*(c1-Ktens))) * denom1 - stressp_3(iw) = (stressp_3(iw)*(c1-arlx1i*revp) + c1sw*(divusw*(c1+Ktens) - Deltasw*(c1-Ktens))) * denom1 - stressp_4(iw) = (stressp_4(iw)*(c1-arlx1i*revp) + c1se*(divuse*(c1+Ktens) - Deltase*(c1-Ktens))) * denom1 - - stressm_1(iw) = (stressm_1(iw)*(c1-arlx1i*revp) + c0ne*tensionne*(c1+Ktens)) * denom1 - stressm_2(iw) = (stressm_2(iw)*(c1-arlx1i*revp) + c0nw*tensionnw*(c1+Ktens)) * denom1 - stressm_3(iw) = (stressm_3(iw)*(c1-arlx1i*revp) + c0sw*tensionsw*(c1+Ktens)) * denom1 - stressm_4(iw) = (stressm_4(iw)*(c1-arlx1i*revp) + c0se*tensionse*(c1+Ktens)) * denom1 - - stress12_1(iw) = (stress12_1(iw)*(c1-arlx1i*revp) + c0ne*shearne*p5*(c1+Ktens)) * denom1 - stress12_2(iw) = (stress12_2(iw)*(c1-arlx1i*revp) + c0nw*shearnw*p5*(c1+Ktens)) * denom1 - stress12_3(iw) = (stress12_3(iw)*(c1-arlx1i*revp) + c0sw*shearsw*p5*(c1+Ktens)) * denom1 - stress12_4(iw) = (stress12_4(iw)*(c1-arlx1i*revp) + c0se*shearse*p5*(c1+Ktens)) * denom1 - - !----------------------------------------------------------------- - ! combinations of the stresses for the momentum equation ! kg/s^2 - !----------------------------------------------------------------- - - ssigpn = stressp_1(iw) + stressp_2(iw) - ssigps = stressp_3(iw) + stressp_4(iw) - ssigpe = stressp_1(iw) + stressp_4(iw) - ssigpw = stressp_2(iw) + stressp_3(iw) - ssigp1 =(stressp_1(iw) + stressp_3(iw))*p055 - ssigp2 =(stressp_2(iw) + stressp_4(iw))*p055 - - ssigmn = stressm_1(iw) + stressm_2(iw) - ssigms = stressm_3(iw) + stressm_4(iw) - ssigme = stressm_1(iw) + stressm_4(iw) - ssigmw = stressm_2(iw) + stressm_3(iw) - ssigm1 =(stressm_1(iw) + stressm_3(iw))*p055 - ssigm2 =(stressm_2(iw) + stressm_4(iw))*p055 - - ssig12n = stress12_1(iw) + stress12_2(iw) - ssig12s = stress12_3(iw) + stress12_4(iw) - ssig12e = stress12_1(iw) + stress12_4(iw) - ssig12w = stress12_2(iw) + stress12_3(iw) - ssig121 =(stress12_1(iw) + stress12_3(iw))*p111 - ssig122 =(stress12_2(iw) + stress12_4(iw))*p111 - - csigpne = p111*stressp_1(iw) + ssigp2 + p027*stressp_3(iw) - csigpnw = p111*stressp_2(iw) + ssigp1 + p027*stressp_4(iw) - csigpsw = p111*stressp_3(iw) + ssigp2 + p027*stressp_1(iw) - csigpse = p111*stressp_4(iw) + ssigp1 + p027*stressp_2(iw) - - csigmne = p111*stressm_1(iw) + ssigm2 + p027*stressm_3(iw) - csigmnw = p111*stressm_2(iw) + ssigm1 + p027*stressm_4(iw) - csigmsw = p111*stressm_3(iw) + ssigm2 + p027*stressm_1(iw) - csigmse = p111*stressm_4(iw) + ssigm1 + p027*stressm_2(iw) - - csig12ne = p222*stress12_1(iw) + ssig122 + p055*stress12_3(iw) - csig12nw = p222*stress12_2(iw) + ssig121 + p055*stress12_4(iw) - csig12sw = p222*stress12_3(iw) + ssig122 + p055*stress12_1(iw) - csig12se = p222*stress12_4(iw) + ssig121 + p055*stress12_2(iw) - - str12ew = p5*dxt(iw)*(p333*ssig12e + p166*ssig12w) - str12we = p5*dxt(iw)*(p333*ssig12w + p166*ssig12e) - str12ns = p5*dyt(iw)*(p333*ssig12n + p166*ssig12s) - str12sn = p5*dyt(iw)*(p333*ssig12s + p166*ssig12n) - - !----------------------------------------------------------------- - ! for dF/dx (u momentum) - !----------------------------------------------------------------- - strp_tmp = p25*dyt(iw)*(p333*ssigpn + p166*ssigps) - strm_tmp = p25*dyt(iw)*(p333*ssigmn + p166*ssigms) - - ! northeast (iw) - str1(iw) = -strp_tmp - strm_tmp - str12ew & - + dxhy*(-csigpne + csigmne) + dyhx*csig12ne - - ! northwest (i+1,j) - str2(iw) = strp_tmp + strm_tmp - str12we & - + dxhy*(-csigpnw + csigmnw) + dyhx*csig12nw - - strp_tmp = p25*dyt(iw)*(p333*ssigps + p166*ssigpn) - strm_tmp = p25*dyt(iw)*(p333*ssigms + p166*ssigmn) - - ! southeast (i,j+1) - str3(iw) = -strp_tmp - strm_tmp + str12ew & - + dxhy*(-csigpse + csigmse) + dyhx*csig12se - - ! southwest (i+1,j+1) - str4(iw) = strp_tmp + strm_tmp + str12we & - + dxhy*(-csigpsw + csigmsw) + dyhx*csig12sw - - !----------------------------------------------------------------- - ! for dF/dy (v momentum) - !----------------------------------------------------------------- - strp_tmp = p25*dxt(iw)*(p333*ssigpe + p166*ssigpw) - strm_tmp = p25*dxt(iw)*(p333*ssigme + p166*ssigmw) - - ! northeast (i,j) - str5(iw) = -strp_tmp + strm_tmp - str12ns & - - dyhx*(csigpne + csigmne) + dxhy*csig12ne - - ! southeast (i,j+1) - str6(iw) = strp_tmp - strm_tmp - str12sn & - - dyhx*(csigpse + csigmse) + dxhy*csig12se - - strp_tmp = p25*dxt(iw)*(p333*ssigpw + p166*ssigpe) - strm_tmp = p25*dxt(iw)*(p333*ssigmw + p166*ssigme) - - ! northwest (i+1,j) - str7(iw) = -strp_tmp + strm_tmp + str12ns & - - dyhx*(csigpnw + csigmnw) + dxhy*csig12nw - - ! southwest (i+1,j+1) - str8(iw) = strp_tmp - strm_tmp + str12sn & - - dyhx*(csigpsw + csigmsw) + dxhy*csig12sw - enddo -#ifdef _OPENACC - !$acc end parallel -#endif + tmp_strintx = uarear(iw) * (str1(iw) + tmp_str2_nw + tmp_str3_sse + tmp_str4_sw) + tmp_strinty = uarear(iw) * (str5(iw) + tmp_str6_sse + tmp_str7_nw + tmp_str8_sw) - end subroutine stress_i - -!---------------------------------------------------------------------------- - - subroutine stress_l(NA_len, tarear, & - ee,ne,se,lb,ub,uvel,vvel,dxt,dyt, & - hte,htn,htem1,htnm1, & - strength,stressp_1,stressp_2,stressp_3,stressp_4, & - stressm_1,stressm_2,stressm_3,stressm_4,stress12_1, & - stress12_2,stress12_3,stress12_4, & - divu,rdg_conv,rdg_shear,shear, & - str1,str2,str3,str4,str5,str6,str7,str8 ) - - use ice_kinds_mod - use ice_constants, only: p027, p055, p111, p166, p222, p25, p333, p5, c1p5, c0, c1 - use ice_dyn_shared, only: ecci, denom1, arlx1i, Ktens, revp - - implicit none - - integer (kind=int_kind), intent(in) :: NA_len - integer (kind=int_kind), intent(in) :: lb,ub - integer (kind=int_kind), dimension(:), intent(in), contiguous :: ee,ne,se - real (kind=dbl_kind), dimension(:), intent(in), contiguous :: & - strength, uvel, vvel, dxt, dyt, tarear, & - hte,htn,htem1,htnm1 - real (kind=dbl_kind), dimension(:), intent(inout), contiguous :: & - stressp_1,stressp_2, stressp_3, stressp_4, stressm_1, stressm_2, & - stressm_3,stressm_4, stress12_1,stress12_2,stress12_3, stress12_4 - real (kind=DBL_KIND), dimension(:), intent(out), contiguous :: & - str1,str2,str3,str4,str5,str6,str7,str8 - real (kind=dbl_kind), dimension(:), intent(out), contiguous :: & - divu,rdg_conv,rdg_shear,shear - - !-- local variables - - integer (kind=int_kind) :: iw,il,iu - real (kind=dbl_kind) :: & - puny - real (kind=DBL_KIND) :: & - divune, divunw, divuse, divusw,tensionne, tensionnw, tensionse, tensionsw, & - shearne, shearnw, shearse, shearsw, Deltane, Deltanw, Deltase, Deltasw , & - c0ne, c0nw, c0se, c0sw, c1ne, c1nw, c1se, c1sw , & - ssigpn, ssigps, ssigpe, ssigpw, ssigmn, ssigms, ssigme, ssigmw , & - ssig12n, ssig12s, ssig12e, ssig12w, ssigp1, ssigp2,ssigm1, ssigm2,ssig121, & - ssig122, csigpne, csigpnw, csigpse, csigpsw,csigmne, csigmnw, csigmse , & - csigmsw, csig12ne, csig12nw, csig12se, csig12sw, str12ew, str12we,str12ns, & - str12sn, strp_tmp, strm_tmp, tmp_uvel_ee, tmp_vvel_se, tmp_vvel_ee, & - tmp_vvel_ne, tmp_uvel_ne, tmp_uvel_se - real (kind=DBL_KIND) :: dxhy,dyhx,cxp,cyp,cxm,cym,tinyarea - - character(len=*), parameter :: subname = '(stress_l)' - !--------------------------------------- - - call icepack_query_parameters(puny_out=puny) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) + cc1 = tmp_strintx + forcex(iw) + taux & + + umassdti(iw) * (brlx * uold + revp * uvel_init(iw)) + cc2 = tmp_strinty + forcey(iw) + tauy & + + umassdti(iw) * (brlx * vold + revp * vvel_init(iw)) -#ifdef _OPENACC - !$acc parallel & - !$acc present(ee,ne,se,strength,uvel,vvel,dxt,dyt,tarear, & - !$acc hte,htn,htem1,htnm1, & - !$acc str1,str2,str3,str4,str5,str6,str7,str8, & - !$acc stressp_1,stressp_2,stressp_3,stressp_4, & - !$acc stressm_1,stressm_2,stressm_3,stressm_4, & - !$acc stress12_1,stress12_2,stress12_3,stress12_4, & - !$acc divu,rdg_conv,rdg_shear,shear) - !$acc loop - do iw = 1,NA_len -#else - call domp_get_domain(lb,ub,il,iu) - do iw = il, iu -#endif - tinyarea = puny*dxt(iw)*dyt(iw) - dxhy = p5*(hte(iw) - htem1(iw)) - dyhx = p5*(htn(iw) - htnm1(iw)) - cxp = c1p5*htn(iw) - p5*htnm1(iw) - cyp = c1p5*hte(iw) - p5*htem1(iw) - cxm = -(c1p5*htnm1(iw) - p5*htn(iw)) - cym = -(c1p5*htem1(iw) - p5*hte(iw)) - - !----------------------------------------------------------------- - ! strain rates - ! NOTE these are actually strain rates * area (m^2/s) - !----------------------------------------------------------------- - ! divergence = e_11 + e_22 - tmp_uvel_ee = uvel(ee(iw)) - tmp_vvel_se = vvel(se(iw)) - tmp_vvel_ee = vvel(ee(iw)) - tmp_vvel_ne = vvel(ne(iw)) - tmp_uvel_ne = uvel(ne(iw)) - tmp_uvel_se = uvel(se(iw)) - - divune = cyp*uvel(iw) - dyt(iw)*tmp_uvel_ee & - + cxp*vvel(iw) - dxt(iw)*tmp_vvel_se - divunw = cym*tmp_uvel_ee + dyt(iw)*uvel(iw) & - + cxp*tmp_vvel_ee - dxt(iw)*tmp_vvel_ne - divusw = cym*tmp_uvel_ne + dyt(iw)*tmp_uvel_se & - + cxm*tmp_vvel_ne + dxt(iw)*tmp_vvel_ee - divuse = cyp*tmp_uvel_se - dyt(iw)*tmp_uvel_ne & - + cxm*tmp_vvel_se + dxt(iw)*vvel(iw) - - ! tension strain rate = e_11 - e_22 - tensionne = -cym*uvel(iw) - dyt(iw)*tmp_uvel_ee & - + cxm*vvel(iw) + dxt(iw)*tmp_vvel_se - tensionnw = -cyp*tmp_uvel_ee + dyt(iw)*uvel(iw) & - + cxm*tmp_vvel_ee + dxt(iw)*tmp_vvel_ne - tensionsw = -cyp*tmp_uvel_ne + dyt(iw)*tmp_uvel_se & - + cxp*tmp_vvel_ne - dxt(iw)*tmp_vvel_ee - tensionse = -cym*tmp_uvel_se - dyt(iw)*tmp_uvel_ne & - + cxp*tmp_vvel_se - dxt(iw)*vvel(iw) - - ! shearing strain rate = 2*e_12 - shearne = -cym*vvel(iw) - dyt(iw)*tmp_vvel_ee & - - cxm*uvel(iw) - dxt(iw)*tmp_uvel_se - shearnw = -cyp*tmp_vvel_ee + dyt(iw)*vvel(iw) & - - cxm*tmp_uvel_ee - dxt(iw)*tmp_uvel_ne - shearsw = -cyp*tmp_vvel_ne + dyt(iw)*tmp_vvel_se & - - cxp*tmp_uvel_ne + dxt(iw)*tmp_uvel_ee - shearse = -cym*tmp_vvel_se - dyt(iw)*tmp_vvel_ne & - - cxp*tmp_uvel_se + dxt(iw)*uvel(iw) - - ! Delta (in the denominator of zeta, eta) - Deltane = sqrt(divune**2 + ecci*(tensionne**2 + shearne**2)) - Deltanw = sqrt(divunw**2 + ecci*(tensionnw**2 + shearnw**2)) - Deltase = sqrt(divuse**2 + ecci*(tensionse**2 + shearse**2)) - Deltasw = sqrt(divusw**2 + ecci*(tensionsw**2 + shearsw**2)) - - !----------------------------------------------------------------- - ! on last subcycle, save quantities for mechanical redistribution - !----------------------------------------------------------------- - divu(iw) = p25*(divune + divunw + divuse + divusw) * tarear(iw) - rdg_conv(iw) = -min(divu(iw),c0) ! Could move outside the entire "kernel" - rdg_shear(iw) = p5*( p25*(Deltane + Deltanw + Deltase + Deltasw) * tarear(iw) -abs(divu(iw)) ) - - ! diagnostic only - ! shear = sqrt(tension**2 + shearing**2) - shear(iw) = p25*tarear(iw)*sqrt( & - (tensionne + tensionnw + tensionse + tensionsw)**2 & - + (shearne + shearnw + shearse + shearsw)**2) - - !----------------------------------------------------------------- - ! replacement pressure/Delta ! kg/s - ! save replacement pressure for principal stress calculation - !----------------------------------------------------------------- - c0ne = strength(iw)/max(Deltane,tinyarea) - c0nw = strength(iw)/max(Deltanw,tinyarea) - c0sw = strength(iw)/max(Deltasw,tinyarea) - c0se = strength(iw)/max(Deltase,tinyarea) - - c1ne = c0ne*arlx1i - c1nw = c0nw*arlx1i - c1sw = c0sw*arlx1i - c1se = c0se*arlx1i - - c0ne = c1ne*ecci - c0nw = c1nw*ecci - c0sw = c1sw*ecci - c0se = c1se*ecci - - !----------------------------------------------------------------- - ! the stresses ! kg/s^2 - ! (1) northeast, (2) northwest, (3) southwest, (4) southeast - !----------------------------------------------------------------- - - stressp_1(iw) = (stressp_1(iw)*(c1-arlx1i*revp) + c1ne*(divune*(c1+Ktens) - Deltane*(c1-Ktens))) * denom1 - stressp_2(iw) = (stressp_2(iw)*(c1-arlx1i*revp) + c1nw*(divunw*(c1+Ktens) - Deltanw*(c1-Ktens))) * denom1 - stressp_3(iw) = (stressp_3(iw)*(c1-arlx1i*revp) + c1sw*(divusw*(c1+Ktens) - Deltasw*(c1-Ktens))) * denom1 - stressp_4(iw) = (stressp_4(iw)*(c1-arlx1i*revp) + c1se*(divuse*(c1+Ktens) - Deltase*(c1-Ktens))) * denom1 - - stressm_1(iw) = (stressm_1(iw)*(c1-arlx1i*revp) + c0ne*tensionne*(c1+Ktens)) * denom1 - stressm_2(iw) = (stressm_2(iw)*(c1-arlx1i*revp) + c0nw*tensionnw*(c1+Ktens)) * denom1 - stressm_3(iw) = (stressm_3(iw)*(c1-arlx1i*revp) + c0sw*tensionsw*(c1+Ktens)) * denom1 - stressm_4(iw) = (stressm_4(iw)*(c1-arlx1i*revp) + c0se*tensionse*(c1+Ktens)) * denom1 - - stress12_1(iw) = (stress12_1(iw)*(c1-arlx1i*revp) + c0ne*shearne*p5*(c1+Ktens)) * denom1 - stress12_2(iw) = (stress12_2(iw)*(c1-arlx1i*revp) + c0nw*shearnw*p5*(c1+Ktens)) * denom1 - stress12_3(iw) = (stress12_3(iw)*(c1-arlx1i*revp) + c0sw*shearsw*p5*(c1+Ktens)) * denom1 - stress12_4(iw) = (stress12_4(iw)*(c1-arlx1i*revp) + c0se*shearse*p5*(c1+Ktens)) * denom1 - - !----------------------------------------------------------------- - ! combinations of the stresses for the momentum equation ! kg/s^2 - !----------------------------------------------------------------- - - ssigpn = stressp_1(iw) + stressp_2(iw) - ssigps = stressp_3(iw) + stressp_4(iw) - ssigpe = stressp_1(iw) + stressp_4(iw) - ssigpw = stressp_2(iw) + stressp_3(iw) - ssigp1 =(stressp_1(iw) + stressp_3(iw))*p055 - ssigp2 =(stressp_2(iw) + stressp_4(iw))*p055 - - ssigmn = stressm_1(iw) + stressm_2(iw) - ssigms = stressm_3(iw) + stressm_4(iw) - ssigme = stressm_1(iw) + stressm_4(iw) - ssigmw = stressm_2(iw) + stressm_3(iw) - ssigm1 =(stressm_1(iw) + stressm_3(iw))*p055 - ssigm2 =(stressm_2(iw) + stressm_4(iw))*p055 - - ssig12n = stress12_1(iw) + stress12_2(iw) - ssig12s = stress12_3(iw) + stress12_4(iw) - ssig12e = stress12_1(iw) + stress12_4(iw) - ssig12w = stress12_2(iw) + stress12_3(iw) - ssig121 =(stress12_1(iw) + stress12_3(iw))*p111 - ssig122 =(stress12_2(iw) + stress12_4(iw))*p111 - - csigpne = p111*stressp_1(iw) + ssigp2 + p027*stressp_3(iw) - csigpnw = p111*stressp_2(iw) + ssigp1 + p027*stressp_4(iw) - csigpsw = p111*stressp_3(iw) + ssigp2 + p027*stressp_1(iw) - csigpse = p111*stressp_4(iw) + ssigp1 + p027*stressp_2(iw) - - csigmne = p111*stressm_1(iw) + ssigm2 + p027*stressm_3(iw) - csigmnw = p111*stressm_2(iw) + ssigm1 + p027*stressm_4(iw) - csigmsw = p111*stressm_3(iw) + ssigm2 + p027*stressm_1(iw) - csigmse = p111*stressm_4(iw) + ssigm1 + p027*stressm_2(iw) - - csig12ne = p222*stress12_1(iw) + ssig122 + p055*stress12_3(iw) - csig12nw = p222*stress12_2(iw) + ssig121 + p055*stress12_4(iw) - csig12sw = p222*stress12_3(iw) + ssig122 + p055*stress12_1(iw) - csig12se = p222*stress12_4(iw) + ssig121 + p055*stress12_2(iw) - - str12ew = p5*dxt(iw)*(p333*ssig12e + p166*ssig12w) - str12we = p5*dxt(iw)*(p333*ssig12w + p166*ssig12e) - str12ns = p5*dyt(iw)*(p333*ssig12n + p166*ssig12s) - str12sn = p5*dyt(iw)*(p333*ssig12s + p166*ssig12n) - - !----------------------------------------------------------------- - ! for dF/dx (u momentum) - !----------------------------------------------------------------- - strp_tmp = p25*dyt(iw)*(p333*ssigpn + p166*ssigps) - strm_tmp = p25*dyt(iw)*(p333*ssigmn + p166*ssigms) - - ! northeast (iw) - str1(iw) = -strp_tmp - strm_tmp - str12ew & - + dxhy*(-csigpne + csigmne) + dyhx*csig12ne - - ! northwest (i+1,j) - str2(iw) = strp_tmp + strm_tmp - str12we & - + dxhy*(-csigpnw + csigmnw) + dyhx*csig12nw - - strp_tmp = p25*dyt(iw)*(p333*ssigps + p166*ssigpn) - strm_tmp = p25*dyt(iw)*(p333*ssigms + p166*ssigmn) - - ! southeast (i,j+1) - str3(iw) = -strp_tmp - strm_tmp + str12ew & - + dxhy*(-csigpse + csigmse) + dyhx*csig12se - - ! southwest (i+1,j+1) - str4(iw) = strp_tmp + strm_tmp + str12we & - + dxhy*(-csigpsw + csigmsw) + dyhx*csig12sw - - !----------------------------------------------------------------- - ! for dF/dy (v momentum) - !----------------------------------------------------------------- - strp_tmp = p25*dxt(iw)*(p333*ssigpe + p166*ssigpw) - strm_tmp = p25*dxt(iw)*(p333*ssigme + p166*ssigmw) - - ! northeast (i,j) - str5(iw) = -strp_tmp + strm_tmp - str12ns & - - dyhx*(csigpne + csigmne) + dxhy*csig12ne - - ! southeast (i,j+1) - str6(iw) = strp_tmp - strm_tmp - str12sn & - - dyhx*(csigpse + csigmse) + dxhy*csig12se - - strp_tmp = p25*dxt(iw)*(p333*ssigpw + p166*ssigpe) - strm_tmp = p25*dxt(iw)*(p333*ssigmw + p166*ssigme) - - ! northwest (i+1,j) - str7(iw) = -strp_tmp + strm_tmp + str12ns & - - dyhx*(csigpnw + csigmnw) + dxhy*csig12nw - - ! southwest (i+1,j+1) - str8(iw) = strp_tmp - strm_tmp + str12sn & - - dyhx*(csigpsw + csigmsw) + dxhy*csig12sw - enddo -#ifdef _OPENACC - !$acc end parallel -#endif - end subroutine stress_l - -!---------------------------------------------------------------------------- - - subroutine stepu_iter(NA_len,rhow, & - lb,ub,Cw,aiu,uocn,vocn,forcex,forcey,umassdti,fm,uarear,Tbu, & - uvel_init,vvel_init,uvel,vvel, & - str1,str2,str3,str4,str5,str6,str7,str8, nw,sw,se,skipme) - - use ice_kinds_mod - use ice_dyn_shared, only: brlx, revp - use ice_constants, only: c0, c1 - - implicit none - - integer (kind=int_kind), intent(in) :: NA_len - real (kind=dbl_kind), intent(in) :: rhow - integer(kind=int_kind),intent(in) :: lb,ub - logical(kind=log_kind),intent(in), dimension(:) :: skipme - integer(kind=int_kind),dimension(:), intent(in), contiguous :: nw,sw,se - real(kind=dbl_kind),dimension(:), intent(in), contiguous :: & - uvel_init, vvel_init, aiu, forcex, forcey, umassdti, Tbu, & - uocn, vocn, fm, uarear,Cw - real(kind=DBL_KIND),dimension(:), intent(in), contiguous :: & - str1,str2,str3,str4,str5,str6,str7,str8 - real(kind=dbl_kind),dimension(:), intent(inout), contiguous :: & - uvel,vvel - real (kind=dbl_kind), parameter :: & - cosw = c1 , & ! cos(ocean turning angle) ! turning angle = 0 - sinw = c0 - - !-- local variables - - integer (kind=int_kind) :: iw,il,iu - real (kind=dbl_kind) :: uold, vold, vrel,cca,ccb,ab2,cc1,cc2,taux,tauy,Cb - real (kind=dbl_kind) :: tmp_str2_nw,tmp_str3_se,tmp_str4_sw, tmp_strintx - real (kind=dbl_kind) :: tmp_str6_se,tmp_str7_nw,tmp_str8_sw, tmp_strinty - real (kind=dbl_kind) :: waterx,watery - real (kind=dbl_kind) :: u0 = 5.e-5_dbl_kind ! residual velocity for seabed stress (m/s) - - character(len=*), parameter :: subname = '(stepu_iter)' - !--------------------------------------- + uvel(iw) = (cca * cc1 + ccb * cc2) / ab2 + vvel(iw) = (cca * cc2 - ccb * cc1) / ab2 + end do #ifdef _OPENACC - !$acc parallel & - !$acc present(Cw,aiu,uocn,vocn,forcex,forcey,umassdti,fm,uarear,Tbu, & - !$acc uvel_init,vvel_init,nw,sw,se,skipme, & - !$acc str1,str2,str3,str4,str5,str6,str7,str8,uvel,vvel) - !$acc loop - do iw = 1,NA_len -#else - call domp_get_domain(lb,ub,il,iu) - do iw = il, iu -#endif - if (skipme(iw)) cycle - uold = uvel(iw) - vold = vvel(iw) - vrel = aiu(iw)*rhow*Cw(iw)*sqrt((uocn(iw)-uold)**2+(vocn(iw)-vold)**2) - waterx = uocn(iw)*cosw - vocn(iw)*sinw*sign(c1,fm(iw)) - watery = vocn(iw)*cosw + uocn(iw)*sinw*sign(c1,fm(iw)) - taux = vrel*waterx - tauy = vrel*watery - Cb = Tbu(iw) / (sqrt(uold**2 + vold**2) + u0) - cca = (brlx + revp)*umassdti(iw) + vrel * cosw + Cb - ccb = fm(iw) + sign(c1,fm(iw)) * vrel * sinw - ab2 = cca**2 + ccb**2 - ! southeast(i,j+1) = se - ! northwest(i+1,j) = nw - ! southwest(i+1,j+1) = sw - tmp_str2_nw = str2(nw(iw)) - tmp_str3_se = str3(se(iw)) - tmp_str4_sw = str4(sw(iw)) - tmp_str6_se = str6(se(iw)) - tmp_str7_nw = str7(nw(iw)) - tmp_str8_sw = str8(sw(iw)) - - tmp_strintx = uarear(iw)*(str1(iw)+tmp_str2_nw+tmp_str3_se+tmp_str4_sw) - tmp_strinty = uarear(iw)*(str5(iw)+tmp_str6_se+tmp_str7_nw+tmp_str8_sw) - cc1 = tmp_strintx + forcex(iw) + taux & - + umassdti(iw)*(brlx*uold + revp*uvel_init(iw)) - cc2 = tmp_strinty + forcey(iw) + tauy & - + umassdti(iw)*(brlx*vold + revp*vvel_init(iw)) - uvel(iw) = (cca*cc1 + ccb*cc2) / ab2 - vvel(iw) = (cca*cc2 - ccb*cc1) / ab2 - enddo -#ifdef _OPENACC - !$acc end parallel + !$acc end parallel #endif - end subroutine stepu_iter - -!---------------------------------------------------------------------------- - - subroutine stepu_last(NA_len, rhow, & - lb,ub,Cw,aiu,uocn,vocn,forcex,forcey,umassdti,fm,uarear,Tbu, & - strintx,strinty,taubx,tauby, & - uvel_init,vvel_init,uvel,vvel, & - str1,str2,str3,str4,str5,str6,str7,str8, nw,sw,se,skipme) - - use ice_kinds_mod - use ice_constants, only: c0, c1 - use ice_dyn_shared, only: brlx, revp, seabed_stress - - implicit none - - integer (kind=int_kind), intent(in) :: NA_len - real (kind=dbl_kind), intent(in) :: rhow - logical(kind=log_kind),intent(in), dimension(:) :: skipme - integer(kind=int_kind),intent(in) :: lb,ub - integer(kind=int_kind),dimension(:), intent(in), contiguous :: nw,sw,se - real(kind=dbl_kind),dimension(:), intent(in), contiguous :: & - uvel_init, vvel_init, aiu, forcex, forcey, umassdti, Tbu, & - uocn, vocn, fm, uarear,Cw - real(kind=DBL_KIND),dimension(:), intent(in), contiguous :: & - str1,str2,str3,str4,str5,str6,str7,str8 - real(kind=dbl_kind),dimension(:), intent(inout), contiguous :: & - uvel,vvel, strintx,strinty, taubx,tauby - real (kind=dbl_kind), parameter :: & - cosw = c1 , & ! cos(ocean turning angle) ! turning angle = 0 - sinw = c0 - - !-- local variables - - integer (kind=int_kind) :: iw,il,iu - real (kind=dbl_kind) :: uold, vold, vrel,cca,ccb,ab2,cc1,cc2,taux,tauy,Cb - real (kind=dbl_kind) :: tmp_str2_nw,tmp_str3_se,tmp_str4_sw - real (kind=dbl_kind) :: tmp_str6_se,tmp_str7_nw,tmp_str8_sw - real (kind=dbl_kind) :: waterx,watery - real (kind=dbl_kind) :: u0 = 5.e-5_dbl_kind ! residual velocity for seabed stress (m/s) - - character(len=*), parameter :: subname = '(stepu_last)' - !--------------------------------------- + end subroutine stepu_iter + +!======================================================================= + + subroutine stepu_last(NA_len, rhow, lb, ub, Cw, aiu, uocn, vocn, & + forcex, forcey, umassdti, fm, uarear, Tbu, uvel_init, vvel_init, & + uvel, vvel, str1, str2, str3, str4, str5, str6, str7, str8, nw, & + sw, sse, skipucell, strintx, strinty, taubx, tauby) + + use ice_kinds_mod + use ice_constants, only : c0, c1 + use ice_dyn_shared, only : brlx, revp, u0, cosw, sinw, & + seabed_stress + + implicit none + + integer(kind=int_kind), intent(in) :: NA_len, lb, ub + real(kind=dbl_kind), intent(in) :: rhow + logical(kind=log_kind), intent(in), dimension(:) :: skipucell + integer(kind=int_kind), dimension(:), intent(in), contiguous :: & + nw, sw, sse + real(kind=dbl_kind), dimension(:), intent(in), contiguous :: & + uvel_init, vvel_init, aiu, forcex, forcey, umassdti, Tbu, & + uocn, vocn, fm, uarear, Cw, str1, str2, str3, str4, str5, & + str6, str7, str8 + real(kind=dbl_kind), dimension(:), intent(inout), contiguous :: & + uvel, vvel, strintx, strinty, taubx, tauby + + ! local variables + + integer(kind=int_kind) :: iw, il, iu + real(kind=dbl_kind) :: uold, vold, vrel, cca, ccb, ab2, cc1, & + cc2, taux, tauy, Cb, tmp_str2_nw, tmp_str3_sse, tmp_str4_sw, & + tmp_str6_sse, tmp_str7_nw, tmp_str8_sw, waterx, watery + + character(len=*), parameter :: subname = '(stepu_last)' #ifdef _OPENACC - !$acc parallel & - !$acc present(Cw,aiu,uocn,vocn,forcex,forcey,umassdti,fm,uarear,Tbu, & - !$acc strintx,strinty,taubx,tauby,uvel_init,vvel_init,nw,sw,se,skipme, & - !$acc str1,str2,str3,str4,str5,str6,str7,str8,uvel,vvel ) - !$acc loop - do iw = 1,NA_len + !$acc parallel & + !$acc present(Cw, aiu, uocn, vocn, forcex, forcey, umassdti, fm, & + !$acc uarear, Tbu, uvel_init, vvel_init, nw, sw, sse, skipucell, & + !$acc str1, str2, str3, str4, str5, str6, str7, str8, uvel, & + !$acc vvel, strintx, strinty, taubx, tauby) + !$acc loop + do iw = 1, NA_len #else - call domp_get_domain(lb,ub,il,iu) - do iw = il, iu + call domp_get_domain(lb, ub, il, iu) + do iw = il, iu #endif - if (skipme(iw)) cycle - uold = uvel(iw) - vold = vvel(iw) - vrel = aiu(iw)*rhow*Cw(iw)*sqrt((uocn(iw)-uold)**2+(vocn(iw)-vold)**2) - waterx = uocn(iw)*cosw - vocn(iw)*sinw*sign(c1,fm(iw)) - watery = vocn(iw)*cosw + uocn(iw)*sinw*sign(c1,fm(iw)) - taux = vrel*waterx - tauy = vrel*watery - Cb = Tbu(iw) / (sqrt(uold**2 + vold**2) + u0) - cca = (brlx + revp)*umassdti(iw) + vrel * cosw + Cb - ccb = fm(iw) + sign(c1,fm(iw)) * vrel * sinw - ab2 = cca**2 + ccb**2 - ! southeast(i,j+1) = se - ! northwest(i+1,j) = nw - ! southwest(i+1,j+1) = sw - tmp_str2_nw = str2(nw(iw)) - tmp_str3_se = str3(se(iw)) - tmp_str4_sw = str4(sw(iw)) - tmp_str6_se = str6(se(iw)) - tmp_str7_nw = str7(nw(iw)) - tmp_str8_sw = str8(sw(iw)) - - strintx(iw) = uarear(iw)*(str1(iw)+tmp_str2_nw+tmp_str3_se+tmp_str4_sw) - strinty(iw) = uarear(iw)*(str5(iw)+tmp_str6_se+tmp_str7_nw+tmp_str8_sw) - cc1 = strintx(iw) + forcex(iw) + taux & - + umassdti(iw)*(brlx*uold + revp*uvel_init(iw)) - cc2 = strinty(iw) + forcey(iw) + tauy & - + umassdti(iw)*(brlx*vold + revp*vvel_init(iw)) - uvel(iw) = (cca*cc1 + ccb*cc2) / ab2 - vvel(iw) = (cca*cc2 - ccb*cc1) / ab2 - ! calculate seabed stress component for outputs - if ( seabed_stress ) then - taubx(iw) = -uvel(iw)*Tbu(iw) / (sqrt(uold**2 + vold**2) + u0) - tauby(iw) = -vvel(iw)*Tbu(iw) / (sqrt(uold**2 + vold**2) + u0) - endif - enddo + + if (skipucell(iw)) cycle + + uold = uvel(iw) + vold = vvel(iw) + + vrel = aiu(iw) * rhow * Cw(iw) * sqrt((uocn(iw) - uold)**2 + (vocn(iw) - vold)**2) + + waterx = uocn(iw) * cosw - vocn(iw) * sinw * sign(c1, fm(iw)) + watery = vocn(iw) * cosw + uocn(iw) * sinw * sign(c1, fm(iw)) + + taux = vrel * waterx + tauy = vrel * watery + + Cb = Tbu(iw) / (sqrt(uold**2 + vold**2) + u0) + + cca = (brlx + revp) * umassdti(iw) + vrel * cosw + Cb + ccb = fm(iw) + sign(c1, fm(iw)) * vrel * sinw + + ab2 = cca**2 + ccb**2 + + tmp_str2_nw = str2(nw(iw)) + tmp_str3_sse = str3(sse(iw)) + tmp_str4_sw = str4(sw(iw)) + tmp_str6_sse = str6(sse(iw)) + tmp_str7_nw = str7(nw(iw)) + tmp_str8_sw = str8(sw(iw)) + + strintx(iw) = uarear(iw) * (str1(iw) + tmp_str2_nw + tmp_str3_sse + tmp_str4_sw) + strinty(iw) = uarear(iw) * (str5(iw) + tmp_str6_sse + tmp_str7_nw + tmp_str8_sw) + + cc1 = strintx(iw) + forcex(iw) + taux & + + umassdti(iw) * (brlx * uold + revp * uvel_init(iw)) + cc2 = strinty(iw) + forcey(iw) + tauy & + + umassdti(iw) * (brlx * vold + revp * vvel_init(iw)) + + uvel(iw) = (cca * cc1 + ccb * cc2) / ab2 + vvel(iw) = (cca * cc2 - ccb * cc1) / ab2 + + ! calculate seabed stress component for outputs + if (seabed_stress) then + taubx(iw) = -uvel(iw) * Tbu(iw) / (sqrt(uold**2 + vold**2) + u0) + tauby(iw) = -vvel(iw) * Tbu(iw) / (sqrt(uold**2 + vold**2) + u0) + end if + + end do #ifdef _OPENACC - !$acc end parallel + !$acc end parallel #endif - end subroutine stepu_last + end subroutine stepu_last -!---------------------------------------------------------------------------- +!======================================================================= - subroutine evp1d_halo_update(NAVEL_len,lb,ub,uvel,vvel, halo_parent) + subroutine evp1d_halo_update(NAVEL_len, lb, ub, uvel, vvel, & + halo_parent) - use ice_kinds_mod + use ice_kinds_mod - implicit none + implicit none - integer (kind=int_kind), intent(in) :: NAVEL_len - integer(kind=int_kind),intent(in) :: lb,ub - integer(kind=int_kind),dimension(:), intent(in), contiguous :: halo_parent - real(kind=dbl_kind),dimension(:), intent(inout), contiguous :: uvel,vvel + integer(kind=int_kind), intent(in) :: NAVEL_len, lb, ub + integer(kind=int_kind), dimension(:), intent(in), contiguous :: & + halo_parent + real(kind=dbl_kind), dimension(:), intent(inout), contiguous :: & + uvel, vvel - !-- local variables + ! local variables - integer (kind=int_kind) :: iw,il,iu + integer (kind=int_kind) :: iw, il, iu - character(len=*), parameter :: subname = '(evp1d_halo_update)' - !--------------------------------------- + character(len=*), parameter :: subname = '(evp1d_halo_update)' #ifdef _OPENACC - !$acc parallel & - !$acc present(uvel,vvel) & - !$acc loop - do iw = 1,NAVEL_len + !$acc parallel & + !$acc present(uvel, vvel) & + !$acc loop + do iw = 1, NAVEL_len + if (halo_parent(iw) == 0) cycle + uvel(iw) = uvel(halo_parent(iw)) + vvel(iw) = vvel(halo_parent(iw)) + end do + !$acc end parallel #else - call domp_get_domain(lb,ub,il,iu) - do iw = il, iu -#endif - if (halo_parent(iw)==0) cycle - uvel(iw) = uvel(halo_parent(iw)) - vvel(iw) = vvel(halo_parent(iw)) - enddo -#ifdef _OPENACC - !$acc end parallel + call domp_get_domain(lb, ub, il, iu) + do iw = il, iu + if (halo_parent(iw) == 0) cycle + uvel(iw) = uvel(halo_parent(iw)) + vvel(iw) = vvel(halo_parent(iw)) + end do + call domp_get_domain(ub + 1, NAVEL_len, il, iu) + do iw = il, iu + if (halo_parent(iw) == 0) cycle + uvel(iw) = uvel(halo_parent(iw)) + vvel(iw) = vvel(halo_parent(iw)) + end do #endif - end subroutine evp1d_halo_update - -!---------------------------------------------------------------------------- - -!former end module bench_v2 - -!=============================================================================== -!---------------------------------------------------------------------------- - - subroutine alloc1d(na) - - implicit none - - integer(kind=int_kind),intent(in) :: na - integer(kind=int_kind) :: ierr,nb - - character(len=*), parameter :: subname = '(alloc1d)' - !--------------------------------------- - - nb=na - allocate( & - ! U+T cells - ! Helper index for neighbours - indj(1:na),indi(1:na), & - ee(1:na),ne(1:na),se(1:na), & - nw(1:nb),sw(1:nb),sse(1:nb), & - skipucell(1:na), & - ! Grid distances: HTE,HTN + "-1 neighbours" - HTE(1:na),HTN(1:na), & - HTEm1(1:na),HTNm1(1:na), & - ! T cells -!v1 dxhy(1:na),dyhx(1:na),cyp(1:na),cxp(1:na),cym(1:na),cxm(1:na),tinyarea(1:na),& - strength(1:na),dxt(1:na),dyt(1:na), tarear(1:na), & - stressp_1(1:na), stressp_2(1:na), stressp_3(1:na), stressp_4(1:na), & - stressm_1(1:na), stressm_2(1:na), stressm_3(1:na), stressm_4(1:na), & - stress12_1(1:na),stress12_2(1:na),stress12_3(1:na),stress12_4(1:na),& - divu(1:na),rdg_conv(1:na),rdg_shear(1:na),shear(1:na), & - ! U cells -!v1 waterx(1:nb),watery(1:nb), & - cdn_ocn(1:nb),aiu(1:nb),uocn(1:nb),vocn(1:nb), & - forcex(1:nb),forcey(1:nb),Tbu(1:nb), & - umassdti(1:nb),fm(1:nb),uarear(1:nb), & - strintx(1:nb),strinty(1:nb), & - uvel_init(1:nb),vvel_init(1:nb), & - taubx(1:nb),tauby(1:nb), & - stat=ierr) - - if (ierr/=0) call abort_ice(subname//': ERROR allocating 1D') - - end subroutine alloc1d - -!---------------------------------------------------------------------------- - - subroutine alloc1d_navel(navel) - - implicit none - - integer(kind=int_kind),intent(in) :: navel - integer(kind=int_kind) :: ierr - - character(len=*), parameter :: subname = '(alloc1d_navel)' - !--------------------------------------- - - allocate( & - uvel(1:navel),vvel(1:navel), indij(1:navel), halo_parent(1:navel), & - str1(1:navel),str2(1:navel),str3(1:navel),str4(1:navel), & - str5(1:navel),str6(1:navel),str7(1:navel),str8(1:navel), & - stat=ierr) - if (ierr/=0) call abort_ice(subname// ': Error allocating 1D navel') - - end subroutine alloc1d_navel - -!---------------------------------------------------------------------------- - - subroutine dealloc1d - - implicit none - - integer(kind=int_kind) :: ierr - - character(len=*), parameter :: subname = '(dealloc1d)' - !--------------------------------------- - - deallocate( & - ! U+T cells - ! Helper index for neighbours - indj,indi, & - ee,ne,se, & - nw,sw,sse, & - skipucell, & - ! T cells - strength,dxt,dyt,tarear, & - stressp_1, stressp_2, stressp_3, stressp_4, & - stressm_1, stressm_2, stressm_3, stressm_4, & - stress12_1,stress12_2,stress12_3,stress12_4,& - str1, str2,str3,str4, & - str5, str6,str7,str8, & - divu,rdg_conv,rdg_shear,shear, & - ! U cells - cdn_ocn,aiu,uocn,vocn, & - forcex,forcey,Tbu, & - umassdti,fm,uarear, & - strintx,strinty, & - uvel_init,vvel_init, & - taubx,tauby, & - ! NAVEL - uvel,vvel, indij, halo_parent, & - stat=ierr) - - if (ierr/=0) call abort_ice(subname//': Error de-allocating 1D') - -!v1 if (allocated(tinyarea)) then -!v1 deallocate( & -!v1 dxhy,dyhx,cyp,cxp,cym,cxm,tinyarea, & -!v1 waterx,watery, & -!v1 stat=ierr) -!v1 if (ierr/=0) call abort_ice(subname//': Error de-allocating 1D, v1') -!v1 endif - - if (allocated(HTE)) then - deallocate( & - ! Grid distances: HTE,HTN + "-1 neighbours" - HTE,HTN, HTEm1,HTNm1, & - stat=ierr) - if (ierr/=0) call abort_ice(subname//': Error de-allocating 1D, v2') - endif - - end subroutine dealloc1d - -!---------------------------------------------------------------------------- - - subroutine evp_copyin_v2(nx,ny,nblk,nx_glob,ny_glob, & - I_HTE,I_HTN, & -!v1 I_dxhy,I_dyhx,I_cyp,I_cxp,I_cym,I_cxm,I_tinyarea, & -!v1 I_waterx,I_watery, & - I_icetmask,I_iceumask, & - I_cdn_ocn,I_aiu,I_uocn,I_vocn,I_forcex,I_forcey,I_Tbu, & - I_umassdti,I_fm,I_uarear,I_tarear,I_strintx,I_strinty,I_uvel_init,I_vvel_init, & - I_strength,I_uvel,I_vvel,I_dxt,I_dyt, & - I_stressp_1 ,I_stressp_2, I_stressp_3, I_stressp_4, & - I_stressm_1 ,I_stressm_2, I_stressm_3, I_stressm_4, & - I_stress12_1,I_stress12_2,I_stress12_3,I_stress12_4 ) - - use ice_gather_scatter, only: gather_global_ext - use ice_domain, only: distrb_info - use ice_communicate, only: my_task, master_task - - implicit none - - integer(int_kind), intent(in) :: nx, ny, nblk, nx_glob, ny_glob - integer (kind=int_kind),dimension (nx,ny,nblk), intent(in) :: I_icetmask - logical (kind=log_kind),dimension (nx,ny,nblk), intent(in) :: I_iceumask - real (kind=dbl_kind), dimension(nx,ny,nblk), intent(in) :: & - I_HTE,I_HTN, & -!v1 I_dxhy,I_dyhx,I_cyp,I_cxp,I_cym,I_cxm,I_tinyarea, & -!v1 I_waterx,I_watery, & - I_cdn_ocn,I_aiu,I_uocn,I_vocn,I_forcex,I_forcey,I_Tbu, & - I_umassdti,I_fm,I_uarear,I_tarear,I_strintx,I_strinty,I_uvel_init,I_vvel_init, & - I_strength,I_uvel,I_vvel,I_dxt,I_dyt, & - I_stressp_1, I_stressp_2, I_stressp_3, I_stressp_4, & - I_stressm_1, I_stressm_2, I_stressm_3, I_stressm_4, & - I_stress12_1,I_stress12_2,I_stress12_3,I_stress12_4 - - !-- local variables - - integer (kind=int_kind),dimension (nx_glob,ny_glob) :: G_icetmask - logical (kind=log_kind),dimension (nx_glob,ny_glob) :: G_iceumask - real (kind=dbl_kind), dimension(nx_glob,ny_glob) :: & - G_HTE,G_HTN, & -!v1 G_dxhy,G_dyhx,G_cyp,G_cxp,G_cym,G_cxm,G_tinyarea, & -!v1 G_waterx,G_watery, & - G_cdn_ocn,G_aiu,G_uocn,G_vocn,G_forcex,G_forcey,G_Tbu, & - G_umassdti,G_fm,G_uarear,G_tarear,G_strintx,G_strinty,G_uvel_init,G_vvel_init, & - G_strength,G_uvel,G_vvel,G_dxt,G_dyt, & - G_stressp_1, G_stressp_2, G_stressp_3, G_stressp_4, & - G_stressm_1, G_stressm_2, G_stressm_3, G_stressm_4, & - G_stress12_1,G_stress12_2,G_stress12_3,G_stress12_4 - integer(kind=int_kind) :: na, navel - - character(len=*), parameter :: subname = '(evp_copyin_v2)' - !--------------------------------------- - !-- Gather data into one single block -- - - call gather_global_ext(G_icetmask, I_icetmask, master_task, distrb_info) - call gather_global_ext(G_iceumask, I_iceumask, master_task, distrb_info) - call gather_global_ext(G_HTE, I_HTE, master_task, distrb_info) - call gather_global_ext(G_HTN, I_HTN, master_task, distrb_info) -!v1 call gather_global_ext(G_dxhy, I_dxhy, master_task, distrb_info) -!v1 call gather_global_ext(G_dyhx, I_dyhx, master_task, distrb_info) -!v1 call gather_global_ext(G_cyp, I_cyp, master_task, distrb_info) -!v1 call gather_global_ext(G_cxp, I_cxp, master_task, distrb_info) -!v1 call gather_global_ext(G_cym, I_cym, master_task, distrb_info) -!v1 call gather_global_ext(G_cxm, I_cxm, master_task, distrb_info) -!v1 call gather_global_ext(G_tinyarea, I_tinyarea, master_task, distrb_info) -!v1 call gather_global_ext(G_waterx, I_waterx, master_task, distrb_info) -!v1 call gather_global_ext(G_watery, I_watery, master_task, distrb_info) - call gather_global_ext(G_cdn_ocn, I_cdn_ocn, master_task, distrb_info) - call gather_global_ext(G_aiu, I_aiu, master_task, distrb_info) - call gather_global_ext(G_uocn, I_uocn, master_task, distrb_info) - call gather_global_ext(G_vocn, I_vocn, master_task, distrb_info) - call gather_global_ext(G_forcex, I_forcex, master_task, distrb_info) - call gather_global_ext(G_forcey, I_forcey, master_task, distrb_info) - call gather_global_ext(G_Tbu, I_Tbu, master_task, distrb_info) - call gather_global_ext(G_umassdti, I_umassdti, master_task, distrb_info) - call gather_global_ext(G_fm, I_fm, master_task, distrb_info) - call gather_global_ext(G_uarear, I_uarear, master_task, distrb_info) - call gather_global_ext(G_tarear, I_tarear, master_task, distrb_info) - call gather_global_ext(G_strintx, I_strintx, master_task, distrb_info) - call gather_global_ext(G_strinty, I_strinty, master_task, distrb_info) - call gather_global_ext(G_uvel_init, I_uvel_init, master_task, distrb_info) - call gather_global_ext(G_vvel_init, I_vvel_init, master_task, distrb_info) - call gather_global_ext(G_strength, I_strength, master_task, distrb_info) - call gather_global_ext(G_uvel, I_uvel, master_task, distrb_info) - call gather_global_ext(G_vvel, I_vvel, master_task, distrb_info) - call gather_global_ext(G_dxt, I_dxt, master_task, distrb_info) - call gather_global_ext(G_dyt, I_dyt, master_task, distrb_info) - call gather_global_ext(G_stressp_1, I_stressp_1, master_task, distrb_info) - call gather_global_ext(G_stressp_2, I_stressp_2, master_task, distrb_info) - call gather_global_ext(G_stressp_3, I_stressp_3, master_task, distrb_info) - call gather_global_ext(G_stressp_4, I_stressp_4, master_task, distrb_info) - call gather_global_ext(G_stressm_1, I_stressm_1, master_task, distrb_info) - call gather_global_ext(G_stressm_2, I_stressm_2, master_task, distrb_info) - call gather_global_ext(G_stressm_3, I_stressm_3, master_task, distrb_info) - call gather_global_ext(G_stressm_4, I_stressm_4, master_task, distrb_info) - call gather_global_ext(G_stress12_1, I_stress12_1, master_task, distrb_info) - call gather_global_ext(G_stress12_2, I_stress12_2, master_task, distrb_info) - call gather_global_ext(G_stress12_3, I_stress12_3, master_task, distrb_info) - call gather_global_ext(G_stress12_4, I_stress12_4, master_task, distrb_info) - - !-- All calculations has to be done on the master-task -- - - if (my_task == master_task) then - !-- Find number of active points and allocate vectors -- - call calc_na(nx_glob,ny_glob,na,G_icetmask) - call alloc1d(na) - call calc_2d_indices(nx_glob,ny_glob,na, G_icetmask, G_iceumask) - call calc_navel(nx_glob,ny_glob,na,navel) - call alloc1d_navel(navel) -!MHRI !$OMP PARALLEL DEFAULT(shared) - call numainit(1,na,navel) -!MHRI !$OMP END PARALLEL - ! Remap 2d to 1d and fill in - call convert_2d_1d(nx_glob,ny_glob,na,navel, & - G_HTE,G_HTN, & -!v1 G_dxhy,G_dyhx,G_cyp,G_cxp,G_cym,G_cxm,G_tinyarea, & -!v1 G_waterx,G_watery, & - G_cdn_ocn,G_aiu,G_uocn,G_vocn,G_forcex,G_forcey,G_Tbu, & - G_umassdti,G_fm,G_uarear,G_tarear,G_strintx,G_strinty,G_uvel_init,G_vvel_init, & - G_strength,G_uvel,G_vvel,G_dxt,G_dyt, & - G_stressp_1, G_stressp_2, G_stressp_3, G_stressp_4, & - G_stressm_1, G_stressm_2, G_stressm_3, G_stressm_4, & - G_stress12_1,G_stress12_2,G_stress12_3,G_stress12_4 ) - call calc_halo_parent(nx_glob,ny_glob,na,navel, G_icetmask) - NA_len=na - NAVEL_len=navel - endif - - !-- write check -!if (1 == 1) then -! write(nu_diag,*) subname,' MHRI: INDICES start:' -! write(nu_diag,*) 'na,navel ', na,navel -! write(nu_diag,*) 'Min/max ee', minval(ee(1:na)), maxval(ee(1:na)) -! write(nu_diag,*) 'Min/max ne', minval(ne(1:na)), maxval(ne(1:na)) -! write(nu_diag,*) 'Min/max se', minval(se(1:na)), maxval(se(1:na)) -! write(nu_diag,*) 'Min/max nw', minval(nw(1:na)), maxval(nw(1:na)) -! write(nu_diag,*) 'Min/max sw', minval(sw(1:na)), maxval(sw(1:na)) -! write(nu_diag,*) 'Min/max sse', minval(sse(1:na)), maxval(sse(1:na)) -! write(nu_diag,*) subname,' MHRI: INDICES end:' -!endif - - end subroutine evp_copyin_v2 - -!---------------------------------------------------------------------------- - - subroutine evp_copyout(nx,ny,nblk,nx_glob,ny_glob, & - I_uvel,I_vvel, I_strintx,I_strinty, & - I_stressp_1, I_stressp_2, I_stressp_3, I_stressp_4, & - I_stressm_1, I_stressm_2, I_stressm_3, I_stressm_4, & - I_stress12_1,I_stress12_2,I_stress12_3,I_stress12_4, & - I_divu,I_rdg_conv,I_rdg_shear,I_shear,I_taubx,I_tauby ) - - use ice_constants, only : c0 - use ice_gather_scatter, only: scatter_global_ext - use ice_domain, only: distrb_info - use ice_communicate, only: my_task, master_task - - implicit none - - integer(int_kind), intent(in) :: nx,ny,nblk, nx_glob,ny_glob - real(dbl_kind), dimension(nx,ny,nblk), intent(out) :: & - I_uvel,I_vvel, I_strintx,I_strinty, & - I_stressp_1, I_stressp_2, I_stressp_3, I_stressp_4, & - I_stressm_1, I_stressm_2, I_stressm_3, I_stressm_4, & - I_stress12_1,I_stress12_2,I_stress12_3,I_stress12_4, & - I_divu,I_rdg_conv, I_rdg_shear,I_shear, I_taubx,I_tauby - - !-- local variables - - real(dbl_kind), dimension(nx_glob,ny_glob) :: & - G_uvel,G_vvel, G_strintx,G_strinty, & - G_stressp_1, G_stressp_2, G_stressp_3, G_stressp_4, & - G_stressm_1, G_stressm_2, G_stressm_3, G_stressm_4, & - G_stress12_1,G_stress12_2,G_stress12_3,G_stress12_4, & - G_divu,G_rdg_conv, G_rdg_shear,G_shear, G_taubx,G_tauby - integer(int_kind) :: i,j,iw, nx_block - - character(len=*), parameter :: subname = '(evp_copyout)' - !--------------------------------------- - ! Remap 1d to 2d and fill in - nx_block=nx_glob ! Total block size in x-dir - - if (my_task == master_task) then - G_uvel = c0 - G_vvel = c0 - G_strintx = c0 - G_strinty = c0 - G_stressp_1 = c0 - G_stressp_2 = c0 - G_stressp_3 = c0 - G_stressp_4 = c0 - G_stressm_1 = c0 - G_stressm_2 = c0 - G_stressm_3 = c0 - G_stressm_4 = c0 - G_stress12_1 = c0 - G_stress12_2 = c0 - G_stress12_3 = c0 - G_stress12_4 = c0 - G_divu = c0 - G_rdg_conv = c0 - G_rdg_shear = c0 - G_shear = c0 - G_taubx = c0 - G_tauby = c0 - !$OMP PARALLEL PRIVATE(iw,i,j) - do iw=1,NAVEL_len - j=int((indij(iw)-1)/(nx_block))+1 - i=indij(iw)-(j-1)*nx_block - G_uvel(i,j) = uvel(iw) - G_vvel(i,j) = vvel(iw) - enddo - !$OMP END PARALLEL - !$OMP PARALLEL PRIVATE(iw,i,j) - do iw=1,NA_len - i=indi(iw) - j=indj(iw) -! G_uvel(i,j) = uvel(iw) ! done above -! G_vvel(i,j) = vvel(iw) ! done above - G_strintx(i,j) = strintx(iw) - G_strinty(i,j) = strinty(iw) - G_stressp_1(i,j) = stressp_1(iw) - G_stressp_2(i,j) = stressp_2(iw) - G_stressp_3(i,j) = stressp_3(iw) - G_stressp_4(i,j) = stressp_4(iw) - G_stressm_1(i,j) = stressm_1(iw) - G_stressm_2(i,j) = stressm_2(iw) - G_stressm_3(i,j) = stressm_3(iw) - G_stressm_4(i,j) = stressm_4(iw) - G_stress12_1(i,j) = stress12_1(iw) - G_stress12_2(i,j) = stress12_2(iw) - G_stress12_3(i,j) = stress12_3(iw) - G_stress12_4(i,j) = stress12_4(iw) - G_divu(i,j) = divu(iw) - G_rdg_conv(i,j) = rdg_conv(iw) - G_rdg_shear(i,j) = rdg_shear(iw) - G_shear(i,j) = shear(iw) - G_taubx(i,j) = taubx(iw) - G_tauby(i,j) = tauby(iw) - enddo - !$OMP END PARALLEL - call dealloc1d() - endif - - !-- Scatter data into blocks -- - !-- has to be done on all tasks -- - - call scatter_global_ext(I_uvel, G_uvel, master_task, distrb_info) - call scatter_global_ext(I_vvel, G_vvel, master_task, distrb_info) - call scatter_global_ext(I_strintx, G_strintx, master_task, distrb_info) - call scatter_global_ext(I_strinty, G_strinty, master_task, distrb_info) - call scatter_global_ext(I_stressp_1, G_stressp_1, master_task, distrb_info) - call scatter_global_ext(I_stressp_2, G_stressp_2, master_task, distrb_info) - call scatter_global_ext(I_stressp_3, G_stressp_3, master_task, distrb_info) - call scatter_global_ext(I_stressp_4, G_stressp_4, master_task, distrb_info) - call scatter_global_ext(I_stressm_1, G_stressm_1, master_task, distrb_info) - call scatter_global_ext(I_stressm_2, G_stressm_2, master_task, distrb_info) - call scatter_global_ext(I_stressm_3, G_stressm_3, master_task, distrb_info) - call scatter_global_ext(I_stressm_4, G_stressm_4, master_task, distrb_info) - call scatter_global_ext(I_stress12_1, G_stress12_1, master_task, distrb_info) - call scatter_global_ext(I_stress12_2, G_stress12_2, master_task, distrb_info) - call scatter_global_ext(I_stress12_3, G_stress12_3, master_task, distrb_info) - call scatter_global_ext(I_stress12_4, G_stress12_4, master_task, distrb_info) - call scatter_global_ext(I_divu, G_divu, master_task, distrb_info) - call scatter_global_ext(I_rdg_conv, G_rdg_conv, master_task, distrb_info) - call scatter_global_ext(I_rdg_shear, G_rdg_shear, master_task, distrb_info) - call scatter_global_ext(I_shear, G_shear, master_task, distrb_info) - call scatter_global_ext(I_taubx, G_taubx, master_task, distrb_info) - call scatter_global_ext(I_tauby, G_tauby, master_task, distrb_info) - - end subroutine evp_copyout - -!---------------------------------------------------------------------------- - - subroutine evp_kernel_v2 - - use ice_constants, only : c0 - use ice_dyn_shared, only: ndte - use ice_communicate, only: my_task, master_task - implicit none - - real(kind=dbl_kind) :: rhow - integer (kind=int_kind) :: i, nthreads - integer (kind=int_kind) :: na,nb,navel - - character(len=*), parameter :: subname = '(evp_kernel_v2)' - !--------------------------------------- - !-- All calculations has to be done on one single node (choose master-task) -- - - if (my_task == master_task) then - - !- Read constants... - call icepack_query_parameters(rhow_out=rhow) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - na=NA_len - nb=NA_len - navel=NAVEL_len - - !- Initialize openmp --------------------------------------------------------- - call domp_init(nthreads) ! ought to be called from main - - !- Initialize timers --------------------------------------------------------- - str1=c0 - str2=c0 - str3=c0 - str4=c0 - str5=c0 - str6=c0 - str7=c0 - str8=c0 - - if (ndte<2) call abort_ice(subname//' ERROR: ndte must be 2 or higher for this kernel') - - !$OMP PARALLEL PRIVATE(i) - do i = 1, ndte-1 - call evp1d_stress(NA_len, & - ee,ne,se,1,na,uvel,vvel,dxt,dyt, & - hte,htn,htem1,htnm1, & - strength,stressp_1,stressp_2,stressp_3,stressp_4, & - stressm_1,stressm_2,stressm_3,stressm_4,stress12_1, & - stress12_2,stress12_3,stress12_4,str1,str2,str3, & - str4,str5,str6,str7,str8) - !$OMP BARRIER - call evp1d_stepu(NA_len, rhow, & - 1,nb,cdn_ocn,aiu,uocn,vocn,forcex,forcey,umassdti,fm,uarear,Tbu,& - uvel_init,vvel_init,uvel,vvel, & - str1,str2,str3,str4,str5,str6,str7,str8, nw,sw,sse,skipucell) - !$OMP BARRIER - call evp1d_halo_update(NA_len,1,navel,uvel,vvel, halo_parent) - !$OMP BARRIER - enddo - - call evp1d_stress(NA_len, tarear, & - ee,ne,se,1,na,uvel,vvel,dxt,dyt, & - hte,htn,htem1,htnm1, & - strength,stressp_1,stressp_2,stressp_3,stressp_4, & - stressm_1,stressm_2,stressm_3,stressm_4,stress12_1, & - stress12_2,stress12_3,stress12_4, & - divu,rdg_conv,rdg_shear,shear, & - str1,str2,str3,str4,str5,str6,str7,str8) - !$OMP BARRIER - call evp1d_stepu(NA_len, rhow, & - 1,nb,cdn_ocn,aiu,uocn,vocn,forcex,forcey,umassdti,fm,uarear,Tbu,& - strintx,strinty,taubx,tauby, & - uvel_init,vvel_init,uvel,vvel, & - str1,str2,str3,str4,str5,str6,str7,str8, nw,sw,sse,skipucell) - !$OMP BARRIER - call evp1d_halo_update(NA_len,1,navel,uvel,vvel, halo_parent) + end subroutine evp1d_halo_update + +!======================================================================= + + subroutine alloc1d(na) + + implicit none + + integer(kind=int_kind), intent(in) :: na + + ! local variables + + integer(kind=int_kind) :: ierr + + character(len=*), parameter :: subname = '(alloc1d)' + + allocate( & + ! helper indices for neighbours + indj(1:na), indi(1:na), ee(1:na), ne(1:na), se(1:na), & + nw(1:na), sw(1:na), sse(1:na), skipucell(1:na), & + skiptcell(1:na), & + ! grid distances and their "-1 neighbours" + HTE(1:na), HTN(1:na), HTEm1(1:na), HTNm1(1:na), & + ! T cells + strength(1:na), dxt(1:na), dyt(1:na), tarear(1:na), & + stressp_1(1:na), stressp_2(1:na), stressp_3(1:na), & + stressp_4(1:na), stressm_1(1:na), stressm_2(1:na), & + stressm_3(1:na), stressm_4(1:na), stress12_1(1:na), & + stress12_2(1:na), stress12_3(1:na), stress12_4(1:na), & + divu(1:na), rdg_conv(1:na), rdg_shear(1:na), shear(1:na), & + ! U cells + cdn_ocn(1:na), aiu(1:na), uocn(1:na), vocn(1:na), & + forcex(1:na), forcey(1:na), Tbu(1:na), umassdti(1:na), & + fm(1:na), uarear(1:na), strintx(1:na), strinty(1:na), & + uvel_init(1:na), vvel_init(1:na), taubx(1:na), tauby(1:na), & + ! error handling + stat=ierr) + + if (ierr /= 0) call abort_ice(subname & + // ' ERROR: could not allocate 1D arrays') + + end subroutine alloc1d + +!======================================================================= + + subroutine alloc1d_navel(navel) + + implicit none + + integer(kind=int_kind), intent(in) :: navel + + ! local variables + + integer(kind=int_kind) :: ierr + + character(len=*), parameter :: subname = '(alloc1d_navel)' + + allocate(uvel(1:navel), vvel(1:navel), indij(1:navel), & + halo_parent(1:navel), str1(1:navel), str2(1:navel), & + str3(1:navel), str4(1:navel), str5(1:navel), str6(1:navel), & + str7(1:navel), str8(1:navel), stat=ierr) + + if (ierr /= 0) call abort_ice(subname & + // ' ERROR: could not allocate 1D arrays') + + end subroutine alloc1d_navel + +!======================================================================= + + subroutine dealloc1d + + implicit none + + ! local variables + + integer(kind=int_kind) :: ierr + + character(len=*), parameter :: subname = '(dealloc1d)' + + deallocate( & + ! helper indices for neighbours + indj, indi, ee, ne, se, nw, sw, sse, skipucell, skiptcell, & + ! grid distances and their "-1 neighbours" + HTE, HTN, HTEm1, HTNm1, & + ! T cells + strength, dxt, dyt, tarear, stressp_1, stressp_2, stressp_3, & + stressp_4, stressm_1, stressm_2, stressm_3, stressm_4, & + stress12_1, stress12_2, stress12_3, stress12_4, str1, str2, & + str3, str4, str5, str6, str7, str8, divu, rdg_conv, & + rdg_shear, shear, & + ! U cells + cdn_ocn, aiu, uocn, vocn, forcex, forcey, Tbu, umassdti, fm, & + uarear, strintx, strinty, uvel_init, vvel_init, taubx, tauby, & + uvel, vvel, indij, halo_parent, & + ! error handling + stat=ierr) + + if (ierr /= 0) call abort_ice(subname & + // ' ERROR: could not deallocate 1D arrays') + + end subroutine dealloc1d + +!======================================================================= + + subroutine ice_dyn_evp_1d_copyin(nx, ny, nblk, nx_glob, ny_glob, & + I_icetmask, I_iceumask, I_cdn_ocn, I_aiu, I_uocn, I_vocn, & + I_forcex, I_forcey, I_Tbu, I_umassdti, I_fm, I_uarear, I_tarear, & + I_strintx, I_strinty, I_uvel_init, I_vvel_init, I_strength, & + I_uvel, I_vvel, I_dxt, I_dyt, I_stressp_1, I_stressp_2, & + I_stressp_3, I_stressp_4, I_stressm_1, I_stressm_2, I_stressm_3, & + I_stressm_4, I_stress12_1, I_stress12_2, I_stress12_3, & + I_stress12_4) + + use ice_gather_scatter, only : gather_global_ext + use ice_domain, only : distrb_info + use ice_communicate, only : my_task, master_task + use ice_grid, only : G_HTE, G_HTN + use ice_constants, only : c0 + + implicit none + + integer(int_kind), intent(in) :: nx, ny, nblk, nx_glob, ny_glob + logical(kind=log_kind), dimension(nx, ny, nblk), intent(in) :: & + I_iceumask + integer(kind=int_kind), dimension(nx, ny, nblk), intent(in) :: & + I_icetmask + real(kind=dbl_kind), dimension(nx, ny, nblk), intent(in) :: & + I_cdn_ocn, I_aiu, I_uocn, I_vocn, I_forcex, I_forcey, I_Tbu, & + I_umassdti, I_fm, I_uarear, I_tarear, I_strintx, I_strinty, & + I_uvel_init, I_vvel_init, I_strength, I_uvel, I_vvel, I_dxt, & + I_dyt, I_stressp_1, I_stressp_2, I_stressp_3, I_stressp_4, & + I_stressm_1, I_stressm_2, I_stressm_3, I_stressm_4, & + I_stress12_1, I_stress12_2, I_stress12_3, I_stress12_4 + + ! local variables + + logical(kind=log_kind), dimension(nx_glob, ny_glob) :: & + G_iceumask + integer(kind=int_kind), dimension(nx_glob, ny_glob) :: & + G_icetmask + real(kind=dbl_kind), dimension(nx_glob, ny_glob) :: & + G_cdn_ocn, G_aiu, G_uocn, G_vocn, G_forcex, G_forcey, G_Tbu, & + G_umassdti, G_fm, G_uarear, G_tarear, G_strintx, G_strinty, & + G_uvel_init, G_vvel_init, G_strength, G_uvel, G_vvel, G_dxt, & + G_dyt, G_stressp_1, G_stressp_2, G_stressp_3, G_stressp_4, & + G_stressm_1, G_stressm_2, G_stressm_3, G_stressm_4, & + G_stress12_1, G_stress12_2, G_stress12_3, G_stress12_4 + + character(len=*), parameter :: & + subname = '(ice_dyn_evp_1d_copyin)' + + call gather_global_ext(G_icetmask, I_icetmask, master_task, distrb_info ) + call gather_global_ext(G_iceumask, I_iceumask, master_task, distrb_info ) + call gather_global_ext(G_cdn_ocn, I_cdn_ocn, master_task, distrb_info ) + call gather_global_ext(G_aiu, I_aiu, master_task, distrb_info ) + call gather_global_ext(G_uocn, I_uocn, master_task, distrb_info ) + call gather_global_ext(G_vocn, I_vocn, master_task, distrb_info ) + call gather_global_ext(G_forcex, I_forcex, master_task, distrb_info ) + call gather_global_ext(G_forcey, I_forcey, master_task, distrb_info ) + call gather_global_ext(G_Tbu, I_Tbu, master_task, distrb_info ) + call gather_global_ext(G_umassdti, I_umassdti, master_task, distrb_info ) + call gather_global_ext(G_fm, I_fm, master_task, distrb_info ) + call gather_global_ext(G_uarear, I_uarear, master_task, distrb_info ) + call gather_global_ext(G_tarear, I_tarear, master_task, distrb_info ) + call gather_global_ext(G_strintx, I_strintx, master_task, distrb_info ) + call gather_global_ext(G_strinty, I_strinty, master_task, distrb_info ) + call gather_global_ext(G_uvel_init, I_uvel_init, master_task, distrb_info ) + call gather_global_ext(G_vvel_init, I_vvel_init, master_task, distrb_info ) + call gather_global_ext(G_strength, I_strength, master_task, distrb_info ) + call gather_global_ext(G_uvel, I_uvel, master_task, distrb_info, c0) + call gather_global_ext(G_vvel, I_vvel, master_task, distrb_info, c0) + call gather_global_ext(G_dxt, I_dxt, master_task, distrb_info ) + call gather_global_ext(G_dyt, I_dyt, master_task, distrb_info ) + call gather_global_ext(G_stressp_1, I_stressp_1, master_task, distrb_info ) + call gather_global_ext(G_stressp_2, I_stressp_2, master_task, distrb_info ) + call gather_global_ext(G_stressp_3, I_stressp_3, master_task, distrb_info ) + call gather_global_ext(G_stressp_4, I_stressp_4, master_task, distrb_info ) + call gather_global_ext(G_stressm_1, I_stressm_1, master_task, distrb_info ) + call gather_global_ext(G_stressm_2, I_stressm_2, master_task, distrb_info ) + call gather_global_ext(G_stressm_3, I_stressm_3, master_task, distrb_info ) + call gather_global_ext(G_stressm_4, I_stressm_4, master_task, distrb_info ) + call gather_global_ext(G_stress12_1, I_stress12_1, master_task, distrb_info ) + call gather_global_ext(G_stress12_2, I_stress12_2, master_task, distrb_info ) + call gather_global_ext(G_stress12_3, I_stress12_3, master_task, distrb_info ) + call gather_global_ext(G_stress12_4, I_stress12_4, master_task, distrb_info ) + + ! all calculations id done on master task + if (my_task == master_task) then + ! find number of active points and allocate 1D vectors + call calc_na(nx_glob, ny_glob, NA_len, G_icetmask, G_iceumask) + call alloc1d(NA_len) + call calc_2d_indices(nx_glob, ny_glob, NA_len, G_icetmask, G_iceumask) + call calc_navel(nx_glob, ny_glob, NA_len, NAVEL_len) + call alloc1d_navel(NAVEL_len) + ! initialize OpenMP. FIXME: ought to be called from main + call domp_init() + !$OMP PARALLEL DEFAULT(shared) + call numainit(1, NA_len, NAVEL_len) + !$OMP END PARALLEL + ! map 2D arrays to 1D arrays + call convert_2d_1d(nx_glob, ny_glob, NA_len, NAVEL_len, & + G_HTE, G_HTN, G_cdn_ocn, G_aiu, G_uocn, G_vocn, G_forcex, & + G_forcey, G_Tbu, G_umassdti, G_fm, G_uarear, G_tarear, & + G_strintx, G_strinty, G_uvel_init, G_vvel_init, & + G_strength, G_uvel, G_vvel, G_dxt, G_dyt, G_stressp_1, & + G_stressp_2, G_stressp_3, G_stressp_4, G_stressm_1, & + G_stressm_2, G_stressm_3, G_stressm_4, G_stress12_1, & + G_stress12_2, G_stress12_3, G_stress12_4) + call calc_halo_parent(nx_glob, ny_glob, NA_len, NAVEL_len, G_icetmask) + end if + + end subroutine ice_dyn_evp_1d_copyin + +!======================================================================= + + subroutine ice_dyn_evp_1d_copyout(nx, ny, nblk, nx_glob, ny_glob, & + I_uvel, I_vvel, I_strintx, I_strinty, I_stressp_1, I_stressp_2, & + I_stressp_3, I_stressp_4, I_stressm_1, I_stressm_2, I_stressm_3, & + I_stressm_4, I_stress12_1, I_stress12_2, I_stress12_3, & + I_stress12_4, I_divu, I_rdg_conv, I_rdg_shear, I_shear, I_taubx, & + I_tauby) + + use ice_constants, only : c0 + use ice_gather_scatter, only : scatter_global_ext + use ice_domain, only : distrb_info + use ice_communicate, only : my_task, master_task + + implicit none + + integer(int_kind), intent(in) :: nx, ny, nblk, nx_glob, ny_glob + real(dbl_kind), dimension(nx, ny, nblk), intent(out) :: I_uvel, & + I_vvel, I_strintx, I_strinty, I_stressp_1, I_stressp_2, & + I_stressp_3, I_stressp_4, I_stressm_1, I_stressm_2, & + I_stressm_3, I_stressm_4, I_stress12_1, I_stress12_2, & + I_stress12_3, I_stress12_4, I_divu, I_rdg_conv, I_rdg_shear, & + I_shear, I_taubx, I_tauby + + ! local variables + + integer(int_kind) :: iw, lo, up, j, i + real(dbl_kind), dimension(nx_glob, ny_glob) :: G_uvel, G_vvel, & + G_strintx, G_strinty, G_stressp_1, G_stressp_2, G_stressp_3, & + G_stressp_4, G_stressm_1, G_stressm_2, G_stressm_3, & + G_stressm_4, G_stress12_1, G_stress12_2, G_stress12_3, & + G_stress12_4, G_divu, G_rdg_conv, G_rdg_shear, G_shear, & + G_taubx, G_tauby + + character(len=*), parameter :: & + subname = '(ice_dyn_evp_1d_copyout)' + + ! remap 1D arrays into 2D arrays + if (my_task == master_task) then + + G_uvel = c0 + G_vvel = c0 + G_strintx = c0 + G_strinty = c0 + G_stressp_1 = c0 + G_stressp_2 = c0 + G_stressp_3 = c0 + G_stressp_4 = c0 + G_stressm_1 = c0 + G_stressm_2 = c0 + G_stressm_3 = c0 + G_stressm_4 = c0 + G_stress12_1 = c0 + G_stress12_2 = c0 + G_stress12_3 = c0 + G_stress12_4 = c0 + G_divu = c0 + G_rdg_conv = c0 + G_rdg_shear = c0 + G_shear = c0 + G_taubx = c0 + G_tauby = c0 + + !$OMP PARALLEL PRIVATE(iw, lo, up, j, i) + call domp_get_domain(1, NA_len, lo, up) + do iw = lo, up + ! get 2D indices + i = indi(iw) + j = indj(iw) + ! remap + G_strintx(i, j) = strintx(iw) + G_strinty(i, j) = strinty(iw) + G_stressp_1(i, j) = stressp_1(iw) + G_stressp_2(i, j) = stressp_2(iw) + G_stressp_3(i, j) = stressp_3(iw) + G_stressp_4(i, j) = stressp_4(iw) + G_stressm_1(i, j) = stressm_1(iw) + G_stressm_2(i, j) = stressm_2(iw) + G_stressm_3(i, j) = stressm_3(iw) + G_stressm_4(i, j) = stressm_4(iw) + G_stress12_1(i, j) = stress12_1(iw) + G_stress12_2(i, j) = stress12_2(iw) + G_stress12_3(i, j) = stress12_3(iw) + G_stress12_4(i, j) = stress12_4(iw) + G_divu(i, j) = divu(iw) + G_rdg_conv(i, j) = rdg_conv(iw) + G_rdg_shear(i, j) = rdg_shear(iw) + G_shear(i, j) = shear(iw) + G_taubx(i, j) = taubx(iw) + G_tauby(i, j) = tauby(iw) + G_uvel(i, j) = uvel(iw) + G_vvel(i, j) = vvel(iw) + end do + call domp_get_domain(NA_len + 1, NAVEL_len, lo, up) + do iw = lo, up + ! get 2D indices + j = int((indij(iw) - 1) / (nx_glob)) + 1 + i = indij(iw) - (j - 1) * nx_glob + ! remap + G_uvel(i, j) = uvel(iw) + G_vvel(i, j) = vvel(iw) + end do + !$OMP END PARALLEL + + call dealloc1d() + + end if + + ! scatter data on all tasks + call scatter_global_ext(I_uvel, G_uvel, master_task, distrb_info) + call scatter_global_ext(I_vvel, G_vvel, master_task, distrb_info) + call scatter_global_ext(I_strintx, G_strintx, master_task, distrb_info) + call scatter_global_ext(I_strinty, G_strinty, master_task, distrb_info) + call scatter_global_ext(I_stressp_1, G_stressp_1, master_task, distrb_info) + call scatter_global_ext(I_stressp_2, G_stressp_2, master_task, distrb_info) + call scatter_global_ext(I_stressp_3, G_stressp_3, master_task, distrb_info) + call scatter_global_ext(I_stressp_4, G_stressp_4, master_task, distrb_info) + call scatter_global_ext(I_stressm_1, G_stressm_1, master_task, distrb_info) + call scatter_global_ext(I_stressm_2, G_stressm_2, master_task, distrb_info) + call scatter_global_ext(I_stressm_3, G_stressm_3, master_task, distrb_info) + call scatter_global_ext(I_stressm_4, G_stressm_4, master_task, distrb_info) + call scatter_global_ext(I_stress12_1, G_stress12_1, master_task, distrb_info) + call scatter_global_ext(I_stress12_2, G_stress12_2, master_task, distrb_info) + call scatter_global_ext(I_stress12_3, G_stress12_3, master_task, distrb_info) + call scatter_global_ext(I_stress12_4, G_stress12_4, master_task, distrb_info) + call scatter_global_ext(I_divu, G_divu, master_task, distrb_info) + call scatter_global_ext(I_rdg_conv, G_rdg_conv, master_task, distrb_info) + call scatter_global_ext(I_rdg_shear, G_rdg_shear, master_task, distrb_info) + call scatter_global_ext(I_shear, G_shear, master_task, distrb_info) + call scatter_global_ext(I_taubx, G_taubx, master_task, distrb_info) + call scatter_global_ext(I_tauby, G_tauby, master_task, distrb_info) + + end subroutine ice_dyn_evp_1d_copyout + +!======================================================================= + + subroutine ice_dyn_evp_1d_kernel + + use ice_constants, only : c0 + use ice_dyn_shared, only : ndte + use ice_communicate, only : my_task, master_task + + implicit none + + ! local variables + + real(kind=dbl_kind) :: rhow + integer(kind=int_kind) :: ksub + + character(len=*), parameter :: & + subname = '(ice_dyn_evp_1d_kernel)' + + ! all calculations is done on master task + if (my_task == master_task) then + + ! read constants + call icepack_query_parameters(rhow_out = rhow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) then + call abort_ice(error_message=subname, file=__FILE__, & + line=__LINE__) + end if + + if (ndte < 2) call abort_ice(subname & + // ' ERROR: ndte must be 2 or higher for this kernel') + + !$OMP PARALLEL PRIVATE(ksub) + do ksub = 1, ndte - 1 + call evp1d_stress(NA_len, ee, ne, se, 1, NA_len, uvel, & + vvel, dxt, dyt, hte, htn, htem1, htnm1, strength, & + stressp_1, stressp_2, stressp_3, stressp_4, stressm_1, & + stressm_2, stressm_3, stressm_4, stress12_1, & + stress12_2, stress12_3, stress12_4, str1, str2, str3, & + str4, str5, str6, str7, str8, skiptcell) + !$OMP BARRIER + call evp1d_stepu(NA_len, rhow, 1, NA_len, cdn_ocn, aiu, & + uocn, vocn, forcex, forcey, umassdti, fm, uarear, Tbu, & + uvel_init, vvel_init, uvel, vvel, str1, str2, str3, & + str4, str5, str6, str7, str8, nw, sw, sse, skipucell) + !$OMP BARRIER + call evp1d_halo_update(NAVEL_len, 1, NA_len, uvel, vvel, & + halo_parent) + !$OMP BARRIER + end do + + call evp1d_stress(NA_len, ee, ne, se, 1, NA_len, uvel, vvel, & + dxt, dyt, hte, htn, htem1, htnm1, strength, stressp_1, & + stressp_2, stressp_3, stressp_4, stressm_1, stressm_2, & + stressm_3, stressm_4, stress12_1, stress12_2, stress12_3, & + stress12_4, str1, str2, str3, str4, str5, str6, str7, & + str8, skiptcell, tarear, divu, rdg_conv, rdg_shear, shear) + !$OMP BARRIER + call evp1d_stepu(NA_len, rhow, 1, NA_len, cdn_ocn, aiu, uocn, & + vocn, forcex, forcey, umassdti, fm, uarear, Tbu, & + uvel_init, vvel_init, uvel, vvel, str1, str2, str3, str4, & + str5, str6, str7, str8, nw, sw, sse, skipucell, strintx, & + strinty, taubx, tauby) + !$OMP BARRIER + call evp1d_halo_update(NAVEL_len, 1, NA_len, uvel, vvel, & + halo_parent) + !$OMP END PARALLEL + + end if ! master task + + end subroutine ice_dyn_evp_1d_kernel + +!======================================================================= + + subroutine calc_na(nx, ny, na, icetmask, iceumask) + ! Calculate number of active points + + use ice_blocks, only : nghost + + implicit none + + integer(kind=int_kind), intent(in) :: nx, ny + integer(kind=int_kind), dimension(nx, ny), intent(in) :: & + icetmask + logical(kind=log_kind), dimension(nx, ny), intent(in) :: & + iceumask + integer(kind=int_kind), intent(out) :: na + + ! local variables + + integer(kind=int_kind) :: i, j + + character(len=*), parameter :: subname = '(calc_na)' + + na = 0 + ! NOTE: T mask includes northern and eastern ghost cells + do j = 1 + nghost, ny + do i = 1 + nghost, nx + if (icetmask(i,j) == 1 .or. iceumask(i,j)) na = na + 1 + end do + end do + + end subroutine calc_na + +!======================================================================= + + subroutine calc_2d_indices(nx, ny, na, icetmask, iceumask) + + use ice_blocks, only : nghost + + implicit none + + integer(kind=int_kind), intent(in) :: nx, ny, na + integer(kind=int_kind), dimension(nx, ny), intent(in) :: & + icetmask + logical(kind=log_kind), dimension(nx, ny), intent(in) :: & + iceumask + + ! local variables + + integer(kind=int_kind) :: i, j, Nmaskt + + character(len=*), parameter :: subname = '(calc_2d_indices)' + + skipucell(:) = .false. + skiptcell(:) = .false. + indi = 0 + indj = 0 + Nmaskt = 0 + ! NOTE: T mask includes northern and eastern ghost cells + do j = 1 + nghost, ny + do i = 1 + nghost, nx + if (icetmask(i,j) == 1 .or. iceumask(i,j)) then + Nmaskt = Nmaskt + 1 + indi(Nmaskt) = i + indj(Nmaskt) = j + if (icetmask(i,j) /= 1) skiptcell(Nmaskt) = .true. + if (.not. iceumask(i,j)) skipucell(Nmaskt) = .true. + ! NOTE: U mask does not include northern and eastern + ! ghost cells. Skip northern and eastern ghost cells + if (i == nx) skipucell(Nmaskt) = .true. + if (j == ny) skipucell(Nmaskt) = .true. + end if + end do + end do + + end subroutine calc_2d_indices + +!======================================================================= + + subroutine calc_navel(nx_block, ny_block, na, navel) + ! Calculate number of active points, including halo points + + implicit none + + integer(kind=int_kind), intent(in) :: nx_block, ny_block, na + integer(kind=int_kind), intent(out) :: navel + + ! local variables + + integer(kind=int_kind) :: iw, i, j + integer(kind=int_kind), dimension(1:na) :: Iin, Iee, Ine, Ise, & + Inw, Isw, Isse + integer(kind=int_kind), dimension(1:7 * na) :: util1, util2 + + character(len=*), parameter :: subname = '(calc_navel)' + + ! calculate additional 1D indices used for finite differences + do iw = 1, na + ! get 2D indices + i = indi(iw) + j = indj(iw) + ! calculate 1D indices + Iin(iw) = i + (j - 1) * nx_block ! ( 0, 0) target point + Iee(iw) = i - 1 + (j - 1) * nx_block ! (-1, 0) + Ine(iw) = i - 1 + (j - 2) * nx_block ! (-1, -1) + Ise(iw) = i + (j - 2) * nx_block ! ( 0, -1) + Inw(iw) = i + 1 + (j - 1) * nx_block ! (+1, 0) + Isw(iw) = i + 1 + (j - 0) * nx_block ! (+1, +1) + Isse(iw) = i + (j - 0) * nx_block ! ( 0, +1) + end do + + ! find number of points needed for finite difference calculations + call union(Iin, Iee, na, na, util1, i ) + call union(util1, Ine, i, na, util2, j ) + call union(util2, Ise, j, na, util1, i ) + call union(util1, Inw, i, na, util2, j ) + call union(util2, Isw, j, na, util1, i ) + call union(util1, Isse, i, na, util2, navel) + + end subroutine calc_navel + +!======================================================================= + + subroutine convert_2d_1d(nx, ny, na, navel, I_HTE, I_HTN, & + I_cdn_ocn, I_aiu, I_uocn, I_vocn, I_forcex, I_forcey, I_Tbu, & + I_umassdti, I_fm, I_uarear, I_tarear, I_strintx, I_strinty, & + I_uvel_init, I_vvel_init, I_strength, I_uvel, I_vvel, I_dxt, & + I_dyt, I_stressp_1, I_stressp_2, I_stressp_3, I_stressp_4, & + I_stressm_1, I_stressm_2, I_stressm_3, I_stressm_4, & + I_stress12_1, I_stress12_2, I_stress12_3, I_stress12_4) + + implicit none + + integer(kind=int_kind), intent(in) :: nx, ny, na, navel + real (kind=dbl_kind), dimension(nx, ny), intent(in) :: I_HTE, & + I_HTN, I_cdn_ocn, I_aiu, I_uocn, I_vocn, I_forcex, I_forcey, & + I_Tbu, I_umassdti, I_fm, I_uarear, I_tarear, I_strintx, & + I_strinty, I_uvel_init, I_vvel_init, I_strength, I_uvel, & + I_vvel, I_dxt, I_dyt, I_stressp_1, I_stressp_2, I_stressp_3, & + I_stressp_4, I_stressm_1, I_stressm_2, I_stressm_3, & + I_stressm_4, I_stress12_1, I_stress12_2, I_stress12_3, & + I_stress12_4 + + ! local variables + + integer(kind=int_kind) :: iw, lo, up, j, i, nachk + integer(kind=int_kind), dimension(1:na) :: Iin, Iee, Ine, Ise, & + Inw, Isw, Isse + integer(kind=int_kind), dimension(1:7 * na) :: util1, util2 + + character(len=*), parameter :: subname = '(convert_2d_1d)' + + ! calculate additional 1D indices used for finite differences + do iw = 1, na + ! get 2D indices + i = indi(iw) + j = indj(iw) + ! calculate 1D indices + Iin(iw) = i + (j - 1) * nx ! ( 0, 0) target point + Iee(iw) = i - 1 + (j - 1) * nx ! (-1, 0) + Ine(iw) = i - 1 + (j - 2) * nx ! (-1,-1) + Ise(iw) = i + (j - 2) * nx ! ( 0,-1) + Inw(iw) = i + 1 + (j - 1) * nx ! (+1, 0) + Isw(iw) = i + 1 + (j - 0) * nx ! (+1,+1) + Isse(iw) = i + (j - 0) * nx ! ( 0,+1) + end do + + ! find number of points needed for finite difference calculations + call union(Iin, Iee, na, na, util1, i ) + call union(util1, Ine, i, na, util2, j ) + call union(util2, Ise, j, na, util1, i ) + call union(util1, Inw, i, na, util2, j ) + call union(util2, Isw, j, na, util1, i ) + call union(util1, Isse, i, na, util2, nachk) + + ! index vector with sorted target points + do iw = 1, na + indij(iw) = Iin(iw) + end do + + ! sorted additional points + call setdiff(util2, Iin, navel, na, util1, j) + do iw = na + 1, navel + indij(iw) = util1(iw - na) + end do + + ! indices for additional points needed for uvel and vvel + call findXinY(Iee, indij, na, navel, ee) + call findXinY(Ine, indij, na, navel, ne) + call findXinY(Ise, indij, na, navel, se) + call findXinY(Inw, indij, na, navel, nw) + call findXinY(Isw, indij, na, navel, sw) + call findXinY(Isse, indij, na, navel, sse) + + !$OMP PARALLEL PRIVATE(iw, lo, up, j, i) + ! write 1D arrays from 2D arrays (target points) + call domp_get_domain(1, na, lo, up) + do iw = lo, up + ! get 2D indices + i = indi(iw) + j = indj(iw) + ! map + uvel(iw) = I_uvel(i, j) + vvel(iw) = I_vvel(i, j) + cdn_ocn(iw) = I_cdn_ocn(i, j) + aiu(iw) = I_aiu(i, j) + uocn(iw) = I_uocn(i, j) + vocn(iw) = I_vocn(i, j) + forcex(iw) = I_forcex(i, j) + forcey(iw) = I_forcey(i, j) + Tbu(iw) = I_Tbu(i, j) + umassdti(iw) = I_umassdti(i, j) + fm(iw) = I_fm(i, j) + tarear(iw) = I_tarear(i, j) + uarear(iw) = I_uarear(i, j) + strintx(iw) = I_strintx(i, j) + strinty(iw) = I_strinty(i, j) + uvel_init(iw) = I_uvel_init(i, j) + vvel_init(iw) = I_vvel_init(i, j) + strength(iw) = I_strength(i, j) + dxt(iw) = I_dxt(i, j) + dyt(iw) = I_dyt(i, j) + stressp_1(iw) = I_stressp_1(i, j) + stressp_2(iw) = I_stressp_2(i, j) + stressp_3(iw) = I_stressp_3(i, j) + stressp_4(iw) = I_stressp_4(i, j) + stressm_1(iw) = I_stressm_1(i, j) + stressm_2(iw) = I_stressm_2(i, j) + stressm_3(iw) = I_stressm_3(i, j) + stressm_4(iw) = I_stressm_4(i, j) + stress12_1(iw) = I_stress12_1(i, j) + stress12_2(iw) = I_stress12_2(i, j) + stress12_3(iw) = I_stress12_3(i, j) + stress12_4(iw) = I_stress12_4(i, j) + HTE(iw) = I_HTE(i, j) + HTN(iw) = I_HTN(i, j) + HTEm1(iw) = I_HTE(i - 1, j) + HTNm1(iw) = I_HTN(i, j - 1) + end do + ! write 1D arrays from 2D arrays (additional points) + call domp_get_domain(na + 1, navel, lo, up) + do iw = lo, up + ! get 2D indices + j = int((indij(iw) - 1) / (nx)) + 1 + i = indij(iw) - (j - 1) * nx + ! map + uvel(iw) = I_uvel(i, j) + vvel(iw) = I_vvel(i, j) + end do !$OMP END PARALLEL - endif - - end subroutine evp_kernel_v2 - -!---------------------------------------------------------------------------- - - subroutine calc_na(nx,ny,na,icetmask) - ! Calculate number of active points (na) - use ice_blocks, only: nghost - - implicit none - - integer(int_kind),intent(in) :: nx,ny - integer(int_kind),intent(out) :: na - integer (kind=int_kind),dimension (nx,ny), intent(in) :: icetmask - integer(int_kind) :: i,j - - character(len=*), parameter :: subname = '(calc_na)' - !--------------------------------------- - - na = 0 -! Note: The icellt mask includes north and east ghost cells. (ice_dyn_shared.F90) - do j = 1+nghost, ny ! -nghost - do i = 1+nghost, nx ! -nghost - if (icetmask(i,j)==1) then - na=na+1 - endif - enddo - enddo - - end subroutine calc_na - -!---------------------------------------------------------------------------- - - subroutine calc_2d_indices(nx,ny,na,icetmask,iceumask) - - use ice_blocks, only: nghost - - implicit none - - integer(int_kind),intent(in) :: nx,ny,na - integer (kind=int_kind),dimension (nx,ny), intent(in) :: icetmask - logical (kind=log_kind),dimension (nx,ny), intent(in) :: iceumask - integer(int_kind) :: i,j,Nmaskt - - character(len=*), parameter :: subname = '(calc_2d_indices)' - !--------------------------------------- - - skipucell(:)=.false. - indi=0 - indj=0 - Nmaskt=0 -! Note: The icellt mask includes north and east ghost cells. (ice_dyn_shared.F90) - do j = 1+nghost, ny ! -nghost - do i = 1+nghost, nx ! -nghost - if (icetmask(i,j)==1) then - Nmaskt=Nmaskt+1 - indi(Nmaskt) = i - indj(Nmaskt) = j - ! Umask do NOT include north/east ghost cells ... skip these as well - if (iceumask(i,j) .eqv. .false. ) skipucell(Nmaskt) = .true. - if (i==nx) skipucell(Nmaskt) = .true. - if (j==ny) skipucell(Nmaskt) = .true. - endif - enddo - enddo - if (Nmaskt.ne.na) then - write(nu_diag,*) subname,' Nmaskt,na: ',Nmaskt,na - call abort_ice(subname//': ERROR Problem Nmaskt != na') - endif - if (Nmaskt==0) then - write(nu_diag,*) subname,' WARNING: NO ICE' - endif - - end subroutine calc_2d_indices - -!---------------------------------------------------------------------------- - - subroutine calc_navel(nx_block,ny_block,na,navel) - ! Calculate number of active points including needed halo points (navel) - - implicit none - - integer(int_kind),intent(in) :: nx_block,ny_block,na - integer(int_kind),intent(out) :: navel - - integer(int_kind) :: iw,i,j - integer(int_kind),dimension(1:na) :: Iin,Iee,Ine,Ise,Inw,Isw,Isse - integer(int_kind),dimension(1:7*na) :: util1,util2 - - character(len=*), parameter :: subname = '(calc_navel)' - - !--------------------------------------- - ! Additional indices used for finite differences (FD) - do iw=1,na - i=indi(iw) - j=indj(iw) - Iin(iw) = i + (j-1)*nx_block ! ( 0, 0) Target point - Iee(iw) = i-1 + (j-1)*nx_block ! (-1, 0) - Ine(iw) = i-1 + (j-2)*nx_block ! (-1,-1) - Ise(iw) = i + (j-2)*nx_block ! ( 0,-1) - Inw(iw) = i+1 + (j-1)*nx_block ! (+1, 0) - Isw(iw) = i+1 + (j-0)*nx_block ! (+1,+1) - Isse(iw)= i + (j-0)*nx_block ! ( 0,+1) - enddo - - !-- Find number of points needed for finite difference calculations - call union(Iin, Iee,na,na,util1,i) - call union(util1,Ine, i,na,util2,j) - call union(util2,Ise, j,na,util1,i) - call union(util1,Inw, i,na,util2,j) - call union(util2,Isw, j,na,util1,i) - call union(util1,Isse,i,na,util2,navel) - - !-- Check bounds - do iw=1,navel - if (util2(iw)>nx_block*ny_block .or. util2(iw)<1) then - write(nu_diag,*) subname,' nx_block,ny_block,nx_block*ny_block: ',nx_block,ny_block,nx_block*ny_block - write(nu_diag,*) subname,' na,navel,iw,util2(iw): ',na,navel,iw,util2(iw) - call abort_ice(subname//': Problem with boundary. Check halo zone values') - endif - enddo - - end subroutine calc_navel - -!---------------------------------------------------------------------------- - - subroutine convert_2d_1d_v2(nx,ny, na,navel, & - I_HTE,I_HTN, & -!v1 I_dxhy,I_dyhx,I_cyp,I_cxp,I_cym,I_cxm,I_tinyarea, & -!v1 I_waterx,I_watery, & - I_cdn_ocn,I_aiu,I_uocn,I_vocn,I_forcex,I_forcey,I_Tbu, & - I_umassdti,I_fm,I_uarear,I_tarear,I_strintx,I_strinty, & - I_uvel_init,I_vvel_init, & - I_strength,I_uvel,I_vvel,I_dxt,I_dyt, & - I_stressp_1 ,I_stressp_2, I_stressp_3, I_stressp_4, & - I_stressm_1 ,I_stressm_2, I_stressm_3, I_stressm_4, & - I_stress12_1,I_stress12_2,I_stress12_3,I_stress12_4 ) - - implicit none - - integer(int_kind),intent(in) :: nx,ny,na,navel - real (kind=dbl_kind), dimension(nx,ny), intent(in) :: & - I_HTE,I_HTN, & -!v1 I_dxhy,I_dyhx,I_cyp,I_cxp,I_cym,I_cxm,I_tinyarea, & -!v1 I_waterx,I_watery, & - I_cdn_ocn,I_aiu,I_uocn,I_vocn,I_forcex,I_forcey,I_Tbu, & - I_umassdti,I_fm,I_uarear,I_tarear,I_strintx,I_strinty, & - I_uvel_init,I_vvel_init, & - I_strength,I_uvel,I_vvel,I_dxt,I_dyt, & - I_stressp_1 ,I_stressp_2, I_stressp_3, I_stressp_4, & - I_stressm_1 ,I_stressm_2, I_stressm_3, I_stressm_4, & - I_stress12_1,I_stress12_2,I_stress12_3,I_stress12_4 - - integer(int_kind) :: iw,i,j, nx_block - integer(int_kind),dimension(1:na) :: Iin,Iee,Ine,Ise,Inw,Isw,Isse - integer(int_kind),dimension(1:7*na) :: util1,util2 - integer(int_kind) :: nachk - - character(len=*), parameter :: subname = '(convert_2d_1d_v2)' - - !--------------------------------------- - ! Additional indices used for finite differences (FD) - nx_block=nx ! Total block size in x-dir - do iw=1,na - i=indi(iw) - j=indj(iw) - Iin(iw) = i + (j-1)*nx_block ! ( 0, 0) Target point - Iee(iw) = i-1 + (j-1)*nx_block ! (-1, 0) - Ine(iw) = i-1 + (j-2)*nx_block ! (-1,-1) - Ise(iw) = i + (j-2)*nx_block ! ( 0,-1) - Inw(iw) = i+1 + (j-1)*nx_block ! (+1, 0) - Isw(iw) = i+1 + (j-0)*nx_block ! (+1,+1) - Isse(iw)= i + (j-0)*nx_block ! ( 0,+1) - enddo - - !-- Find number of points needed for finite difference calculations - call union(Iin, Iee,na,na,util1,i) - call union(util1,Ine, i,na,util2,j) - call union(util2,Ise, j,na,util1,i) - call union(util1,Inw, i,na,util2,j) - call union(util2,Isw, j,na,util1,i) - call union(util1,Isse,i,na,util2,nachk) - - if (nachk .ne. navel) then - write(nu_diag,*) subname,' ERROR: navel badly chosen: na,navel,nachk = ',na,navel,nachk - call abort_ice(subname//': ERROR: navel badly chosen') - endif - - ! indij: vector with target points (sorted) ... - do iw=1,na - indij(iw)=Iin(iw) - enddo - - ! indij: ... followed by extra points (sorted) - call setdiff(util2,Iin,navel,na,util1,j) - do iw=na+1,navel - indij(iw)=util1(iw-na) - enddo - - !-- Create indices for additional points needed for uvel,vvel: - call findXinY(Iee ,indij,na,navel, ee) - call findXinY(Ine ,indij,na,navel, ne) - call findXinY(Ise ,indij,na,navel, se) - call findXinY(Inw ,indij,na,navel, nw) - call findXinY(Isw ,indij,na,navel, sw) - call findXinY(Isse,indij,na,navel,sse) - - !-- write check -!if (1 == 2) then -! write(nu_diag,*) subname,' MHRI: INDICES start:' -! write(nu_diag,*) 'Min/max ee', minval(ee), maxval(ee) -! write(nu_diag,*) 'Min/max ne', minval(ne), maxval(ne) -! write(nu_diag,*) 'Min/max se', minval(se), maxval(se) -! write(nu_diag,*) 'Min/max nw', minval(nw), maxval(nw) -! write(nu_diag,*) 'Min/max sw', minval(sw), maxval(sw) -! write(nu_diag,*) 'Min/max sse',minval(sse),maxval(sse) -! write(nu_diag,*) subname,' MHRI: INDICES end:' -!endif - - ! Write 1D data from 2D: Here only extra FD part, the rest follows... - !$OMP PARALLEL DO PRIVATE(iw,i,j) - do iw=na+1,navel - j=int((indij(iw)-1)/(nx_block))+1 - i=indij(iw)-(j-1)*nx_block - uvel(iw)= I_uvel(i,j) - vvel(iw)= I_vvel(i,j) - enddo - !$OMP END PARALLEL DO - - ! Write 1D data from 2D - !$OMP PARALLEL DO PRIVATE(iw,i,j) - do iw=1,na - i=indi(iw) - j=indj(iw) - uvel(iw)= I_uvel(i,j) - vvel(iw)= I_vvel(i,j) - cdn_ocn(iw)= I_cdn_ocn(i,j) - aiu(iw)= I_aiu(i,j) - uocn(iw)= I_uocn(i,j) - vocn(iw)= I_vocn(i,j) - forcex(iw)= I_forcex(i,j) - forcey(iw)= I_forcey(i,j) - Tbu(iw)= I_Tbu(i,j) - umassdti(iw)= I_umassdti(i,j) - fm(iw)= I_fm(i,j) - tarear(iw)= I_tarear(i,j) - uarear(iw)= I_uarear(i,j) - strintx(iw)= I_strintx(i,j) - strinty(iw)= I_strinty(i,j) - uvel_init(iw)= I_uvel_init(i,j) - vvel_init(iw)= I_vvel_init(i,j) - strength(iw)= I_strength(i,j) - dxt(iw)= I_dxt(i,j) - dyt(iw)= I_dyt(i,j) - stressp_1(iw)= I_stressp_1(i,j) - stressp_2(iw)= I_stressp_2(i,j) - stressp_3(iw)= I_stressp_3(i,j) - stressp_4(iw)= I_stressp_4(i,j) - stressm_1(iw)= I_stressm_1(i,j) - stressm_2(iw)= I_stressm_2(i,j) - stressm_3(iw)= I_stressm_3(i,j) - stressm_4(iw)= I_stressm_4(i,j) - stress12_1(iw)=I_stress12_1(i,j) - stress12_2(iw)=I_stress12_2(i,j) - stress12_3(iw)=I_stress12_3(i,j) - stress12_4(iw)=I_stress12_4(i,j) -!v1 dxhy(iw)= I_dxhy(i,j) -!v1 dyhx(iw)= I_dyhx(i,j) -!v1 cyp(iw)= I_cyp(i,j) -!v1 cxp(iw)= I_cxp(i,j) -!v1 cym(iw)= I_cym(i,j) -!v1 cxm(iw)= I_cxm(i,j) -!v1 tinyarea(iw)= I_tinyarea(i,j) -!v1 waterx(iw)= I_waterx(i,j) -!v1 watery(iw)= I_watery(i,j) - HTE(iw) = I_HTE(i,j) - HTN(iw) = I_HTN(i,j) - HTEm1(iw) = I_HTE(i-1,j) - HTNm1(iw) = I_HTN(i,j-1) - enddo - !$OMP END PARALLEL DO - - end subroutine convert_2d_1d_v2 - -!---------------------------------------------------------------------------- - - subroutine calc_halo_parent(nx,ny,na,navel, I_icetmask) - - implicit none - - integer(kind=int_kind),intent(in) :: nx,ny,na,navel - integer(kind=int_kind), dimension(nx,ny), intent(in) :: I_icetmask - - integer(kind=int_kind) :: iw,i,j !,masku,maskt - integer(kind=int_kind),dimension(1:navel) :: Ihalo - - character(len=*), parameter :: subname = '(calc_halo_parent)' - - !--------------------------------------- - ! Indices for halo update: - ! 0: no halo point - ! >0: index for halo point parent. Finally related to indij vector - ! TODO: ONLY for nghost==1 - ! TODO: ONLY for circular grids - NOT tripole grids - - Ihalo(:)=0 - halo_parent(:)=0 - - !$OMP PARALLEL DO PRIVATE(iw,i,j) - do iw=1,navel - j=int((indij(iw)-1)/(nx))+1 - i=indij(iw)-(j-1)*nx - ! If within ghost-zone: - if (i==nx .and. I_icetmask( 2,j)==1) Ihalo(iw)= 2+ (j-1)*nx - if (i==1 .and. I_icetmask(nx-1,j)==1) Ihalo(iw)=(nx-1)+ (j-1)*nx - if (j==ny .and. I_icetmask(i, 2)==1) Ihalo(iw)= i+ nx - if (j==1 .and. I_icetmask(i,ny-1)==1) Ihalo(iw)= i+(ny-2)*nx - enddo - !$OMP END PARALLEL DO - - ! Relate halo indices to indij vector - call findXinY_halo(Ihalo,indij,navel,navel,halo_parent) - - !-- write check -!if (1 == 1) then -! integer(kind=int_kind) :: iiw,ii,jj !,masku,maskt MHRI -! write(nu_diag,*) subname,' MHRI: halo boundary start:' -! do iw=1,navel -! if (halo_parent(iw)>0) then -! iiw=halo_parent(iw) -! j=int((indij(iiw)-1)/(nx))+1 -! i=indij(iiw)-(j-1)*nx -! ii=i -! jj=j -! j=int((indij(iw)-1)/(nx))+1 -! i=indij(iw)-(j-1)*nx -! write(nu_diag,*)iw,i,j,iiw,ii,jj -! endif -! enddo -! write(nu_diag,*) subname,' MHRI: halo boundary end:' -!endif - - end subroutine calc_halo_parent - -!---------------------------------------------------------------------------- - - subroutine union(x,y,nx,ny,xy,nxy) - ! Find union (xy) of two sorted integer vectors (x and y) - ! ie. Combined values of the two vectors with no repetitions. - !use ice_kinds_mod - - implicit none - - integer (int_kind) :: i,j,k - integer (int_kind),intent(in) :: nx,ny - integer (int_kind),intent(in) :: x(1:nx),y(1:ny) - integer (int_kind),intent(out) :: xy(1:nx+ny) - integer (int_kind),intent(out) :: nxy - - character(len=*), parameter :: subname = '(union)' - - !--------------------------------------- - - i=1 - j=1 - k=1 - do while (i<=nx .and. j<=ny) - if (x(i)y(j)) then - xy(k)=y(j) - j=j+1 - else !if (x(i)==y(j)) then - xy(k)=x(i) - i=i+1 - j=j+1 - endif - k=k+1 - enddo - - ! The rest - do while (i<=nx) - xy(k)=x(i) - i=i+1 - k=k+1 - enddo - do while (j<=ny) - xy(k)=y(j) - j=j+1 - k=k+1 - enddo - nxy=k-1 - - end subroutine union - -!---------------------------------------------------------------------------- - - subroutine setdiff(x,y,nx,ny,xy,nxy) - ! Find element (xy) of two sorted integer vectors (x and y) - ! that are in x, but not in y ... or in y, but not in x - !use ice_kinds_mod - - implicit none - - integer (int_kind) :: i,j,k - integer (int_kind),intent(in) :: nx,ny - integer (int_kind),intent(in) :: x(1:nx),y(1:ny) - integer (int_kind),intent(out) :: xy(1:nx+ny) - integer (int_kind),intent(out) :: nxy - - character(len=*), parameter :: subname = '(setdiff)' - !--------------------------------------- - - i=1 - j=1 - k=1 - do while (i<=nx .and. j<=ny) - if (x(i)y(j)) then - xy(k)=y(j) - j=j+1 - k=k+1 - else !if (x(i)==y(j)) then - i=i+1 - j=j+1 - endif - enddo - - ! The rest - do while (i<=nx) - xy(k)=x(i) - i=i+1 - k=k+1 - enddo - do while (j<=ny) - xy(k)=y(j) - j=j+1 - k=k+1 - enddo - nxy=k-1 - - end subroutine setdiff - -!---------------------------------------------------------------------------- - - subroutine findXinY(x,y,nx,ny,indx) - ! Find indx vector so that x(1:na)=y(indx(1:na)) - ! - ! Conditions: - ! * EVERY item in x is found in y. - ! * x(1:nx) is a sorted integer vector. - ! * y(1:ny) consists of two sorted integer vectors: - ! [y(1:nx) ; y(nx+1:ny)] - ! * ny>=nx - ! Return: indx(1:na) - ! - !use ice_kinds_mod - - implicit none - - integer (int_kind),intent(in) :: nx,ny - integer (int_kind),intent(in) :: x(1:nx),y(1:ny) - integer (int_kind),intent(out) :: indx(1:nx) - integer (int_kind) :: i,j1,j2 - - character(len=*), parameter :: subname = '(findXinY)' - !--------------------------------------- - - i=1 - j1=1 - j2=nx+1 - do while (i<=nx) - if (x(i)==y(j1)) then - indx(i)=j1 - i=i+1 - j1=j1+1 - else if (x(i)==y(j2)) then - indx(i)=j2 - i=i+1 - j2=j2+1 - else if (x(i)>y(j1) ) then !.and. j1y(j2) ) then !.and. j2=nx - ! Return: indx(1:na) - ! - !use ice_kinds_mod - - implicit none - - integer (int_kind),intent(in) :: nx,ny - integer (int_kind),intent(in) :: x(1:nx),y(1:ny) - integer (int_kind),intent(out) :: indx(1:nx) - integer (int_kind) :: i,j1,nloop - - character(len=*), parameter :: subname = '(findXinY_halo)' - !--------------------------------------- - - nloop=1 - i=1 - j1=int((ny+1)/2) ! initial guess in the middle - do while (i<=nx) - if (x(i)==0) then - indx(i)=0 - i=i+1 - nloop=1 - else if (x(i)==y(j1)) then - indx(i)=j1 - i=i+1 - j1=j1+1 - if (j1>ny) j1=int((ny+1)/2) ! initial guess in the middle - nloop=1 - else if (x(i)y(j1) ) then - j1=j1+1 - if (j1>ny) then - j1=1 - nloop=nloop+1 - if (nloop>2) then - ! Stop for inf. loop. This check should not be necessary for halo - write(nu_diag,*) subname,' nx,ny: ',nx,ny - write(nu_diag,*) subname,' i,j1: ',i,j1 - write(nu_diag,*) subname,' x(i),y(j1): ',x(i),y(j1) - call abort_ice(subname//': ERROR too many loops') - endif - endif - endif - end do - - end subroutine findXinY_halo - -!---------------------------------------------------------------------------- - - subroutine numainit(l,u,uu) - - use ice_constants, only: c0 - - implicit none - - integer(kind=int_kind),intent(in) :: l,u,uu - - integer(kind=int_kind) :: lo,up - - character(len=*), parameter :: subname = '(numainit)' - !--------------------------------------- - - call domp_get_domain(l,u,lo,up) - ee(lo:up)=0 - ne(lo:up)=0 - se(lo:up)=0 - sse(lo:up)=0 - nw(lo:up)=0 - sw(lo:up)=0 - halo_parent(lo:up)=0 - strength(lo:up)=c0 - uvel(lo:up)=c0 - vvel(lo:up)=c0 - uvel_init(lo:up)=c0 - vvel_init(lo:up)=c0 - uocn(lo:up)=c0 - vocn(lo:up)=c0 - dxt(lo:up)=c0 - dyt(lo:up)=c0 - HTE(lo:up)=c0 - HTN(lo:up)=c0 - HTEm1(lo:up)=c0 - HTNm1(lo:up)=c0 -!v1 dxhy(lo:up)=c0 -!v1 dyhx(lo:up)=c0 -!v1 cyp(lo:up)=c0 -!v1 cxp(lo:up)=c0 -!v1 cym(lo:up)=c0 -!v1 cxm(lo:up)=c0 -!v1 tinyarea(lo:up)=c0 - stressp_1(lo:up)=c0 - stressp_2(lo:up)=c0 - stressp_3(lo:up)=c0 - stressp_4(lo:up)=c0 - stressm_1(lo:up)=c0 - stressm_2(lo:up)=c0 - stressm_3(lo:up)=c0 - stressm_4(lo:up)=c0 - stress12_1(lo:up)=c0 - stress12_2(lo:up)=c0 - stress12_3(lo:up)=c0 - stress12_4(lo:up)=c0 - tarear(lo:up)=c0 - Tbu(lo:up)=c0 - taubx(lo:up)=c0 - tauby(lo:up)=c0 - divu(lo:up)=c0 - rdg_conv(lo:up)=c0 - rdg_shear(lo:up)=c0 - shear(lo:up)=c0 - str1(lo:up)=c0 - str2(lo:up)=c0 - str3(lo:up)=c0 - str4(lo:up)=c0 - str5(lo:up)=c0 - str6(lo:up)=c0 - str7(lo:up)=c0 - str8(lo:up)=c0 - call domp_get_domain(u+1,uu,lo,up) - halo_parent(lo:up)=0 - uvel(lo:up)=c0 - vvel(lo:up)=c0 - str1(lo:up)=c0 - str2(lo:up)=c0 - str3(lo:up)=c0 - str4(lo:up)=c0 - str5(lo:up)=c0 - str6(lo:up)=c0 - str7(lo:up)=c0 - str8(lo:up)=c0 - - end subroutine numainit - -!---------------------------------------------------------------------------- -!=============================================================================== + end subroutine convert_2d_1d -end module ice_dyn_evp_1d +!======================================================================= + + subroutine calc_halo_parent(nx, ny, na, navel, I_icetmask) + + implicit none + integer(kind=int_kind), intent(in) :: nx, ny, na, navel + integer(kind=int_kind), dimension(nx, ny), intent(in) :: & + I_icetmask + + ! local variables + + integer(kind=int_kind) :: iw, i, j + integer(kind=int_kind), dimension(1:navel) :: Ihalo + + character(len=*), parameter :: subname = '(calc_halo_parent)' + + !----------------------------------------------------------------- + ! Indices for halo update: + ! 0: no halo point + ! >0: index for halo point parent, related to indij vector + ! + ! TODO: Implement for nghost > 1 + ! TODO: Implement for tripole grids + !----------------------------------------------------------------- + + Ihalo(:) = 0 + halo_parent(:) = 0 + + do iw = 1, navel + j = int((indij(iw) - 1) / (nx)) + 1 + i = indij(iw) - (j - 1) * nx + ! if within ghost zone + if (i == nx .and. I_icetmask(2, j) == 1) Ihalo(iw) = 2 + (j - 1) * nx + if (i == 1 .and. I_icetmask(nx - 1, j) == 1) Ihalo(iw) = (nx - 1) + (j - 1) * nx + if (j == ny .and. I_icetmask(i, 2) == 1) Ihalo(iw) = i + nx + if (j == 1 .and. I_icetmask(i, ny - 1) == 1) Ihalo(iw) = i + (ny - 2) * nx + end do + + ! relate halo indices to indij vector + call findXinY_halo(Ihalo, indij, navel, navel, halo_parent) + + end subroutine calc_halo_parent + +!======================================================================= + + subroutine union(x, y, nx, ny, xy, nxy) + ! Find union (xy) of two sorted integer vectors (x and y), i.e. + ! combined values of the two vectors with no repetitions + + implicit none + + integer(int_kind), intent(in) :: nx, ny + integer(int_kind), intent(in) :: x(1:nx), y(1:ny) + integer(int_kind), intent(out) :: xy(1:nx + ny) + integer(int_kind), intent(out) :: nxy + + ! local variables + + integer(int_kind) :: i, j, k + + character(len=*), parameter :: subname = '(union)' + + i = 1 + j = 1 + k = 1 + do while (i <= nx .and. j <= ny) + if (x(i) < y(j)) then + xy(k) = x(i) + i = i + 1 + else if (x(i) > y(j)) then + xy(k) = y(j) + j = j + 1 + else + xy(k) = x(i) + i = i + 1 + j = j + 1 + end if + k = k + 1 + end do + + ! the rest + do while (i <= nx) + xy(k) = x(i) + i = i + 1 + k = k + 1 + end do + do while (j <= ny) + xy(k) = y(j) + j = j + 1 + k = k + 1 + end do + nxy = k - 1 + + end subroutine union + +!======================================================================= + + subroutine setdiff(x, y, nx, ny, xy, nxy) + ! Find element (xy) of two sorted integer vectors (x and y) that + ! are in x, but not in y, or in y, but not in x + + implicit none + + integer(kind=int_kind), intent(in) :: nx, ny + integer(kind=int_kind), intent(in) :: x(1:nx), y(1:ny) + integer(kind=int_kind), intent(out) :: xy(1:nx + ny) + integer(kind=int_kind), intent(out) :: nxy + + ! local variables + + integer(kind=int_kind) :: i, j, k + + character(len=*), parameter :: subname = '(setdiff)' + + i = 1 + j = 1 + k = 1 + do while (i <= nx .and. j <= ny) + if (x(i) < y(j)) then + xy(k) = x(i) + i = i + 1 + k = k + 1 + else if (x(i) > y(j)) then + xy(k) = y(j) + j = j + 1 + k = k + 1 + else + i = i + 1 + j = j + 1 + end if + end do + + ! the rest + do while (i <= nx) + xy(k) = x(i) + i = i + 1 + k = k + 1 + end do + do while (j <= ny) + xy(k) = y(j) + j = j + 1 + k = k + 1 + end do + nxy = k - 1 + + end subroutine setdiff + +!======================================================================== + + subroutine findXinY(x, y, nx, ny, indx) + ! Find indx vector so that x(1:na) = y(indx(1:na)) + ! + ! Conditions: + ! * EVERY item in x is found in y + ! * x(1:nx) is a sorted integer vector + ! * y(1:ny) consists of two sorted integer vectors: + ! [y(1:nx); y(nx + 1:ny)] + ! * ny >= nx + ! + ! Return: indx(1:na) + + implicit none + + integer (kind=int_kind), intent(in) :: nx, ny + integer (kind=int_kind), intent(in) :: x(1:nx), y(1:ny) + integer (kind=int_kind), intent(out) :: indx(1:nx) + + ! local variables + + integer (kind=int_kind) :: i, j1, j2 + + character(len=*), parameter :: subname = '(findXinY)' + + i = 1 + j1 = 1 + j2 = nx + 1 + do while (i <= nx) + if (x(i) == y(j1)) then + indx(i) = j1 + i = i + 1 + j1 = j1 + 1 + else if (x(i) == y(j2)) then + indx(i) = j2 + i = i + 1 + j2 = j2 + 1 + else if (x(i) > y(j1)) then + j1 = j1 + 1 + else if (x(i) > y(j2)) then + j2 = j2 + 1 + else + call abort_ice(subname & + // ': ERROR: conditions not met') + end if + end do + + end subroutine findXinY + +!======================================================================= + + subroutine findXinY_halo(x, y, nx, ny, indx) + ! Find indx vector so that x(1:na) = y(indx(1:na)) + ! + ! Conditions: + ! * EVERY item in x is found in y, + ! except for x == 0, where indx = 0 is returned + ! * x(1:nx) is a non-sorted integer vector + ! * y(1:ny) is a sorted integer vector + ! * ny >= nx + ! + ! Return: indx(1:na) + + implicit none + + integer (kind=int_kind), intent(in) :: nx, ny + integer (kind=int_kind), intent(in) :: x(1:nx), y(1:ny) + integer (kind=int_kind), intent(out) :: indx(1:nx) + + ! local variables + + integer (kind=int_kind) :: i, j1, nloop + + character(len=*), parameter :: subname = '(findXinY_halo)' + + nloop = 1 + i = 1 + j1 = int((ny + 1) / 2) ! initial guess in the middle + do while (i <= nx) + if (x(i) == 0) then + indx(i) = 0 + i = i + 1 + nloop = 1 + else if (x(i) == y(j1)) then + indx(i) = j1 + i = i + 1 + j1 = j1 + 1 + ! initial guess in the middle + if (j1 > ny) j1 = int((ny + 1) / 2) + nloop = 1 + else if (x(i) < y(j1)) then + j1 = 1 + else if (x(i) > y(j1)) then + j1 = j1 + 1 + if (j1 > ny) then + j1 = 1 + nloop = nloop + 1 + if (nloop > 2) then + ! stop for infinite loop. This check should not be + ! necessary for halo + call abort_ice(subname // ' ERROR: too many loops') + end if + end if + end if + end do + + end subroutine findXinY_halo + +!======================================================================= + + subroutine numainit(l, u, uu) + + use ice_constants, only : c0 + + implicit none + + integer(kind=int_kind), intent(in) :: l, u, uu + + ! local variables + + integer(kind=int_kind) :: lo, up + + character(len=*), parameter :: subname = '(numainit)' + + call domp_get_domain(l, u, lo, up) + ee(lo:up) = 0 + ne(lo:up) = 0 + se(lo:up) = 0 + sse(lo:up) = 0 + nw(lo:up) = 0 + sw(lo:up) = 0 + halo_parent(lo:up) = 0 + strength(lo:up) = c0 + uvel(lo:up) = c0 + vvel(lo:up) = c0 + uvel_init(lo:up) = c0 + vvel_init(lo:up) = c0 + uocn(lo:up) = c0 + vocn(lo:up) = c0 + dxt(lo:up) = c0 + dyt(lo:up) = c0 + HTE(lo:up) = c0 + HTN(lo:up) = c0 + HTEm1(lo:up) = c0 + HTNm1(lo:up) = c0 + stressp_1(lo:up) = c0 + stressp_2(lo:up) = c0 + stressp_3(lo:up) = c0 + stressp_4(lo:up) = c0 + stressm_1(lo:up) = c0 + stressm_2(lo:up) = c0 + stressm_3(lo:up) = c0 + stressm_4(lo:up) = c0 + stress12_1(lo:up) = c0 + stress12_2(lo:up) = c0 + stress12_3(lo:up) = c0 + stress12_4(lo:up) = c0 + tarear(lo:up) = c0 + Tbu(lo:up) = c0 + taubx(lo:up) = c0 + tauby(lo:up) = c0 + divu(lo:up) = c0 + rdg_conv(lo:up) = c0 + rdg_shear(lo:up) = c0 + shear(lo:up) = c0 + str1(lo:up) = c0 + str2(lo:up) = c0 + str3(lo:up) = c0 + str4(lo:up) = c0 + str5(lo:up) = c0 + str6(lo:up) = c0 + str7(lo:up) = c0 + str8(lo:up) = c0 + + call domp_get_domain(u + 1, uu, lo, up) + halo_parent(lo:up) = 0 + uvel(lo:up) = c0 + vvel(lo:up) = c0 + str1(lo:up) = c0 + str2(lo:up) = c0 + str3(lo:up) = c0 + str4(lo:up) = c0 + str5(lo:up) = c0 + str6(lo:up) = c0 + str7(lo:up) = c0 + str8(lo:up) = c0 + + end subroutine numainit + +!======================================================================= + +end module ice_dyn_evp_1d diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 old mode 100644 new mode 100755 index f3685ed61..bb65f122c --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -40,13 +40,11 @@ module ice_dyn_shared ssh_stress ! 'geostrophic' or 'coupled' logical (kind=log_kind), public :: & - revised_evp ! if true, use revised evp procedure + revised_evp ! if true, use revised evp procedure - integer (kind=int_kind), public :: & - kevp_kernel ! 0 = 2D org version - ! 1 = 1D representation raw (not implemented) - ! 2 = 1D + calculate distances inline (implemented) - ! 3 = 1D + calculate distances inline + real*4 internal (not implemented yet) + character (len=char_len), public :: & + evp_algorithm ! standard_2d = 2D org version (standard) + ! shared_mem_1d = 1d without mpi call and refactorization to 1d ! other EVP parameters character (len=char_len), public :: & @@ -55,12 +53,12 @@ module ice_dyn_shared ! LKD: Lemieux et al. 2015, probabilistic: Dupont et al. in prep. real (kind=dbl_kind), parameter, public :: & - eyc = 0.36_dbl_kind, & - ! coefficient for calculating the parameter E - cosw = c1 , & ! cos(ocean turning angle) ! turning angle = 0 - sinw = c0 , & ! sin(ocean turning angle) ! turning angle = 0 - a_min = p001, & ! minimum ice area - m_min = p01 ! minimum ice mass (kg/m^2) + eyc = 0.36_dbl_kind, & ! coefficient for calculating the parameter E + u0 = 5e-5_dbl_kind, & ! residual velocity for seabed stress (m/s) + cosw = c1 , & ! cos(ocean turning angle) ! turning angle = 0 + sinw = c0 , & ! sin(ocean turning angle) ! turning angle = 0 + a_min = p001 , & ! minimum ice area + m_min = p01 ! minimum ice mass (kg/m^2) real (kind=dbl_kind), public :: & revp , & ! 0 for classic EVP, 1 for revised EVP @@ -91,12 +89,11 @@ module ice_dyn_shared seabed_stress ! if true, seabed stress for landfast on real (kind=dbl_kind), public :: & - k1, & ! 1st free parameter for seabed1 grounding parameterization - k2, & ! second free parameter (N/m^3) for seabed1 grounding parametrization - alphab, & ! alphab=Cb factor in Lemieux et al 2015 - threshold_hw, & ! max water depth for grounding + k1 , & ! 1st free parameter for seabed1 grounding parameterization + k2 , & ! second free parameter (N/m^3) for seabed1 grounding parametrization + alphab , & ! alphab=Cb factor in Lemieux et al 2015 + threshold_hw ! max water depth for grounding ! see keel data from Amundrud et al. 2004 (JGR) - u0 = 5e-5_dbl_kind ! residual velocity for seabed stress (m/s) !======================================================================= @@ -1204,10 +1201,10 @@ subroutine deformations (nx_block, ny_block, & vvel , & ! y-component of velocity (m/s) dxt , & ! width of T-cell through the middle (m) dyt , & ! height of T-cell through the middle (m) - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm , & ! 0.5*HTN - 1.5*HTN + cyp , & ! 1.5*HTE - 0.5*HTW + cxp , & ! 1.5*HTN - 0.5*HTS + cym , & ! 0.5*HTE - 1.5*HTW + cxm , & ! 0.5*HTN - 1.5*HTS tarear ! 1/tarea real (kind=dbl_kind), dimension (nx_block,ny_block), & @@ -1305,10 +1302,10 @@ subroutine strain_rates (nx_block, ny_block, & vvel , & ! y-component of velocity (m/s) dxt , & ! width of T-cell through the middle (m) dyt , & ! height of T-cell through the middle (m) - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm ! 0.5*HTN - 1.5*HTN + cyp , & ! 1.5*HTE - 0.5*HTW + cxp , & ! 1.5*HTN - 0.5*HTS + cym , & ! 0.5*HTE - 1.5*HTW + cxm ! 0.5*HTN - 1.5*HTS real (kind=dbl_kind), intent(out):: & ! at each corner : divune, divunw, divuse, divusw , & ! divergence diff --git a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 index 457a73ade..860865dba 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 @@ -1149,12 +1149,12 @@ subroutine calc_zeta_dPr (nx_block, ny_block, & vvel , & ! y-component of velocity (m/s) dxt , & ! width of T-cell through the middle (m) dyt , & ! height of T-cell through the middle (m) - dxhy , & ! 0.5*(HTE - HTE) - dyhx , & ! 0.5*(HTN - HTN) - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm , & ! 0.5*HTN - 1.5*HTN + dxhy , & ! 0.5*(HTE - HTW) + dyhx , & ! 0.5*(HTN - HTS) + cyp , & ! 1.5*HTE - 0.5*HTW + cxp , & ! 1.5*HTN - 0.5*HTS + cym , & ! 0.5*HTE - 1.5*HTW + cxm , & ! 0.5*HTN - 1.5*HTS tinyarea ! min_strain_rate*tarea real (kind=dbl_kind), dimension(nx_block,ny_block,4), intent(out) :: & @@ -1335,10 +1335,10 @@ subroutine stress_vp (nx_block , ny_block , & vvel , & ! y-component of velocity (m/s) dxt , & ! width of T-cell through the middle (m) dyt , & ! height of T-cell through the middle (m) - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm ! 0.5*HTN - 1.5*HTN + cyp , & ! 1.5*HTE - 0.5*HTW + cxp , & ! 1.5*HTN - 0.5*HTS + cym , & ! 0.5*HTE - 1.5*HTW + cxm ! 0.5*HTN - 1.5*HTS real (kind=dbl_kind), dimension(nx_block,ny_block,4), intent(in) :: & zetaD ! 2*zeta @@ -1555,12 +1555,12 @@ subroutine matvec (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & dxt , & ! width of T-cell through the middle (m) dyt , & ! height of T-cell through the middle (m) - dxhy , & ! 0.5*(HTE - HTE) - dyhx , & ! 0.5*(HTN - HTN) - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm ! 0.5*HTN - 1.5*HTN + dxhy , & ! 0.5*(HTE - HTW) + dyhx , & ! 0.5*(HTN - HTS) + cyp , & ! 1.5*HTE - 0.5*HTW + cxp , & ! 1.5*HTN - 0.5*HTS + cym , & ! 0.5*HTE - 1.5*HTW + cxm ! 0.5*HTN - 1.5*HTS real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvel , & ! x-component of velocity (m/s) @@ -2004,12 +2004,12 @@ subroutine formDiag_step1 (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & dxt , & ! width of T-cell through the middle (m) dyt , & ! height of T-cell through the middle (m) - dxhy , & ! 0.5*(HTE - HTE) - dyhx , & ! 0.5*(HTN - HTN) - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm ! 0.5*HTN - 1.5*HTN + dxhy , & ! 0.5*(HTE - HTW) + dyhx , & ! 0.5*(HTN - HTS) + cyp , & ! 1.5*HTE - 0.5*HTW + cxp , & ! 1.5*HTN - 0.5*HTS + cym , & ! 0.5*HTE - 1.5*HTW + cxm ! 0.5*HTN - 1.5*HTS real (kind=dbl_kind), dimension(nx_block,ny_block,4), intent(in) :: & zetaD ! 2*zeta diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 index e3da6390b..f2dff2367 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 @@ -85,7 +85,9 @@ subroutine init_transport integer (kind=int_kind) :: ntrcr, nt_Tsfc, nt_qice, nt_qsno, & nt_sice, nt_fbri, nt_iage, nt_FY, nt_alvl, nt_vlvl, & - nt_apnd, nt_hpnd, nt_ipnd, nt_fsd, nt_isosno, nt_isoice, nt_bgc_Nit, nt_bgc_S + nt_apnd, nt_hpnd, nt_ipnd, nt_fsd, & + nt_smice, nt_smliq, nt_rhos, nt_rsnw, & + nt_isosno, nt_isoice, nt_bgc_Nit, nt_bgc_S character(len=*), parameter :: subname = '(init_transport)' @@ -94,9 +96,12 @@ subroutine init_transport call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, nt_fbri_out=nt_fbri, & - nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_alvl_out=nt_alvl, nt_fsd_out=nt_fsd, & - nt_vlvl_out=nt_vlvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & - nt_ipnd_out=nt_ipnd, nt_bgc_Nit_out=nt_bgc_Nit, nt_bgc_S_out=nt_bgc_S, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_fsd_out=nt_fsd, & + nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, nt_rhos_out=nt_rhos, & + nt_rsnw_out=nt_rsnw, & + nt_bgc_Nit_out=nt_bgc_Nit, nt_bgc_S_out=nt_bgc_S, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -195,6 +200,18 @@ subroutine init_transport if (nt-k==nt_ipnd) & write(nu_diag,1000) 'nt_ipnd ',nt,depend(nt),tracer_type(nt),& has_dependents(nt) + if (nt-k==nt_smice) & + write(nu_diag,1000) 'nt_smice ',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_smliq) & + write(nu_diag,1000) 'nt_smliq ',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_rhos) & + write(nu_diag,1000) 'nt_rhos ',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_rsnw) & + write(nu_diag,1000) 'nt_rsnw ',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) if (nt-k==nt_fsd) & write(nu_diag,1000) 'nt_fsd ',nt,depend(nt),tracer_type(nt),& has_dependents(nt) diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index bcc7305ff..23fb9df63 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -218,6 +218,7 @@ module ice_flux fresh , & ! fresh water flux to ocean (kg/m^2/s) fsalt , & ! salt flux to ocean (kg/m^2/s) fhocn , & ! net heat flux to ocean (W/m^2) + fsloss , & ! rate of snow loss to leads (kg/m^2/s) fswthru , & ! shortwave penetrating to ocean (W/m^2) fswthru_vdr , & ! vis dir shortwave penetrating to ocean (W/m^2) fswthru_vdf , & ! vis dif shortwave penetrating to ocean (W/m^2) @@ -294,6 +295,10 @@ module ice_flux fsensn, & ! category sensible heat flux flatn ! category latent heat flux + real (kind=dbl_kind), & + dimension (:,:,:,:), allocatable, public :: & + snwcnt ! counter for presence of snow + ! As above but these remain grid box mean values i.e. they are not ! divided by aice at end of ice_dynamics. These are used in ! CICE_IN_NEMO for coupling and also for generating @@ -448,6 +453,7 @@ subroutine alloc_flux fresh (nx_block,ny_block,max_blocks), & ! fresh water flux to ocean (kg/m^2/s) fsalt (nx_block,ny_block,max_blocks), & ! salt flux to ocean (kg/m^2/s) fhocn (nx_block,ny_block,max_blocks), & ! net heat flux to ocean (W/m^2) + fsloss (nx_block,ny_block,max_blocks), & ! rate of snow loss to leads (kg/m^2/s) fswthru (nx_block,ny_block,max_blocks), & ! shortwave penetrating to ocean (W/m^2) fswthru_vdr (nx_block,ny_block,max_blocks), & ! vis dir shortwave penetrating to ocean (W/m^2) fswthru_vdf (nx_block,ny_block,max_blocks), & ! vis dif shortwave penetrating to ocean (W/m^2) @@ -525,6 +531,7 @@ subroutine alloc_flux fsensn (nx_block,ny_block,ncat,max_blocks), & ! category sensible heat flux flatn (nx_block,ny_block,ncat,max_blocks), & ! category latent heat flux albcnt (nx_block,ny_block,max_blocks,max_nstrm), & ! counter for zenith angle + snwcnt (nx_block,ny_block,max_blocks,max_nstrm), & ! counter for snow salinz (nx_block,ny_block,nilyr+1,max_blocks), & ! initial salinity profile (ppt) Tmltz (nx_block,ny_block,nilyr+1,max_blocks), & ! initial melting temperature (^oC) stat=ierr) diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index a71e6dd17..84bf1d461 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -41,7 +41,7 @@ module ice_forcing field_type_vector, field_loc_NEcorner use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_sea_freezing_temperature - use icepack_intfc, only: icepack_init_wave + use icepack_intfc, only: icepack_init_wave, icepack_init_parameters use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_parameters implicit none @@ -50,7 +50,8 @@ module ice_forcing get_forcing_atmo, get_forcing_ocn, get_wave_spec, & read_clim_data, read_clim_data_nc, & interpolate_data, interp_coeff_monthly, & - read_data_nc_point, interp_coeff + read_data_nc_point, interp_coeff, & + init_snowtable integer (kind=int_kind), public :: & ycycle , & ! number of years in forcing cycle, set by namelist @@ -166,6 +167,16 @@ module ice_forcing integer (kind=int_kind), public :: & Njday_atm ! Number of atm forcing timesteps + character (len=char_len_long), public :: & + snw_filename ! filename for snow lookup table + + character (char_len), public :: & + snw_rhos_fname , & ! snow table 1d rhos field name + snw_Tgrd_fname , & ! snow table 1d Tgrd field name + snw_T_fname , & ! snow table 1d T field name + snw_tau_fname , & ! snow table 3d tau field name + snw_kappa_fname, & ! snow table 3d kappa field name + snw_drdt0_fname ! snow table 3d drdt0 field name ! PRIVATE: @@ -5398,7 +5409,199 @@ end subroutine get_wave_spec !======================================================================= - end module ice_forcing +! initial snow aging lookup table +! +! Dry snow metamorphism table +! snicar_drdt_bst_fit_60_c070416.nc +! Flanner (file metadata units mislabelled) +! drdsdt0 (10^-6 m/hr) tau (10^-6 m) +! + subroutine init_snowtable + + use ice_broadcast, only: broadcast_array, broadcast_scalar + integer (kind=int_kind) :: & + idx_T_max , & ! Table dimensions + idx_rhos_max, & + idx_Tgrd_max + real (kind=dbl_kind), allocatable :: & + snowage_rhos (:), & + snowage_Tgrd (:), & + snowage_T (:), & + snowage_tau (:,:,:), & + snowage_kappa(:,:,:), & + snowage_drdt0(:,:,:) + + ! local variables + + logical (kind=log_kind) :: diag = .false. + + integer (kind=int_kind) :: & + fid ! file id for netCDF file + + character (char_len) :: & + snw_aging_table, & ! aging table setting + fieldname ! field name in netcdf file + + integer (kind=int_kind) :: & + j, k ! indices + + character(len=*), parameter :: subname = '(init_snowtable)' + + !----------------------------------------------------------------- + ! read table of snow aging parameters + !----------------------------------------------------------------- + + call icepack_query_parameters(snw_aging_table_out=snw_aging_table, & + isnw_rhos_out=idx_rhos_max, isnw_Tgrd_out=idx_Tgrd_max, isnw_T_out=idx_T_max) + + if (my_task == master_task) then + write (nu_diag,*) ' ' + write (nu_diag,*) 'Snow aging file:', trim(snw_filename) + endif + + if (snw_aging_table == 'snicar') then + ! just read the 3d data and pass it in + + call ice_open_nc(snw_filename,fid) + + allocate(snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max)) + allocate(snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max)) + allocate(snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max)) + + fieldname = trim(snw_tau_fname) + call ice_read_nc(fid,fieldname,snowage_tau, diag, & + idx_rhos_max,idx_Tgrd_max,idx_T_max) + fieldname = trim(snw_kappa_fname) + call ice_read_nc(fid,fieldname,snowage_kappa,diag, & + idx_rhos_max,idx_Tgrd_max,idx_T_max) + fieldname = trim(snw_drdt0_fname) + call ice_read_nc(fid,fieldname,snowage_drdt0,diag, & + idx_rhos_max,idx_Tgrd_max,idx_T_max) + + call ice_close_nc(fid) + + call broadcast_array(snowage_tau , master_task) + call broadcast_array(snowage_kappa, master_task) + call broadcast_array(snowage_drdt0, master_task) + + if (my_task == master_task) then + write(nu_diag,*) subname,' ' + write(nu_diag,*) subname,' Successfully read snow aging properties:' + write(nu_diag,*) subname,' snw_aging_table = ',trim(snw_aging_table) + write(nu_diag,*) subname,' idx_rhos_max = ',idx_rhos_max + write(nu_diag,*) subname,' idx_Tgrd_max = ',idx_Tgrd_max + write(nu_diag,*) subname,' idx_T_max = ',idx_T_max + write(nu_diag,*) subname,' Data at rhos, Tgrd, T at first index ' + write(nu_diag,*) subname,' snoage_tau (1,1,1) = ',snowage_tau (1,1,1) + write(nu_diag,*) subname,' snoage_kappa (1,1,1) = ',snowage_kappa(1,1,1) + write(nu_diag,*) subname,' snoage_drdt0 (1,1,1) = ',snowage_drdt0(1,1,1) + write(nu_diag,*) subname,' Data at rhos, Tgrd, T at max index' + write(nu_diag,*) subname,' snoage_tau (max,max,max) = ',snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max) + write(nu_diag,*) subname,' snoage_kappa (max,max,max) = ',snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max) + write(nu_diag,*) subname,' snoage_drdt0 (max,max,max) = ',snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max) + endif + + call icepack_init_parameters( & + snowage_tau_in = snowage_tau, & + snowage_kappa_in = snowage_kappa, & + snowage_drdt0_in = snowage_drdt0 ) + + deallocate(snowage_tau) + deallocate(snowage_kappa) + deallocate(snowage_drdt0) + + else + ! read everything and pass it in + + call ice_open_nc(snw_filename,fid) + + fieldname = trim(snw_rhos_fname) + call ice_get_ncvarsize(fid,fieldname,idx_rhos_max) + fieldname = trim(snw_Tgrd_fname) + call ice_get_ncvarsize(fid,fieldname,idx_Tgrd_max) + fieldname = trim(snw_T_fname) + call ice_get_ncvarsize(fid,fieldname,idx_T_max) + + call broadcast_scalar(idx_rhos_max, master_task) + call broadcast_scalar(idx_Tgrd_max, master_task) + call broadcast_scalar(idx_T_max , master_task) + + allocate(snowage_rhos (idx_rhos_max)) + allocate(snowage_Tgrd (idx_Tgrd_max)) + allocate(snowage_T (idx_T_max)) + allocate(snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max)) + allocate(snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max)) + allocate(snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max)) + + fieldname = trim(snw_rhos_fname) + call ice_read_nc(fid,fieldname,snowage_rhos, diag, & + idx_rhos_max) + fieldname = trim(snw_Tgrd_fname) + call ice_read_nc(fid,fieldname,snowage_Tgrd, diag, & + idx_Tgrd_max) + fieldname = trim(snw_T_fname) + call ice_read_nc(fid,fieldname,snowage_T, diag, & + idx_T_max) + + fieldname = trim(snw_tau_fname) + call ice_read_nc(fid,fieldname,snowage_tau, diag, & + idx_rhos_max,idx_Tgrd_max,idx_T_max) + fieldname = trim(snw_kappa_fname) + call ice_read_nc(fid,fieldname,snowage_kappa,diag, & + idx_rhos_max,idx_Tgrd_max,idx_T_max) + fieldname = trim(snw_drdt0_fname) + call ice_read_nc(fid,fieldname,snowage_drdt0,diag, & + idx_rhos_max,idx_Tgrd_max,idx_T_max) + + call ice_close_nc(fid) + + call broadcast_array(snowage_rhos , master_task) + call broadcast_array(snowage_Tgrd , master_task) + call broadcast_array(snowage_T , master_task) + call broadcast_array(snowage_tau , master_task) + call broadcast_array(snowage_kappa, master_task) + call broadcast_array(snowage_drdt0, master_task) + + if (my_task == master_task) then + write(nu_diag,*) subname,' ' + write(nu_diag,*) subname,' Successfully read snow aging properties:' + write(nu_diag,*) subname,' idx_rhos_max = ',idx_rhos_max + write(nu_diag,*) subname,' idx_Tgrd_max = ',idx_Tgrd_max + write(nu_diag,*) subname,' idx_T_max = ',idx_T_max + write(nu_diag,*) subname,' Data at rhos, Tgrd, T = ',snowage_rhos(1),snowage_Tgrd(1),snowage_T(1) + write(nu_diag,*) subname,' snoage_tau (1,1,1) = ',snowage_tau (1,1,1) + write(nu_diag,*) subname,' snoage_kappa (1,1,1) = ',snowage_kappa(1,1,1) + write(nu_diag,*) subname,' snoage_drdt0 (1,1,1) = ',snowage_drdt0(1,1,1) + write(nu_diag,*) subname,' Data at rhos, Tgrd, T = ',snowage_rhos(idx_rhos_max),snowage_Tgrd(idx_Tgrd_max),snowage_T(idx_T_max) + write(nu_diag,*) subname,' snoage_tau (max,max,max) = ',snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max) + write(nu_diag,*) subname,' snoage_kappa (max,max,max) = ',snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max) + write(nu_diag,*) subname,' snoage_drdt0 (max,max,max) = ',snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max) + endif + + call icepack_init_parameters( & + isnw_t_in = idx_T_max, & + isnw_Tgrd_in = idx_Tgrd_max, & + isnw_rhos_in = idx_rhos_max, & + snowage_rhos_in = snowage_rhos, & + snowage_Tgrd_in = snowage_Tgrd, & + snowage_T_in = snowage_T, & + snowage_tau_in = snowage_tau, & + snowage_kappa_in = snowage_kappa, & + snowage_drdt0_in = snowage_drdt0 ) + + deallocate(snowage_rhos) + deallocate(snowage_Tgrd) + deallocate(snowage_T) + deallocate(snowage_tau) + deallocate(snowage_kappa) + deallocate(snowage_drdt0) + + endif + + end subroutine init_snowtable !======================================================================= + end module ice_forcing + +!======================================================================= diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index b896c3bb9..3d102217a 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -74,7 +74,7 @@ subroutine input_data use ice_arrays_column, only: oceanmixed_ice use ice_restart_column, only: restart_age, restart_FY, restart_lvl, & restart_pond_cesm, restart_pond_lvl, restart_pond_topo, restart_aero, & - restart_fsd, restart_iso + restart_fsd, restart_iso, restart_snow use ice_restart_shared, only: & restart, restart_ext, restart_coszen, restart_dir, restart_file, pointer_file, & runid, runtype, use_restart_time, restart_format, lcdf64 @@ -91,15 +91,19 @@ subroutine input_data bgc_data_type, & ocn_data_type, ocn_data_dir, wave_spec_file, & oceanmixed_file, restore_ocn, trestore, & - ice_data_type + ice_data_type, & + snw_filename, & + snw_tau_fname, snw_kappa_fname, snw_drdt0_fname, & + snw_rhos_fname, snw_Tgrd_fname, snw_T_fname use ice_arrays_column, only: bgc_data_dir, fe_data_type use ice_grid, only: grid_file, gridcpl_file, kmt_file, & bathymetry_file, use_bathymetry, & bathymetry_format, & grid_type, grid_format, & - dxrect, dyrect + dxrect, dyrect, & + pgl_global_ext use ice_dyn_shared, only: ndte, kdyn, revised_evp, yield_curve, & - kevp_kernel, & + evp_algorithm, & seabed_stress, seabed_stress_method, & k1, k2, alphab, threshold_hw, & Ktens, e_ratio, coriolis, ssh_stress, & @@ -128,19 +132,21 @@ subroutine input_data mu_rdg, hs0, dpscale, rfracmin, rfracmax, pndaspect, hs1, hp1, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, dSdt_slow_mode, & phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar, & - sw_frac, sw_dtemp, floediam, hfrazilmin, iceruf, iceruf_ocn + sw_frac, sw_dtemp, floediam, hfrazilmin, iceruf, iceruf_ocn, & + rsnw_fall, rsnw_tmax, rhosnew, rhosmin, rhosmax, & + windmin, drhosdwind, snwlvlfac integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & kitd, kcatbound, ktransport character (len=char_len) :: shortwave, albedo_type, conduct, fbot_xfer_type, & - tfrz_option, frzpnd, atmbndy, wave_spec_type + tfrz_option, frzpnd, atmbndy, wave_spec_type, snwredist, snw_aging_table logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & - sw_redist, calc_dragio + sw_redist, calc_dragio, use_smliq_pnd, snwgrain logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond - logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd + logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd, tr_snow logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo integer (kind=int_kind) :: numin, numax ! unit number limits @@ -187,6 +193,7 @@ subroutine input_data tr_pond_cesm, restart_pond_cesm, & tr_pond_lvl, restart_pond_lvl, & tr_pond_topo, restart_pond_topo, & + tr_snow, restart_snow, & tr_iso, restart_iso, & tr_aero, restart_aero, & tr_fsd, restart_fsd, & @@ -201,7 +208,7 @@ subroutine input_data namelist /dynamics_nml/ & kdyn, ndte, revised_evp, yield_curve, & - kevp_kernel, & + evp_algorithm, & brlx, arlx, ssh_stress, & advection, coriolis, kridge, ktransport, & kstrength, krdg_partic, krdg_redist, mu_rdg, & @@ -227,6 +234,13 @@ subroutine input_data rfracmin, rfracmax, pndaspect, hs1, & hp1 + namelist /snow_nml/ & + snwredist, snwgrain, rsnw_fall, rsnw_tmax, & + rhosnew, rhosmin, rhosmax, snwlvlfac, & + windmin, drhosdwind, use_smliq_pnd, snw_aging_table,& + snw_filename, snw_rhos_fname, snw_Tgrd_fname,snw_T_fname, & + snw_tau_fname, snw_kappa_fname, snw_drdt0_fname + namelist /forcing_nml/ & formdrag, atmbndy, calc_strair, calc_Tsfc, & highfreq, natmiter, atmiter_conv, calc_dragio, & @@ -329,7 +343,8 @@ subroutine input_data kdyn = 1 ! type of dynamics (-1, 0 = off, 1 = evp, 2 = eap, 3 = vp) ndtd = 1 ! dynamic time steps per thermodynamic time step ndte = 120 ! subcycles per dynamics timestep: ndte=dt_dyn/dte - kevp_kernel = 0 ! EVP kernel (0 = 2D, >0: 1D. Only ver. 2 is implemented yet) + evp_algorithm = 'standard_2d' ! EVP kernel (=standard_2d: standard cice evp; =shared_mem_1d: 1d shared memory and no mpi. if more mpi processors then executed on master + pgl_global_ext = .false. ! if true, init primary grid lengths (global ext.) brlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared arlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared revised_evp = .false. ! if true, use revised procedure for evp dynamics @@ -409,6 +424,25 @@ subroutine input_data rfracmin = 0.15_dbl_kind ! minimum retained fraction of meltwater rfracmax = 0.85_dbl_kind ! maximum retained fraction of meltwater pndaspect = 0.8_dbl_kind ! ratio of pond depth to area fraction + snwredist = 'none' ! type of snow redistribution + snw_aging_table = 'test' ! snow aging lookup table + snw_filename = 'unknown' ! snowtable filename + snw_tau_fname = 'unknown' ! snowtable file tau fieldname + snw_kappa_fname = 'unknown' ! snowtable file kappa fieldname + snw_drdt0_fname = 'unknown' ! snowtable file drdt0 fieldname + snw_rhos_fname = 'unknown' ! snowtable file rhos fieldname + snw_Tgrd_fname = 'unknown' ! snowtable file Tgrd fieldname + snw_T_fname = 'unknown' ! snowtable file T fieldname + snwgrain = .false. ! snow metamorphosis + use_smliq_pnd = .false. ! use liquid in snow for ponds + rsnw_fall = 100.0_dbl_kind ! radius of new snow (10^-6 m) ! advanced snow physics: 54.526 x 10^-6 m + rsnw_tmax = 1500.0_dbl_kind ! maximum snow radius (10^-6 m) + rhosnew = 100.0_dbl_kind ! new snow density (kg/m^3) + rhosmin = 100.0_dbl_kind ! minimum snow density (kg/m^3) + rhosmax = 450.0_dbl_kind ! maximum snow density (kg/m^3) + windmin = 10.0_dbl_kind ! minimum wind speed to compact snow (m/s) + drhosdwind= 27.3_dbl_kind ! wind compaction factor for snow (kg s/m^4) + snwlvlfac = 0.3_dbl_kind ! fractional increase in snow depth for bulk redistribution albicev = 0.78_dbl_kind ! visible ice albedo for h > ahmax albicei = 0.36_dbl_kind ! near-ir ice albedo for h > ahmax albsnowv = 0.98_dbl_kind ! cold snow albedo, visible @@ -472,6 +506,8 @@ subroutine input_data restart_pond_lvl = .false. ! melt ponds restart tr_pond_topo = .false. ! explicit melt ponds (topographic) restart_pond_topo = .false. ! melt ponds restart + tr_snow = .false. ! advanced snow physics + restart_snow = .false. ! advanced snow physics restart tr_iso = .false. ! isotopes restart_iso = .false. ! isotopes restart tr_aero = .false. ! aerosols @@ -545,6 +581,9 @@ subroutine input_data print*,'Reading ponds_nml' read(nu_nml, nml=ponds_nml,iostat=nml_error) if (nml_error /= 0) exit + print*,'Reading snow_nml' + read(nu_nml, nml=snow_nml,iostat=nml_error) + if (nml_error /= 0) exit print*,'Reading forcing_nml' read(nu_nml, nml=forcing_nml,iostat=nml_error) if (nml_error /= 0) exit @@ -669,7 +708,8 @@ subroutine input_data call broadcast_scalar(kdyn, master_task) call broadcast_scalar(ndtd, master_task) call broadcast_scalar(ndte, master_task) - call broadcast_scalar(kevp_kernel, master_task) + call broadcast_scalar(evp_algorithm, master_task) + call broadcast_scalar(pgl_global_ext, master_task) call broadcast_scalar(brlx, master_task) call broadcast_scalar(arlx, master_task) call broadcast_scalar(revised_evp, master_task) @@ -734,6 +774,25 @@ subroutine input_data call broadcast_scalar(rfracmin, master_task) call broadcast_scalar(rfracmax, master_task) call broadcast_scalar(pndaspect, master_task) + call broadcast_scalar(snwredist, master_task) + call broadcast_scalar(snw_aging_table, master_task) + call broadcast_scalar(snw_filename, master_task) + call broadcast_scalar(snw_tau_fname, master_task) + call broadcast_scalar(snw_kappa_fname, master_task) + call broadcast_scalar(snw_drdt0_fname, master_task) + call broadcast_scalar(snw_rhos_fname, master_task) + call broadcast_scalar(snw_Tgrd_fname, master_task) + call broadcast_scalar(snw_T_fname, master_task) + call broadcast_scalar(snwgrain, master_task) + call broadcast_scalar(use_smliq_pnd, master_task) + call broadcast_scalar(rsnw_fall, master_task) + call broadcast_scalar(rsnw_tmax, master_task) + call broadcast_scalar(rhosnew, master_task) + call broadcast_scalar(rhosmin, master_task) + call broadcast_scalar(rhosmax, master_task) + call broadcast_scalar(windmin, master_task) + call broadcast_scalar(drhosdwind, master_task) + call broadcast_scalar(snwlvlfac, master_task) call broadcast_scalar(albicev, master_task) call broadcast_scalar(albicei, master_task) call broadcast_scalar(albsnowv, master_task) @@ -797,6 +856,8 @@ subroutine input_data call broadcast_scalar(restart_pond_lvl, master_task) call broadcast_scalar(tr_pond_topo, master_task) call broadcast_scalar(restart_pond_topo, master_task) + call broadcast_scalar(tr_snow, master_task) + call broadcast_scalar(restart_snow, master_task) call broadcast_scalar(tr_iso, master_task) call broadcast_scalar(restart_iso, master_task) call broadcast_scalar(tr_aero, master_task) @@ -877,6 +938,7 @@ subroutine input_data restart_pond_cesm = .false. restart_pond_lvl = .false. restart_pond_topo = .false. + restart_snow = .false. ! tcraig, OK to leave as true, needed for boxrestore case ! restart_ext = .false. else @@ -985,6 +1047,59 @@ subroutine input_data abort_list = trim(abort_list)//":8" endif + if (snwredist(1:4) /= 'none' .and. .not. tr_snow) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: snwredist on but tr_snow=F' + write (nu_diag,*) 'ERROR: Use tr_snow=T for snow redistribution' + endif + abort_list = trim(abort_list)//":37" + endif + if (snwredist(1:4) == 'bulk' .and. .not. tr_lvl) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: snwredist=bulk but tr_lvl=F' + write (nu_diag,*) 'ERROR: Use tr_lvl=T for snow redistribution' + endif + abort_list = trim(abort_list)//":38" + endif + if (snwredist(1:6) == 'ITDrdg' .and. .not. tr_lvl) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: snwredist=ITDrdg but tr_lvl=F' + write (nu_diag,*) 'ERROR: Use tr_lvl=T for snow redistribution' + endif + abort_list = trim(abort_list)//":39" + endif + if (use_smliq_pnd .and. .not. snwgrain) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: use_smliq_pnd = T but' + write (nu_diag,*) 'ERROR: snow metamorphosis not used' + write (nu_diag,*) 'ERROR: Use snwgrain=T with smliq for ponds' + endif + abort_list = trim(abort_list)//":40" + endif + if (use_smliq_pnd .and. .not. tr_snow) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: use_smliq_pnd = T but' + write (nu_diag,*) 'ERROR: snow tracers are not active' + write (nu_diag,*) 'ERROR: Use tr_snow=T with smliq for ponds' + endif + abort_list = trim(abort_list)//":41" + endif + if (snwgrain .and. .not. tr_snow) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: snwgrain=T but tr_snow=F' + write (nu_diag,*) 'ERROR: Use tr_snow=T for snow metamorphosis' + endif + abort_list = trim(abort_list)//":42" + endif + if (trim(snw_aging_table) /= 'test' .and. & + trim(snw_aging_table) /= 'snicar' .and. & + trim(snw_aging_table) /= 'file') then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: unknown snw_aging_table = '//trim(snw_aging_table) + endif + abort_list = trim(abort_list)//":43" + endif + if (tr_iso .and. n_iso==0) then if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: isotopes activated but' @@ -1014,7 +1129,7 @@ subroutine input_data if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: nilyr < 1' endif - abort_list = trim(abort_list)//":33" + abort_list = trim(abort_list)//":2" endif if (nslyr < 1) then @@ -1048,6 +1163,13 @@ subroutine input_data abort_list = trim(abort_list)//":10" endif + if (trim(shortwave) /= 'dEdd' .and. snwgrain) then + if (my_task == master_task) then + write (nu_diag,*) 'WARNING: snow grain radius activated but' + write (nu_diag,*) 'WARNING: dEdd shortwave is not.' + endif + endif + if ((rfracmin < -puny .or. rfracmin > c1+puny) .or. & (rfracmax < -puny .or. rfracmax > c1+puny) .or. & (rfracmin > rfracmax)) then @@ -1293,16 +1415,16 @@ subroutine input_data tmpstr2 = ' : revised EVP formulation not used' endif write(nu_diag,1010) ' revised_evp = ', revised_evp,trim(tmpstr2) - - if (kevp_kernel == 0) then - tmpstr2 = ' : original EVP solver' - elseif (kevp_kernel == 2 .or. kevp_kernel == 102) then - tmpstr2 = ' : vectorized EVP solver' + + if (evp_algorithm == 'standard_2d') then + tmpstr2 = ' : standard 2d EVP solver' + elseif (evp_algorithm == 'shared_mem_1d') then + tmpstr2 = ' : vectorized 1d EVP solver' + pgl_global_ext = .true. else tmpstr2 = ' : unknown value' endif - write(nu_diag,1020) ' kevp_kernel = ', kevp_kernel,trim(tmpstr2) - + write(nu_diag,1031) ' evp_algorithm = ', trim(evp_algorithm),trim(tmpstr2) write(nu_diag,1020) ' ndtd = ', ndtd, ' : number of dynamics/advection/ridging/steps per thermo timestep' write(nu_diag,1020) ' ndte = ', ndte, ' : number of EVP or EAP subcycles' endif @@ -1652,6 +1774,78 @@ subroutine input_data write(nu_diag,1002) ' rfracmin = ', rfracmin,' : minimum fraction of melt water added to ponds' write(nu_diag,1002) ' rfracmax = ', rfracmax,' : maximum fraction of melt water added to ponds' + write(nu_diag,*) ' ' + write(nu_diag,*) ' Snow redistribution/metamorphism tracers' + write(nu_diag,*) '-----------------------------------------' + if (tr_snow) then + write(nu_diag,1010) ' tr_snow = ', tr_snow, & + ' : advanced snow physics' + if (snwredist(1:4) == 'none') then + write(nu_diag,1030) ' snwredist = ', trim(snwredist), & + ' : Snow redistribution scheme turned off' + else + if (snwredist(1:4) == 'bulk') then + write(nu_diag,1030) ' snwredist = ', trim(snwredist), & + ' : Using bulk snow redistribution scheme' + elseif (snwredist(1:6) == 'ITDrdg') then + write(nu_diag,1030) ' snwredist = ', trim(snwredist), & + ' : Using ridging based snow redistribution scheme' + write(nu_diag,1002) ' rhosnew = ', rhosnew, & + ' : new snow density (kg/m^3)' + write(nu_diag,1002) ' rhosmin = ', rhosmin, & + ' : minimum snow density (kg/m^3)' + write(nu_diag,1002) ' rhosmax = ', rhosmax, & + ' : maximum snow density (kg/m^3)' + write(nu_diag,1002) ' windmin = ', windmin, & + ' : minimum wind speed to compact snow (m/s)' + write(nu_diag,1002) ' drhosdwind = ', drhosdwind, & + ' : wind compaction factor (kg s/m^4)' + endif + write(nu_diag,1002) ' snwlvlfac = ', snwlvlfac, & + ' : fractional increase in snow depth for redistribution on ridges' + endif + if (.not. snwgrain) then + write(nu_diag,1010) ' snwgrain = ', snwgrain, & + ' : Snow metamorphosis turned off' + else + write(nu_diag,1010) ' snwgrain = ', snwgrain, & + ' : Using snow metamorphosis scheme' + write(nu_diag,1002) ' rsnw_tmax = ', rsnw_tmax, & + ' : maximum snow radius (10^-6 m)' + endif + write(nu_diag,1002) ' rsnw_fall = ', rsnw_fall, & + ' : radius of new snow (10^-6 m)' + if (snwgrain) then + if (use_smliq_pnd) then + tmpstr2 = ' : Using liquid water in snow for melt ponds' + else + tmpstr2 = ' : NOT using liquid water in snow for melt ponds' + endif + write(nu_diag,1010) ' use_smliq_pnd = ', use_smliq_pnd, trim(tmpstr2) + if (snw_aging_table == 'test') then + tmpstr2 = ' : Using 5x5x1 test matrix of internallly defined snow aging parameters' + write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + elseif (snw_aging_table == 'snicar') then + tmpstr2 = ' : Reading 3D snow aging parameters from SNICAR file' + write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + write(nu_diag,1031) ' snw_filename = ',trim(snw_filename) + write(nu_diag,1031) ' snw_tau_fname = ',trim(snw_tau_fname) + write(nu_diag,1031) ' snw_kappa_fname = ',trim(snw_kappa_fname) + write(nu_diag,1031) ' snw_drdt0_fname = ',trim(snw_drdt0_fname) + elseif (snw_aging_table == 'file') then + tmpstr2 = ' : Reading 1D and 3D snow aging dimensions and parameters from external file' + write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + write(nu_diag,1031) ' snw_filename = ',trim(snw_filename) + write(nu_diag,1031) ' snw_rhos_fname = ',trim(snw_rhos_fname) + write(nu_diag,1031) ' snw_Tgrd_fname = ',trim(snw_Tgrd_fname) + write(nu_diag,1031) ' snw_T_fname = ',trim(snw_T_fname) + write(nu_diag,1031) ' snw_tau_fname = ',trim(snw_tau_fname) + write(nu_diag,1031) ' snw_kappa_fname = ',trim(snw_kappa_fname) + write(nu_diag,1031) ' snw_drdt0_fname = ',trim(snw_drdt0_fname) + endif + endif + endif + write(nu_diag,*) ' ' write(nu_diag,*) ' Primary state variables, tracers' write(nu_diag,*) ' (excluding biogeochemistry)' @@ -1665,6 +1859,7 @@ subroutine input_data if (tr_pond_lvl) write(nu_diag,1010) ' tr_pond_lvl = ', tr_pond_lvl,' : level-ice pond formulation' if (tr_pond_topo) write(nu_diag,1010) ' tr_pond_topo = ', tr_pond_topo,' : topo pond formulation' if (tr_pond_cesm) write(nu_diag,1010) ' tr_pond_cesm = ', tr_pond_cesm,' : CESM pond formulation' + if (tr_snow) write(nu_diag,1010) ' tr_snow = ', tr_snow,' : advanced snow physics' if (tr_iage) write(nu_diag,1010) ' tr_iage = ', tr_iage,' : chronological ice age' if (tr_FY) write(nu_diag,1010) ' tr_FY = ', tr_FY,' : first-year ice area' if (tr_iso) write(nu_diag,1010) ' tr_iso = ', tr_iso,' : diagnostic isotope tracers' @@ -1702,13 +1897,13 @@ subroutine input_data write(nu_diag,1023) ' histfreq_n = ', histfreq_n(:) write(nu_diag,1031) ' histfreq_base = ', trim(histfreq_base) write(nu_diag,1011) ' hist_avg = ', hist_avg - if (.not. hist_avg) write(nu_diag,1031) ' History data will be snapshots' + if (.not. hist_avg) write(nu_diag,1039) ' History data will be snapshots' write(nu_diag,1031) ' history_dir = ', trim(history_dir) write(nu_diag,1031) ' history_file = ', trim(history_file) write(nu_diag,1021) ' history_precision= ', history_precision write(nu_diag,1031) ' history_format = ', trim(history_format) if (write_ic) then - write(nu_diag,1031) ' Initial condition will be written in ', & + write(nu_diag,1039) ' Initial condition will be written in ', & trim(incond_dir) endif write(nu_diag,1031) ' dumpfreq = ', trim(dumpfreq) @@ -1786,6 +1981,7 @@ subroutine input_data write(nu_diag,1011) ' restart_pond_cesm= ', restart_pond_cesm write(nu_diag,1011) ' restart_pond_lvl = ', restart_pond_lvl write(nu_diag,1011) ' restart_pond_topo= ', restart_pond_topo + write(nu_diag,1011) ' restart_snow = ', restart_snow write(nu_diag,1011) ' restart_iso = ', restart_iso write(nu_diag,1011) ' restart_aero = ', restart_aero write(nu_diag,1011) ' restart_fsd = ', restart_fsd @@ -1815,19 +2011,11 @@ subroutine input_data abort_list = trim(abort_list)//":20" endif - ! check for valid kevp_kernel - ! tcraig, kevp_kernel=2 is not validated, do not allow use - ! use "102" to test "2" for now - if (kevp_kernel /= 0) then - if (kevp_kernel == 102) then - kevp_kernel = 2 - else - if (my_task == master_task) write(nu_diag,*) subname//' ERROR: kevp_kernel = ',kevp_kernel - if (kevp_kernel == 2) then - if (my_task == master_task) write(nu_diag,*) subname//' kevp_kernel=2 not validated, use kevp_kernel=102 for testing until it is validated' - endif - abort_list = trim(abort_list)//":21" - endif + if (kdyn == 1 .and. & + evp_algorithm /= 'standard_2d' .and. & + evp_algorithm /= 'shared_mem_1d') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown evp_algorithm=',trim(evp_algorithm) + abort_list = trim(abort_list)//":21" endif if (abort_list /= "") then @@ -1858,10 +2046,14 @@ subroutine input_data wave_spec_in=wave_spec, nfreq_in=nfreq, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & Pstar_in=Pstar, Cstar_in=Cstar, iceruf_in=iceruf, iceruf_ocn_in=iceruf_ocn, calc_dragio_in=calc_dragio, & + windmin_in=windmin, drhosdwind_in=drhosdwind, & + rsnw_fall_in=rsnw_fall, rsnw_tmax_in=rsnw_tmax, rhosnew_in=rhosnew, & + snwlvlfac_in=snwlvlfac, rhosmin_in=rhosmin, rhosmax_in=rhosmax, & + snwredist_in=snwredist, snwgrain_in=snwgrain, snw_aging_table_in=trim(snw_aging_table), & sw_redist_in=sw_redist, sw_frac_in=sw_frac, sw_dtemp_in=sw_dtemp) call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & - tr_fsd_in=tr_fsd, tr_pond_in=tr_pond, & + tr_fsd_in=tr_fsd, tr_snow_in=tr_snow, tr_pond_in=tr_pond, & tr_pond_cesm_in=tr_pond_cesm, tr_pond_lvl_in=tr_pond_lvl, tr_pond_topo_in=tr_pond_topo) call icepack_init_tracer_sizes(ncat_in=ncat, nilyr_in=nilyr, nslyr_in=nslyr, nblyr_in=nblyr, & nfsd_in=nfsd, n_algae_in=n_algae, n_iso_in=n_iso, n_aero_in=n_aero, & @@ -1883,6 +2075,7 @@ subroutine input_data 1030 format (a20,a14,1x,a) ! character 1031 format (a20,1x,a,a) 1033 format (a20,1x,6a6) + 1039 format (a,1x,a,1x,a,1x,a) end subroutine input_data @@ -1918,10 +2111,12 @@ subroutine init_state heat_capacity ! from icepack integer (kind=int_kind) :: ntrcr - logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_fsd + logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo + logical (kind=log_kind) :: tr_snow, tr_fsd integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd + integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw integer (kind=int_kind) :: nt_isosno, nt_isoice, nt_aero, nt_fsd type (block) :: & @@ -1934,12 +2129,15 @@ subroutine init_state call icepack_query_parameters(heat_capacity_out=heat_capacity) call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_fsd_out=tr_fsd, & - tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) + tr_lvl_out=tr_lvl, tr_iso_out=tr_iso, tr_aero_out=tr_aero, & + tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & + tr_snow_out=tr_snow, tr_fsd_out=tr_fsd) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & nt_qice_out=nt_qice, nt_qsno_out=nt_qsno, nt_iage_out=nt_iage, nt_fy_out=nt_fy, & nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) @@ -2016,6 +2214,14 @@ subroutine init_state trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth trcr_depend(nt_ipnd) = 2+nt_apnd ! refrozen pond lid endif + if (tr_snow) then ! snow-volume-weighted snow tracers + do k = 1, nslyr + trcr_depend(nt_smice + k - 1) = 2 ! ice mass in snow + trcr_depend(nt_smliq + k - 1) = 2 ! liquid mass in snow + trcr_depend(nt_rhos + k - 1) = 2 ! effective snow density + trcr_depend(nt_rsnw + k - 1) = 2 ! snow radius + enddo + endif if (tr_fsd) then do it = 1, nfsd trcr_depend(nt_fsd + it - 1) = 0 ! area-weighted floe size distribution @@ -2246,7 +2452,7 @@ subroutine set_state_var (nx_block, ny_block, & indxi, indxj ! compressed indices for cells with aicen > puny real (kind=dbl_kind) :: & - Tsfc, sum, hbar, puny, rhos, Lfresh, rad_to_deg + Tsfc, sum, hbar, puny, rhos, Lfresh, rad_to_deg, rsnw_fall real (kind=dbl_kind), dimension(ncat) :: & ainit, hinit ! initial area, thickness @@ -2262,22 +2468,26 @@ subroutine set_state_var (nx_block, ny_block, & edge_init_nh = 70._dbl_kind, & ! initial ice edge, N.Hem. (deg) edge_init_sh = -60._dbl_kind ! initial ice edge, S.Hem. (deg) - logical (kind=log_kind) :: tr_brine, tr_lvl + logical (kind=log_kind) :: tr_brine, tr_lvl, tr_snow integer (kind=int_kind) :: ntrcr integer (kind=int_kind) :: nt_Tsfc, nt_qice, nt_qsno, nt_sice integer (kind=int_kind) :: nt_fbri, nt_alvl, nt_vlvl + integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw character(len=*), parameter :: subname='(set_state_var)' !----------------------------------------------------------------- call icepack_query_tracer_sizes(ntrcr_out=ntrcr) - call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_lvl_out=tr_lvl) + call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_lvl_out=tr_lvl, & + tr_snow_out=tr_snow) call icepack_query_tracer_indices( nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, & - nt_fbri_out=nt_fbri, nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl) + nt_fbri_out=nt_fbri, nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) call icepack_query_parameters(rhos_out=rhos, Lfresh_out=Lfresh, puny_out=puny, & - rad_to_deg_out=rad_to_deg) + rad_to_deg_out=rad_to_deg, rsnw_fall_out=rsnw_fall) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -2309,6 +2519,14 @@ subroutine set_state_var (nx_block, ny_block, & do k = 1, nslyr trcrn(i,j,nt_qsno+k-1,n) = -rhos * Lfresh enddo + if (tr_snow) then + do k = 1, nslyr + trcrn(i,j,nt_rsnw +k-1,n) = rsnw_fall + trcrn(i,j,nt_rhos +k-1,n) = rhos + trcrn(i,j,nt_smice+k-1,n) = rhos + trcrn(i,j,nt_smliq+k-1,n) = c0 + enddo ! nslyr + endif enddo enddo enddo diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index d65cf52d3..976e95361 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -36,7 +36,7 @@ module ice_step_mod private public :: step_therm1, step_therm2, step_dyn_horiz, step_dyn_ridge, & - prep_radiation, step_radiation, ocean_mixed_layer, & + step_snow, prep_radiation, step_radiation, ocean_mixed_layer, & update_state, biogeochemistry, save_init, step_dyn_wave !======================================================================= @@ -163,7 +163,7 @@ subroutine step_therm1 (dt, iblk) Cdn_ocn, Cdn_ocn_skin, Cdn_ocn_floe, Cdn_ocn_keel, Cdn_atm_ratio, & Cdn_atm, Cdn_atm_skin, Cdn_atm_floe, Cdn_atm_rdg, Cdn_atm_pond, & hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & - fswsfcn, fswintn, Sswabsn, Iswabsn, & + fswsfcn, fswintn, Sswabsn, Iswabsn, meltsliqn, meltsliq, & fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf use ice_blocks, only: block, get_block, nx_block, ny_block use ice_calendar, only: yday @@ -172,13 +172,13 @@ subroutine step_therm1 (dt, iblk) use ice_flux, only: frzmlt, sst, Tf, strocnxT, strocnyT, rside, fbot, Tbot, Tsnice, & meltsn, melttn, meltbn, congeln, snoicen, uatm, vatm, fside, & wind, rhoa, potT, Qa, zlvl, zlvs, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & - flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, & + flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, fsloss, & frain, Tair, strairxT, strairyT, fsurf, fcondtop, fsens, & flat, fswabs, flwout, evap, evaps, evapi, Tref, Qref, Uref, fresh, fsalt, fhocn, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & meltt, melts, meltb, congel, snoice, & flatn_f, fsensn_f, fsurfn_f, fcondtopn_f, & - send_i2x_per_cat, fswthrun_ai + send_i2x_per_cat, fswthrun_ai, dsnow use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn, fiso_atm, fiso_ocn, & Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn use ice_grid, only: lmask_n, lmask_s, tmask @@ -211,11 +211,11 @@ subroutine step_therm1 (dt, iblk) integer (kind=int_kind) :: & ntrcr, nt_apnd, nt_hpnd, nt_ipnd, nt_alvl, nt_vlvl, nt_Tsfc, & nt_iage, nt_FY, nt_qice, nt_sice, nt_aero, nt_qsno, & - nt_isosno, nt_isoice + nt_isosno, nt_isoice, nt_rsnw, nt_smice, nt_smliq logical (kind=log_kind) :: & tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, tr_pond_cesm, & - tr_pond_lvl, tr_pond_topo, calc_Tsfc, highfreq + tr_pond_lvl, tr_pond_topo, calc_Tsfc, highfreq, tr_snow real (kind=dbl_kind) :: & uvel_center, & ! cell-centered velocity, x component (m/s) @@ -228,6 +228,9 @@ subroutine step_therm1 (dt, iblk) real (kind=dbl_kind), dimension(n_iso,ncat) :: & isosno, isoice ! kg/m^2 + real (kind=dbl_kind), dimension(nslyr,ncat) :: & + rsnwn, smicen, smliqn + type (block) :: & this_block ! block information for current block @@ -240,13 +243,15 @@ subroutine step_therm1 (dt, iblk) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_iso_out=tr_iso, & tr_aero_out=tr_aero, tr_pond_out=tr_pond, tr_pond_cesm_out=tr_pond_cesm, & - tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) + tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & + tr_snow_out=tr_snow) call icepack_query_tracer_indices( & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_Tsfc_out=nt_Tsfc, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, & nt_qice_out=nt_qice, nt_sice_out=nt_sice, & nt_aero_out=nt_aero, nt_qsno_out=nt_qsno, & + nt_rsnw_out=nt_rsnw, nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -256,7 +261,9 @@ subroutine step_therm1 (dt, iblk) prescribed_ice = .false. #endif - isosno (:,:) = c0 + rsnwn (:,:) = c0 + smicen (:,:) = c0 + smliqn (:,:) = c0 isoice (:,:) = c0 aerosno(:,:,:) = c0 aeroice(:,:,:) = c0 @@ -302,6 +309,16 @@ subroutine step_therm1 (dt, iblk) vvel_center = c0 endif ! highfreq + if (tr_snow) then + do n = 1, ncat + do k = 1, nslyr + rsnwn (k,n) = trcrn(i,j,nt_rsnw +k-1,n,iblk) + smicen(k,n) = trcrn(i,j,nt_smice+k-1,n,iblk) + smliqn(k,n) = trcrn(i,j,nt_smliq+k-1,n,iblk) + enddo + enddo + endif ! tr_snow + if (tr_iso) then ! trcrn(nt_iso*) has units kg/m^3 do n=1,ncat do k=1,n_iso @@ -350,6 +367,9 @@ subroutine step_therm1 (dt, iblk) ipnd = trcrn (i,j,nt_ipnd,:,iblk), & iage = trcrn (i,j,nt_iage,:,iblk), & FY = trcrn (i,j,nt_FY ,:,iblk), & + rsnwn = rsnwn (:,:), & + smicen = smicen (:,:), & + smliqn = smliqn (:,:), & aerosno = aerosno (:,:,:), & aeroice = aeroice (:,:,:), & isosno = isosno (:,:), & @@ -397,13 +417,14 @@ subroutine step_therm1 (dt, iblk) strocnyT = strocnyT (i,j, iblk), & fbot = fbot (i,j, iblk), & Tbot = Tbot (i,j, iblk), & - Tsnice = Tsnice (i,j, iblk), & + Tsnice = Tsnice (i,j, iblk), & frzmlt = frzmlt (i,j, iblk), & rside = rside (i,j, iblk), & fside = fside (i,j, iblk), & fsnow = fsnow (i,j, iblk), & frain = frain (i,j, iblk), & fpond = fpond (i,j, iblk), & + fsloss = fsloss (i,j, iblk), & fsurf = fsurf (i,j, iblk), & fsurfn = fsurfn (i,j,:,iblk), & fcondtop = fcondtop (i,j, iblk), & @@ -433,10 +454,10 @@ subroutine step_therm1 (dt, iblk) fsalt = fsalt (i,j, iblk), & fhocn = fhocn (i,j, iblk), & fswthru = fswthru (i,j, iblk), & - fswthru_vdr = fswthru_vdr (i,j, iblk),& - fswthru_vdf = fswthru_vdf (i,j, iblk),& - fswthru_idr = fswthru_idr (i,j, iblk),& - fswthru_idf = fswthru_idf (i,j, iblk),& + fswthru_vdr = fswthru_vdr (i,j, iblk), & + fswthru_vdf = fswthru_vdf (i,j, iblk), & + fswthru_idr = fswthru_idr (i,j, iblk), & + fswthru_idf = fswthru_idf (i,j, iblk), & flatn_f = flatn_f (i,j,:,iblk), & fsensn_f = fsensn_f (i,j,:,iblk), & fsurfn_f = fsurfn_f (i,j,:,iblk), & @@ -461,7 +482,10 @@ subroutine step_therm1 (dt, iblk) congeln = congeln (i,j,:,iblk), & snoice = snoice (i,j, iblk), & snoicen = snoicen (i,j,:,iblk), & + dsnow = dsnow (i,j, iblk), & dsnown = dsnown (i,j,:,iblk), & + meltsliq = meltsliq (i,j, iblk), & + meltsliqn = meltsliqn (i,j,:,iblk), & lmask_n = lmask_n (i,j, iblk), & lmask_s = lmask_s (i,j, iblk), & mlt_onset = mlt_onset (i,j, iblk), & @@ -483,6 +507,16 @@ subroutine step_therm1 (dt, iblk) endif + if (tr_snow) then + do n = 1, ncat + do k = 1, nslyr + trcrn(i,j,nt_rsnw +k-1,n,iblk) = rsnwn (k,n) + trcrn(i,j,nt_smice+k-1,n,iblk) = smicen(k,n) + trcrn(i,j,nt_smliq+k-1,n,iblk) = smliqn(k,n) + enddo + enddo + endif ! tr_snow + if (tr_iso) then do n = 1, ncat if (vicen(i,j,n,iblk) > puny) & @@ -685,13 +719,15 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound real (kind=dbl_kind), intent(in) :: & - dt , & ! time step - offset ! d(age)/dt time offset = dt for thermo, 0 for dyn + dt ! time step - real (kind=dbl_kind), dimension(:,:,:), intent(inout) :: & - daidt, & ! change in ice area per time step - dvidt, & ! change in ice volume per time step - dagedt ! change in ice age per time step + real (kind=dbl_kind), dimension(:,:,:), intent(inout), optional :: & + daidt, & ! change in ice area per time step + dvidt, & ! change in ice volume per time step + dagedt ! change in ice age per time step + + real (kind=dbl_kind), intent(in), optional :: & + offset ! d(age)/dt time offset = dt for thermo, 0 for dyn integer (kind=int_kind) :: & iblk, & ! block index @@ -747,6 +783,8 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) n_trcr_strata = n_trcr_strata(:), & nt_strata = nt_strata(:,:)) + if (present(offset)) then + !----------------------------------------------------------------- ! Compute thermodynamic area and volume tendencies. !----------------------------------------------------------------- @@ -762,7 +800,8 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & - dagedt(i,j,iblk)) / dt endif - endif + endif ! tr_iage + endif ! present(offset) enddo ! i enddo ! j @@ -1022,6 +1061,118 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) end subroutine step_dyn_ridge +!======================================================================= +! +! Updates snow tracers +! +! authors: Elizabeth C. Hunke, LANL +! Nicole Jeffery, LANL + + subroutine step_snow (dt, iblk) + + use ice_blocks, only: block, get_block + use ice_calendar, only: nstreams + use ice_domain, only: blocks_ice + use ice_domain_size, only: ncat, nslyr, nilyr + use ice_flux, only: snwcnt, wind, fresh, fhocn, fsloss, fsnow + use ice_state, only: trcrn, vsno, vsnon, vicen, aicen, aice + use icepack_intfc, only: icepack_step_snow + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + nt_smice, nt_smliq, nt_rsnw, & + nt_Tsfc, nt_qice, nt_sice, nt_qsno, & + nt_alvl, nt_vlvl, nt_rhos + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + i, j, & ! horizontal indices + n, & ! category index + ns, & ! history streams index + ipoint ! index for print diagnostic + + real (kind=dbl_kind) :: & + puny + + real (kind=dbl_kind) :: & + fhs ! flag for presence of snow + + character(len=*), parameter :: subname = '(step_snow)' + + type (block) :: & + this_block ! block information for current block + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + !----------------------------------------------------------------- + ! query icepack values + !----------------------------------------------------------------- + + call icepack_query_parameters(puny_out=puny) + call icepack_query_tracer_indices( & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rsnw_out=nt_rsnw, nt_Tsfc_out=nt_Tsfc, & + nt_qice_out=nt_qice, nt_sice_out=nt_sice, nt_qsno_out=nt_qsno, & + nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_rhos_out=nt_rhos) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Snow redistribution and metamorphosis + !----------------------------------------------------------------- + + do j = jlo, jhi + do i = ilo, ihi + + call icepack_step_snow (dt, nilyr, & + nslyr, ncat, & + wind (i,j, iblk), & + aice (i,j, iblk), & + aicen(i,j,:,iblk), & + vicen(i,j,:,iblk), & + vsnon(i,j,:,iblk), & + trcrn(i,j,nt_Tsfc,:,iblk), & + trcrn(i,j,nt_qice,:,iblk), & ! top layer only + trcrn(i,j,nt_sice,:,iblk), & ! top layer only + trcrn(i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & + trcrn(i,j,nt_alvl,:,iblk), & + trcrn(i,j,nt_vlvl,:,iblk), & + trcrn(i,j,nt_smice:nt_smice+nslyr-1,:,iblk), & + trcrn(i,j,nt_smliq:nt_smliq+nslyr-1,:,iblk), & + trcrn(i,j,nt_rsnw:nt_rsnw+nslyr-1,:,iblk), & + trcrn(i,j,nt_rhos:nt_rhos+nslyr-1,:,iblk), & + fresh (i,j,iblk), & + fhocn (i,j,iblk), & + fsloss (i,j,iblk), & + fsnow (i,j,iblk)) + enddo + enddo + + ! increment counter for history averaging + do j = jlo, jhi + do i = ilo, ihi + fhs = c0 + if (vsno(i,j,iblk) > puny) fhs = c1 + do ns = 1, nstreams + snwcnt(i,j,iblk,ns) = snwcnt(i,j,iblk,ns) + fhs + enddo + enddo + enddo + + end subroutine step_snow + !======================================================================= ! ! Computes radiation fields @@ -1067,7 +1218,7 @@ subroutine step_radiation (dt, iblk) this_block ! block information for current block integer (kind=int_kind) :: & - nt_Tsfc, nt_alvl, & + nt_Tsfc, nt_alvl, nt_rsnw, & nt_apnd, nt_hpnd, nt_ipnd, nt_aero, nlt_chl_sw, & ntrcr, nbtrcr, nbtrcr_sw, nt_fbri @@ -1078,13 +1229,14 @@ subroutine step_radiation (dt, iblk) nlt_zaero_sw, nt_zaero logical (kind=log_kind) :: & - tr_bgc_N, tr_zaero, tr_brine, dEdd_algae, modal_aero + tr_bgc_N, tr_zaero, tr_brine, dEdd_algae, modal_aero, snwgrain real (kind=dbl_kind), dimension(ncat) :: & - fbri ! brine height to ice thickness + fbri ! brine height to ice thickness real(kind= dbl_kind), dimension(:,:), allocatable :: & - ztrcr_sw + ztrcr_sw, & ! zaerosols (kg/m^3) and chla (mg/m^3) + rsnow ! snow grain radius tracer (10^-6 m) logical (kind=log_kind) :: & debug, & ! flag for printing debugging information @@ -1099,16 +1251,18 @@ subroutine step_radiation (dt, iblk) call icepack_query_tracer_flags( & tr_brine_out=tr_brine, tr_bgc_N_out=tr_bgc_N, tr_zaero_out=tr_zaero) call icepack_query_tracer_indices( & - nt_Tsfc_out=nt_Tsfc, nt_alvl_out=nt_alvl, & + nt_Tsfc_out=nt_Tsfc, nt_alvl_out=nt_alvl, nt_rsnw_out=nt_rsnw, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, & nlt_chl_sw_out=nlt_chl_sw, nlt_zaero_sw_out=nlt_zaero_sw, & nt_fbri_out=nt_fbri, nt_zaero_out=nt_zaero, nt_bgc_N_out=nt_bgc_N) - call icepack_query_parameters(dEdd_algae_out=dEdd_algae, modal_aero_out=modal_aero) + call icepack_query_parameters(dEdd_algae_out=dEdd_algae, modal_aero_out=modal_aero, & + snwgrain_out=snwgrain) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) allocate(ztrcr_sw(nbtrcr_sw,ncat)) + allocate(rsnow(nslyr,ncat)) this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -1130,10 +1284,16 @@ subroutine step_radiation (dt, iblk) write (nu_diag, *) 'my_task = ',my_task enddo ! ipoint endif - fbri(:) = c0 + fbri (:) = c0 ztrcr_sw(:,:) = c0 + rsnow (:,:) = c0 do n = 1, ncat - if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) + if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) + if (snwgrain) then + do k = 1, nslyr + rsnow(k,n) = trcrn(i,j,nt_rsnw+k-1,n,iblk) + enddo + endif enddo if (tmask(i,j,iblk)) then @@ -1182,8 +1342,7 @@ subroutine step_radiation (dt, iblk) albpndn =albpndn (i,j,: ,iblk), apeffn =apeffn (i,j,: ,iblk), & snowfracn=snowfracn(i,j,: ,iblk), & dhsn =dhsn (i,j,: ,iblk), ffracn =ffracn(i,j,:,iblk), & - l_print_point=l_print_point) - + rsnow =rsnow (:,:), l_print_point=l_print_point) endif if (dEdd_algae .and. (tr_zaero .or. tr_bgc_N)) then @@ -1202,6 +1361,7 @@ subroutine step_radiation (dt, iblk) file=__FILE__, line=__LINE__) deallocate(ztrcr_sw) + deallocate(rsnow) call ice_timer_stop(timer_sw) ! shortwave diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 index 635bbbeb4..3959f12cf 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 @@ -74,7 +74,8 @@ module ice_boundary ice_HaloUpdate, & ice_HaloUpdate_stress, & ice_HaloExtrapolate, & - ice_HaloDestroy + ice_HaloDestroy, & + primary_grid_lengths_global_ext interface ice_HaloUpdate ! generic interface module procedure ice_HaloUpdate2DR8, & @@ -6807,6 +6808,136 @@ subroutine ice_HaloDestroy(halo) endif end subroutine ice_HaloDestroy +!*********************************************************************** + + subroutine primary_grid_lengths_global_ext( & + ARRAY_O, ARRAY_I, ew_boundary_type, ns_boundary_type) + +! This subroutine adds ghost cells to global primary grid lengths array +! ARRAY_I and outputs result to array ARRAY_O + +! Note duplicate implementation of this subroutine in: +! cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 + + use ice_constants, only: c0 + use ice_domain_size, only: nx_global, ny_global + + real (kind=dbl_kind), dimension(:,:), intent(in) :: & + ARRAY_I + + character (*), intent(in) :: & + ew_boundary_type, ns_boundary_type + + real (kind=dbl_kind), dimension(:,:), intent(out) :: & + ARRAY_O + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (kind=int_kind) :: & + ii, io, ji, jo + + character(len=*), parameter :: & + subname = '(primary_grid_lengths_global_ext)' + +!----------------------------------------------------------------------- +! +! add ghost cells to global primary grid lengths array +! +!----------------------------------------------------------------------- + + if (trim(ns_boundary_type) == 'tripole' .or. & + trim(ns_boundary_type) == 'tripoleT') then + call abort_ice(subname//' ERROR: '//ns_boundary_type & + //' boundary type not implemented for configuration') + endif + + do jo = 1,ny_global+2*nghost + ji = -nghost + jo + + !*** Southern ghost cells + + if (ji < 1) then + select case (trim(ns_boundary_type)) + case ('cyclic') + ji = ji + ny_global + case ('open') + ji = nghost - jo + 1 + case ('closed') + ji = 0 + case default + call abort_ice( & + subname//' ERROR: unknown north-south boundary type') + end select + endif + + !*** Northern ghost cells + + if (ji > ny_global) then + select case (trim(ns_boundary_type)) + case ('cyclic') + ji = ji - ny_global + case ('open') + ji = 2 * ny_global - ji + 1 + case ('closed') + ji = 0 + case default + call abort_ice( & + subname//' ERROR: unknown north-south boundary type') + end select + endif + + do io = 1,nx_global+2*nghost + ii = -nghost + io + + !*** Western ghost cells + + if (ii < 1) then + select case (trim(ew_boundary_type)) + case ('cyclic') + ii = ii + nx_global + case ('open') + ii = nghost - io + 1 + case ('closed') + ii = 0 + case default + call abort_ice( & + subname//' ERROR: unknown east-west boundary type') + end select + endif + + !*** Eastern ghost cells + + if (ii > nx_global) then + select case (trim(ew_boundary_type)) + case ('cyclic') + ii = ii - nx_global + case ('open') + ii = 2 * nx_global - ii + 1 + case ('closed') + ii = 0 + case default + call abort_ice( & + subname//' ERROR: unknown east-west boundary type') + end select + endif + + if (ii == 0 .or. ji == 0) then + ARRAY_O(io, jo) = c0 + else + ARRAY_O(io, jo) = ARRAY_I(ii, ji) + endif + + enddo + enddo + +!----------------------------------------------------------------------- + + end subroutine primary_grid_lengths_global_ext + !*********************************************************************** end module ice_boundary diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 index 010a5c8c4..0a58769db 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 @@ -636,6 +636,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) else special_value = spval_dbl endif + ARRAY_G = special_value nx = nx_global + 2*nghost ny = ny_global + 2*nghost @@ -744,92 +745,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif endif - !*** fill land blocks with special values - - else if (src_dist%blockLocation(n) == 0) then - - this_block = get_block(n,n) - - ! interior - do j=this_block%jlo,this_block%jhi - do i=this_block%ilo,this_block%ihi - ARRAY_G(this_block%i_glob(i)+nghost, & - this_block%j_glob(j)+nghost) = special_value - end do - end do - -#ifdef CICE_IN_NEMO -!echmod: this code is temporarily wrapped for nemo pending further testing elsewhere - ! fill ghost cells - if (this_block%jblock == 1) then - ! south block - do j=1, nghost - do i=this_block%ilo,this_block%ihi - ARRAY_G(this_block%i_glob(i)+nghost,j) = special_value - end do - end do - if (this_block%iblock == 1) then - ! southwest corner - do j=1, nghost - do i=1, nghost - ARRAY_G(i,j) = special_value - end do - end do - endif - endif - if (this_block%jblock == nblocks_y) then - ! north block - do j=1, nghost - do i=this_block%ilo,this_block%ihi - ARRAY_G(this_block%i_glob(i)+nghost, & - ny_global + nghost + j) = special_value - end do - end do - if (this_block%iblock == nblocks_x) then - ! northeast corner - do j=1, nghost - do i=1, nghost - ARRAY_G(nx-i+1, ny-j+1) = special_value - end do - end do - endif - endif - if (this_block%iblock == 1) then - ! west block - do j=this_block%jlo,this_block%jhi - do i=1, nghost - ARRAY_G(i,this_block%j_glob(j)+nghost) = special_value - end do - end do - if (this_block%jblock == nblocks_y) then - ! northwest corner - do j=1, nghost - do i=1, nghost - ARRAY_G(i, ny-j+1) = special_value - end do - end do - endif - endif - if (this_block%iblock == nblocks_x) then - ! east block - do j=this_block%jlo,this_block%jhi - do i=1, nghost - ARRAY_G(nx_global + nghost + i, & - this_block%j_glob(j)+nghost) = special_value - end do - end do - if (this_block%jblock == 1) then - ! southeast corner - do j=1, nghost - do i=1, nghost - ARRAY_G( nx-i+1,j) = special_value - end do - end do - endif - endif -#endif - - endif + endif ! src_dist%blockLocation end do @@ -939,7 +855,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) ! !----------------------------------------------------------------------- - else + else ! master task allocate(snd_request(nblocks_tot), & snd_status (MPI_STATUS_SIZE, nblocks_tot)) @@ -960,7 +876,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) call MPI_WAITALL(nsends, snd_request, snd_status, ierr) deallocate(snd_request, snd_status) - endif + endif ! master task if (add_mpi_barriers) then call ice_barrier() @@ -1028,8 +944,9 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) if (present(spc_val)) then special_value = spc_val else - special_value = 0 !MHRI NOTE: 0,1,-999,??? + special_value = -9999 endif + ARRAY_G = special_value nx = nx_global + 2*nghost ny = ny_global + 2*nghost @@ -1138,21 +1055,7 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif endif - !*** fill land blocks with special values - - else if (src_dist%blockLocation(n) == 0) then - - this_block = get_block(n,n) - - ! interior - do j=this_block%jlo,this_block%jhi - do i=this_block%ilo,this_block%ihi - ARRAY_G(this_block%i_glob(i)+nghost, & - this_block%j_glob(j)+nghost) = special_value - end do - end do - - endif + endif ! src_dist%blockLocation end do @@ -1262,7 +1165,7 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) ! !----------------------------------------------------------------------- - else + else ! master task allocate(snd_request(nblocks_tot), & snd_status (MPI_STATUS_SIZE, nblocks_tot)) @@ -1283,7 +1186,7 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) call MPI_WAITALL(nsends, snd_request, snd_status, ierr) deallocate(snd_request, snd_status) - endif + endif ! master task if (add_mpi_barriers) then call ice_barrier() @@ -1351,8 +1254,9 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) if (present(spc_val)) then special_value = spc_val else - special_value = .false. !MHRI NOTE: .true./.false. ??? + special_value = .false. endif + ARRAY_G = special_value nx = nx_global + 2*nghost ny = ny_global + 2*nghost @@ -1461,21 +1365,7 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif endif - !*** fill land blocks with special values - - else if (src_dist%blockLocation(n) == 0) then - - this_block = get_block(n,n) - - ! interior - do j=this_block%jlo,this_block%jhi - do i=this_block%ilo,this_block%ihi - ARRAY_G(this_block%i_glob(i)+nghost, & - this_block%j_glob(j)+nghost) = special_value - end do - end do - - endif + endif ! src_dist%blockLocation end do @@ -1585,7 +1475,7 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) ! !----------------------------------------------------------------------- - else + else ! master task allocate(snd_request(nblocks_tot), & snd_status (MPI_STATUS_SIZE, nblocks_tot)) @@ -1606,7 +1496,7 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) call MPI_WAITALL(nsends, snd_request, snd_status, ierr) deallocate(snd_request, snd_status) - endif + endif ! master task if (add_mpi_barriers) then call ice_barrier() diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 index c66cdd13c..f3fffba59 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 @@ -61,7 +61,8 @@ module ice_boundary ice_HaloUpdate, & ice_HaloUpdate_stress, & ice_HaloExtrapolate, & - ice_HaloDestroy + ice_HaloDestroy, & + primary_grid_lengths_global_ext interface ice_HaloUpdate ! generic interface module procedure ice_HaloUpdate2DR8, & @@ -4587,6 +4588,136 @@ subroutine ice_HaloDestroy(halo) end subroutine ice_HaloDestroy +!*********************************************************************** + + subroutine primary_grid_lengths_global_ext( & + ARRAY_O, ARRAY_I, ew_boundary_type, ns_boundary_type) + +! This subroutine adds ghost cells to global primary grid lengths array +! ARRAY_I and outputs result to array ARRAY_O + +! Note duplicate implementation of this subroutine in: +! cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 + + use ice_constants, only: c0 + use ice_domain_size, only: nx_global, ny_global + + real (kind=dbl_kind), dimension(:,:), intent(in) :: & + ARRAY_I + + character (*), intent(in) :: & + ew_boundary_type, ns_boundary_type + + real (kind=dbl_kind), dimension(:,:), intent(out) :: & + ARRAY_O + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (kind=int_kind) :: & + ii, io, ji, jo + + character(len=*), parameter :: & + subname = '(primary_grid_lengths_global_ext)' + +!----------------------------------------------------------------------- +! +! add ghost cells to global primary grid lengths array +! +!----------------------------------------------------------------------- + + if (trim(ns_boundary_type) == 'tripole' .or. & + trim(ns_boundary_type) == 'tripoleT') then + call abort_ice(subname//' ERROR: '//ns_boundary_type & + //' boundary type not implemented for configuration') + endif + + do jo = 1,ny_global+2*nghost + ji = -nghost + jo + + !*** Southern ghost cells + + if (ji < 1) then + select case (trim(ns_boundary_type)) + case ('cyclic') + ji = ji + ny_global + case ('open') + ji = nghost - jo + 1 + case ('closed') + ji = 0 + case default + call abort_ice( & + subname//' ERROR: unknown north-south boundary type') + end select + endif + + !*** Northern ghost cells + + if (ji > ny_global) then + select case (trim(ns_boundary_type)) + case ('cyclic') + ji = ji - ny_global + case ('open') + ji = 2 * ny_global - ji + 1 + case ('closed') + ji = 0 + case default + call abort_ice( & + subname//' ERROR: unknown north-south boundary type') + end select + endif + + do io = 1,nx_global+2*nghost + ii = -nghost + io + + !*** Western ghost cells + + if (ii < 1) then + select case (trim(ew_boundary_type)) + case ('cyclic') + ii = ii + nx_global + case ('open') + ii = nghost - io + 1 + case ('closed') + ii = 0 + case default + call abort_ice( & + subname//' ERROR: unknown east-west boundary type') + end select + endif + + !*** Eastern ghost cells + + if (ii > nx_global) then + select case (trim(ew_boundary_type)) + case ('cyclic') + ii = ii - nx_global + case ('open') + ii = 2 * nx_global - ii + 1 + case ('closed') + ii = 0 + case default + call abort_ice( & + subname//' ERROR: unknown east-west boundary type') + end select + endif + + if (ii == 0 .or. ji == 0) then + ARRAY_O(io, jo) = c0 + else + ARRAY_O(io, jo) = ARRAY_I(ii, ji) + endif + + enddo + enddo + +!----------------------------------------------------------------------- + + end subroutine primary_grid_lengths_global_ext + !*********************************************************************** end module ice_boundary diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 index 418c80f61..4b0bb1f9e 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 @@ -373,6 +373,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) else special_value = spval_dbl endif + ARRAY_G = special_value nx = nx_global + 2*nghost ny = ny_global + 2*nghost @@ -477,16 +478,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif endif - else !*** fill land blocks with special values - - do j=this_block%jlo,this_block%jhi - do i=this_block%ilo,this_block%ihi - ARRAY_G(this_block%i_glob(i)+nghost, & - this_block%j_glob(j)+nghost) = special_value - end do - end do - - endif + endif ! src_dist%blockLocation end do @@ -537,8 +529,9 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) if (present(spc_val)) then special_value = spc_val else - special_value = 0 !MHRI: 0,1,999,-999 ?? + special_value = -9999 endif + ARRAY_G = special_value nx = nx_global + 2*nghost ny = ny_global + 2*nghost @@ -643,16 +636,7 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif endif - else !*** fill land blocks with special values - - do j=this_block%jlo,this_block%jhi - do i=this_block%ilo,this_block%ihi - ARRAY_G(this_block%i_glob(i)+nghost, & - this_block%j_glob(j)+nghost) = special_value - end do - end do - - endif + endif ! src_dist%blockLocation end do @@ -703,8 +687,9 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) if (present(spc_val)) then special_value = spc_val else - special_value = .false. !MHRI: true/false + special_value = .false. endif + ARRAY_G = special_value nx = nx_global + 2*nghost ny = ny_global + 2*nghost @@ -809,16 +794,7 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif endif - else !*** fill land blocks with special values - - do j=this_block%jlo,this_block%jhi - do i=this_block%ilo,this_block%ihi - ARRAY_G(this_block%i_glob(i)+nghost, & - this_block%j_glob(j)+nghost) = special_value - end do - end do - - endif + endif ! src_dist%blockLocation end do diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index 52f0da850..1dfdd0428 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -441,7 +441,7 @@ subroutine init_domain_distribution(KMTG,ULATG) !---------------------------------------------------------------------- if (distribution_wght == 'latitude') then - flat = NINT(abs(ULATG*rad_to_deg), int_kind) ! linear function + flat = max(NINT(abs(ULATG*rad_to_deg), int_kind),1) ! linear function else flat = 1 endif diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index 2124bbebe..18dbaaefe 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -19,7 +19,8 @@ module ice_grid use ice_kinds_mod use ice_broadcast, only: broadcast_scalar, broadcast_array - use ice_boundary, only: ice_HaloUpdate, ice_HaloExtrapolate + use ice_boundary, only: ice_HaloUpdate, ice_HaloExtrapolate, & + primary_grid_lengths_global_ext use ice_communicate, only: my_task, master_task use ice_blocks, only: block, get_block, nx_block, ny_block, nghost use ice_domain_size, only: nx_global, ny_global, max_blocks @@ -77,13 +78,17 @@ module ice_grid ocn_gridcell_frac ! only relevant for lat-lon grids ! gridcell value of [1 - (land fraction)] (T-cell) + real (kind=dbl_kind), dimension (:,:), allocatable, public :: & + G_HTE , & ! length of eastern edge of T-cell (global ext.) + G_HTN ! length of northern edge of T-cell (global ext.) + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - cyp , & ! 1.5*HTE - 0.5*HTE - cxp , & ! 1.5*HTN - 0.5*HTN - cym , & ! 0.5*HTE - 1.5*HTE - cxm , & ! 0.5*HTN - 1.5*HTN - dxhy , & ! 0.5*(HTE - HTE) - dyhx ! 0.5*(HTN - HTN) + cyp , & ! 1.5*HTE(i,j)-0.5*HTW(i,j) = 1.5*HTE(i,j)-0.5*HTE(i-1,j) + cxp , & ! 1.5*HTN(i,j)-0.5*HTS(i,j) = 1.5*HTN(i,j)-0.5*HTN(i,j-1) + cym , & ! 0.5*HTE(i,j)-1.5*HTW(i,j) = 0.5*HTE(i,j)-1.5*HTE(i-1,j) + cxm , & ! 0.5*HTN(i,j)-1.5*HTS(i,j) = 0.5*HTN(i,j)-1.5*HTN(i,j-1) + dxhy , & ! 0.5*(HTE(i,j) - HTW(i,j)) = 0.5*(HTE(i,j) - HTE(i-1,j)) + dyhx ! 0.5*(HTN(i,j) - HTS(i,j)) = 0.5*(HTN(i,j) - HTN(i,j-1)) ! grid dimensions for rectangular grid real (kind=dbl_kind), public :: & @@ -125,7 +130,8 @@ module ice_grid kmt ! ocean topography mask for bathymetry (T-cell) logical (kind=log_kind), public :: & - use_bathymetry ! flag for reading in bathymetry_file + use_bathymetry, & ! flag for reading in bathymetry_file + pgl_global_ext ! flag for init primary grid lengths (global ext.) logical (kind=log_kind), & dimension (:,:,:), allocatable, public :: & @@ -153,6 +159,8 @@ subroutine alloc_grid integer (int_kind) :: ierr + character(len=*), parameter :: subname = '(alloc_grid)' + allocate( & dxt (nx_block,ny_block,max_blocks), & ! width of T-cell through the middle (m) dyt (nx_block,ny_block,max_blocks), & ! height of T-cell through the middle (m) @@ -175,12 +183,12 @@ subroutine alloc_grid ANGLET (nx_block,ny_block,max_blocks), & ! ANGLE converted to T-cells bathymetry(nx_block,ny_block,max_blocks),& ! ocean depth, for grounding keels and bergs (m) ocn_gridcell_frac(nx_block,ny_block,max_blocks),& ! only relevant for lat-lon grids - cyp (nx_block,ny_block,max_blocks), & ! 1.5*HTE - 0.5*HTE - cxp (nx_block,ny_block,max_blocks), & ! 1.5*HTN - 0.5*HTN - cym (nx_block,ny_block,max_blocks), & ! 0.5*HTE - 1.5*HTE - cxm (nx_block,ny_block,max_blocks), & ! 0.5*HTN - 1.5*HTN - dxhy (nx_block,ny_block,max_blocks), & ! 0.5*(HTE - HTE) - dyhx (nx_block,ny_block,max_blocks), & ! 0.5*(HTN - HTN) + cyp (nx_block,ny_block,max_blocks), & ! 1.5*HTE - 0.5*HTW + cxp (nx_block,ny_block,max_blocks), & ! 1.5*HTN - 0.5*HTS + cym (nx_block,ny_block,max_blocks), & ! 0.5*HTE - 1.5*HTW + cxm (nx_block,ny_block,max_blocks), & ! 0.5*HTN - 1.5*HTS + dxhy (nx_block,ny_block,max_blocks), & ! 0.5*(HTE - HTW) + dyhx (nx_block,ny_block,max_blocks), & ! 0.5*(HTN - HTS) xav (nx_block,ny_block,max_blocks), & ! mean T-cell value of x yav (nx_block,ny_block,max_blocks), & ! mean T-cell value of y xxav (nx_block,ny_block,max_blocks), & ! mean T-cell value of xx @@ -203,7 +211,15 @@ subroutine alloc_grid mse (2,2,nx_block,ny_block,max_blocks), & msw (2,2,nx_block,ny_block,max_blocks), & stat=ierr) - if (ierr/=0) call abort_ice('(alloc_grid): Out of memory') + if (ierr/=0) call abort_ice(subname//'ERROR: Out of memory') + + if (pgl_global_ext) then + allocate( & + G_HTE(nx_global+2*nghost, ny_global+2*nghost), & ! length of eastern edge of T-cell (global ext.) + G_HTN(nx_global+2*nghost, ny_global+2*nghost), & ! length of northern edge of T-cell (global ext.) + stat=ierr) + if (ierr/=0) call abort_ice(subname//'ERROR: Out of memory') + endif end subroutine alloc_grid @@ -1499,6 +1515,10 @@ subroutine primary_grid_lengths_HTN(work_g) enddo enddo endif + if (pgl_global_ext) then + call primary_grid_lengths_global_ext( & + G_HTN, work_g, ew_boundary_type, ns_boundary_type) + endif call scatter_global(HTN, work_g, master_task, distrb_info, & field_loc_Nface, field_type_scalar) call scatter_global(dxu, work_g2, master_task, distrb_info, & @@ -1573,6 +1593,10 @@ subroutine primary_grid_lengths_HTE(work_g) enddo endif endif + if (pgl_global_ext) then + call primary_grid_lengths_global_ext( & + G_HTE, work_g, ew_boundary_type, ns_boundary_type) + endif call scatter_global(HTE, work_g, master_task, distrb_info, & field_loc_Eface, field_type_scalar) call scatter_global(dyu, work_g2, master_task, distrb_info, & diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index d902c62f8..bf0361cf1 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -68,6 +68,9 @@ module ice_read_write ice_read_nc_xyz, & !ice_read_nc_xyf, & ice_read_nc_point, & + ice_read_nc_1D, & + ice_read_nc_2D, & + ice_read_nc_3D, & ice_read_nc_z end interface @@ -285,7 +288,7 @@ subroutine ice_read_xyt(nu, nrec, work, atype, diag, & read(nu) ((work_g1(i,j),i=1,nx_global),j=1,ny_global) endif else - write(nu_diag,*) ' ERROR: reading unknown atype ',atype + write(nu_diag,*) subname,' ERROR: reading unknown atype ',atype endif endif ! my_task = master_task @@ -304,7 +307,7 @@ subroutine ice_read_xyt(nu, nrec, work, atype, diag, & amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum(work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' read_global ',nu, nrec, amin, amax, asum endif !------------------------------------------------------------------- @@ -433,7 +436,7 @@ subroutine ice_read_xyzt(nu, nrec, work, atype, diag, & k=1,nblyr+2) endif else - write(nu_diag,*) ' ERROR: reading unknown atype ',atype + write(nu_diag,*) subname,' ERROR: reading unknown atype ',atype endif endif ! my_task = master_task @@ -452,7 +455,7 @@ subroutine ice_read_xyzt(nu, nrec, work, atype, diag, & amin = minval(work_g4) amax = maxval(work_g4, mask = work_g4 /= spval_dbl) asum = sum (work_g4, mask = work_g4 /= spval_dbl) - write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' read_global ',nu, nrec, amin, amax, asum endif !------------------------------------------------------------------- @@ -566,7 +569,7 @@ subroutine ice_read_global (nu, nrec, work_g, atype, diag, & read(nu) ((work_g(i,j),i=1,nx_global),j=1,ny_global) endif else - write(nu_diag,*) ' ERROR: reading unknown atype ',atype + write(nu_diag,*) subname,' ERROR: reading unknown atype ',atype endif endif ! my_task = master_task @@ -582,7 +585,7 @@ subroutine ice_read_global (nu, nrec, work_g, atype, diag, & amin = minval(work_g) amax = maxval(work_g, mask = work_g /= spval_dbl) asum = sum (work_g, mask = work_g /= spval_dbl) - write(nu_diag,*) ' read_global ',nu, nrec, amin, amax,asum + write(nu_diag,*) subname,' read_global ',nu, nrec, amin, amax,asum endif end subroutine ice_read_global @@ -686,7 +689,7 @@ subroutine ice_read_ext(nu, nrec, work, atype, diag, & read(nu) ((work_g1(i,j),i=1,nx),j=1,ny) endif else - write(nu_diag,*) ' ERROR: reading unknown atype ',atype + write(nu_diag,*) subname,' ERROR: reading unknown atype ',atype endif endif ! my_task = master_task @@ -705,7 +708,7 @@ subroutine ice_read_ext(nu, nrec, work, atype, diag, & amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' read_global ',nu, nrec, amin, amax, asum endif !------------------------------------------------------------------- @@ -800,7 +803,7 @@ subroutine ice_write_xyt(nu, nrec, work, atype, diag) elseif (atype == 'ruf8') then write(nu) ((work_g1(i,j),i=1,nx_global),j=1,ny_global) else - write(nu_diag,*) ' ERROR: writing unknown atype ',atype + write(nu_diag,*) subname,' ERROR: writing unknown atype ',atype endif !------------------------------------------------------------------- @@ -810,7 +813,7 @@ subroutine ice_write_xyt(nu, nrec, work, atype, diag) amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' write_global ', nu, nrec, amin, amax, asum endif endif ! my_task = master_task @@ -905,7 +908,7 @@ subroutine ice_write_xyzt(nu, nrec, work, atype, diag) write(nu)(((work_g4(i,j,k),i=1,nx_global),j=1,ny_global), & k=1,nblyr+2) else - write(nu_diag,*) ' ERROR: writing unknown atype ',atype + write(nu_diag,*) subname,' ERROR: writing unknown atype ',atype endif !------------------------------------------------------------------- @@ -915,7 +918,7 @@ subroutine ice_write_xyzt(nu, nrec, work, atype, diag) amin = minval(work_g4) amax = maxval(work_g4, mask = work_g4 /= spval_dbl) asum = sum (work_g4, mask = work_g4 /= spval_dbl) - write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' write_global ', nu, nrec, amin, amax, asum endif endif ! my_task = master_task @@ -1011,7 +1014,7 @@ subroutine ice_write_ext(nu, nrec, work, atype, diag) elseif (atype == 'ruf8') then write(nu) ((work_g1(i,j),i=1,nx),j=1,ny) else - write(nu_diag,*) ' ERROR: writing unknown atype ',atype + write(nu_diag,*) subname,' ERROR: writing unknown atype ',atype endif !------------------------------------------------------------------- @@ -1021,7 +1024,7 @@ subroutine ice_write_ext(nu, nrec, work, atype, diag) amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + write(nu_diag,*) subname,' write_global ', nu, nrec, amin, amax, asum endif endif ! my_task = master_task @@ -1055,14 +1058,15 @@ subroutine ice_open_nc(filename, fid) status = nf90_open(filename, NF90_NOWRITE, fid) if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot open '//trim(filename) ) + call abort_ice(subname//' ERROR: Cannot open '//trim(filename), & + file=__FILE__, line=__LINE__) endif endif ! my_task = master_task #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(filename), & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined for '//trim(filename), & + file=__FILE__, line=__LINE__) fid = -999 ! to satisfy intent(out) attribute #endif end subroutine ice_open_nc @@ -1110,26 +1114,29 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & ! netCDF file diagnostics: integer (kind=int_kind) :: & varid , & ! variable id - status ! status output from netcdf routines -! ndim, nvar, & ! sizes of netcdf file -! id, & ! dimension index -! dimlen ! dimension size + status , & ! status output from netcdf routines + ndims , & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids real (kind=dbl_kind) :: & missingvalue, & amin, amax, asum ! min, max values and sum of input array -! character (char_len) :: & -! dimname ! dimension name - real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 integer (kind=int_kind) :: nx, ny + integer (kind=int_kind) :: lnrec ! local value of nrec + real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g2 + lnrec = nrec + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1)) @@ -1164,9 +1171,31 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 2) then + status = nf90_inquire_dimension(fid, dimids(3), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 3 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1175,13 +1204,21 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,nrec/), & - count=(/nx_global+2,ny_global+1,1/) ) + start=(/1,1,lnrec/), & + count=(/nx_global+2,ny_global+1,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif work_g1 = work_g2(2:nx_global+1,1:ny_global) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,nrec/), & - count=(/nx,ny,1/) ) + start=(/1,1,lnrec/), & + count=(/nx,ny,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif status = nf90_get_att(fid, varid, "_FillValue", missingvalue) @@ -1192,19 +1229,19 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_xy, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= missingvalue) asum = sum (work_g1, mask = work_g1 /= missingvalue) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) endif !------------------------------------------------------------------- @@ -1234,8 +1271,8 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_read_nc_xy @@ -1282,27 +1319,33 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & ! netCDF file diagnostics: integer (kind=int_kind) :: & n, & ! ncat index - varid , & ! variable id - status ! status output from netcdf routines -! ndim, nvar, & ! sizes of netcdf file -! id, & ! dimension index -! dimlen ! size of dimension + varid , & ! variable id + status , & ! status output from netcdf routines + ndims , & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids real (kind=dbl_kind) :: & missingvalue, & ! missing value amin, amax, asum ! min, max values and sum of input array ! character (char_len) :: & -! dimname ! dimension name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g1 integer (kind=int_kind) :: nx, ny + integer (kind=int_kind) :: lnrec ! local value of nrec + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g2 + lnrec = nrec + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1,ncat)) @@ -1335,9 +1378,31 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 3) then + status = nf90_inquire_dimension(fid, dimids(4), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 4 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1346,13 +1411,21 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,1,nrec/), & - count=(/nx_global+2,ny_global+1,ncat,1/) ) + start=(/1,1,1,lnrec/), & + count=(/nx_global+2,ny_global+1,ncat,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif work_g1 = work_g2(2:nx_global+1,1:ny_global,:) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & - count=(/nx,ny,ncat,1/) ) + start=(/1,1,1,lnrec/), & + count=(/nx,ny,ncat,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif status = nf90_get_att(fid, varid, "_FillValue", missingvalue) @@ -1363,20 +1436,20 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo do n=1,ncat amin = minval(work_g1(:,:,n)) amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) enddo endif @@ -1410,8 +1483,8 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_xyz @@ -1465,26 +1538,34 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ndim, nvar, & ! sizes of netcdf file id, & ! dimension index n, & ! ncat index - dimlen ! size of dimension + ndims, & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids real (kind=dbl_kind) :: & missingvalue, & ! missing value amin, amax, asum ! min, max values and sum of input array character (char_len) :: & - dimname ! dimension name + dimname ! dimension name real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g1 integer (kind=int_kind) :: nx, ny + integer (kind=int_kind) :: lnrec ! local value of nrec + character(len=*), parameter :: subname = '(ice_read_nc_xyf)' #ifdef USE_NETCDF real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g2 + lnrec = nrec + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1,nfreq)) @@ -1517,10 +1598,31 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice ( & - 'ice_read_nc_xyf: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 3) then + status = nf90_inquire_dimension(fid, dimids(4), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 4 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1529,13 +1631,21 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,1,nrec/), & - count=(/nx_global+2,ny_global+1,nfreq,1/) ) + start=(/1,1,1,lnrec/), & + count=(/nx_global+2,ny_global+1,nfreq,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif work_g1 = work_g2(2:nx_global+1,1:ny_global,:) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & - count=(/nx,ny,nfreq,1/) ) + start=(/1,1,1,lnrec/), & + count=(/nx,ny,nfreq,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif status = nf90_get_att(fid, varid, "missing_value", missingvalue) @@ -1546,21 +1656,21 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_xyf, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) - write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar do id=1,ndim status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) - write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen enddo - write(nu_diag,*) 'missingvalue= ',missingvalue + write(nu_diag,*) subname,' missingvalue= ',missingvalue do n = 1, nfreq amin = minval(work_g1(:,:,n)) amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum enddo endif @@ -1597,8 +1707,8 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif @@ -1640,24 +1750,54 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & status, & ! status output from netcdf routines ndim, nvar, & ! sizes of netcdf file id, & ! dimension index - dimlen ! size of dimension + ndims, & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids real (kind=dbl_kind), dimension(1) :: & workg ! temporary work variable + integer (kind=int_kind) :: lnrec ! local value of nrec + character (char_len) :: & - dimname ! dimension name + dimname ! dimension name - if (my_task == master_task) then + lnrec = nrec + + if (my_task == master_task) then !------------------------------------------------------------- ! Find out ID of required variable !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 0) then + status = nf90_inquire_dimension(fid, dimids(1), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 1 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1665,11 +1805,11 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & !-------------------------------------------------------------- status = nf90_get_var(fid, varid, workg, & - start= (/ nrec /), & - count=(/ 1 /) ) - - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot get variable '//trim(varname) ) + start= (/ lnrec /), & + count=(/ 1 /)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif endif ! my_task = master_task @@ -1678,28 +1818,299 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_point, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) - write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar do id=1,ndim status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) - write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen enddo endif work = workg(1) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_point !======================================================================= +! Written by T. Craig + + subroutine ice_read_nc_1D(fid, varname, work, diag, & + xdim) + + use ice_fileunits, only: nu_diag + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + xdim ! field dimensions + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (char_len), intent(in) :: & + varname ! field name in netcdf file + + real (kind=dbl_kind), dimension(:), intent(out) :: & + work ! output array + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_1D)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid, & ! netcdf id for field + status, & ! status output from netcdf routines + ndim, nvar, & ! sizes of netcdf file + dimlen ! size of dimension + + character (char_len) :: & + dimname ! dimension name + + real (kind=dbl_kind), dimension(xdim) :: & + workg ! output array (real, 8-byte) + + !-------------------------------------------------------------- + + if (my_task == master_task) then + + if (size(work,dim=1) < xdim) then + write(nu_diag,*) subname,' work, dim=1 ',size(work,dim=1),xdim + call abort_ice (subname//' ERROR: work array wrong size '//trim(varname), & + file=__FILE__, line=__LINE__ ) + endif + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__ ) + endif + + !-------------------------------------------------------------- + ! Read array + !-------------------------------------------------------------- + status = nf90_get_var( fid, varid, workg, & + start=(/1/), & + count=(/xdim/) ) + work(1:xdim) = workg(1:xdim) + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (diag) then + write(nu_diag,*) subname, & + ' fid= ',fid, ', xdim = ',xdim, & + ' varname = ',trim(varname) + status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) + write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + endif + endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + + end subroutine ice_read_nc_1D + +!======================================================================= + +! Written by T. Craig + + subroutine ice_read_nc_2D(fid, varname, work, diag, & + xdim, ydim) + + use ice_fileunits, only: nu_diag + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + xdim, ydim ! field dimensions + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (char_len), intent(in) :: & + varname ! field name in netcdf file + + real (kind=dbl_kind), dimension(:,:), intent(out) :: & + work ! output array + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_2D)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid, & ! netcdf id for field + status, & ! status output from netcdf routines + ndim, nvar, & ! sizes of netcdf file + dimlen ! size of dimension + + character (char_len) :: & + dimname ! dimension name + + real (kind=dbl_kind), dimension(xdim,ydim) :: & + workg ! output array (real, 8-byte) + + !-------------------------------------------------------------- + + if (my_task == master_task) then + + if (size(work,dim=1) < xdim .or. & + size(work,dim=2) < ydim) then + write(nu_diag,*) subname,' work, dim=1 ',size(work,dim=1),xdim + write(nu_diag,*) subname,' work, dim=2 ',size(work,dim=2),ydim + call abort_ice (subname//' ERROR: work array wrong size '//trim(varname), & + file=__FILE__, line=__LINE__ ) + endif + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__ ) + endif + + !-------------------------------------------------------------- + ! Read array + !-------------------------------------------------------------- + status = nf90_get_var( fid, varid, workg, & + start=(/1,1/), & + count=(/xdim,ydim/) ) + work(1:xdim,1:ydim) = workg(1:xdim, 1:ydim) + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (diag) then + write(nu_diag,*) subname, & + ' fid= ',fid, ', xdim = ',xdim, & + ' ydim= ', ydim, ' varname = ',trim(varname) + status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) + write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + endif + endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + + end subroutine ice_read_nc_2D + +!======================================================================= +!======================================================================= + +! Written by T. Craig + + subroutine ice_read_nc_3D(fid, varname, work, diag, & + xdim, ydim, zdim) + + use ice_fileunits, only: nu_diag + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + xdim, ydim,zdim ! field dimensions + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (char_len), intent(in) :: & + varname ! field name in netcdf file + + real (kind=dbl_kind), dimension(:,:,:), intent(out) :: & + work ! output array + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_3D)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid, & ! netcdf id for field + status, & ! status output from netcdf routines + ndim, nvar, & ! sizes of netcdf file + dimlen ! size of dimension + + character (char_len) :: & + dimname ! dimension name + + real (kind=dbl_kind), dimension(xdim,ydim,zdim) :: & + workg ! output array (real, 8-byte) + + !-------------------------------------------------------------- + + if (my_task == master_task) then + + if (size(work,dim=1) < xdim .or. & + size(work,dim=2) < ydim .or. & + size(work,dim=3) < zdim ) then + write(nu_diag,*) subname,' work, dim=1 ',size(work,dim=1),xdim + write(nu_diag,*) subname,' work, dim=2 ',size(work,dim=2),ydim + write(nu_diag,*) subname,' work, dim=3 ',size(work,dim=3),zdim + call abort_ice (subname//' ERROR: work array wrong size '//trim(varname), & + file=__FILE__, line=__LINE__ ) + endif + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__ ) + endif + + !-------------------------------------------------------------- + ! Read array + !-------------------------------------------------------------- + status = nf90_get_var( fid, varid, workg, & + start=(/1,1,1/), & + count=(/xdim,ydim,zdim/) ) + work(1:xdim,1:ydim,1:zdim) = workg(1:xdim, 1:ydim, 1:zdim) + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (diag) then + write(nu_diag,*) subname, & + ' fid= ',fid, ', xdim = ',xdim, & + ' ydim= ', ydim,' zdim = ',zdim, ' varname = ',trim(varname) + status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) + write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + endif + endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + + end subroutine ice_read_nc_3D + +!======================================================================= + ! Adapted by Nicole Jeffery, LANL subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & @@ -1736,16 +2147,25 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & status, & ! status output from netcdf routines ndim, nvar, & ! sizes of netcdf file id, & ! dimension index - dimlen ! size of dimension + ndims, & ! number of dimensions + dimlen ! dimension size + + integer (kind=int_kind), dimension(10) :: & + dimids ! generic size dimids character (char_len) :: & dimname ! dimension name + + integer (kind=int_kind) :: lnrec ! local value of nrec + #endif character(len=*), parameter :: subname = '(ice_read_nc_z)' #ifdef USE_NETCDF + lnrec = nrec + allocate(work_z(nilyr)) if (my_task == master_task) then @@ -1755,9 +2175,31 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + + !------------------------------------------------------------- + ! Check nrec axis size + !------------------------------------------------------------- + + status = nf90_inquire_variable(fid, varid, ndims=ndims, dimids=dimids) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire variable dimids '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (ndims > 1) then + status = nf90_inquire_dimension(fid, dimids(2), len=dimlen) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: inquire dimension size 2 '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + if (lnrec > dimlen) then + write(nu_diag,*) subname,' ERROR not enough records, ',trim(varname),lnrec,dimlen + call abort_ice(subname//' ERROR: not enough records '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif !-------------------------------------------------------------- @@ -1765,9 +2207,12 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & !-------------------------------------------------------------- status = nf90_get_var( fid, varid, work_z, & - start=(/1,nrec/), & - count=(/nilyr,1/) ) - + start=(/1,lnrec/), & + count=(/nilyr,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif ! my_task = master_task !------------------------------------------------------------------- @@ -1775,14 +2220,14 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,*) & - 'ice_read_nc_z, fid= ',fid, ', nrec = ',nrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) - write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar do id=1,ndim status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) - write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen enddo endif @@ -1790,8 +2235,8 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & deallocate(work_z) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_z @@ -1826,7 +2271,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & ! local variables - character(len=*), parameter :: subname = '(ice_read_nc_xy)' + character(len=*), parameter :: subname = '(ice_write_nc_xy)' #ifdef USE_NETCDF ! netCDF file diagnostics: @@ -1841,7 +2286,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & character (char_len) :: & lvarname ! variable name -! dimname ! dimension name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 @@ -1886,7 +2331,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & status = nf90_put_var( fid, varid, work_g1, & start=(/1,1,nrec/), & - count=(/nx,ny,1/) ) + count=(/nx,ny,1/)) endif ! my_task = master_task @@ -1896,25 +2341,25 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & if (my_task==master_task .and. diag) then ! write(nu_diag,*) & -! 'ice_write_nc_xy, fid= ',fid, ', nrec = ',nrec, & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varid = ',varid ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(lvarname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(lvarname) endif deallocate(work_g1) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_write_nc_xy @@ -1949,7 +2394,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & ! local variables - character(len=*), parameter :: subname = '(ice_read_nc_xyz)' + character(len=*), parameter :: subname = '(ice_write_nc_xyz)' #ifdef USE_NETCDF ! netCDF file diagnostics: @@ -1965,7 +2410,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & character (char_len) :: & lvarname ! variable name -! dimname ! dimension name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g1 @@ -2016,7 +2461,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & status = nf90_put_var( fid, varid, work_g1, & start=(/1,1,1,nrec/), & - count=(/nx,ny,ncat,1/) ) + count=(/nx,ny,ncat,1/)) endif ! my_task = master_task @@ -2026,13 +2471,13 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & if (my_task==master_task .and. diag) then ! write(nu_diag,*) & -! 'ice_write_nc_xyz, fid= ',fid, ', nrec = ',nrec, & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varid = ',varid ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = 10000._dbl_kind amax = -10000._dbl_kind @@ -2040,15 +2485,15 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & amin = minval(work_g1(:,:,n)) amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(lvarname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(lvarname) enddo endif deallocate(work_g1) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_write_nc_xyz @@ -2094,7 +2539,7 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) amin, amax, asum ! min, max values and sum of input array ! character (char_len) :: & -! dimname ! dimension name +! dimname ! dimension name ! real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g3 @@ -2117,9 +2562,9 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif !-------------------------------------------------------------- @@ -2129,12 +2574,20 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) if (orca_halogrid) then status = nf90_get_var( fid, varid, work_g3, & start=(/1,1,nrec/), & - count=(/nx_global+2,ny_global+1,1/) ) + count=(/nx_global+2,ny_global+1,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif work_g=work_g3(2:nx_global+1,1:ny_global) else status = nf90_get_var( fid, varid, work_g, & start=(/1,1,nrec/), & - count=(/nx_global,ny_global,1/) ) + count=(/nx_global,ny_global,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif endif ! my_task = master_task @@ -2144,25 +2597,25 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) if (my_task == master_task .and. diag) then ! write(nu_diag,*) & -! 'ice_read_global_nc, fid= ',fid, ', nrec = ',nrec, & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) -! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar ! do id=1,ndim ! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) -! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! write(nu_diag,*) subname,' Dim name = ',trim(dimname),', size = ',dimlen ! enddo amin = minval(work_g) amax = maxval(work_g, mask = work_g /= spval_dbl) asum = sum (work_g, mask = work_g /= spval_dbl) - write(nu_diag,*) 'min, max, sum = ', amin, amax, asum, trim(varname) + write(nu_diag,*) subname,' min, max, sum = ', amin, amax, asum, trim(varname) endif if (orca_halogrid) deallocate(work_g3) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work_g = c0 ! to satisfy intent(out) attribute #endif @@ -2190,8 +2643,8 @@ subroutine ice_close_nc(fid) status = nf90_close(fid) endif #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_close_nc @@ -2249,7 +2702,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & amin, amax, asum ! min, max values and sum of input array ! character (char_len) :: & -! dimname ! dimension name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 @@ -2279,9 +2732,9 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif !-------------------------------------------------------------- @@ -2290,7 +2743,11 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & status = nf90_get_var( fid, varid, work_g1, & start=(/1,1,nzlev,nrec/), & - count=(/nx,ny,1,1/) ) + count=(/nx,ny,1,1/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif endif ! my_task = master_task @@ -2302,7 +2759,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & amin = minval(work_g1) amax = maxval(work_g1, mask = work_g1 /= spval_dbl) asum = sum (work_g1, mask = work_g1 /= spval_dbl) - write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) endif !------------------------------------------------------------------- @@ -2327,8 +2784,8 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & deallocate(work_g1) #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif @@ -2380,9 +2837,9 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) !------------------------------------------------------------- status = nf90_inq_varid(fid, trim(varname), varid) - if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Cannot find variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif !-------------------------------------------------------------- @@ -2391,7 +2848,12 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) status = nf90_get_var( fid, varid, work_g, & start=(/1/), & - count=(/nrec/) ) + count=(/nrec/)) + if (status /= nf90_noerr) then + call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & + file=__FILE__, line=__LINE__) + endif + endif ! my_task = master_task !------------------------------------------------------------------- @@ -2401,12 +2863,12 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) if (my_task == master_task .and. diag) then amin = minval(work_g) amax = maxval(work_g) - write(nu_diag,*) 'min, max, nrec = ', amin, amax, nrec + write(nu_diag,*) subname,' min, max, nrec = ', amin, amax, nrec endif #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work_g = c0 ! to satisfy intent(out) attribute #endif @@ -2437,22 +2899,25 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) if (my_task == master_task) then status=nf90_inquire(fid, nDimensions = nDims) if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: inquire nDimensions' ) + call abort_ice(subname//' ERROR: inquire nDimensions', & + file=__FILE__, line=__LINE__ ) endif do i=1,nDims status = nf90_inquire_dimension(fid,i,name=cvar,len=recsize) if (status /= nf90_noerr) then - call abort_ice (subname//'ERROR: inquire len for variable '//trim(cvar) ) + call abort_ice(subname//' ERROR: inquire len for variable '//trim(cvar), & + file=__FILE__, line=__LINE__) endif if (trim(cvar) == trim(varname)) exit enddo if (trim(cvar) .ne. trim(varname)) then - call abort_ice (subname//'ERROR: Did not find variable '//trim(varname) ) + call abort_ice(subname//' ERROR: Did not find variable '//trim(varname), & + file=__FILE__, line=__LINE__) endif endif #else - call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) recsize = 0 ! to satisfy intent(out) attribute #endif diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 index 91d57ea48..a6f42a6a5 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 @@ -15,11 +15,12 @@ module ice_restart use ice_fileunits, only: nu_diag, nu_rst_pointer use ice_fileunits, only: nu_dump, nu_dump_eap, nu_dump_FY, nu_dump_age use ice_fileunits, only: nu_dump_lvl, nu_dump_pond, nu_dump_hbrine - use ice_fileunits, only: nu_dump_bgc, nu_dump_aero, nu_dump_fsd, nu_dump_iso + use ice_fileunits, only: nu_dump_iso, nu_dump_snow + use ice_fileunits, only: nu_dump_bgc, nu_dump_aero, nu_dump_fsd use ice_fileunits, only: nu_restart, nu_restart_eap, nu_restart_FY, nu_restart_age use ice_fileunits, only: nu_restart_lvl, nu_restart_pond, nu_restart_hbrine use ice_fileunits, only: nu_restart_bgc, nu_restart_aero, nu_restart_fsd - use ice_fileunits, only: nu_restart_iso + use ice_fileunits, only: nu_restart_iso, nu_restart_snow use ice_exit, only: abort_ice use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_sizes @@ -57,7 +58,7 @@ subroutine init_restart_read(ice_ic) logical (kind=log_kind) :: & solve_zsal, tr_fsd, & tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & - tr_pond_topo, tr_pond_lvl, tr_brine + tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow character(len=char_len_long) :: & filename, filename0 @@ -82,7 +83,8 @@ subroutine init_restart_read(ice_ic) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & + tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -285,6 +287,26 @@ subroutine init_restart_read(ice_ic) endif endif + if (tr_snow) then + if (my_task == master_task) then + n = index(filename0,trim(restart_file)) + if (n == 0) call abort_ice(subname//'ERROR: snow restart: filename discrepancy') + string1 = trim(filename0(1:n-1)) + string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) + write(filename,'(a,a,a,a)') & + string1(1:lenstr(string1)), & + restart_file(1:lenstr(restart_file)),'.snow', & + string2(1:lenstr(string2)) + if (restart_ext) then + call ice_open_ext(nu_restart_snow,filename,0) + else + call ice_open(nu_restart_snow,filename,0) + endif + read (nu_restart_snow) iignore,rignore,rignore + write(nu_diag,*) 'Reading ',filename(1:lenstr(filename)) + endif + endif + if (tr_brine) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) @@ -392,7 +414,7 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & solve_zsal, tr_fsd, & tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & - tr_pond_topo, tr_pond_lvl, tr_brine + tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow integer (kind=int_kind) :: & nbtrcr ! number of bgc tracers @@ -408,7 +430,8 @@ subroutine init_restart_write(filename_spec) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & + tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -599,6 +622,26 @@ subroutine init_restart_write(filename_spec) endif + if (tr_snow) then + + write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & + restart_dir(1:lenstr(restart_dir)), & + restart_file(1:lenstr(restart_file)),'.snow.', & + myear,'-',mmonth,'-',mday,'-',msec + + if (restart_ext) then + call ice_open_ext(nu_dump_snow,filename,0) + else + call ice_open(nu_dump_snow,filename,0) + endif + + if (my_task == master_task) then + write(nu_dump_snow) istep1,timesecs,time_forc + write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) + endif + + endif + if (tr_brine) then write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & @@ -808,7 +851,7 @@ subroutine final_restart() logical (kind=log_kind) :: & solve_zsal, & tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & - tr_pond_topo, tr_pond_lvl, tr_brine + tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow integer (kind=int_kind) :: & nbtrcr ! number of bgc tracers @@ -822,7 +865,8 @@ subroutine final_restart() call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & + tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -838,6 +882,7 @@ subroutine final_restart() if (tr_pond_cesm) close(nu_dump_pond) if (tr_pond_lvl) close(nu_dump_pond) if (tr_pond_topo) close(nu_dump_pond) + if (tr_snow) close(nu_dump_snow) if (tr_brine) close(nu_dump_hbrine) if (solve_zsal .or. nbtrcr > 0) & close(nu_dump_bgc) diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 index 9c6b30ee1..493a91c1e 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -48,8 +48,8 @@ subroutine ice_write_hist (ns) use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_calendar, only: msec, timesecs, idate, idate0, write_ic, & - histfreq, days_per_year, use_leap_years, dayyr, & - year_init, month_init, day_init + histfreq, histfreq_n, days_per_year, use_leap_years, dayyr, & + hh_init, mm_init, ss_init use ice_communicate, only: my_task, master_task use ice_domain, only: distrb_info use ice_domain_size, only: nx_global, ny_global, max_nstrm, max_blocks @@ -67,11 +67,9 @@ subroutine ice_write_hist (ns) ! local variables - real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 - real (kind=real_kind), dimension(:,:), allocatable :: work_gr - real (kind=real_kind), dimension(:,:,:), allocatable :: work_gr3 - real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & - work1 + real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 + real (kind=dbl_kind), dimension(:,:,:), allocatable :: work1_3 + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: work1 integer (kind=int_kind) :: i,k,ic,n,nn, & ncid,status,imtid,jmtid,kmtidi,kmtids,kmtidb, cmtid,timid,varid, & @@ -205,7 +203,6 @@ subroutine ice_write_hist (ns) ! define coordinate variables !----------------------------------------------------------------- -!sgl status = nf90_def_var(ncid,'time',nf90_float,timid,varid) status = nf90_def_var(ncid,'time',nf90_double,timid,varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining var time') @@ -215,8 +212,9 @@ subroutine ice_write_hist (ns) 'ice Error: time long_name') write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & + hh_init,':',mm_init,':',ss_init status = nf90_put_att(ncid,varid,'units',title) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time units') @@ -258,8 +256,9 @@ subroutine ice_write_hist (ns) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time_bounds long_name') write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & + hh_init,':',mm_init,':',ss_init status = nf90_put_att(ncid,varid,'units',title) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time_bounds units') @@ -361,20 +360,7 @@ subroutine ice_write_hist (ns) status = nf90_put_att(ncid, varid, 'units', coord_var(i)%units) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining units for '//coord_var(i)%short_name) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//coord_var(i)%short_name) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//coord_var(i)%short_name) + call ice_write_hist_fill(ncid,varid,coord_var(i)%short_name,history_precision) if (coord_var(i)%short_name == 'ULAT') then status = nf90_put_att(ncid,varid,'comment', & 'Latitude of NE corner of T grid cell') @@ -421,18 +407,7 @@ subroutine ice_write_hist (ns) if (status /= nf90_noerr) call abort_ice(subname//'ERROR: tmask units') status = nf90_put_att(ncid,varid,'comment', '0 = land, 1 = ocean') if (status /= nf90_noerr) call abort_ice(subname//'ERROR: tmask comment') - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining missing_value for tmask') - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining _FillValue for tmask') + call ice_write_hist_fill(ncid,varid,'tmask',history_precision) endif if (igrd(n_blkmask)) then @@ -444,18 +419,7 @@ subroutine ice_write_hist (ns) if (status /= nf90_noerr) call abort_ice(subname//'ERROR: blkmask units') status = nf90_put_att(ncid,varid,'comment', 'mytask + iblk/100') if (status /= nf90_noerr) call abort_ice(subname//'ERROR: blkmask comment') - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining missing_value for blkmask') - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining _FillValue for blkmask') + call ice_write_hist_fill(ncid,varid,'blkmask',history_precision) endif do i = 3, nvar ! note n_tmask=1, n_blkmask=2 @@ -473,20 +437,7 @@ subroutine ice_write_hist (ns) status = nf90_put_att(ncid, varid, 'coordinates', var(i)%coordinates) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining coordinates for '//var(i)%req%short_name) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//var(i)%req%short_name) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//var(i)%req%short_name) + call ice_write_hist_fill(ncid,varid,var(i)%req%short_name,history_precision) endif enddo @@ -506,20 +457,7 @@ subroutine ice_write_hist (ns) status = nf90_put_att(ncid, varid, 'units', var_nverts(i)%units) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining units for '//var_nverts(i)%short_name) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//var_nverts(i)%short_name) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//var_nverts(i)%short_name) + call ice_write_hist_fill(ncid,varid,var_nverts(i)%short_name,history_precision) endif enddo @@ -545,20 +483,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) !----------------------------------------------------------------- ! Add cell_methods attribute to variables if averaged @@ -575,7 +500,8 @@ subroutine ice_write_hist (ns) endif endif - if (histfreq(ns) == '1' .or. .not. hist_avg & + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg & .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & .or. n==n_sigP(ns) .or. n==n_trsig(ns) & @@ -616,20 +542,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) !----------------------------------------------------------------- ! Add cell_methods attribute to variables if averaged @@ -640,7 +553,8 @@ subroutine ice_write_hist (ns) 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') else status = nf90_put_att(ncid,varid,'time_rep','averaged') @@ -675,20 +589,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) endif enddo ! num_avail_hist_fields_3Dz @@ -720,20 +621,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) endif enddo ! num_avail_hist_fields_3Db @@ -765,20 +653,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) endif enddo ! num_avail_hist_fields_3Da @@ -810,20 +685,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) endif enddo ! num_avail_hist_fields_3Df @@ -857,20 +719,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) !----------------------------------------------------------------- ! Add cell_methods attribute to variables if averaged @@ -881,7 +730,8 @@ subroutine ice_write_hist (ns) 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') else status = nf90_put_att(ncid,varid,'time_rep','averaged') @@ -918,20 +768,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) !----------------------------------------------------------------- ! Add cell_methods attribute to variables if averaged @@ -942,7 +779,8 @@ subroutine ice_write_hist (ns) 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') else status = nf90_put_att(ncid,varid,'time_rep','averaged') @@ -979,20 +817,7 @@ subroutine ice_write_hist (ns) avail_hist_fields(n)%vcellmeas) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'missing_value',spval) - else - status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) - if (lprecision == nf90_float) then - status = nf90_put_att(ncid,varid,'_FillValue',spval) - else - status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) - endif - if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + call ice_write_hist_fill(ncid,varid,avail_hist_fields(n)%vname,history_precision) !----------------------------------------------------------------- ! Add cell_methods attribute to variables if averaged @@ -1003,7 +828,8 @@ subroutine ice_write_hist (ns) 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = nf90_put_att(ncid,varid,'time_rep','instantaneous') else status = nf90_put_att(ncid,varid,'time_rep','averaged') @@ -1114,9 +940,7 @@ subroutine ice_write_hist (ns) if (my_task==master_task) then allocate(work_g1(nx_global,ny_global)) - allocate(work_gr(nx_global,ny_global)) else - allocate(work_gr(1,1)) ! to save memory allocate(work_g1(1,1)) endif @@ -1147,11 +971,10 @@ subroutine ice_write_hist (ns) END SELECT if (my_task == master_task) then - work_gr = work_g1 status = nf90_inq_varid(ncid, coord_var(i)%short_name, varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting varid for '//coord_var(i)%short_name) - status = nf90_put_var(ncid,varid,work_gr) + status = nf90_put_var(ncid,varid,work_g1) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: writing'//coord_var(i)%short_name) endif @@ -1193,11 +1016,10 @@ subroutine ice_write_hist (ns) if (igrd(n_tmask)) then call gather_global(work_g1, hm, master_task, distrb_info) if (my_task == master_task) then - work_gr=work_g1 status = nf90_inq_varid(ncid, 'tmask', varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting varid for tmask') - status = nf90_put_var(ncid,varid,work_gr) + status = nf90_put_var(ncid,varid,work_g1) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: writing variable tmask') endif @@ -1206,11 +1028,10 @@ subroutine ice_write_hist (ns) if (igrd(n_blkmask)) then call gather_global(work_g1, bm, master_task, distrb_info) if (my_task == master_task) then - work_gr=work_g1 status = nf90_inq_varid(ncid, 'blkmask', varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting varid for blkmask') - status = nf90_put_var(ncid,varid,work_gr) + status = nf90_put_var(ncid,varid,work_g1) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: writing variable blkmask') endif @@ -1243,31 +1064,28 @@ subroutine ice_write_hist (ns) END SELECT if (my_task == master_task) then - work_gr=work_g1 status = nf90_inq_varid(ncid, var(i)%req%short_name, varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting varid for '//var(i)%req%short_name) - status = nf90_put_var(ncid,varid,work_gr) + status = nf90_put_var(ncid,varid,work_g1) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: writing variable '//var(i)%req%short_name) endif endif enddo - deallocate(work_gr) - !---------------------------------------------------------------- ! Write coordinates of grid box vertices !---------------------------------------------------------------- if (f_bounds) then if (my_task==master_task) then - allocate(work_gr3(nverts,nx_global,ny_global)) + allocate(work1_3(nverts,nx_global,ny_global)) else - allocate(work_gr3(1,1,1)) ! to save memory + allocate(work1_3(1,1,1)) ! to save memory endif - work_gr3(:,:,:) = c0 + work1_3(:,:,:) = c0 work1 (:,:,:) = c0 do i = 1, nvar_verts @@ -1277,25 +1095,25 @@ subroutine ice_write_hist (ns) do ivertex = 1, nverts work1(:,:,:) = lont_bounds(ivertex,:,:,:) call gather_global(work_g1, work1, master_task, distrb_info) - if (my_task == master_task) work_gr3(ivertex,:,:) = work_g1(:,:) + if (my_task == master_task) work1_3(ivertex,:,:) = work_g1(:,:) enddo CASE ('latt_bounds') do ivertex = 1, nverts work1(:,:,:) = latt_bounds(ivertex,:,:,:) call gather_global(work_g1, work1, master_task, distrb_info) - if (my_task == master_task) work_gr3(ivertex,:,:) = work_g1(:,:) + if (my_task == master_task) work1_3(ivertex,:,:) = work_g1(:,:) enddo CASE ('lonu_bounds') do ivertex = 1, nverts work1(:,:,:) = lonu_bounds(ivertex,:,:,:) call gather_global(work_g1, work1, master_task, distrb_info) - if (my_task == master_task) work_gr3(ivertex,:,:) = work_g1(:,:) + if (my_task == master_task) work1_3(ivertex,:,:) = work_g1(:,:) enddo CASE ('latu_bounds') do ivertex = 1, nverts work1(:,:,:) = latu_bounds(ivertex,:,:,:) call gather_global(work_g1, work1, master_task, distrb_info) - if (my_task == master_task) work_gr3(ivertex,:,:) = work_g1(:,:) + if (my_task == master_task) work1_3(ivertex,:,:) = work_g1(:,:) enddo END SELECT @@ -1303,24 +1121,18 @@ subroutine ice_write_hist (ns) status = nf90_inq_varid(ncid, var_nverts(i)%short_name, varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting varid for '//var_nverts(i)%short_name) - status = nf90_put_var(ncid,varid,work_gr3) + status = nf90_put_var(ncid,varid,work1_3) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: writing variable '//var_nverts(i)%short_name) endif enddo - deallocate(work_gr3) + deallocate(work1_3) endif !----------------------------------------------------------------- ! write variable data !----------------------------------------------------------------- - if (my_task==master_task) then - allocate(work_gr(nx_global,ny_global)) - else - allocate(work_gr(1,1)) ! to save memory - endif - work_gr(:,:) = c0 work_g1(:,:) = c0 do n=1,num_avail_hist_fields_2D @@ -1328,19 +1140,18 @@ subroutine ice_write_hist (ns) call gather_global(work_g1, a2D(:,:,n,:), & master_task, distrb_info) if (my_task == master_task) then - work_gr(:,:) = work_g1(:,:) status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting varid for '//avail_hist_fields(n)%vname) - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & count=(/nx_global,ny_global/)) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: writing variable '//avail_hist_fields(n)%vname) endif + endif enddo ! num_avail_hist_fields_2D - work_gr(:,:) = c0 work_g1(:,:) = c0 do n = n2D + 1, n3Dccum @@ -1354,13 +1165,12 @@ subroutine ice_write_hist (ns) do k = 1, ncat_hist call gather_global(work_g1, a3Dc(:,:,k,nn,:), & master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) if (my_task == master_task) then status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting varid for '//avail_hist_fields(n)%vname) - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & count=(/nx_global,ny_global,1/)) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1370,7 +1180,6 @@ subroutine ice_write_hist (ns) endif enddo ! num_avail_hist_fields_3Dc - work_gr(:,:) = c0 work_g1(:,:) = c0 do n = n3Dccum+1, n3Dzcum @@ -1384,10 +1193,9 @@ subroutine ice_write_hist (ns) do k = 1, nzilyr call gather_global(work_g1, a3Dz(:,:,k,nn,:), & master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) if (my_task == master_task) then - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & count=(/nx_global,ny_global,1/)) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1397,7 +1205,6 @@ subroutine ice_write_hist (ns) endif enddo ! num_avail_hist_fields_3Dz - work_gr(:,:) = c0 work_g1(:,:) = c0 do n = n3Dzcum+1, n3Dbcum @@ -1411,10 +1218,9 @@ subroutine ice_write_hist (ns) do k = 1, nzblyr call gather_global(work_g1, a3Db(:,:,k,nn,:), & master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) if (my_task == master_task) then - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & count=(/nx_global,ny_global,1/)) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1424,7 +1230,6 @@ subroutine ice_write_hist (ns) endif enddo ! num_avail_hist_fields_3Db - work_gr(:,:) = c0 work_g1(:,:) = c0 do n = n3Dbcum+1, n3Dacum @@ -1438,10 +1243,9 @@ subroutine ice_write_hist (ns) do k = 1, nzalyr call gather_global(work_g1, a3Da(:,:,k,nn,:), & master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) if (my_task == master_task) then - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & count=(/nx_global,ny_global,1/)) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1451,7 +1255,6 @@ subroutine ice_write_hist (ns) endif enddo ! num_avail_hist_fields_3Da - work_gr(:,:) = c0 work_g1(:,:) = c0 do n = n3Dacum+1, n3Dfcum @@ -1465,9 +1268,8 @@ subroutine ice_write_hist (ns) do k = 1, nfsd_hist call gather_global(work_g1, a3Df(:,:,k,nn,:), & master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) if (my_task == master_task) then - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & count=(/nx_global,ny_global,1/)) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1477,7 +1279,6 @@ subroutine ice_write_hist (ns) endif enddo ! num_avail_hist_fields_3Df - work_gr(:,:) = c0 work_g1(:,:) = c0 do n = n3Dfcum+1, n4Dicum @@ -1492,9 +1293,8 @@ subroutine ice_write_hist (ns) do k = 1, nzilyr call gather_global(work_g1, a4Di(:,:,k,ic,nn,:), & master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) if (my_task == master_task) then - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k,ic/), & count=(/nx_global,ny_global,1, 1/)) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1505,7 +1305,6 @@ subroutine ice_write_hist (ns) endif enddo ! num_avail_hist_fields_4Di - work_gr(:,:) = c0 work_g1(:,:) = c0 do n = n4Dicum+1, n4Dscum @@ -1520,9 +1319,8 @@ subroutine ice_write_hist (ns) do k = 1, nzslyr call gather_global(work_g1, a4Ds(:,:,k,ic,nn,:), & master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) if (my_task == master_task) then - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k,ic/), & count=(/nx_global,ny_global,1, 1/)) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1545,9 +1343,8 @@ subroutine ice_write_hist (ns) do k = 1, nfsd_hist call gather_global(work_g1, a4Df(:,:,k,ic,nn,:), & master_task, distrb_info) - work_gr(:,:) = work_g1(:,:) if (my_task == master_task) then - status = nf90_put_var(ncid,varid,work_gr(:,:), & + status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k,ic/), & count=(/nx_global,ny_global,1, 1/)) if (status /= nf90_noerr) call abort_ice(subname// & @@ -1558,7 +1355,6 @@ subroutine ice_write_hist (ns) endif enddo ! num_avail_hist_fields_4Df - deallocate(work_gr) deallocate(work_g1) !----------------------------------------------------------------- @@ -1580,6 +1376,43 @@ subroutine ice_write_hist (ns) end subroutine ice_write_hist +!======================================================================= + + subroutine ice_write_hist_fill(ncid,varid,vname,precision) + + use ice_kinds_mod +#ifdef USE_NETCDF + use netcdf +#endif + + integer (kind=int_kind), intent(in) :: ncid ! netcdf file id + integer (kind=int_kind), intent(in) :: varid ! netcdf var id + character(len=*), intent(in) :: vname ! var name + integer (kind=int_kind), intent(in) :: precision ! precision + + ! local variables + + integer (kind=int_kind) :: status + character(len=*), parameter :: subname = '(ice_write_hist_fill)' + + if (precision == 8) then + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + else + status = nf90_put_att(ncid,varid,'missing_value',spval) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//trim(vname)) + + if (precision == 8) then + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//trim(vname)) + + end subroutine ice_write_hist_fill + !======================================================================= end module ice_history_write diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index e744caf09..f6002ff40 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -145,7 +145,7 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & solve_zsal, skl_bgc, z_tracers, tr_fsd, & tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & - tr_pond_topo, tr_pond_lvl, tr_brine, & + tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & tr_bgc_chl, tr_bgc_Am, & @@ -181,7 +181,8 @@ subroutine init_restart_write(filename_spec) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & + tr_snow_out=tr_snow, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & tr_bgc_chl_out=tr_bgc_chl, tr_bgc_Am_out=tr_bgc_Am, & @@ -480,6 +481,16 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'qsno'//trim(nchar),dims) enddo + if (tr_snow) then + do k=1,nslyr + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'smice'//trim(nchar),dims) + call define_rest_field(ncid,'smliq'//trim(nchar),dims) + call define_rest_field(ncid, 'rhos'//trim(nchar),dims) + call define_rest_field(ncid, 'rsnw'//trim(nchar),dims) + enddo + endif + if (tr_fsd) then do k=1,nfsd write(nchar,'(i3.3)') k diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 index 72a1ed97f..0e91d42d0 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -18,6 +18,7 @@ module ice_history_write use ice_kinds_mod + use ice_constants, only: c0, c360, spval, spval_dbl use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -42,9 +43,9 @@ subroutine ice_write_hist (ns) use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_calendar, only: msec, timesecs, idate, idate0, write_ic, & - histfreq, days_per_year, use_leap_years, dayyr + histfreq, histfreq_n, days_per_year, use_leap_years, dayyr, & + hh_init, mm_init, ss_init use ice_communicate, only: my_task, master_task - use ice_constants, only: c0, c360, spval, spval_dbl use ice_domain, only: distrb_info, nblocks use ice_domain_size, only: nx_global, ny_global, max_blocks, max_nstrm use ice_gather_scatter, only: gather_global @@ -70,7 +71,6 @@ subroutine ice_write_hist (ns) integer (kind=int_kind), dimension(5) :: dimidcz integer (kind=int_kind), dimension(3) :: dimid_nverts integer (kind=int_kind), dimension(6) :: dimidex - real (kind=real_kind) :: ltime real (kind= dbl_kind) :: ltime2 character (char_len) :: title character (char_len_long) :: ncfile(max_nstrm) @@ -116,10 +116,15 @@ subroutine ice_write_hist (ns) TYPE(coord_attributes), dimension(nvarz) :: var_nz CHARACTER (char_len), dimension(ncoord) :: coord_bounds - real (kind=dbl_kind), allocatable :: workr2(:,:,:) - real (kind=dbl_kind), allocatable :: workr3(:,:,:,:) - real (kind=dbl_kind), allocatable :: workr4(:,:,:,:,:) - real (kind=dbl_kind), allocatable :: workr3v(:,:,:,:) + real (kind=dbl_kind) , allocatable :: workd2(:,:,:) + real (kind=dbl_kind) , allocatable :: workd3(:,:,:,:) + real (kind=dbl_kind) , allocatable :: workd4(:,:,:,:,:) + real (kind=dbl_kind) , allocatable :: workd3v(:,:,:,:) + + real (kind=real_kind), allocatable :: workr2(:,:,:) + real (kind=real_kind), allocatable :: workr3(:,:,:,:) + real (kind=real_kind), allocatable :: workr4(:,:,:,:,:) + real (kind=real_kind), allocatable :: workr3v(:,:,:,:) character(len=char_len_long) :: & filename @@ -164,19 +169,18 @@ subroutine ice_write_hist (ns) call ice_pio_init(mode='write', filename=trim(filename), File=File, & clobber=.true., cdf64=lcdf64, iotype=iotype) - call ice_pio_initdecomp(iodesc=iodesc2d) - call ice_pio_initdecomp(ndim3=ncat_hist, iodesc=iodesc3dc) - call ice_pio_initdecomp(ndim3=nzilyr, iodesc=iodesc3di) - call ice_pio_initdecomp(ndim3=nzblyr, iodesc=iodesc3db) - call ice_pio_initdecomp(ndim3=nzalyr, iodesc=iodesc3da) - call ice_pio_initdecomp(ndim3=nfsd_hist, iodesc=iodesc3df) - call ice_pio_initdecomp(ndim3=nverts, iodesc=iodesc3dv, inner_dim=.true.) - call ice_pio_initdecomp(ndim3=nzilyr, ndim4=ncat_hist, iodesc=iodesc4di) - call ice_pio_initdecomp(ndim3=nzslyr, ndim4=ncat_hist, iodesc=iodesc4ds) - call ice_pio_initdecomp(ndim3=nfsd_hist, ndim4=ncat_hist, iodesc=iodesc4df) + call ice_pio_initdecomp(iodesc=iodesc2d, precision=history_precision) + call ice_pio_initdecomp(ndim3=ncat_hist, iodesc=iodesc3dc, precision=history_precision) + call ice_pio_initdecomp(ndim3=nzilyr, iodesc=iodesc3di, precision=history_precision) + call ice_pio_initdecomp(ndim3=nzblyr, iodesc=iodesc3db, precision=history_precision) + call ice_pio_initdecomp(ndim3=nzalyr, iodesc=iodesc3da, precision=history_precision) + call ice_pio_initdecomp(ndim3=nfsd_hist, iodesc=iodesc3df, precision=history_precision) + call ice_pio_initdecomp(ndim3=nverts, iodesc=iodesc3dv, inner_dim=.true., precision=history_precision) + call ice_pio_initdecomp(ndim3=nzilyr, ndim4=ncat_hist, iodesc=iodesc4di, precision=history_precision) + call ice_pio_initdecomp(ndim3=nzslyr, ndim4=ncat_hist, iodesc=iodesc4ds, precision=history_precision) + call ice_pio_initdecomp(ndim3=nfsd_hist, ndim4=ncat_hist, iodesc=iodesc4df, precision=history_precision) ltime2 = timesecs/secday - ltime = real(timesecs/secday,kind=real_kind) ! option of turning on double precision history files lprecision = pio_real @@ -186,7 +190,7 @@ subroutine ice_write_hist (ns) ! define dimensions !----------------------------------------------------------------- - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_def_dim(File,'d2',2,boundid) endif @@ -205,13 +209,13 @@ subroutine ice_write_hist (ns) ! define coordinate variables: time, time_bounds !----------------------------------------------------------------- -!sgl status = pio_def_var(File,'time',pio_real,(/timid/),varid) status = pio_def_var(File,'time',pio_double,(/timid/),varid) status = pio_put_att(File,varid,'long_name','model time') write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & + hh_init,':',mm_init,':',ss_init status = pio_put_att(File,varid,'units',trim(title)) if (days_per_year == 360) then @@ -224,21 +228,21 @@ subroutine ice_write_hist (ns) call abort_ice(subname//'ERROR: invalid calendar settings') endif - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'bounds','time_bounds') endif ! Define attributes for time_bounds if hist_avg is true - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then dimid2(1) = boundid dimid2(2) = timid -!sgl status = pio_def_var(File,'time_bounds',pio_real,dimid2,varid) status = pio_def_var(File,'time_bounds',pio_double,dimid2,varid) status = pio_put_att(File,varid,'long_name', & 'boundaries for time-averaging interval') write(cdate,'(i8.8)') idate0 - write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & - cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & + hh_init,':',mm_init,':',ss_init status = pio_put_att(File,varid,'units',trim(title)) endif @@ -340,13 +344,7 @@ subroutine ice_write_hist (ns) dimid2, varid) status = pio_put_att(File,varid,'long_name',trim(coord_var(i)%long_name)) status = pio_put_att(File, varid, 'units', trim(coord_var(i)%units)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,coord_var(i)%short_name,history_precision) if (coord_var(i)%short_name == 'ULAT') then status = pio_put_att(File,varid,'comment', & trim('Latitude of NE corner of T grid cell')) @@ -378,13 +376,7 @@ subroutine ice_write_hist (ns) status = pio_def_var(File, 'tmask', lprecision, dimid2, varid) status = pio_put_att(File,varid, 'long_name', 'ocean grid mask') status = pio_put_att(File, varid, 'coordinates', 'TLON TLAT') - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,'tmask',history_precision) status = pio_put_att(File,varid,'comment', '0 = land, 1 = ocean') endif if (igrd(n_blkmask)) then @@ -392,13 +384,7 @@ subroutine ice_write_hist (ns) status = pio_put_att(File,varid, 'long_name', 'ice grid block mask') status = pio_put_att(File, varid, 'coordinates', 'TLON TLAT') status = pio_put_att(File,varid,'comment', 'mytask + iblk/100') - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,'blkmask',history_precision) endif do i = 3, nvar ! note: n_tmask=1, n_blkmask=2 @@ -408,13 +394,7 @@ subroutine ice_write_hist (ns) status = pio_put_att(File,varid, 'long_name', trim(var(i)%req%long_name)) status = pio_put_att(File, varid, 'units', trim(var(i)%req%units)) status = pio_put_att(File, varid, 'coordinates', trim(var(i)%coordinates)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,var(i)%req%short_name,history_precision) endif enddo @@ -430,13 +410,7 @@ subroutine ice_write_hist (ns) pio_put_att(File,varid, 'long_name', trim(var_nverts(i)%long_name)) status = & pio_put_att(File, varid, 'units', trim(var_nverts(i)%units)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,var_nverts(i)%short_name,history_precision) endif enddo @@ -464,16 +438,10 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then if (TRIM(avail_hist_fields(n)%vname)/='sig1' & .or.TRIM(avail_hist_fields(n)%vname)/='sig2' & .or.TRIM(avail_hist_fields(n)%vname)/='sistreave' & @@ -483,7 +451,8 @@ subroutine ice_write_hist (ns) endif endif - if (histfreq(ns) == '1' .or. .not. hist_avg & + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg & .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & .or. n==n_sigP(ns) .or. n==n_trsig(ns) & @@ -518,20 +487,15 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -560,20 +524,15 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -602,20 +561,15 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -644,20 +598,15 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -686,20 +635,15 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -734,20 +678,15 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -777,20 +716,15 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -821,20 +755,15 @@ subroutine ice_write_hist (ns) trim(avail_hist_fields(n)%vcoord)) status = pio_put_att(File,varid,'cell_measures', & trim(avail_hist_fields(n)%vcellmeas)) - if (lprecision == pio_real) then - status = pio_put_att(File, varid, 'missing_value', spval) - status = pio_put_att(File, varid,'_FillValue',spval) - else - status = pio_put_att(File, varid, 'missing_value', spval_dbl) - status = pio_put_att(File, varid,'_FillValue',spval_dbl) - endif + call ice_write_hist_fill(File,varid,avail_hist_fields(n)%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_put_att(File,varid,'cell_methods','time: mean') endif - if (histfreq(ns) == '1' .or. .not. hist_avg) then + if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & + .or..not. hist_avg) then status = pio_put_att(File,varid,'time_rep','instantaneous') else status = pio_put_att(File,varid,'time_rep','averaged') @@ -901,14 +830,13 @@ subroutine ice_write_hist (ns) !----------------------------------------------------------------- status = pio_inq_varid(File,'time',varid) -!sgl status = pio_put_var(File,varid,(/1/),ltime) status = pio_put_var(File,varid,(/1/),ltime2) !----------------------------------------------------------------- ! write time_bounds info !----------------------------------------------------------------- - if (hist_avg .and. histfreq(ns) /= '1') then + if (hist_avg) then status = pio_inq_varid(File,'time_bounds',varid) time_bounds=(/time_beg(ns),time_end(ns)/) bnd_start = (/1,1/) @@ -921,6 +849,7 @@ subroutine ice_write_hist (ns) ! write coordinate variables !----------------------------------------------------------------- + allocate(workd2(nx_block,ny_block,nblocks)) allocate(workr2(nx_block,ny_block,nblocks)) do i = 1,ncoord @@ -928,16 +857,22 @@ subroutine ice_write_hist (ns) SELECT CASE (coord_var(i)%short_name) CASE ('TLON') ! Convert T grid longitude from -180 -> 180 to 0 to 360 - workr2(:,:,:) = mod(tlon(:,:,1:nblocks)*rad_to_deg + c360, c360) + workd2(:,:,:) = mod(tlon(:,:,1:nblocks)*rad_to_deg + c360, c360) CASE ('TLAT') - workr2(:,:,:) = tlat(:,:,1:nblocks)*rad_to_deg + workd2(:,:,:) = tlat(:,:,1:nblocks)*rad_to_deg CASE ('ULON') - workr2(:,:,:) = ulon(:,:,1:nblocks)*rad_to_deg + workd2(:,:,:) = ulon(:,:,1:nblocks)*rad_to_deg CASE ('ULAT') - workr2(:,:,:) = ulat(:,:,1:nblocks)*rad_to_deg + workd2(:,:,:) = ulat(:,:,1:nblocks)*rad_to_deg END SELECT - call pio_write_darray(File, varid, iodesc2d, & - workr2, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc2d, & + workd2, status, fillval=spval_dbl) + else + workr2 = workd2 + call pio_write_darray(File, varid, iodesc2d, & + workr2, status, fillval=spval) + endif enddo ! Extra dimensions (NCAT, NFSD, VGRD*) @@ -981,33 +916,39 @@ subroutine ice_write_hist (ns) if (igrd(i)) then SELECT CASE (var(i)%req%short_name) CASE ('tmask') - workr2 = hm(:,:,1:nblocks) + workd2 = hm(:,:,1:nblocks) CASE ('blkmask') - workr2 = bm(:,:,1:nblocks) + workd2 = bm(:,:,1:nblocks) CASE ('tarea') - workr2 = tarea(:,:,1:nblocks) + workd2 = tarea(:,:,1:nblocks) CASE ('uarea') - workr2 = uarea(:,:,1:nblocks) + workd2 = uarea(:,:,1:nblocks) CASE ('dxu') - workr2 = dxu(:,:,1:nblocks) + workd2 = dxu(:,:,1:nblocks) CASE ('dyu') - workr2 = dyu(:,:,1:nblocks) + workd2 = dyu(:,:,1:nblocks) CASE ('dxt') - workr2 = dxt(:,:,1:nblocks) + workd2 = dxt(:,:,1:nblocks) CASE ('dyt') - workr2 = dyt(:,:,1:nblocks) + workd2 = dyt(:,:,1:nblocks) CASE ('HTN') - workr2 = HTN(:,:,1:nblocks) + workd2 = HTN(:,:,1:nblocks) CASE ('HTE') - workr2 = HTE(:,:,1:nblocks) + workd2 = HTE(:,:,1:nblocks) CASE ('ANGLE') - workr2 = ANGLE(:,:,1:nblocks) + workd2 = ANGLE(:,:,1:nblocks) CASE ('ANGLET') - workr2 = ANGLET(:,:,1:nblocks) + workd2 = ANGLET(:,:,1:nblocks) END SELECT status = pio_inq_varid(File, var(i)%req%short_name, varid) - call pio_write_darray(File, varid, iodesc2d, & - workr2, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc2d, & + workd2, status, fillval=spval_dbl) + else + workr2 = workd2 + call pio_write_darray(File, varid, iodesc2d, & + workr2, status, fillval=spval) + endif endif enddo @@ -1016,32 +957,40 @@ subroutine ice_write_hist (ns) !---------------------------------------------------------------- if (f_bounds) then + allocate(workd3v(nverts,nx_block,ny_block,nblocks)) allocate(workr3v(nverts,nx_block,ny_block,nblocks)) - workr3v (:,:,:,:) = c0 + workd3v (:,:,:,:) = c0 do i = 1, nvar_verts SELECT CASE (var_nverts(i)%short_name) CASE ('lont_bounds') do ivertex = 1, nverts - workr3v(ivertex,:,:,:) = lont_bounds(ivertex,:,:,1:nblocks) + workd3v(ivertex,:,:,:) = lont_bounds(ivertex,:,:,1:nblocks) enddo CASE ('latt_bounds') do ivertex = 1, nverts - workr3v(ivertex,:,:,:) = latt_bounds(ivertex,:,:,1:nblocks) + workd3v(ivertex,:,:,:) = latt_bounds(ivertex,:,:,1:nblocks) enddo CASE ('lonu_bounds') do ivertex = 1, nverts - workr3v(ivertex,:,:,:) = lonu_bounds(ivertex,:,:,1:nblocks) + workd3v(ivertex,:,:,:) = lonu_bounds(ivertex,:,:,1:nblocks) enddo CASE ('latu_bounds') do ivertex = 1, nverts - workr3v(ivertex,:,:,:) = latu_bounds(ivertex,:,:,1:nblocks) + workd3v(ivertex,:,:,:) = latu_bounds(ivertex,:,:,1:nblocks) enddo END SELECT status = pio_inq_varid(File, var_nverts(i)%short_name, varid) - call pio_write_darray(File, varid, iodesc3dv, & - workr3v, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc3dv, & + workd3v, status, fillval=spval_dbl) + else + workr3v = workd3v + call pio_write_darray(File, varid, iodesc3dv, & + workr3v, status, fillval=spval) + endif enddo + deallocate(workd3v) deallocate(workr3v) endif ! f_bounds @@ -1056,20 +1005,28 @@ subroutine ice_write_hist (ns) status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) if (status /= pio_noerr) call abort_ice(subname// & 'ERROR getting varid for '//avail_hist_fields(n)%vname) - workr2(:,:,:) = a2D(:,:,n,1:nblocks) + workd2(:,:,:) = a2D(:,:,n,1:nblocks) #ifdef CESM1_PIO call pio_setframe(varid, int(1,kind=PIO_OFFSET)) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc2d,& - workr2, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc2d,& + workd2, status, fillval=spval_dbl) + else + workr2 = workd2 + call pio_write_darray(File, varid, iodesc2d,& + workr2, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_2D + deallocate(workd2) deallocate(workr2) ! 3D (category) + allocate(workd3(nx_block,ny_block,nblocks,ncat_hist)) allocate(workr3(nx_block,ny_block,nblocks,ncat_hist)) do n = n2D + 1, n3Dccum nn = n - n2D @@ -1079,7 +1036,7 @@ subroutine ice_write_hist (ns) 'ERROR: getting varid for '//avail_hist_fields(n)%vname) do j = 1, nblocks do i = 1, ncat_hist - workr3(:,:,j,i) = a3Dc(:,:,i,nn,j) + workd3(:,:,j,i) = a3Dc(:,:,i,nn,j) enddo enddo #ifdef CESM1_PIO @@ -1087,13 +1044,21 @@ subroutine ice_write_hist (ns) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc3dc,& - workr3, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc3dc,& + workd3, status, fillval=spval_dbl) + else + workr3 = workd3 + call pio_write_darray(File, varid, iodesc3dc,& + workr3, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_3Dc + deallocate(workd3) deallocate(workr3) ! 3D (vertical ice) + allocate(workd3(nx_block,ny_block,nblocks,nzilyr)) allocate(workr3(nx_block,ny_block,nblocks,nzilyr)) do n = n3Dccum+1, n3Dzcum nn = n - n3Dccum @@ -1103,7 +1068,7 @@ subroutine ice_write_hist (ns) 'ERROR: getting varid for '//avail_hist_fields(n)%vname) do j = 1, nblocks do i = 1, nzilyr - workr3(:,:,j,i) = a3Dz(:,:,i,nn,j) + workd3(:,:,j,i) = a3Dz(:,:,i,nn,j) enddo enddo #ifdef CESM1_PIO @@ -1111,13 +1076,21 @@ subroutine ice_write_hist (ns) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc3di,& - workr3, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc3di,& + workd3, status, fillval=spval_dbl) + else + workr3 = workd3 + call pio_write_darray(File, varid, iodesc3di,& + workr3, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_3Dz + deallocate(workd3) deallocate(workr3) ! 3D (vertical ice biology) + allocate(workd3(nx_block,ny_block,nblocks,nzblyr)) allocate(workr3(nx_block,ny_block,nblocks,nzblyr)) do n = n3Dzcum+1, n3Dbcum nn = n - n3Dzcum @@ -1127,7 +1100,7 @@ subroutine ice_write_hist (ns) 'ERROR: getting varid for '//avail_hist_fields(n)%vname) do j = 1, nblocks do i = 1, nzblyr - workr3(:,:,j,i) = a3Db(:,:,i,nn,j) + workd3(:,:,j,i) = a3Db(:,:,i,nn,j) enddo enddo #ifdef CESM1_PIO @@ -1135,13 +1108,21 @@ subroutine ice_write_hist (ns) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc3db,& - workr3, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc3db,& + workd3, status, fillval=spval_dbl) + else + workr3 = workd3 + call pio_write_darray(File, varid, iodesc3db,& + workr3, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_3Db + deallocate(workd3) deallocate(workr3) ! 3D (vertical snow biology) + allocate(workd3(nx_block,ny_block,nblocks,nzalyr)) allocate(workr3(nx_block,ny_block,nblocks,nzalyr)) do n = n3Dbcum+1, n3Dacum nn = n - n3Dbcum @@ -1151,7 +1132,7 @@ subroutine ice_write_hist (ns) 'ERROR: getting varid for '//avail_hist_fields(n)%vname) do j = 1, nblocks do i = 1, nzalyr - workr3(:,:,j,i) = a3Da(:,:,i,nn,j) + workd3(:,:,j,i) = a3Da(:,:,i,nn,j) enddo enddo #ifdef CESM1_PIO @@ -1159,13 +1140,21 @@ subroutine ice_write_hist (ns) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc3da,& - workr3, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc3da,& + workd3, status, fillval=spval_dbl) + else + workr3 = workd3 + call pio_write_darray(File, varid, iodesc3da,& + workr3, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_3Db + deallocate(workd3) deallocate(workr3) ! 3D (fsd) + allocate(workd3(nx_block,ny_block,nblocks,nfsd_hist)) allocate(workr3(nx_block,ny_block,nblocks,nfsd_hist)) do n = n3Dacum+1, n3Dfcum nn = n - n3Dacum @@ -1175,7 +1164,7 @@ subroutine ice_write_hist (ns) 'ERROR: getting varid for '//avail_hist_fields(n)%vname) do j = 1, nblocks do i = 1, nfsd_hist - workr3(:,:,j,i) = a3Df(:,:,i,nn,j) + workd3(:,:,j,i) = a3Df(:,:,i,nn,j) enddo enddo #ifdef CESM1_PIO @@ -1183,12 +1172,20 @@ subroutine ice_write_hist (ns) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc3df,& - workr3, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc3df,& + workd3, status, fillval=spval_dbl) + else + workr3 = workd3 + call pio_write_darray(File, varid, iodesc3df,& + workr3, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_3Df + deallocate(workd3) deallocate(workr3) + allocate(workd4(nx_block,ny_block,nblocks,ncat_hist,nzilyr)) allocate(workr4(nx_block,ny_block,nblocks,ncat_hist,nzilyr)) ! 4D (categories, fsd) do n = n3Dfcum+1, n4Dicum @@ -1200,7 +1197,7 @@ subroutine ice_write_hist (ns) do j = 1, nblocks do i = 1, ncat_hist do k = 1, nzilyr - workr4(:,:,j,i,k) = a4Di(:,:,k,i,nn,j) + workd4(:,:,j,i,k) = a4Di(:,:,k,i,nn,j) enddo ! k enddo ! i enddo ! j @@ -1209,12 +1206,20 @@ subroutine ice_write_hist (ns) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc4di,& - workr4, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc4di,& + workd4, status, fillval=spval_dbl) + else + workr4 = workd4 + call pio_write_darray(File, varid, iodesc4di,& + workr4, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_4Di + deallocate(workd4) deallocate(workr4) + allocate(workd4(nx_block,ny_block,nblocks,ncat_hist,nzslyr)) allocate(workr4(nx_block,ny_block,nblocks,ncat_hist,nzslyr)) ! 4D (categories, vertical ice) do n = n4Dicum+1, n4Dscum @@ -1226,7 +1231,7 @@ subroutine ice_write_hist (ns) do j = 1, nblocks do i = 1, ncat_hist do k = 1, nzslyr - workr4(:,:,j,i,k) = a4Ds(:,:,k,i,nn,j) + workd4(:,:,j,i,k) = a4Ds(:,:,k,i,nn,j) enddo ! k enddo ! i enddo ! j @@ -1235,12 +1240,20 @@ subroutine ice_write_hist (ns) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc4ds,& - workr4, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc4ds,& + workd4, status, fillval=spval_dbl) + else + workr4 = workd4 + call pio_write_darray(File, varid, iodesc4ds,& + workr4, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_4Ds + deallocate(workd4) deallocate(workr4) + allocate(workd4(nx_block,ny_block,nblocks,ncat_hist,nfsd_hist)) allocate(workr4(nx_block,ny_block,nblocks,ncat_hist,nfsd_hist)) ! 4D (categories, vertical ice) do n = n4Dscum+1, n4Dfcum @@ -1252,7 +1265,7 @@ subroutine ice_write_hist (ns) do j = 1, nblocks do i = 1, ncat_hist do k = 1, nfsd_hist - workr4(:,:,j,i,k) = a4Df(:,:,k,i,nn,j) + workd4(:,:,j,i,k) = a4Df(:,:,k,i,nn,j) enddo ! k enddo ! i enddo ! j @@ -1261,13 +1274,20 @@ subroutine ice_write_hist (ns) #else call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) #endif - call pio_write_darray(File, varid, iodesc4df,& - workr4, status, fillval=spval_dbl) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc4df,& + workd4, status, fillval=spval_dbl) + else + workr4 = workd4 + call pio_write_darray(File, varid, iodesc4df,& + workr4, status, fillval=spval) + endif endif enddo ! num_avail_hist_fields_4Df + deallocate(workd4) deallocate(workr4) -! similarly for num_avail_hist_fields_4Db (define workr4b, iodesc4db) +! similarly for num_avail_hist_fields_4Db (define workd4b, iodesc4db) !----------------------------------------------------------------- @@ -1297,6 +1317,34 @@ subroutine ice_write_hist (ns) end subroutine ice_write_hist +!======================================================================= + + subroutine ice_write_hist_fill(File,varid,vname,precision) + + use ice_kinds_mod + use ice_pio + use pio + + type(file_desc_t) , intent(inout) :: File + type(var_desc_t) , intent(in) :: varid + character(len=*), intent(in) :: vname ! var name + integer (kind=int_kind), intent(in) :: precision ! precision + + ! local variables + + integer (kind=int_kind) :: status + character(len=*), parameter :: subname = '(ice_write_hist_fill)' + + if (precision == 8) then + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + else + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + endif + + end subroutine ice_write_hist_fill + !======================================================================= end module ice_history_write diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 index 9c65b2ce1..d4149f7bf 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 @@ -197,9 +197,10 @@ end subroutine ice_pio_init !================================================================================ - subroutine ice_pio_initdecomp_2d(iodesc) + subroutine ice_pio_initdecomp_2d(iodesc, precision) type(io_desc_t), intent(out) :: iodesc + integer(kind=int_kind), optional, intent(in) :: precision integer (kind=int_kind) :: & iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k @@ -207,8 +208,12 @@ subroutine ice_pio_initdecomp_2d(iodesc) type(block) :: this_block integer(kind=int_kind), pointer :: dof2d(:) + integer(kind=int_kind) :: lprecision character(len=*), parameter :: subname = '(ice_pio_initdecomp_2d)' + lprecision = 8 + if (present(precision)) lprecision = precision + allocate(dof2d(nx_block*ny_block*nblocks)) n=0 @@ -235,8 +240,13 @@ subroutine ice_pio_initdecomp_2d(iodesc) enddo !j end do - call pio_initdecomp(ice_pio_subsystem, pio_double, (/nx_global,ny_global/), & - dof2d, iodesc) + if (lprecision == 8) then + call pio_initdecomp(ice_pio_subsystem, pio_double, (/nx_global,ny_global/), & + dof2d, iodesc) + else + call pio_initdecomp(ice_pio_subsystem, pio_real, (/nx_global,ny_global/), & + dof2d, iodesc) + endif deallocate(dof2d) @@ -244,19 +254,24 @@ end subroutine ice_pio_initdecomp_2d !================================================================================ - subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap) + subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap, precision) integer(kind=int_kind), intent(in) :: ndim3 type(io_desc_t), intent(out) :: iodesc logical, optional :: remap + integer(kind=int_kind), optional, intent(in) :: precision integer (kind=int_kind) :: & iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k type(block) :: this_block logical :: lremap integer(kind=int_kind), pointer :: dof3d(:) + integer(kind=int_kind) :: lprecision character(len=*), parameter :: subname = '(ice_pio_initdecomp_2d)' + lprecision = 8 + if (present(precision)) lprecision = precision + allocate(dof3d(nx_block*ny_block*nblocks*ndim3)) lremap=.false. if (present(remap)) lremap=remap @@ -313,8 +328,13 @@ subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap) enddo !ndim3 endif - call pio_initdecomp(ice_pio_subsystem, pio_double, (/nx_global,ny_global,ndim3/), & - dof3d, iodesc) + if (lprecision == 8) then + call pio_initdecomp(ice_pio_subsystem, pio_double, (/nx_global,ny_global,ndim3/), & + dof3d, iodesc) + else + call pio_initdecomp(ice_pio_subsystem, pio_real, (/nx_global,ny_global,ndim3/), & + dof3d, iodesc) + endif deallocate(dof3d) @@ -322,11 +342,12 @@ end subroutine ice_pio_initdecomp_3d !================================================================================ - subroutine ice_pio_initdecomp_3d_inner(ndim3, inner_dim, iodesc) + subroutine ice_pio_initdecomp_3d_inner(ndim3, inner_dim, iodesc, precision) integer(kind=int_kind), intent(in) :: ndim3 logical, intent(in) :: inner_dim type(io_desc_t), intent(out) :: iodesc + integer(kind=int_kind), optional, intent(in) :: precision integer (kind=int_kind) :: & iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k @@ -334,9 +355,12 @@ subroutine ice_pio_initdecomp_3d_inner(ndim3, inner_dim, iodesc) type(block) :: this_block integer(kind=int_kind), pointer :: dof3d(:) - + integer(kind=int_kind) :: lprecision character(len=*), parameter :: subname = '(ice_pio_initdecomp_3d_inner)' + lprecision = 8 + if (present(precision)) lprecision = precision + allocate(dof3d(nx_block*ny_block*nblocks*ndim3)) n=0 @@ -365,8 +389,13 @@ subroutine ice_pio_initdecomp_3d_inner(ndim3, inner_dim, iodesc) enddo !j end do !iblk - call pio_initdecomp(ice_pio_subsystem, pio_double, (/ndim3,nx_global,ny_global/), & - dof3d, iodesc) + if (lprecision == 8) then + call pio_initdecomp(ice_pio_subsystem, pio_double, (/ndim3,nx_global,ny_global/), & + dof3d, iodesc) + else + call pio_initdecomp(ice_pio_subsystem, pio_real, (/ndim3,nx_global,ny_global/), & + dof3d, iodesc) + endif deallocate(dof3d) @@ -374,10 +403,11 @@ end subroutine ice_pio_initdecomp_3d_inner !================================================================================ - subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc) + subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc, precision) integer(kind=int_kind), intent(in) :: ndim3, ndim4 type(io_desc_t), intent(out) :: iodesc + integer(kind=int_kind), optional, intent(in) :: precision integer (kind=int_kind) :: & iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k,l @@ -385,9 +415,12 @@ subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc) type(block) :: this_block integer(kind=int_kind), pointer :: dof4d(:) - + integer(kind=int_kind) :: lprecision character(len=*), parameter :: subname = '(ice_pio_initdecomp_4d)' + lprecision = 8 + if (present(precision)) lprecision = precision + allocate(dof4d(nx_block*ny_block*nblocks*ndim3*ndim4)) n=0 @@ -420,8 +453,13 @@ subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc) enddo !ndim3 enddo !ndim4 - call pio_initdecomp(ice_pio_subsystem, pio_double, & - (/nx_global,ny_global,ndim3,ndim4/), dof4d, iodesc) + if (lprecision == 8) then + call pio_initdecomp(ice_pio_subsystem, pio_double, & + (/nx_global,ny_global,ndim3,ndim4/), dof4d, iodesc) + else + call pio_initdecomp(ice_pio_subsystem, pio_real, & + (/nx_global,ny_global,ndim3,ndim4/), dof4d, iodesc) + endif deallocate(dof4d) diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index 12d5d8e71..0ec6b7628 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -83,8 +83,8 @@ subroutine init_restart_read(ice_ic) File%fh=-1 call ice_pio_init(mode='read', filename=trim(filename), File=File, iotype=iotype) - call ice_pio_initdecomp(iodesc=iodesc2d) - call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat,remap=.true.) + call ice_pio_initdecomp(iodesc=iodesc2d, precision=8) + call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat,remap=.true., precision=8) if (use_restart_time) then status1 = PIO_noerr @@ -151,7 +151,7 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & - tr_pond_topo, tr_pond_lvl, tr_brine, & + tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & tr_bgc_chl, tr_bgc_Am, & @@ -187,7 +187,8 @@ subroutine init_restart_write(filename_spec) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & + tr_snow_out=tr_snow, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & tr_bgc_chl_out=tr_bgc_chl, tr_bgc_Am_out=tr_bgc_Am, & @@ -483,6 +484,16 @@ subroutine init_restart_write(filename_spec) call define_rest_field(File,'qsno'//trim(nchar),dims) enddo + if (tr_snow) then + do k=1,nslyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'smice'//trim(nchar),dims) + call define_rest_field(File,'smliq'//trim(nchar),dims) + call define_rest_field(File, 'rhos'//trim(nchar),dims) + call define_rest_field(File, 'rsnw'//trim(nchar),dims) + enddo + endif + if (tr_fsd) then do k=1,nfsd write(nchar,'(i3.3)') k @@ -638,8 +649,8 @@ subroutine init_restart_write(filename_spec) deallocate(dims) status = pio_enddef(File) - call ice_pio_initdecomp(iodesc=iodesc2d) - call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat, remap=.true.) + call ice_pio_initdecomp(iodesc=iodesc2d, precision=8) + call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat, remap=.true., precision=8) ! endif ! restart_format diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 62ff2727d..f8627d690 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -582,7 +582,7 @@ subroutine ice_import( importState, rc ) rhoa(i,j,iblk) = inst_pres_height_lowest / & (287.058_ESMF_KIND_R8*(1._ESMF_KIND_R8+0.608_ESMF_KIND_R8*Qa(i,j,iblk))*Tair(i,j,iblk)) else - rhoa(i,j,iblk) = 0._ESMF_KIND_R8 + rhoa(i,j,iblk) = 1.2_ESMF_KIND_R8 endif end do !i end do !j diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 60f71fa8a..363025b9b 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -18,6 +18,7 @@ module CICE_InitMod use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave + use icepack_intfc, only: icepack_init_snow use icepack_intfc, only: icepack_configure use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & @@ -76,7 +77,7 @@ subroutine cice_init use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & - get_forcing_atmo, get_forcing_ocn, get_wave_spec + get_forcing_atmo, get_forcing_ocn, get_wave_spec, init_snowtable use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid @@ -90,7 +91,8 @@ subroutine cice_init use ice_transport_driver, only: init_transport logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_iso, tr_fsd, wave_spec + tr_iso, tr_fsd, wave_spec, tr_snow + character(len=char_len) :: snw_aging_table character(len=*), parameter :: subname = '(cice_init)' call init_communicate ! initial setup for message passing @@ -162,7 +164,7 @@ subroutine cice_init call ice_HaloRestore_init ! restored boundary conditions call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & - wave_spec_out=wave_spec) + wave_spec_out=wave_spec, snw_aging_table_out=snw_aging_table) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -176,7 +178,7 @@ subroutine cice_init call calc_timesteps ! update timestep counter if not using npt_unit="1" call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) - call icepack_query_tracer_flags(tr_iso_out=tr_iso) + call icepack_query_tracer_flags(tr_iso_out=tr_iso, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -207,8 +209,20 @@ subroutine cice_init call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data + ! snow aging lookup table initialization + if (tr_snow) then ! advanced snow physics + call icepack_init_snow() + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + if (snw_aging_table(1:4) /= 'test') then + call init_snowtable() + endif + endif + ! isotopes if (tr_iso) call fiso_default ! default values + ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -235,12 +249,12 @@ subroutine init_restart use ice_calendar, only: calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_iso, n_aero, nfsd + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd, nslyr use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn use ice_grid, only: tmask use ice_init, only: ice_ic - use ice_init_column, only: init_age, init_FY, init_lvl, & + use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & @@ -248,6 +262,7 @@ subroutine init_restart restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & + restart_snow, read_restart_snow, & restart_fsd, read_restart_fsd, & restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & @@ -262,12 +277,13 @@ subroutine init_restart iblk ! block index logical(kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & - tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & + tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_smice, nt_smliq, nt_rhos, nt_rsnw, & nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -282,10 +298,12 @@ subroutine init_restart call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -382,6 +400,22 @@ subroutine init_restart enddo ! iblk endif ! .not. restart_pond endif + + ! snow redistribution/metamorphism + if (tr_snow) then + if (trim(runtype) == 'continue') restart_snow = .true. + if (restart_snow) then + call read_restart_snow + else + do iblk = 1, nblocks + call init_snowtracers(trcrn(:,:,nt_smice:nt_smice+nslyr-1,:,iblk), & + trcrn(:,:,nt_smliq:nt_smliq+nslyr-1,:,iblk), & + trcrn(:,:,nt_rhos :nt_rhos +nslyr-1,:,iblk), & + trcrn(:,:,nt_rsnw :nt_rsnw +nslyr-1,:,iblk)) + enddo ! iblk + endif + endif + ! floe size distribution if (tr_fsd) then if (trim(runtype) == 'continue') restart_fsd = .true. @@ -398,7 +432,7 @@ subroutine init_restart if (restart_iso) then call read_restart_iso else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) enddo ! iblk diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 08059435f..0fde18e04 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -151,12 +151,13 @@ subroutine ice_step use ice_restart_column, only: write_restart_age, write_restart_FY, & write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_iso, write_restart_bgc, write_restart_hbrine + write_restart_iso, write_restart_bgc, write_restart_hbrine, & + write_restart_snow use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, save_init, step_dyn_wave + biogeochemistry, save_init, step_dyn_wave, step_snow use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite @@ -170,7 +171,7 @@ subroutine ice_step offset ! d(age)/dt time offset logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, & + tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec @@ -191,7 +192,7 @@ subroutine ice_step call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -317,17 +318,28 @@ subroutine ice_step enddo endif + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + !----------------------------------------------------------------- - ! albedo, shortwave radiation + ! snow redistribution and metamorphosis !----------------------------------------------------------------- - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics + if (tr_snow) then ! advanced snow physics + do iblk = 1, nblocks + call step_snow (dt, iblk) + enddo + call update_state (dt) ! clean up + endif !MHRI: CHECK THIS OMP !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks + !----------------------------------------------------------------- + ! albedo, shortwave radiation + !----------------------------------------------------------------- + if (ktherm >= 0) call step_radiation (dt, iblk) if (debug_model) then @@ -383,6 +395,7 @@ subroutine ice_step if (tr_pond_cesm) call write_restart_pond_cesm if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo + if (tr_snow) call write_restart_snow if (tr_fsd) call write_restart_fsd if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero diff --git a/cicecore/drivers/unittest/bcstchk/bcstchk.F90 b/cicecore/drivers/unittest/bcstchk/bcstchk.F90 index e5cadc805..264931780 100644 --- a/cicecore/drivers/unittest/bcstchk/bcstchk.F90 +++ b/cicecore/drivers/unittest/bcstchk/bcstchk.F90 @@ -51,7 +51,7 @@ program bcstchk write(6,*) ' ' write(6,*) '==========================================================' write(6,*) ' ' - write(6,*) 'Running BCSTCHK' + write(6,*) 'RunningUnitTest BCSTCHK' write(6,*) ' ' write(6,*) ' npes = ',npes write(6,*) ' my_task = ',my_task @@ -257,36 +257,16 @@ program bcstchk write(6,*) errorflag1(k),stringflag1(k) enddo write(6,*) ' ' + write(6,*) 'BCSTCHK COMPLETED SUCCESSFULLY' if (errorflag0 == passflag) then - write(6,*) 'BCSTCHK COMPLETED SUCCESSFULLY' + write(6,*) 'BCSTCHK TEST COMPLETED SUCCESSFULLY' else - write(6,*) 'BCSTCHK FAILED' + write(6,*) 'BCSTCHK TEST FAILED' endif endif - ! Test abort_ice, regardless of test outcome - ! Set doabort to false to support code coverage stats, aborted runs don't seem to generate - ! gcov statistics - - call flush_fileunit(6) - call ice_barrier() - if (my_task == master_task) then - write(6,*) ' ' - write(6,*) '==========================================================' - write(6,*) ' ' - write(6,*) 'NOTE: We are testing the abort now so you should see an abort to follow' - write(6,*) 'The BCSTCHK passed, so please ignore the abort' - write(6,*) ' ' - call abort_ice(subname//' Test abort ',file=__FILE__,line=__LINE__, doabort=.false.) - endif - call flush_fileunit(6) - call ice_barrier() - - if (my_task == master_task) then - write(6,*) ' ' - write(6,*) 'BCSTCHK done' - write(6,*) ' ' - endif + ! --------------------------- + ! exit gracefully call end_run() diff --git a/cicecore/drivers/unittest/calchk/calchk.F90 b/cicecore/drivers/unittest/calchk/calchk.F90 index 09a297f1f..d669dbad3 100644 --- a/cicecore/drivers/unittest/calchk/calchk.F90 +++ b/cicecore/drivers/unittest/calchk/calchk.F90 @@ -15,13 +15,14 @@ program calchk use ice_calendar, only: init_calendar, calendar use ice_calendar, only: set_date_from_timesecs use ice_calendar, only: calendar_date2time, calendar_time2date - use ice_calendar, only: compute_calendar_data + use ice_calendar, only: compute_calendar_data, calendar_sec2hms implicit none integer(kind=int_kind) :: yearmax integer(kind=int_kind) :: nday,nptc integer(kind=int_kind) :: n,m,ny,nm,nd,nf1,nf2,xadd,nfa,nfb,nfc,ns1,ns2 integer(kind=int_kind) :: yi,mi,di,si + integer(kind=int_kind) :: hh,mm,ss integer(kind=int_kind) :: dyear,dmon,dday,dsec integer(kind=int_kind) :: fyear,fmon,fday,fsec character(len=32) :: calstr,unitstr,signstr @@ -29,7 +30,7 @@ program calchk integer (kind=int_kind) :: tdaycal(months_per_year+1) ! day count per month integer (kind=int_kind) :: tdayyr ! days in year - integer(kind=int_kind), parameter :: ntests = 8 + integer(kind=int_kind), parameter :: ntests = 9 character(len=8) :: errorflag0,errorflag(1:ntests),errorflagtmp character(len=32) :: testname(ntests) integer(kind=int_kind) :: yearv(ntests),monv(ntests),dayv(ntests),secv(ntests),ndayv(ntests) ! computed values @@ -40,7 +41,7 @@ program calchk failflag = 'FAIL' write(6,*) ' ' - write(6,*) 'Running CALCHK' + write(6,*) 'RunningUnitTest CALCHK' write(6,*) ' ' errorflag0 = passflag @@ -54,6 +55,7 @@ program calchk testname(6) = 'small add/sub update_date' testname(7) = 'special checks' testname(8) = 'calc_timesteps' + testname(9) = 'seconds_to_hms' ! test yearmax years from year 0 ! yearmax = 1000 @@ -561,6 +563,26 @@ program calchk endif enddo + !------------------------- + ! calc hms + !------------------------- + + write(6,*) ' ' + do ns1 = 0,86399 + call calendar_sec2hms(ns1,hh,mm,ss) + if (ns1 < 10 .or. ns1 > 86390 .or. (ns1 > 7195 .and. ns1 < 7205)) then + write(6,'(a,i8,2x,i2.2,a,i2.2,a,i2.2)') ' CHECK9 ',ns1,hh,':',mm,':',ss + endif + enddo + monc(9) = 23 ! hh correct result for 86399 + dayc(9) = 59 ! mm correct result for 86399 + secc(9) = 59 ! ss correct result for 86399 + if (hh /= monc(9) .or. mm /= dayc(9) .or. ss /= secc(9)) then + errorflag(9) = failflag + write(6,*) 'ERROR9: hms expected',ns1,monc(9),dayc(9),secc(9) + write(6,*) 'ERROR9: hms error ',ns1,hh,mm,ss + endif + !------------------------- ! write test results !------------------------- @@ -579,10 +601,11 @@ program calchk 1002 format(a,i10,1x,a) write(6,*) ' ' + write(6,*) 'CALCHK COMPLETED SUCCESSFULLY' if (errorflag0 == passflag) then - write(6,*) 'CALCHK COMPLETED SUCCESSFULLY' + write(6,*) 'CALCHK TEST COMPLETED SUCCESSFULLY' else - write(6,*) 'CALCHK FAILED' + write(6,*) 'CALCHK TEST FAILED' endif end program diff --git a/cicecore/drivers/unittest/helloworld/helloworld.F90 b/cicecore/drivers/unittest/helloworld/helloworld.F90 index 651436bea..c4e4ae91f 100644 --- a/cicecore/drivers/unittest/helloworld/helloworld.F90 +++ b/cicecore/drivers/unittest/helloworld/helloworld.F90 @@ -1,8 +1,9 @@ program hello_world - write(6,*) 'hello_world' - write(6,*) 'COMPLETED SUCCESSFULLY' + write(6,*) 'RunningUnitTest hello_world' + write(6,*) 'hello_world COMPLETED SUCCESSFULLY' + write(6,*) 'hello_world TEST COMPLETED SUCCESSFULLY' end program diff --git a/cicecore/drivers/unittest/sumchk/sumchk.F90 b/cicecore/drivers/unittest/sumchk/sumchk.F90 index e3b99b59d..f314959cb 100644 --- a/cicecore/drivers/unittest/sumchk/sumchk.F90 +++ b/cicecore/drivers/unittest/sumchk/sumchk.F90 @@ -107,7 +107,7 @@ program sumchk write(6,*) ' ' write(6,*) '==========================================================' write(6,*) ' ' - write(6,*) 'Running SUMCHK' + write(6,*) 'RunningUnitTest SUMCHK' write(6,*) ' ' write(6,*) ' npes = ',npes write(6,*) ' my_task = ',my_task @@ -674,10 +674,11 @@ program sumchk write(6,*) errorflag4(k),stringflag4(k) enddo write(6,*) ' ' + write(6,*) 'SUMCHK COMPLETED SUCCESSFULLY' if (errorflag0 == passflag) then - write(6,*) 'SUMCHK COMPLETED SUCCESSFULLY' + write(6,*) 'SUMCHK TEST COMPLETED SUCCESSFULLY' else - write(6,*) 'SUMCHK FAILED' + write(6,*) 'SUMCHK TEST FAILED' endif write(6,*) ' ' write(6,*) '==========================================================' diff --git a/cicecore/shared/ice_arrays_column.F90 b/cicecore/shared/ice_arrays_column.F90 index 46ea6f62e..dbad4292c 100644 --- a/cicecore/shared/ice_arrays_column.F90 +++ b/cicecore/shared/ice_arrays_column.F90 @@ -67,6 +67,15 @@ module ice_arrays_column character (len=35), public, allocatable :: c_hi_range(:) + ! icepack_snow.F90 + real (kind=dbl_kind), public, & + dimension (:,:,:), allocatable :: & + meltsliq ! snow melt mass (kg/m^2/step-->kg/m^2/day) + + real (kind=dbl_kind), public, & + dimension (:,:,:,:), allocatable :: & + meltsliqn ! snow melt mass in category n (kg/m^2) + ! icepack_meltpond_lvl.F90 real (kind=dbl_kind), public, & dimension (:,:,:,:), allocatable :: & @@ -354,6 +363,8 @@ subroutine alloc_arrays_column fzsal_g (nx_block,ny_block,max_blocks), & ! Total gravity drainage flux upNO (nx_block,ny_block,max_blocks), & ! nitrate uptake rate (mmol/m^2/d) times aice upNH (nx_block,ny_block,max_blocks), & ! ammonium uptake rate (mmol/m^2/d) times aice + meltsliq (nx_block,ny_block,max_blocks), & ! snow melt mass (kg/m^2) + meltsliqn (nx_block,ny_block,ncat,max_blocks), & ! snow melt mass in category n (kg/m^2) dhsn (nx_block,ny_block,ncat,max_blocks), & ! depth difference for snow on sea ice and pond ice ffracn (nx_block,ny_block,ncat,max_blocks), & ! fraction of fsurfn used to melt ipond alvdrn (nx_block,ny_block,ncat,max_blocks), & ! visible direct albedo (fraction) diff --git a/cicecore/shared/ice_calendar.F90 b/cicecore/shared/ice_calendar.F90 index f76b3b30b..7684fef67 100644 --- a/cicecore/shared/ice_calendar.F90 +++ b/cicecore/shared/ice_calendar.F90 @@ -47,6 +47,7 @@ module ice_calendar public :: update_date ! input date and delta date, compute new date public :: calendar_date2time ! convert date to time relative to init date public :: calendar_time2date ! convert time to date relative to init date + public :: calendar_sec2hms ! convert seconds to hour, minute, seconds public :: compute_calendar_data ! compute info about calendar for a given year ! private functions @@ -61,8 +62,10 @@ module ice_calendar ice_calendar_360day = '360day' ! 360 day calendar with 30 days per month integer (kind=int_kind), public, parameter :: & - months_per_year = 12, & ! months per year - hours_per_day = 24 ! hours per day + months_per_year = 12, & ! months per year + hours_per_day = 24, & ! hours per day + minutes_per_hour = 60, & ! minutes per hour + seconds_per_minute = 60 ! seconds per minute integer (kind=int_kind), public :: & seconds_per_day , & ! seconds per day @@ -87,6 +90,9 @@ module ice_calendar day_init, & ! initial day of month sec_init , & ! initial seconds ! other stuff + hh_init , & ! initial hour derived from sec_init + mm_init , & ! initial minute derived from sec_init + ss_init , & ! initial second derived from sec_init idate , & ! date (yyyymmdd) idate0 , & ! initial date (yyyymmdd), associated with year_init, month_init, day_init dayyr , & ! number of days in the current year @@ -189,6 +195,7 @@ subroutine init_calendar mmonth=month_init ! month mday=day_init ! day of the month msec=sec_init ! seconds into date + call calendar_sec2hms(sec_init,hh_init,mm_init,ss_init) ! initialize hh,mm,ss _init hour=0 ! computed in calendar, but needs some reasonable initial value istep1 = istep0 ! number of steps at current timestep ! real (dumped) or imagined (use to set calendar) @@ -948,6 +955,28 @@ subroutine calendar_time2date(atimesecs,ayear,amon,aday,asec,year_ref,mon_ref,da end subroutine calendar_time2date +!======================================================================= +! Compute hours, minutes, seconds from seconds + + subroutine calendar_sec2hms(seconds, hh, mm, ss) + + integer(kind=int_kind), intent(in) :: & + seconds ! calendar seconds in day + integer(kind=int_kind), intent(out) :: & + hh, mm, ss ! output hours, minutes, seconds + + character(len=*),parameter :: subname='(calendar_sec2hms)' + + if (seconds >= seconds_per_day) then + write(nu_diag,*) trim(subname),' ERROR seconds >= seconds_per_day, ',seconds,seconds_per_day + call abort_ice(subname//'ERROR: in seconds') + endif + hh = seconds/(seconds_per_hour) + mm = (seconds - hh*seconds_per_hour)/seconds_per_minute + ss = (seconds - hh*seconds_per_hour - mm*seconds_per_minute) + + end subroutine calendar_sec2hms + !======================================================================= ! Compute relative elapsed years, months, days, hours from base time diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index b6b30d47a..ccb518807 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -51,6 +51,8 @@ module ice_fileunits nu_restart_lvl, & ! restart input file for level ice tracers nu_dump_pond , & ! dump file for restarting melt pond tracer nu_restart_pond,& ! restart input file for melt pond tracer + nu_dump_snow , & ! dump file for restarting snow redist/metamorph tracers + nu_restart_snow,& ! restart input file for snow redist/metamorph tracers nu_dump_fsd , & ! dump file for restarting floe size distribution nu_restart_fsd, & ! restart input file for floe size distribution nu_dump_iso , & ! dump file for restarting isotope tracers @@ -129,6 +131,8 @@ subroutine init_fileunits call get_fileunit(nu_restart_lvl) call get_fileunit(nu_dump_pond) call get_fileunit(nu_restart_pond) + call get_fileunit(nu_dump_snow) + call get_fileunit(nu_restart_snow) call get_fileunit(nu_dump_fsd) call get_fileunit(nu_restart_fsd) call get_fileunit(nu_dump_iso) @@ -218,6 +222,8 @@ subroutine release_all_fileunits call release_fileunit(nu_restart_lvl) call release_fileunit(nu_dump_pond) call release_fileunit(nu_restart_pond) + call release_fileunit(nu_dump_snow) + call release_fileunit(nu_restart_snow) call release_fileunit(nu_dump_fsd) call release_fileunit(nu_restart_fsd) call release_fileunit(nu_dump_iso) diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 4f4641467..eff39a464 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -46,7 +46,7 @@ module ice_init_column init_age, init_FY, init_lvl, init_fsd, & init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_aerosol, init_bgc, init_hbrine, init_zbgc, input_zbgc, & - count_tracers, init_isotope + count_tracers, init_isotope, init_snowtracers ! namelist parameters needed locally @@ -214,8 +214,9 @@ subroutine init_shortwave logical (kind=log_kind) :: & l_print_point, & ! flag to print designated grid point diagnostics debug, & ! if true, print diagnostics - dEdd_algae, & ! from icepack - modal_aero ! from icepack + dEdd_algae, & ! use prognostic chla in dEdd radiation + modal_aero, & ! use modal aerosol optical treatment + snwgrain ! use variable snow radius character (char_len) :: shortwave @@ -225,12 +226,13 @@ subroutine init_shortwave real (kind=dbl_kind), dimension(ncat) :: & fbri ! brine height to ice thickness - real(kind=dbl_kind), allocatable :: & - ztrcr_sw(:,:) ! + real(kind= dbl_kind), dimension(:,:), allocatable :: & + ztrcr_sw, & ! zaerosols (kg/m^3) and chla (mg/m^3) + rsnow ! snow grain radius tracer (10^-6 m) logical (kind=log_kind) :: tr_brine, tr_zaero, tr_bgc_n integer (kind=int_kind) :: nt_alvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero, & - nt_fbri, nt_tsfc, ntrcr, nbtrcr, nbtrcr_sw + nt_fbri, nt_tsfc, ntrcr, nbtrcr, nbtrcr_sw, nt_rsnw integer (kind=int_kind), dimension(icepack_max_algae) :: & nt_bgc_N integer (kind=int_kind), dimension(icepack_max_aero) :: & @@ -243,17 +245,19 @@ subroutine init_shortwave call icepack_query_parameters(shortwave_out=shortwave) call icepack_query_parameters(dEdd_algae_out=dEdd_algae) call icepack_query_parameters(modal_aero_out=modal_aero) + call icepack_query_parameters(snwgrain_out=snwgrain) call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr, nbtrcr_sw_out=nbtrcr_sw) call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_zaero_out=tr_zaero, & tr_bgc_n_out=tr_bgc_n) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, nt_fbri_out=nt_fbri, nt_tsfc_out=nt_tsfc, & - nt_bgc_N_out=nt_bgc_N, nt_zaero_out=nt_zaero) + nt_bgc_N_out=nt_bgc_N, nt_zaero_out=nt_zaero, nt_rsnw_out=nt_rsnw) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__,line= __LINE__) allocate(ztrcr_sw(nbtrcr_sw, ncat)) + allocate(rsnow(nslyr,ncat)) do iblk=1,nblocks @@ -330,8 +334,14 @@ subroutine init_shortwave fbri(:) = c0 ztrcr_sw(:,:) = c0 + rsnow (:,:) = c0 do n = 1, ncat - if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) + if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) + if (snwgrain) then + do k = 1, nslyr + rsnow(k,n) = trcrn(i,j,nt_rsnw+k-1,n,iblk) + enddo + endif enddo if (tmask(i,j,iblk)) then @@ -379,6 +389,7 @@ subroutine init_shortwave albpndn=albpndn(i,j,:,iblk), apeffn=apeffn(i,j,:,iblk), & snowfracn=snowfracn(i,j,:,iblk), & dhsn=dhsn(i,j,:,iblk), ffracn=ffracn(i,j,:,iblk), & + rsnow=rsnow(:,:), & l_print_point=l_print_point, & initonly = .true.) endif @@ -475,6 +486,7 @@ subroutine init_shortwave enddo ! iblk deallocate(ztrcr_sw) + deallocate(rsnow) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -587,6 +599,29 @@ end subroutine init_meltponds_topo !======================================================================= +! Initialize snow redistribution/metamorphosis tracers (call prior to reading restart data) + + subroutine init_snowtracers(smice, smliq, rhos_cmp, rsnw) + + real(kind=dbl_kind), dimension(:,:,:,:), intent(out) :: & + smice, smliq, rhos_cmp, rsnw + character(len=*),parameter :: subname='(init_snowtracers)' + + real (kind=dbl_kind) :: & + rsnw_fall, & ! snow grain radius of new fallen snow (10^-6 m) + rhos ! snow density (kg/m^3) + + call icepack_query_parameters(rsnw_fall_out=rsnw_fall, rhos_out=rhos) + + rsnw (:,:,:,:) = rsnw_fall + rhos_cmp(:,:,:,:) = rhos + smice (:,:,:,:) = rhos + smliq (:,:,:,:) = c0 + + end subroutine init_snowtracers + +!======================================================================= + ! Initialize floe size distribution tracer (call prior to reading restart data) subroutine init_fsd(floesize) @@ -1776,10 +1811,12 @@ subroutine count_tracers integer (kind=int_kind) :: ntrcr logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_fsd + logical (kind=log_kind) :: tr_snow logical (kind=log_kind) :: tr_iso, tr_pond_cesm, tr_pond_lvl, tr_pond_topo integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero integer (kind=int_kind) :: nt_fsd, nt_isosno, nt_isoice + integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw integer (kind=int_kind) :: & nbtrcr, nbtrcr_sw, & @@ -1862,7 +1899,7 @@ subroutine count_tracers tr_lvl_out=tr_lvl, tr_aero_out=tr_aero, tr_pond_out=tr_pond, & tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_fsd_out=tr_fsd, & - tr_iso_out=tr_iso, & + tr_snow_out=tr_snow, tr_iso_out=tr_iso, & tr_bgc_Nit_out=tr_bgc_Nit, tr_bgc_Am_out =tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & tr_bgc_DMS_out=tr_bgc_DMS, tr_bgc_PON_out=tr_bgc_PON, & tr_bgc_N_out =tr_bgc_N, tr_bgc_C_out =tr_bgc_C, tr_bgc_chl_out=tr_bgc_chl, & @@ -1925,6 +1962,21 @@ subroutine count_tracers endif endif + nt_smice = 0 + nt_smliq = 0 + nt_rhos = 0 + nt_rsnw = 0 + if (tr_snow) then + nt_smice = ntrcr + 1 + ntrcr = ntrcr + nslyr ! mass of ice in nslyr snow layers + nt_smliq = ntrcr + 1 + ntrcr = ntrcr + nslyr ! mass of liquid in nslyr snow layers + nt_rhos = ntrcr + 1 + ntrcr = ntrcr + nslyr ! snow density in nslyr layers + nt_rsnw = ntrcr + 1 + ntrcr = ntrcr + nslyr ! snow grain radius in nslyr layers + endif + nt_fsd = 0 if (tr_fsd) then nt_fsd = ntrcr + 1 ! floe size distribution @@ -2212,7 +2264,7 @@ subroutine count_tracers !tcx, +1 here is the unused tracer, want to get rid of it ntrcr = ntrcr + 1 -!tcx, reset unusaed tracer index, eventually get rid of it. +!tcx, reset unused tracer index, eventually get rid of it. if (nt_iage <= 0) nt_iage = ntrcr if (nt_FY <= 0) nt_FY = ntrcr if (nt_alvl <= 0) nt_alvl = ntrcr @@ -2220,6 +2272,10 @@ subroutine count_tracers if (nt_apnd <= 0) nt_apnd = ntrcr if (nt_hpnd <= 0) nt_hpnd = ntrcr if (nt_ipnd <= 0) nt_ipnd = ntrcr + if (nt_smice <= 0) nt_smice = ntrcr + if (nt_smliq <= 0) nt_smliq = ntrcr + if (nt_rhos <= 0) nt_rhos = ntrcr + if (nt_rsnw <= 0) nt_rsnw = ntrcr if (nt_fsd <= 0) nt_fsd = ntrcr if (nt_isosno<= 0) nt_isosno= ntrcr if (nt_isoice<= 0) nt_isoice= ntrcr @@ -2246,6 +2302,7 @@ subroutine count_tracers nt_qice_in=nt_qice, nt_qsno_in=nt_qsno, nt_iage_in=nt_iage, nt_fy_in=nt_fy, & nt_alvl_in=nt_alvl, nt_vlvl_in=nt_vlvl, nt_apnd_in=nt_apnd, nt_hpnd_in=nt_hpnd, & nt_ipnd_in=nt_ipnd, nt_fsd_in=nt_fsd, nt_aero_in=nt_aero, & + nt_smice_in=nt_smice, nt_smliq_in=nt_smliq, nt_rhos_in=nt_rhos, nt_rsnw_in=nt_rsnw, & nt_isosno_in=nt_isosno, nt_isoice_in=nt_isoice, nt_fbri_in=nt_fbri, & nt_bgc_Nit_in=nt_bgc_Nit, nt_bgc_Am_in=nt_bgc_Am, nt_bgc_Sil_in=nt_bgc_Sil, & nt_bgc_DMS_in=nt_bgc_DMS, nt_bgc_PON_in=nt_bgc_PON, nt_bgc_S_in=nt_bgc_S, & diff --git a/cicecore/shared/ice_restart_column.F90 b/cicecore/shared/ice_restart_column.F90 index e819b1098..074b37dbe 100644 --- a/cicecore/shared/ice_restart_column.F90 +++ b/cicecore/shared/ice_restart_column.F90 @@ -12,7 +12,7 @@ module ice_restart_column use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c1, p5 use ice_constants, only: field_loc_center, field_type_scalar - use ice_domain_size, only: ncat, nfsd, nblyr + use ice_domain_size, only: ncat, nslyr, nfsd, nblyr use ice_restart,only: read_restart_field, write_restart_field use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag @@ -32,6 +32,7 @@ module ice_restart_column write_restart_pond_cesm, read_restart_pond_cesm, & write_restart_pond_lvl, read_restart_pond_lvl, & write_restart_pond_topo, read_restart_pond_topo, & + write_restart_snow, read_restart_snow, & write_restart_fsd, read_restart_fsd, & write_restart_iso, read_restart_iso, & write_restart_aero, read_restart_aero, & @@ -45,6 +46,7 @@ module ice_restart_column restart_pond_cesm, & ! if .true., read meltponds restart file restart_pond_lvl , & ! if .true., read meltponds restart file restart_pond_topo, & ! if .true., read meltponds restart file + restart_snow , & ! if .true., read snow tracer restart file restart_fsd , & ! if .true., read floe size restart file restart_iso , & ! if .true., read isotope tracer restart file restart_aero , & ! if .true., read aerosol tracer restart file @@ -483,6 +485,93 @@ end subroutine read_restart_pond_topo !======================================================================= +! Dumps all values needed for restarting snow redistribution/metamorphism +! author Elizabeth C. Hunke, LANL + + subroutine write_restart_snow() + + use ice_fileunits, only: nu_dump_snow + use ice_state, only: trcrn + + ! local variables + + logical (kind=log_kind) :: diag + integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw, k + character*3 ck + character(len=*),parameter :: subname='(write_restart_snow)' + + call icepack_query_tracer_indices(nt_smice_out=nt_smice, & + nt_smliq_out=nt_smliq, nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + diag = .true. + + !----------------------------------------------------------------- + + do k = 1,nslyr + write(ck,'(i3.3)') k + call write_restart_field(nu_dump_snow,0, trcrn(:,:,nt_smice+k-1,:,:), & + 'ruf8','smice'//trim(ck),ncat,diag) + call write_restart_field(nu_dump_snow,0, trcrn(:,:,nt_smliq+k-1,:,:), & + 'ruf8','smliq'//trim(ck),ncat,diag) + call write_restart_field(nu_dump_snow,0, trcrn(:,:,nt_rhos+k-1,:,:), & + 'ruf8','rhos'//trim(ck),ncat,diag) + call write_restart_field(nu_dump_snow,0, trcrn(:,:,nt_rsnw+k-1,:,:), & + 'ruf8','rsnw'//trim(ck),ncat,diag) + enddo + + end subroutine write_restart_snow + +!======================================================================= + +! Reads all values needed for a restart with snow redistribution/metamorphism +! author Elizabeth C. Hunke, LANL + + subroutine read_restart_snow() + + use ice_fileunits, only: nu_restart_snow + use ice_state, only: trcrn + + ! local variables + + logical (kind=log_kind) :: & + diag + integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw, k + character*3 ck + character(len=*),parameter :: subname='(read_restart_snow)' + + call icepack_query_tracer_indices(nt_smice_out=nt_smice, & + nt_smliq_out=nt_smliq, nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + diag = .true. + + if (my_task == master_task) write(nu_diag,*) subname,'min/max snow tracers' + + do k=1,nslyr + write(ck,'(i3.3)') k + call read_restart_field(nu_restart_snow,0,trcrn(:,:,nt_smice+k-1,:,:), & + 'ruf8','smice'//trim(ck),ncat,diag, & + field_type=field_type_scalar,field_loc=field_loc_center) + call read_restart_field(nu_restart_snow,0,trcrn(:,:,nt_smliq+k-1,:,:), & + 'ruf8','smliq'//trim(ck),ncat,diag, & + field_type=field_type_scalar,field_loc=field_loc_center) + call read_restart_field(nu_restart_snow,0,trcrn(:,:,nt_rhos+k-1,:,:), & + 'ruf8','rhos'//trim(ck),ncat,diag, & + field_type=field_type_scalar,field_loc=field_loc_center) + call read_restart_field(nu_restart_snow,0,trcrn(:,:,nt_rsnw+k-1,:,:), & + 'ruf8','rsnw'//trim(ck),ncat,diag, & + field_type=field_type_scalar,field_loc=field_loc_center) + enddo + + end subroutine read_restart_snow + +!======================================================================= + ! Dumps all values needed for restarting ! author Elizabeth C. Hunke, LANL diff --git a/cicecore/version.txt b/cicecore/version.txt index cfd991555..04a90ef1a 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.2.0 +CICE 6.3.0 diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 902abb56b..024270039 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -226,6 +226,23 @@ cat >> ${jobfile} << EOFB #PBS -l walltime=${batchtime} EOFB +else if (${ICE_MACHINE} =~ gaea*) then +cat >> ${jobfile} << EOFB +#SBATCH -J ${ICE_CASENAME} +#SBATCH --partition=batch +#SBATCH --qos=${queue} +#SBATCH --account=nggps_emc +#SBATCH --clusters=c3 +#SBATCH --time=${batchtime} +#SBATCH --nodes=${nnodes} +#SBATCH --ntasks-per-node=${taskpernodelimit} +#SBATCH --cpus-per-task=${nthrds} +#SBATCH -e slurm%j.err +#SBATCH -o slurm%j.out +##SBATCH --mail-type FAIL +##SBATCH --mail-user=xxx@noaa.gov +EOFB + else if (${ICE_MACHINE} =~ hera*) then cat >> ${jobfile} << EOFB #SBATCH -J ${ICE_CASENAME} diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index 7d45a387f..40b8996b4 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -165,6 +165,12 @@ aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_F EOFR endif +#======= +else if (${ICE_MACHINE} =~ gaea*) then +cat >> ${jobfile} << EOFR +srun -n ${ntasks} -c ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE +EOFR + #======= else if (${ICE_MACHINE} =~ hera*) then cat >> ${jobfile} << EOFR diff --git a/configuration/scripts/cice.run.setup.csh b/configuration/scripts/cice.run.setup.csh index ea8efeb03..aa578b5ca 100755 --- a/configuration/scripts/cice.run.setup.csh +++ b/configuration/scripts/cice.run.setup.csh @@ -100,7 +100,7 @@ else echo "Run completed successfully" echo "\`date\` \${0}: Run completed successfully" >> \${ICE_CASEDIR}/README.case else - echo "CICE run did NOT complete" + echo "Run did NOT complete" echo "\`date\` \${0}: CICE run did NOT complete" >> \${ICE_CASEDIR}/README.case exit -1 endif diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index e918a694c..3dec72963 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -100,6 +100,8 @@ restart_pond_topo = .false. tr_pond_lvl = .true. restart_pond_lvl = .false. + tr_snow = .false. + restart_snow = .false. tr_iso = .false. restart_iso = .false. tr_aero = .false. @@ -127,7 +129,7 @@ kdyn = 1 ndte = 240 revised_evp = .false. - kevp_kernel = 0 + evp_algorithm = 'standard_2d' brlx = 300.0 arlx = 300.0 advection = 'remap' @@ -197,6 +199,28 @@ pndaspect = 0.8 / +&snow_nml + snwredist = 'none' + snwgrain = .false. + use_smliq_pnd = .false. + rsnw_fall = 100.0 + rsnw_tmax = 1500.0 + rhosnew = 100.0 + rhosmin = 100.0 + rhosmax = 450.0 + windmin = 10.0 + drhosdwind = 27.3 + snwlvlfac = 0.3 + snw_aging_table = 'test' + snw_filename = 'unknown' + snw_rhos_fname = 'unknown' + snw_Tgrd_fname = 'unknown' + snw_T_fname = 'unknown' + snw_tau_fname = 'unknown' + snw_kappa_fname = 'unknown' + snw_drdt0_fname = 'unknown' +/ + &forcing_nml formdrag = .false. atmbndy = 'default' @@ -584,6 +608,21 @@ f_apeff_ai = 'm' / +&icefields_snow_nml + f_smassicen = 'x' + f_smassliqn = 'x' + f_rhos_cmpn = 'x' + f_rhos_cntn = 'x' + f_rsnwn = 'x' + f_smassice = 'm' + f_smassliq = 'm' + f_rhos_cmp = 'm' + f_rhos_cnt = 'm' + f_rsnw = 'm' + f_meltsliq = 'm' + f_fsloss = 'm' +/ + &icefields_bgc_nml f_fiso_atm = 'x' f_fiso_ocn = 'x' diff --git a/configuration/scripts/machines/Macros.gaea_intel b/configuration/scripts/machines/Macros.gaea_intel new file mode 100644 index 000000000..f4c4d2cbe --- /dev/null +++ b/configuration/scripts/machines/Macros.gaea_intel @@ -0,0 +1,56 @@ +#============================================================================== +# Makefile macros for NOAA hera, intel compiler +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -xHost + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg +else + FFLAGS += -O2 +endif + +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +NETCDF_PATH := $(NETCDF) + +PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/u/apps/ch/opt/pio/2.2/mpt/2.15f/intel/17.0.1/lib + +INC_NETCDF := $(NETCDF_PATH)/include +LIB_NETCDF := $(NETCDF_PATH)/lib + +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) + +INCLDIR := $(INCLDIR) -I$(INC_NETCDF) +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + diff --git a/configuration/scripts/machines/Macros.onyx_cray b/configuration/scripts/machines/Macros.onyx_cray index 6753a78e5..c088d1fd4 100644 --- a/configuration/scripts/machines/Macros.onyx_cray +++ b/configuration/scripts/machines/Macros.onyx_cray @@ -3,7 +3,7 @@ #============================================================================== CPP := ftn -e P -CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CPPDEFS := -DFORTRANUNDERSCORE -DNO_R16 ${ICE_CPPDEFS} CFLAGS := -c -O2 -h fp0 FIXEDFLAGS := -132 diff --git a/configuration/scripts/machines/Macros.onyx_gnu b/configuration/scripts/machines/Macros.onyx_gnu index 890e29e31..31d0e64aa 100644 --- a/configuration/scripts/machines/Macros.onyx_gnu +++ b/configuration/scripts/machines/Macros.onyx_gnu @@ -8,7 +8,7 @@ CFLAGS := -c FIXEDFLAGS := -ffixed-line-length-132 FREEFLAGS := -ffree-form -FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none +FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -fallow-argument-mismatch FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) diff --git a/configuration/scripts/machines/env.gaea_intel b/configuration/scripts/machines/env.gaea_intel new file mode 100755 index 000000000..d143270d7 --- /dev/null +++ b/configuration/scripts/machines/env.gaea_intel @@ -0,0 +1,34 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source /lustre/f2/pdata/esrl/gsd/contrib/lua-5.1.4.9/init/init_lmod.csh +#module list +module purge +module load intel +module load cray-mpich +module load cray-netcdf +module load PrgEnv-intel/6.0.5 +module list + +endif + +setenv ICE_MACHINE_MACHNAME gaea +setenv ICE_MACHINE_MACHINFO "Cray Intel SkyLake 6148" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "ifort 18.0.5 20180823, cray-mpich, cray-netcdf" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $HOME/scratch/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /ncrc/home1/Robert.Grumbine/rgdev/CICE_INPUTDATA +setenv ICE_MACHINE_BASELINE $HOME/scratch/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "sbatch" +setenv ICE_MACHINE_TPNODE 40 +setenv ICE_MACHINE_ACCT P0000000 +setenv ICE_MACHINE_QUEUE "normal" +setenv ICE_MACHINE_BLDTHRDS 1 +setenv ICE_MACHINE_QSTAT "squeue --jobs=" diff --git a/configuration/scripts/machines/env.onyx_cray b/configuration/scripts/machines/env.onyx_cray index b155c1d1e..38785a27d 100755 --- a/configuration/scripts/machines/env.onyx_cray +++ b/configuration/scripts/machines/env.onyx_cray @@ -13,14 +13,14 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-cray/6.0.4 +module load PrgEnv-cray/6.0.9 module unload cce -module load cce/8.6.4 +module load cce/11.0.2 module unload cray-mpich module unload cray-mpich2 -module load cray-mpich/7.6.3 +module load cray-mpich/7.7.16 module unload netcdf module unload cray-netcdf @@ -28,10 +28,11 @@ module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1.3 -module load cray-hdf5/1.10.0.3 +module load cray-netcdf/4.7.4.0 +module load cray-hdf5/1.12.0.0 module unload cray-libsci +module unload craype-hugepages2M module load craype-broadwell @@ -44,7 +45,7 @@ endif setenv ICE_MACHINE_MACHNAME onyx setenv ICE_MACHINE_MACHINFO "Cray XC40/50 Xeon E5-2699v4 Broadwell" setenv ICE_MACHINE_ENVNAME cray -setenv ICE_MACHINE_ENVINFO "Cray cce/8.6.4, cray-mpich/7.6.3, netcdf/4.4.1.1.3" +setenv ICE_MACHINE_ENVINFO "Cray cce/11.0.2, cray-mpich/7.7.16, netcdf/4.7.4.0" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.onyx_gnu b/configuration/scripts/machines/env.onyx_gnu index de7bcc787..699c01559 100755 --- a/configuration/scripts/machines/env.onyx_gnu +++ b/configuration/scripts/machines/env.onyx_gnu @@ -13,14 +13,14 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-gnu/6.0.4 +module load PrgEnv-gnu/6.0.9 module unload gcc -module load gcc/7.2.0 +module load gcc/10.2.0 module unload cray-mpich module unload cray-mpich2 -module load cray-mpich/7.6.2 +module load cray-mpich/7.7.16 module unload netcdf module unload cray-netcdf @@ -28,10 +28,11 @@ module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1.3 -module load cray-hdf5/1.10.0.3 +module load cray-netcdf/4.7.4.0 +module load cray-hdf5/1.12.0.0 module unload cray-libsci +module unload craype-hugepages2M module load craype-broadwell @@ -44,7 +45,7 @@ endif setenv ICE_MACHINE_MACHNAME onyx setenv ICE_MACHINE_MACHINFO "Cray XC40/50 Xeon E5-2699v4 Broadwell" setenv ICE_MACHINE_ENVNAME gnu -setenv ICE_MACHINE_ENVINFO "GNU Fortran (GCC) 7.2.0 20170814, cray-mpich/7.6.2, netcdf/4.4.1.1.3" +setenv ICE_MACHINE_ENVINFO "GNU Fortran (GCC) 10.2.0, cray-mpich/7.7.16, netcdf/4.7.4.0" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.onyx_intel b/configuration/scripts/machines/env.onyx_intel index df42fe9f8..39f25e8e5 100755 --- a/configuration/scripts/machines/env.onyx_intel +++ b/configuration/scripts/machines/env.onyx_intel @@ -13,14 +13,14 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-intel/6.0.4 +module load PrgEnv-intel/6.0.9 module unload intel -module load intel/17.0.1.132 +module load intel/19.1.3.304 module unload cray-mpich module unload cray-mpich2 -module load cray-mpich/7.6.2 +module load cray-mpich/7.7.16 module unload netcdf module unload cray-netcdf @@ -28,10 +28,11 @@ module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1.3 -module load cray-hdf5/1.10.0.3 +module load cray-netcdf/4.7.4.0 +module load cray-hdf5/1.12.0.0 module unload cray-libsci +module unload craype-hugepages2M module load craype-broadwell @@ -44,7 +45,7 @@ endif setenv ICE_MACHINE_MACHNAME onyx setenv ICE_MACHINE_MACHINFO "Cray XC40/50 Xeon E5-2699v4 Broadwell" setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 17.0.1 20161005, cray-mpich/7.6.2, netcdf/4.4.1.1.3" +setenv ICE_MACHINE_ENVINFO "ifort 19.1.3.304, cray-mpich/7.7.16, netcdf/4.7.4.0" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/options/set_nml.alt04 b/configuration/scripts/options/set_nml.alt04 index 53372f124..98eb311cb 100644 --- a/configuration/scripts/options/set_nml.alt04 +++ b/configuration/scripts/options/set_nml.alt04 @@ -17,7 +17,7 @@ sw_frac = 0.9d0 sw_dtemp = 0.02d0 conduct = 'MU71' kdyn = 1 -kevp_kernel = 102 +evp_algorithm = 'shared_mem_1d' fbot_xfer_type = 'Cdn_ocn' shortwave = 'dEdd' formdrag = .true. diff --git a/configuration/scripts/options/set_nml.evp1d b/configuration/scripts/options/set_nml.evp1d new file mode 100644 index 000000000..e7d38e86b --- /dev/null +++ b/configuration/scripts/options/set_nml.evp1d @@ -0,0 +1 @@ +evp_algorithm = 'shared_mem_1d' diff --git a/configuration/scripts/options/set_nml.gx1prod b/configuration/scripts/options/set_nml.gx1prod index eca527a64..94e4bbf89 100644 --- a/configuration/scripts/options/set_nml.gx1prod +++ b/configuration/scripts/options/set_nml.gx1prod @@ -1,11 +1,11 @@ year_init = 2005 use_leap_years = .true. npt_unit = 'y' -npt = 1 +npt = 4 dumpfreq = 'm' dumpfreq_base = 'zero' fyear_init = 2005 -ycycle = 5 +ycycle = 4 ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/CESM/MONTHLY/' use_bathymetry = .true. seabed_stress = .true. diff --git a/configuration/scripts/options/set_nml.gx1prod15 b/configuration/scripts/options/set_nml.gx1prod15 new file mode 100644 index 000000000..edbf5e5de --- /dev/null +++ b/configuration/scripts/options/set_nml.gx1prod15 @@ -0,0 +1,19 @@ +year_init = 1995 +use_leap_years = .true. +npt_unit = 'y' +npt = 15 +dumpfreq = 'm' +dumpfreq_base = 'zero' +fyear_init = 1995 +ycycle = 16 +ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/CESM/MONTHLY/' +use_bathymetry = .true. +seabed_stress = .true. +seabed_stress_method = 'LKD' +ocn_data_type = 'ncar' +ocn_data_format = 'nc' +oceanmixed_ice = .true. +oceanmixed_file = 'ocean_forcing_clim_2D_gx1.20210330.nc' +tr_brine = .true. +f_taubx = 'm' +f_tauby = 'm' diff --git a/configuration/scripts/options/set_nml.histinst b/configuration/scripts/options/set_nml.histinst new file mode 100644 index 000000000..f2f0995c8 --- /dev/null +++ b/configuration/scripts/options/set_nml.histinst @@ -0,0 +1 @@ +hist_avg = .false. diff --git a/configuration/scripts/options/set_nml.kevp102 b/configuration/scripts/options/set_nml.kevp102 deleted file mode 100644 index 3a5dc3dbd..000000000 --- a/configuration/scripts/options/set_nml.kevp102 +++ /dev/null @@ -1 +0,0 @@ -kevp_kernel = 102 diff --git a/configuration/scripts/options/set_nml.qc b/configuration/scripts/options/set_nml.qc index 2b1528cc5..70ba1b429 100644 --- a/configuration/scripts/options/set_nml.qc +++ b/configuration/scripts/options/set_nml.qc @@ -1,4 +1,12 @@ -npt = 43800 +npt_unit = 'y' +npt = 5 +year_init = 2005 +month_init = 1 +day_init = 1 +sec_init = 0 +use_leap_years = .false. +fyear_init = 2005 +ycycle = 1 dumpfreq = 'm' dumpfreq_n = 12 diagfreq = 24 diff --git a/configuration/scripts/options/set_nml.run10year b/configuration/scripts/options/set_nml.run10year new file mode 100644 index 000000000..cf672e991 --- /dev/null +++ b/configuration/scripts/options/set_nml.run10year @@ -0,0 +1,7 @@ +npt_unit = 'y' +npt = 10 +dumpfreq = 'y' +dumpfreq_n = 12 +diagfreq = 24 +histfreq = 'm','x','x','x','x' + diff --git a/configuration/scripts/options/set_nml.snw30percent b/configuration/scripts/options/set_nml.snw30percent new file mode 100644 index 000000000..ecf88ad4e --- /dev/null +++ b/configuration/scripts/options/set_nml.snw30percent @@ -0,0 +1,5 @@ +tr_snow = .true. +snwredist = 'bulk' +snwlvlfac = 0.3 +nslyr = 5 + diff --git a/configuration/scripts/options/set_nml.snwITDrdg b/configuration/scripts/options/set_nml.snwITDrdg new file mode 100644 index 000000000..cddeedec3 --- /dev/null +++ b/configuration/scripts/options/set_nml.snwITDrdg @@ -0,0 +1,10 @@ +tr_snow = .true. +snwredist = 'ITDrdg' +nslyr = 5 +rhosnew = 100.0 +rhosmin = 100.0 +rhosmax = 450.0 +windmin = 10.0 +drhosdwind = 27.3 +snwlvlfac = 0.3 + diff --git a/configuration/scripts/options/set_nml.snwgrain b/configuration/scripts/options/set_nml.snwgrain new file mode 100644 index 000000000..653030385 --- /dev/null +++ b/configuration/scripts/options/set_nml.snwgrain @@ -0,0 +1,15 @@ +tr_snow = .true. +snwgrain = .true. +use_smliq_pnd = .true. +rsnw_fall = 54.526 +rsnw_tmax = 1500.0 +snw_aging_table = 'file' +snw_filename = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/snicar_drdt_bst_fit_60_c04262019.nc' +snw_tau_fname = 'snowEmpiricalGrowthParameterTau' +snw_kappa_fname = 'snowEmpiricalGrowthParameterKappa' +snw_drdt0_fname = 'snowPropertyRate' +snw_rhos_fname = 'nGrainAgingSnowDensity' +snw_Tgrd_fname = 'nGrainAgingTempGradient' +snw_T_fname = 'nGrainAgingTemperature' +nslyr = 5 + diff --git a/configuration/scripts/tests/QC/cice.t-test.py b/configuration/scripts/tests/QC/cice.t-test.py index 987175245..6f2c7e89b 100755 --- a/configuration/scripts/tests/QC/cice.t-test.py +++ b/configuration/scripts/tests/QC/cice.t-test.py @@ -57,6 +57,15 @@ def gen_filenames(base_dir, test_dir): " # of files: {}".format(len(files_b))) sys.exit(-1) + if len(files_a) < 1825: + logger.error("Number of output files too small, expecting at least 1825." + \ + " Exiting...\n" + \ + "Baseline directory: {}\n".format(path_a) + \ + " # of files: {}\n".format(len(files_a)) + \ + "Test directory: {}\n".format(path_b) + \ + " # of files: {}".format(len(files_b))) + sys.exit(-1) + logger.info("Number of files: %d", len(files_a)) return path_a, path_b, files_a, files_b diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index 69252f9fb..4da4dd110 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -9,9 +9,9 @@ smoke gx3 7x2 diag1,bigdiag,run1day,diagpt1 decomp gx3 4x2x25x29x5 none smoke gx3 4x2 diag1,run5day smoke_gx3_8x2_diag1_run5day smoke gx3 4x1 diag1,run5day,thread smoke_gx3_8x2_diag1_run5day -restart gx1 40x4 droundrobin -restart tx1 40x4 dsectrobin -restart tx1 60x2 droundrobin,maskhalo +smoke gx3 1x8 diag1,run5day,evp1d +restart gx1 40x4 droundrobin,medium +restart tx1 40x4 dsectrobin,medium restart gx3 4x4 none restart gx3 10x4 maskhalo restart gx3 6x2 alt01 @@ -58,6 +58,9 @@ restart gx3 4x2 fsd12,debug,short smoke gx3 8x2 fsd12ww3,diag24,run1day smoke gx3 4x1 isotope,debug restart gx3 8x2 isotope +smoke gx3 4x1 snwITDrdg,snwgrain,icdefault,debug +smoke gx3 4x1 snw30percent,icdefault,debug +restart gx3 8x2 snwITDrdg,icdefault,snwgrain restart gx3 4x4 gx3ncarbulk,iobinary restart gx3 4x4 histall,precision8,cdf64 smoke gx3 30x1 bgcz,histall diff --git a/configuration/scripts/tests/comparelog.csh b/configuration/scripts/tests/comparelog.csh index d9e4a7a89..af6b2d76e 100755 --- a/configuration/scripts/tests/comparelog.csh +++ b/configuration/scripts/tests/comparelog.csh @@ -58,8 +58,8 @@ if (${filearg} == 1) then cat ${base_data} | grep -A 99999999 "total ice area (km^2)" | grep -e istep1: -e = | grep -iv "min, max, sum" >&! ${base_out} cat ${test_data} | grep -A 99999999 "total ice area (km^2)" | grep -e istep1: -e = | grep -iv "min, max, sum" >&! ${test_out} else - cp -f ${base_data} ${base_out} - cp -f ${test_data} ${test_out} + sed -n '/RunningUnitTest/,$p' ${base_data} >! ${base_out} + sed -n '/RunningUnitTest/,$p' ${test_data} >! ${test_out} endif set basenum = `cat ${base_out} | wc -l` diff --git a/configuration/scripts/tests/io_suite.ts b/configuration/scripts/tests/io_suite.ts index 6fe1f589a..4d5129578 100644 --- a/configuration/scripts/tests/io_suite.ts +++ b/configuration/scripts/tests/io_suite.ts @@ -12,6 +12,7 @@ restart gx3 32x1 gx3ncarbulk,bgcz,histall,iobinary,precision8 restart gx3 16x2 gx3ncarbulk,bgcskl,histall,iobinary restart gx3 14x2 gx3ncarbulk,isotope,histall,iobinary,precision8 restart gx3 16x2 gx3ncarbulk,fsd12,histall,iobinary +restart gx3 8x4 gx3ncarbulk,debug,histall,iobinary,precision8,histinst restart gx3 32x1 debug,histall,ionetcdf restart gx3 15x2 alt01,histall,ionetcdf,precision8,cdf64 @@ -24,6 +25,7 @@ restart gx3 30x1 bgcz,histall,ionetcdf restart gx3 15x2 bgcskl,histall,ionetcdf,precision8 restart gx3 31x1 isotope,histall,ionetcdf,cdf64 restart gx3 14x2 fsd12,histall,ionetcdf,precision8 +restart gx3 32x1 debug,histall,ionetcdf,histinst restart gx3 16x2 debug,histall,iopio1,precision8,cdf64 restart gx3 14x2 alt01,histall,iopio1,cdf64 @@ -36,6 +38,7 @@ restart gx3 16x2 bgcz,histall,iopio1,precision8 restart gx3 30x1 bgcskl,histall,iopio1 restart gx3 8x4 isotope,histall,iopio1,precision8,cdf64 restart gx3 12x2 fsd12,histall,iopio1,cdf64 +restart gx3 16x2 debug,histall,iopio1,precision8,cdf64,histinst restart gx3 16x2 debug,histall,iopio2 restart gx3 14x2 alt01,histall,iopio2,precision8,cdf64 @@ -48,6 +51,7 @@ restart gx3 16x2 bgcz,histall,iopio2,cdf64 restart gx3 30x1 bgcskl,histall,iopio2,precision8 restart gx3 8x4 isotope,histall,iopio2 restart gx3 12x2 fsd12,histall,iopio2,precision8,cdf64 +restart gx3 16x2 debug,histall,iopio2,histinst restart gx3 16x2 debug,histall,iopio1p,precision8 restart gx3 14x2 alt01,histall,iopio1p @@ -60,6 +64,7 @@ restart gx3 16x2 bgcz,histall,iopio1p,precision8,cdf64 restart gx3 30x1 bgcskl,histall,iopio1p,cdf64 restart gx3 8x4 isotope,histall,iopio1p,precision8 restart gx3 12x2 fsd12,histall,iopio1p +restart gx3 16x2 debug,histall,iopio1p,precision8,histinst restart gx3 16x2 debug,histall,iopio2p,cdf64 restart gx3 14x2 alt01,histall,iopio2p,precision8 @@ -72,4 +77,5 @@ restart gx3 16x2 bgcz,histall,iopio2p restart gx3 30x1 bgcskl,histall,iopio2p,precision8,cdf64 restart gx3 8x4 isotope,histall,iopio2p,cdf64 restart gx3 12x2 fsd12,histall,iopio2p,precision8 +restart gx3 16x2 debug,histall,iopio2p,cdf64,histinst diff --git a/configuration/scripts/tests/prod_suite.ts b/configuration/scripts/tests/prod_suite.ts new file mode 100644 index 000000000..8793dfed2 --- /dev/null +++ b/configuration/scripts/tests/prod_suite.ts @@ -0,0 +1,4 @@ +# Test Grid PEs Sets BFB-compare +smoke gx1 64x1 qc,medium +smoke gx1 64x2 gx1prod,long,run10year + diff --git a/configuration/scripts/tests/reprosum_suite.ts b/configuration/scripts/tests/reprosum_suite.ts index d65370e0a..dd6a6d56b 100644 --- a/configuration/scripts/tests/reprosum_suite.ts +++ b/configuration/scripts/tests/reprosum_suite.ts @@ -8,4 +8,5 @@ logbfb gx3 1x20x5x29x80 dsectrobin,diag1,short,reprosum l logbfb gx3 8x2x8x10x20 droundrobin,diag1,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum logbfb gx3 6x2x50x58x1 droundrobin,diag1,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum logbfb gx3 6x2x4x29x18 dspacecurve,diag1,maskhalo,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum +logbfb gx3 17x2x1x1x800 droundrobin,diag1,maskhalo,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum #logbfb gx3 8x2x8x10x20 droundrobin,diag1 logbfb_gx3_4x2x25x29x4_diag1_dslenderX2 diff --git a/configuration/scripts/tests/test_unittest.script b/configuration/scripts/tests/test_unittest.script index 5f37b15ac..1db8dfe60 100644 --- a/configuration/scripts/tests/test_unittest.script +++ b/configuration/scripts/tests/test_unittest.script @@ -4,24 +4,33 @@ # cice.run returns -1 if run did not complete successfully ./cice.run -set res="$status" +set rres="$status" set log_file = `ls -t1 ${ICE_RUNDIR}/cice.runlog* | head -1` +grep ' TEST COMPLETED SUCCESSFULLY' ${log_file} +set tres="$status" + mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} run" >! ${ICE_CASEDIR}/test_output mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} test" >! ${ICE_CASEDIR}/test_output rm -f ${ICE_CASEDIR}/test_output.prev -set grade = PASS -if ( $res != 0 ) then - set grade = FAIL - echo "$grade ${ICE_TESTNAME} run " >> ${ICE_CASEDIR}/test_output - echo "$grade ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output - exit 99 +set rgrade = PASS +if ( $rres != 0 ) then + set rgrade = FAIL +endif +set tgrade = PASS +if ( $tres != 0 ) then + set tgrade = FAIL endif -echo "$grade ${ICE_TESTNAME} run " >> ${ICE_CASEDIR}/test_output -echo "$grade ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output +echo "$rgrade ${ICE_TESTNAME} run " >> ${ICE_CASEDIR}/test_output +echo "$tgrade ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output + +if ( "$rgrade" == "FAIL" || "$tgrade" == "FAIL") then + echo "ERROR: Test failed" + exit 99 +endif diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 2efcd0335..0a04b5e26 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -168,6 +168,7 @@ either Celsius or Kelvin units). "dms", "dimethyl sulfide concentration", "mmol/m\ :math:`^3`" "dmsp", "dimethyl sulfoniopropionate concentration", "mmol/m\ :math:`^3`" "dpscale", "time scale for flushing in permeable ice", ":math:`1\times 10^{-3}`" + "drhosdwind", "wind compaction factor for snow", "27.3 kg s/m\ :math:`^{4}`" "dragio", "drag coefficient for water on ice", "0.00536" "dSdt_slow_mode", "drainage strength parameter", "" "dsnow", "change in snow thickness", "m" @@ -256,6 +257,7 @@ either Celsius or Kelvin units). "fsnow", "snowfall rate", "kg/m\ :math:`^2`/s" "fsnowrdg", "snow fraction that survives in ridging", "0.5" "fsurf(n)(_f)", "net surface heat flux excluding fcondtop", "W/m\ :math:`^2`" + "fsloss", "rate of snow loss to leads", "kg/m\ :math:`^{2}` s" "fsw", "incoming shortwave radiation", "W/m\ :math:`^2`" "fswabs", "total absorbed shortwave radiation", "W/m\ :math:`^2`" "fswfac", "scaling factor to adjust ice quantities for updated data", "" @@ -393,6 +395,8 @@ either Celsius or Kelvin units). "meltb", "basal ice melt", "m" "meltl", "lateral ice melt", "m" "melts", "snow melt", "m" + "meltsliq", "snow melt mass", "kg/m\ :math:`^{2}`" + "meltsliqn", "snow melt mass in category n", "kg/m\ :math:`^{2}`" "meltt", "top ice melt", "m" "min_salin", "threshold for brine pockets", "0.1 ppt" "mlt_onset", "day of year that surface melt begins", "" @@ -556,14 +560,21 @@ either Celsius or Kelvin units). "rhofresh", "density of fresh water", "1000.0 kg/m\ :math:`^3`" "rhoi", "density of ice", "917. kg/m\ :math:`^3`" "rhos", "density of snow", "330. kg/m\ :math:`^3`" + "rhos_cmp", "density of snow due to wind compaction", "kg/m\ :math:`^3`" + "rhos_cnt", "density of ice and liquid content of snow", "kg/m\ :math:`^3`" "rhosi", "average sea ice density (for hbrine tracer)", "940. kg/m\ :math:`^3`" + "rhosmax", "maximum snow density", "450 kg/m\ :math:`^{3}`" + "rhosmin", "minimum snow density", "100 kg/m\ :math:`^{3}`" + "rhosnew", "new snow density", "100 kg/m\ :math:`^{3}`" "rhow", "density of seawater", "1026. kg/m\ :math:`^3`" "rnilyr", "real(nlyr)", "" "rside", "fraction of ice that melts laterally", "" - "rsnw_fresh", "freshly fallen snow grain radius", "100. :math:`\times` 10\ :math:`^{-6}` m" + "rsnw", "snow grain radius", "10\ :math:`^{-6}` m" + "rsnw_fall", "freshly fallen snow grain radius", "100. :math:`\times` 10\ :math:`^{-6}` m" "rsnw_melt", "melting snow grain radius", "1000. :math:`\times` 10\ :math:`^{-6}` m" "rsnw_nonmelt", "nonmelting snow grain radius", "500. :math:`\times` 10\ :math:`^{-6}` m" "rsnw_sig", "standard deviation of snow grain radius", "250. :math:`\times` 10\ :math:`^{-6}` m" + "rsnw_tmax", "maximum snow radius", "1500. :math:`\times` 10\ :math:`^{-6}` m" "runid", "identifier for run", "" "runtype", "type of initialization used", "" "**S**", "", "" @@ -586,6 +597,25 @@ either Celsius or Kelvin units). "snoice", "snow–ice formation", "m" "snowpatch", "length scale for parameterizing nonuniform snow coverage", "0.02 m" "skl_bgc", "biogeochemistry on/off", "" + "smassice", "mass of ice in snow from smice tracer", "kg/m\ :math:`^2`" + "smassliq", "mass of liquid in snow from smliq tracer", "kg/m\ :math:`^2`" + "snowage_drdt0", "initial rate of change of effective snow radius", " " + "snowage_rhos", "snow aging parameter (density)", " " + "snowage_kappa", "snow aging best-fit parameter", " " + "snowage_tau", "snow aging best-fit parameter", " " + "snowage_T", "snow aging parameter (temperature)", " " + "snowage_Tgrd", "snow aging parameter (temperature gradient)", " " + "snw_aging_table", "snow aging lookup table", " " + "snw_filename", "snowtable filename", " " + "snw_tau_fname", "snowtable file tau fieldname", " " + "snw_kappa_fname", "snowtable file kappa fieldname", " " + "snw_drdt0_fname", "snowtable file drdt0 fieldname", " " + "snw_rhos_fname", "snowtable file rhos fieldname", " " + "snw_Tgrd_fname", "snowtable file Tgrd fieldname", " " + "snw_T_fname", "snowtable file T fieldname", " " + "snwgrain", "activate snow metamorphosis", " " + "snwlvlfac", "fractional increase in snow depth for redistribution on ridges", "0.3" + "snwredist", "type of snow redistribution", " " "spval", "special value (single precision)", ":math:`10^{30}`", "" "spval_dbl", "special value (double precision)", ":math:`10^{30}`", "" "ss_tltx(y)", "sea surface in the x(y) direction", "m/m" @@ -666,6 +696,7 @@ either Celsius or Kelvin units). "update_ocn_f", "if true, include frazil ice fluxes in ocean flux fields", "" "use_leap_years", "if true, include leap days", "" "use_restart_time", "if true, use date from restart file", "" + "use_smliq_pnd", "use liquid in snow for ponds", " " "ustar_min", "minimum friction velocity under ice", "" "ucstr", "string identifying U grid for history variables", "" "uvel", "x-component of ice velocity", "m/s" @@ -691,6 +722,7 @@ either Celsius or Kelvin units). "wave_spectrum", "wave spectrum", "m\ :math:`^2`/s" "wavefreq", "wave frequencies", "1/s" "wind", "wind speed", "m/s" + "windmin", "minimum wind speed to compact snow", "10 m/s" "write_history", "if true, write history now", "" "write_ic", "if true, write initial conditions", "" "write_restart", "if 1, write restart now", "" diff --git a/doc/source/conf.py b/doc/source/conf.py index 4cf2f580d..099f65403 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.2.0' +version = u'6.3.0' # The full version, including alpha/beta/rc tags. -version = u'6.2.0' +version = u'6.3.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/developer_guide/dg_driver.rst b/doc/source/developer_guide/dg_driver.rst index a10cb319a..637e91b68 100644 --- a/doc/source/developer_guide/dg_driver.rst +++ b/doc/source/developer_guide/dg_driver.rst @@ -65,7 +65,6 @@ The initialize calling sequence looks something like:: call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat, hin_max) ! ice thickness distribution if (tr_fsd) call icepack_init_fsd_bounds ! floe size distribution - call calendar(time) ! determine the initial date call init_forcing_ocn(dt) ! initialize sss and sst from data call init_state ! initialize the ice state call init_transport ! initialize horizontal transport @@ -74,10 +73,13 @@ The initialize calling sequence looks something like:: call init_diags ! initialize diagnostic output points call init_history_therm ! initialize thermo history variables call init_history_dyn ! initialize dynamic history variables + call calc_timesteps ! update timestep counter if not using npt_unit="1" call init_shortwave ! initialize radiative transfer + call advance_timestep ! advance the time step call init_forcing_atmo ! initialize atmospheric forcing (standalone) if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice call get_forcing* ! read forcing data (standalone) + if (tr_snow) call icepack_init_snow ! advanced snow physics See a **CICE_InitMod.F90** file for the latest. @@ -105,6 +107,13 @@ The run sequence within a time loop looks something like:: call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) enddo + if (tr_snow) then ! advanced snow physics + do iblk = 1, nblocks + call step_snow (dt, iblk) + enddo + call update_state (dt) ! clean up + endif + do iblk = 1, nblocks call step_radiation (dt, iblk) call coupling_prep (iblk) diff --git a/doc/source/developer_guide/dg_dynamics.rst b/doc/source/developer_guide/dg_dynamics.rst index 47b54bde2..48dead1cb 100644 --- a/doc/source/developer_guide/dg_dynamics.rst +++ b/doc/source/developer_guide/dg_dynamics.rst @@ -6,7 +6,7 @@ Dynamics ============================ -The CICE **cicecore/** directory consists of the non icepack source code. Within that +The CICE **cicecore/** directory consists of the non icepack source code. Within that directory there are the following subdirectories **cicecore/cicedynB/analysis** contains higher level history and diagnostic routines. @@ -30,28 +30,19 @@ Dynamical Solvers -------------------- The dynamics solvers are found in **cicecore/cicedynB/dynamics/**. A couple of different solvers are -available including EVP, revised EVP, EAP and VP. The dynamics solver is specified in namelist with the -``kdyn`` variable. ``kdyn=1`` is evp, ``kdyn=2`` is eap, ``kdyn=3`` is VP and revised EVP requires -the ``revised_evp`` namelist flag be set to true. - -Multiple EVP solvers are supported thru the namelist flag ``kevp_kernel``. The standard implementation -and current default is ``kevp_kernel=0``. In this case, the stress is solved on the regular decomposition -via subcycling and calls to subroutine ``stress`` and subroutine ``stepu`` with MPI global sums required in each -subcycling call. With ``kevp_kernel=2``, the data required to compute the stress is gathered to the root -MPI process and the stress calculation is performed on the root task without any MPI global sums. OpenMP -parallelism is supported in ``kevp_kernel=2``. The solutions with ``kevp_kernel`` set to 0 or 2 will -not be bit-for-bit -identical but should be the same to roundoff and produce the same climate. ``kevp_kernel=2`` may perform -better for some configurations, some machines, and some pe counts. ``kevp_kernel=2`` is not supported -with the tripole grid and is still being validated. Until ``kevp_kernel=2`` is fully validated, it will -abort if set. To override the abort, use value 102 for testing. +available including EVP, EAP and VP. The dynamics solver is specified in namelist with the +``kdyn`` variable. ``kdyn=1`` is evp, ``kdyn=2`` is eap, ``kdyn=3`` is VP. + +Two alternative implementations of EVP are included. The first alternative is the Revised EVP, triggered when the ``revised_evp`` is set to true. The second alternative is the 1d EVP solver triggered when the ``evp_algorithm`` is set to ``shared_mem_1d`` as oppose to the default setting of ``evp_standard_2d``. The solutions with ``evp_algorithm`` set to ``standard_2d`` or ``shared_mem_1d`` will +not be bit-for-bit identical when compared to each other. The reason for this is floating point round off errors that occur unless strict compiler flags are used. ``evp_algorithm=shared_mem_1d`` is primarily built for OpenMP. If MPI domain splitting is used then the solver will only run on the master processor. ``evp_algorithm=shared_mem_1d`` is not supported +with the tripole grid. Transport ----------------- -The transport (advection) methods are found in **cicecore/cicedynB/dynamics/**. Two methods are supported, -upwind and remap. These are set in namelist via the ``advection`` variable. +The transport (advection) methods are found in **cicecore/cicedynB/dynamics/**. Two methods are supported, +upwind and remap. These are set in namelist via the ``advection`` variable. Transport can be disabled with the ``ktransport`` namelist variable. @@ -90,7 +81,7 @@ Time Manager Time manager data is module data in **cicecore/shared/ice_calendar.F90**. Much of the time manager data is public and operated on during the model timestepping. The model timestepping actually takes -place in the **CICE_RunMod.F90** file which is part of the driver code. +place in the **CICE_RunMod.F90** file which is part of the driver code. The time manager was updated in early 2021. Additional information about the time manager can be found here, :ref:`timemanagerplus` @@ -100,12 +91,12 @@ Communication ------------------ Two low-level communications packages, mpi and serial, are provided as part of CICE. This software -provides a middle layer between the model and the underlying libraries. Only the CICE mpi or +provides a middle layer between the model and the underlying libraries. Only the CICE mpi or serial directories are compiled with CICE, not both. -**cicedynB/infrastructure/comm/mpi/** +**cicedynB/infrastructure/comm/mpi/** is based on MPI and provides various methods to do halo updates, global sums, gather/scatter, broadcasts -and similar using some fairly generic interfaces to isolate the MPI calls in the code. +and similar using some fairly generic interfaces to isolate the MPI calls in the code. **cicedynB/infrastructure/comm/serial/** support the same interfaces, but operates in shared memory mode with no MPI. The serial library will be used, by default in the CICE scripts, @@ -124,7 +115,7 @@ case. This has to be set before CICE is built. **cicedynB/infrastructure/io/io_netcdf/** is the default for the standalone CICE model, and it supports writing history and restart files in netcdf format using standard netcdf calls. It does this by writing from and reading to the root task and -gathering and scattering fields from the root task to support model parallelism. +gathering and scattering fields from the root task to support model parallelism. **cicedynB/infrastructure/io/io_binary/** supports files in binary format using a gather/scatter approach and reading to and writing from the root task. @@ -134,4 +125,3 @@ is a parallel io library (https://github.com/NCAR/ParallelIO) that supports read binary and netcdf file through various interfaces including netcdf and pnetcdf. pio is generally more parallel in memory even when using serial netcdf than the standard gather/scatter methods, and it provides parallel read/write capabilities by optionally linking and using pnetcdf. - diff --git a/doc/source/developer_guide/dg_forcing.rst b/doc/source/developer_guide/dg_forcing.rst index 0c0380538..aea6d8ef6 100644 --- a/doc/source/developer_guide/dg_forcing.rst +++ b/doc/source/developer_guide/dg_forcing.rst @@ -180,7 +180,7 @@ constant thereafter. Different conditions can be specified thru the .. _box2001forcing: Box2001 Atmosphere Forcing -------------------------- +--------------------------- The box2001 forcing dataset in generated internally. No files are read. The dataset is used to test an idealized box case as defined in :cite:`Hunke01`. diff --git a/doc/source/science_guide/sg_dynamics.rst b/doc/source/science_guide/sg_dynamics.rst index ecef531b4..d4e209d8a 100644 --- a/doc/source/science_guide/sg_dynamics.rst +++ b/doc/source/science_guide/sg_dynamics.rst @@ -6,10 +6,9 @@ Dynamics ======== There are different approaches in the CICE code for representing sea ice -rheology and for solving the sea ice momentum equation. The -elastic-viscous-plastic (EVP) model represents a modification of the -standard viscous-plastic (VP) model for sea ice dynamics -:cite:`Hibler79`. The elastic-anisotropic-plastic (EAP) model, +rheology and for solving the sea ice momentum equation. The viscous-plastic (VP) originally developed by :cite:`Hibler79`, +the elastic-viscous-plastic (EVP) :cite:`Hunke97` model represents a modification of the +standard viscous-plastic (VP) model for sea ice dynamics. The elastic-anisotropic-plastic (EAP) model, on the other hand, explicitly accounts for the observed sub-continuum anisotropy of the sea ice cover :cite:`Wilchinsky06,Weiss09`. If ``kdyn`` = 1 in the namelist then the EVP model is used (module @@ -68,7 +67,7 @@ vertical direction: where :math:`m` is the combined mass of ice and snow per unit area and :math:`\vec{\tau}_a` and :math:`\vec{\tau}_w` are wind and ocean -stresses, respectively. The term :math:`\vec{\tau}_b` is a +stresses, respectively. The term :math:`\vec{\tau}_b` is a seabed stress (also referred to as basal stress) that represents the grounding of pressure ridges in shallow water :cite:`Lemieux16`. The mechanical properties of the ice are represented by the internal stress tensor :math:`\sigma_{ij}`. The other two terms on @@ -84,11 +83,11 @@ For clarity, the two components of Equation :eq:`vpmom` are .. math:: \begin{aligned} - m{\partial u\over\partial t} &= {\partial\sigma_{1j}\over\partial x_j} + \tau_{ax} + + m{\partial u\over\partial t} &= {\partial\sigma_{1j}\over\partial x_j} + \tau_{ax} + a_i c_w \rho_w \left|{\bf U}_w - {\bf u}\right| \left[\left(U_w-u\right)\cos\theta - \left(V_w-v\right)\sin\theta\right] -C_bu +mfv - mg{\partial H_\circ\over\partial x}, \\ - m{\partial v\over\partial t} &= {\partial\sigma_{2j}\over\partial x_j} + \tau_{ay} + + m{\partial v\over\partial t} &= {\partial\sigma_{2j}\over\partial x_j} + \tau_{ay} + a_i c_w \rho_w \left|{\bf U}_w - {\bf u}\right| \left[\left(U_w-u\right)\sin\theta + \left(V_w-v\right)\cos\theta\right] -C_bv-mfu - mg{\partial H_\circ\over\partial y}. \end{aligned} @@ -111,14 +110,14 @@ Elastic-Viscous-Plastic The momentum equation is discretized in time as follows, for the classic EVP approach. In the code, -:math:`{\tt vrel}=a_i c_w \rho_w\left|{\bf U}_w - {\bf u}^k\right|` and -:math:`C_b=T_b \left( \sqrt{(u^k)^2+(v^k)^2}+u_0 \right)^{-1}`, +:math:`{\tt vrel}=a_i c_w \rho_w\left|{\bf U}_w - {\bf u}^k\right|` and +:math:`C_b=T_b \left( \sqrt{(u^k)^2+(v^k)^2}+u_0 \right)^{-1}`, where :math:`k` denotes the subcycling step. The following equations illustrate the time discretization and define some of the other variables used in the code. .. math:: - \underbrace{\left({m\over\Delta t_e}+{\tt vrel} \cos\theta\ + C_b \right)}_{\tt cca} u^{k+1} + \underbrace{\left({m\over\Delta t_e}+{\tt vrel} \cos\theta\ + C_b \right)}_{\tt cca} u^{k+1} - \underbrace{\left(mf+{\tt vrel}\sin\theta\right)}_{\tt ccb}v^{k+1} = &\underbrace{{\partial\sigma_{1j}^{k+1}\over\partial x_j}}_{\tt strintx} + \underbrace{\tau_{ax} - mg{\partial H_\circ\over\partial x} }_{\tt forcex} \\ @@ -126,7 +125,7 @@ variables used in the code. :label: umom .. math:: - \underbrace{\left(mf+{\tt vrel}\sin\theta\right)}_{\tt ccb} u^{k+1} + \underbrace{\left(mf+{\tt vrel}\sin\theta\right)}_{\tt ccb} u^{k+1} + \underbrace{\left({m\over\Delta t_e}+{\tt vrel} \cos\theta + C_b \right)}_{\tt cca}v^{k+1} = &\underbrace{{\partial\sigma_{2j}^{k+1}\over\partial x_j}}_{\tt strinty} + \underbrace{\tau_{ay} - mg{\partial H_\circ\over\partial y} }_{\tt forcey} \\ @@ -139,7 +138,7 @@ We solve this system of equations analytically for :math:`u^{k+1}` and :math:`v^{k+1}`. Define .. math:: - \hat{u} = F_u + \tau_{ax} - mg{\partial H_\circ\over\partial x} + {\tt vrel} \left(U_w\cos\theta - V_w\sin\theta\right) + {m\over\Delta t_e}u^k + \hat{u} = F_u + \tau_{ax} - mg{\partial H_\circ\over\partial x} + {\tt vrel} \left(U_w\cos\theta - V_w\sin\theta\right) + {m\over\Delta t_e}u^k :label: cevpuhat .. math:: @@ -169,7 +168,7 @@ where .. math:: b = mf + {\tt vrel}\sin\theta. :label: cevpb - + .. _vp-momentum: Viscous-Plastic @@ -248,52 +247,52 @@ stress are expressed as :math:`\tau_{bx}=C_bu` and coefficient. The two parameterizations differ in their calculation of -the :math:`C_b` coefficients. +the :math:`C_b` coefficients. Note that the user must provide a bathymetry field for using these grounding schemes. It is suggested to have a bathymetry field with water depths larger than 5 m that represents well shallow water (less than 30 m) regions such as the Laptev Sea -and the East Siberian Sea. +and the East Siberian Sea. Seabed stress based on linear keel draft (LKD) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This parameterization for the seabed stress is described in :cite:`Lemieux16`. It assumes that the largest keel draft varies linearly with the mean thickness in a grid cell (i.e. sea ice volume). The :math:`C_b` coefficients are expressed as .. math:: C_b= k_2 \max [0,(h_u - h_{cu})] e^{-\alpha_b * (1 - a_u)} (\sqrt{u^2+v^2}+u_0)^{-1}, \\ - :label: Cb + :label: Cb -where :math:`k_2` determines the maximum seabed stress that can be sustained by the grounded parameterized ridge(s), :math:`u_0` -is a small residual velocity and :math:`\alpha_b` is a parameter to ensure that the seabed stress quickly drops when -the ice concentration is smaller than 1. In the code, :math:`k_2 \max [0,(h_u - h_{cu})] e^{-\alpha_b * (1 - a_u)}` is defined as -:math:`T_b`. The quantities :math:`h_u`, :math:`a_{u}` and :math:`h_{cu}` are calculated at -the 'u' point based on local ice conditions (surrounding tracer points). They are respectively given by +where :math:`k_2` determines the maximum seabed stress that can be sustained by the grounded parameterized ridge(s), :math:`u_0` +is a small residual velocity and :math:`\alpha_b` is a parameter to ensure that the seabed stress quickly drops when +the ice concentration is smaller than 1. In the code, :math:`k_2 \max [0,(h_u - h_{cu})] e^{-\alpha_b * (1 - a_u)}` is defined as +:math:`T_b`. The quantities :math:`h_u`, :math:`a_{u}` and :math:`h_{cu}` are calculated at +the 'u' point based on local ice conditions (surrounding tracer points). They are respectively given by .. math:: h_u=\max[v_i(i,j),v_i(i+1,j),v_i(i,j+1),v_i(i+1,j+1)], \\ - :label: hu - + :label: hu + .. math:: a_u=\max[a_i(i,j),a_i(i+1,j),a_i(i,j+1),a_i(i+1,j+1)]. \\ - :label: au - + :label: au + .. math:: h_{cu}=a_u h_{wu} / k_1, \\ :label: hcu -where the :math:`a_i` and :math:`v_i` are the total ice concentrations and ice volumes around the :math:`u` point :math:`i,j` and -:math:`k_1` is a parameter that defines the critical ice thickness :math:`h_{cu}` at which the parameterized -ridge(s) reaches the seafloor for a water depth :math:`h_{wu}=\min[h_w(i,j),h_w(i+1,j),h_w(i,j+1),h_w(i+1,j+1)]`. Given the formulation of :math:`C_b` in equation :eq:`Cb`, the seabed stress components are non-zero only -when :math:`h_u > h_{cu}`. +where the :math:`a_i` and :math:`v_i` are the total ice concentrations and ice volumes around the :math:`u` point :math:`i,j` and +:math:`k_1` is a parameter that defines the critical ice thickness :math:`h_{cu}` at which the parameterized +ridge(s) reaches the seafloor for a water depth :math:`h_{wu}=\min[h_w(i,j),h_w(i+1,j),h_w(i,j+1),h_w(i+1,j+1)]`. Given the formulation of :math:`C_b` in equation :eq:`Cb`, the seabed stress components are non-zero only +when :math:`h_u > h_{cu}`. -The maximum seabed stress depends on the weight of the ridge -above hydrostatic balance and the value of :math:`k_2`. It is, however, the parameter :math:`k_1` that has the most notable impact on the simulated extent of landfast ice. -The value of :math:`k_1` can be changed at runtime using the namelist variable ``k1``. +The maximum seabed stress depends on the weight of the ridge +above hydrostatic balance and the value of :math:`k_2`. It is, however, the parameter :math:`k_1` that has the most notable impact on the simulated extent of landfast ice. +The value of :math:`k_1` can be changed at runtime using the namelist variable ``k1``. -To prevent unrealistic grounding, :math:`T_b` is set to zero when :math:`h_{wu}` -is larger than 30 m. This maximum value is chosen based on observations of large +To prevent unrealistic grounding, :math:`T_b` is set to zero when :math:`h_{wu}` +is larger than 30 m. This maximum value is chosen based on observations of large keels in the Arctic Ocean :cite:`Amundrud04`. Seabed stress based on probabilistic approach @@ -304,11 +303,11 @@ on the probability of contact between the ice thickness distribution (ITD) and the seabed. Multi-thickness category models such as CICE typically use a few thickness categories (5-10). This crude representation of the ITD does not resolve the tail of the ITD, which is crucial for grounding -events. +events. To represent the tail of the distribution, the simulated ITD is converted to a positively skewed probability function :math:`f(x)` -with :math:`x` the sea ice thickness. The mean and variance are set +with :math:`x` the sea ice thickness. The mean and variance are set equal to the ones of the original ITD. A log-normal distribution is used for :math:`f(x)`. @@ -317,7 +316,7 @@ distribution :math:`b(y)`. The mean of :math:`b(y)` comes from the user's bathym standard deviation :math:`\sigma_b` is currently fixed to 2.5 m. Two possible improvements would be to specify a distribution based on high resolution bathymetry data and to take into account variations of the -water depth due to changes in the sea surface height. +water depth due to changes in the sea surface height. Assuming hydrostatic balance and neglecting the impact of snow, the draft of floating ice of thickness :math:`x` is :math:`D(x)=\rho_i x / \rho_w` where :math:`\rho_i` is the sea ice density. Hence, the probability of contact (:math:`P_c`) between the @@ -337,7 +336,7 @@ and then obtains :math:`T_{bt}` by multiplying :math:`T_{bt}^*` by :math:`e^{-\a To calculate :math:`T_{bt}^*` in equation :eq:`Tbt`, :math:`f(x)` and :math:`b(y)` are discretized using many small categories (100). :math:`f(x)` is discretized between 0 and 50 m while :math:`b(y)` is truncated at plus and minus three :math:`\sigma_b`. :math:`f(x)` is also modified by setting it to zero after a certain percentile of the log-normal distribution. This percentile, which is currently set to 99.7%, notably affects the simulation of landfast ice and is used as a tuning parameter. Its impact is similar to the one of the parameter :math:`k_1` for the LKD method. -:math:`T_b` at the 'u' point is calculated from the 't' point values around it according to +:math:`T_b` at the 'u' point is calculated from the 't' point values around it according to .. math:: T_b=\max[T_{bt}(i,j),T_{bt}(i+1,j),T_{bt}(i,j+1),T_{bt}(i+1,j+1)]. \\ @@ -362,13 +361,13 @@ divergence, :math:`D_D`, and the horizontal tension and shearing strain rates, :math:`D_T` and :math:`D_S` respectively: .. math:: - D_D = \dot{\epsilon}_{11} + \dot{\epsilon}_{22}, + D_D = \dot{\epsilon}_{11} + \dot{\epsilon}_{22}, .. math:: - D_T = \dot{\epsilon}_{11} - \dot{\epsilon}_{22}, + D_T = \dot{\epsilon}_{11} - \dot{\epsilon}_{22}, .. math:: - D_S = 2\dot{\epsilon}_{12}, + D_S = 2\dot{\epsilon}_{12}, where @@ -376,12 +375,12 @@ where \dot{\epsilon}_{ij} = {1\over 2}\left({{\partial u_i}\over{\partial x_j}} + {{\partial u_j}\over{\partial x_i}}\right) CICE can output the internal ice pressure which is an important field to support navigation in ice-infested water. -The internal ice pressure (``sigP``) is the average of the normal stresses multiplied by :math:`-1` and +The internal ice pressure (``sigP``) is the average of the normal stresses multiplied by :math:`-1` and is therefore simply equal to :math:`-\sigma_1/2`. -Following the approach of :cite:`Konig10` (see also :cite:`Lemieux16`), the -elliptical yield curve can be modified such that the ice has isotropic tensile strength. -The tensile strength :math:`T_p` is expressed as a fraction of the ice strength :math:`P`, that is :math:`T_p=k_t P` +Following the approach of :cite:`Konig10` (see also :cite:`Lemieux16`), the +elliptical yield curve can be modified such that the ice has isotropic tensile strength. +The tensile strength :math:`T_p` is expressed as a fraction of the ice strength :math:`P`, that is :math:`T_p=k_t P` where :math:`k_t` should be set to a value between 0 and 1 (this can be changed at runtime with the namelist parameter ``Ktens``). The ice strength :math:`P` is a function of the ice thickness distribution as @@ -403,10 +402,10 @@ where :math:`\eta` and :math:`\zeta` are the bulk and shear viscosities. An elliptical yield curve is used, with the viscosities given by .. math:: - \zeta = {P(1+k_t)\over 2\Delta}, + \zeta = {P(1+k_t)\over 2\Delta}, .. math:: - \eta = {P(1+k_t)\over {2\Delta e^2}}, + \eta = {P(1+k_t)\over {2\Delta e^2}}, where @@ -447,7 +446,7 @@ dynamics component is subcycled within the time step, and the elastic parameter :math:`E` is defined in terms of a damping timescale :math:`T` for elastic waves, :math:`\Delta t_e < T < \Delta t`, as -.. math:: +.. math:: E = {\zeta\over T}, where :math:`T=E_\circ\Delta t` and :math:`E_\circ` (eyc) is a tunable @@ -455,7 +454,7 @@ parameter less than one. Including the modification proposed by :cite:`Bouillon1 .. math:: \begin{aligned} - {\partial\sigma_1\over\partial t} + {\sigma_1\over 2T} + {\partial\sigma_1\over\partial t} + {\sigma_1\over 2T} + {P_R(1-k_t)\over 2T} &=& {P(1+k_t)\over 2T\Delta} D_D, \\ {\partial\sigma_2\over\partial t} + {\sigma_2\over 2T} &=& {P(1+k_t)\over 2Te^2\Delta} D_T,\\ @@ -466,14 +465,14 @@ Once discretized in time, these last three equations are written as .. math:: \begin{aligned} - {(\sigma_1^{k+1}-\sigma_1^{k})\over\Delta t_e} + {\sigma_1^{k+1}\over 2T} + {(\sigma_1^{k+1}-\sigma_1^{k})\over\Delta t_e} + {\sigma_1^{k+1}\over 2T} + {P_R^k(1-k_t)\over 2T} &=& {P(1+k_t)\over 2T\Delta^k} D_D^k, \\ {(\sigma_2^{k+1}-\sigma_2^{k})\over\Delta t_e} + {\sigma_2^{k+1}\over 2T} &=& {P(1+k_t)\over 2Te^2\Delta^k} D_T^k,\\ {(\sigma_{12}^{k+1}-\sigma_{12}^{k})\over\Delta t_e} + {\sigma_{12}^{k+1}\over 2T} &=& {P(1+k_t)\over 4Te^2\Delta^k}D_S^k,\end{aligned} - :label: sigdisc - + :label: sigdisc + where :math:`k` denotes again the subcycling step. All coefficients on the left-hand side are constant except for :math:`P_R`. This modification compensates for the decreased efficiency of including @@ -498,7 +497,7 @@ anisotropy of the sea ice cover is accounted for by an additional prognostic variable, the structure tensor :math:`\mathbf{A}` defined by -.. math:: +.. math:: {\mathbf A}=\int_{\mathbb{S}}\vartheta(\mathbf r)\mathbf r\mathbf r d\mathbf r\label{structuretensor}. where :math:`\mathbb{S}` is a unit-radius circle; **A** is a unit @@ -517,7 +516,7 @@ components of :math:`\mathbf{A}`, :math:`A_{1}/A_{2}`, are derived from the phenomenological evolution equation for the structure tensor :math:`\mathbf A`, -.. math:: +.. math:: \frac{D\mathbf{A}}{D t}=\mathbf{F}_{iso}(\mathbf{A})+\mathbf{F}_{frac}(\mathbf{A},\boldsymbol\sigma), :label: evolutionA @@ -581,7 +580,7 @@ of two equations: .. math:: \begin{aligned} - \frac{\partial A_{11}}{\partial t}&=&-k_{t}\left(A_{11}-\frac{1}{2}\right)+M_{11} \mbox{,} \\ + \frac{\partial A_{11}}{\partial t}&=&-k_{t}\left(A_{11}-\frac{1}{2}\right)+M_{11} \mbox{,} \\ \frac{\partial A_{12}}{\partial t}&=&-k_{t} A_{12}+M_{12} \mbox{,}\end{aligned} where the first terms on the right hand side correspond to the @@ -618,7 +617,7 @@ but in a continuum-scale sea ice region the floes can possess different orientations in different places and we take the mean sea ice stress over a collection of floes to be given by the average -.. math:: +.. math:: \boldsymbol\sigma^{EAP}(h)=P_{r}(h)\int_{\mathbb{S}}\vartheta(\mathbf r)\left[\boldsymbol\sigma_{r}^{b}(\mathbf r)+ k \boldsymbol\sigma_{s}^{b}(\mathbf r)\right]d\mathbf r :label: stressaverage @@ -633,11 +632,11 @@ efficient, explicit numerical algorithm used to solve the full sea ice momentum balance. We use the analogous EAP stress equations, .. math:: - \frac{\partial \sigma_{1}}{\partial t}+\frac{\sigma_1}{2T} = \frac{\sigma^{EAP}_{1}}{2T} \mbox{,} + \frac{\partial \sigma_{1}}{\partial t}+\frac{\sigma_1}{2T} = \frac{\sigma^{EAP}_{1}}{2T} \mbox{,} :label: EAPsigma1 .. math:: - \frac{\partial \sigma_{2}}{\partial t}+\frac{\sigma_2}{2T} = \frac{\sigma^{EAP}_{2}}{2T} \mbox{,} + \frac{\partial \sigma_{2}}{\partial t}+\frac{\sigma_2}{2T} = \frac{\sigma^{EAP}_{2}}{2T} \mbox{,} :label: EAPsigma2 .. math:: @@ -676,44 +675,44 @@ of the dynamics. Revised approach **************** -The revised EVP approach is based on a pseudo-time iterative scheme :cite:`Lemieux12`, :cite:`Bouillon13`, :cite:`Kimmritz15`. By construction, the revised EVP approach should lead to the VP solution -(given the right numerical parameters and a sufficiently large number of iterations). To do so, the inertial term is formulated such that it matches the backward Euler approach of -implicit solvers and there is an additional term for the pseudo-time iteration. Hence, with the revised approach, the discretized momentum equations :eq:`umom` and :eq:`vmom` become +The revised EVP approach is based on a pseudo-time iterative scheme :cite:`Lemieux12`, :cite:`Bouillon13`, :cite:`Kimmritz15`. By construction, the revised EVP approach should lead to the VP solution +(given the right numerical parameters and a sufficiently large number of iterations). To do so, the inertial term is formulated such that it matches the backward Euler approach of +implicit solvers and there is an additional term for the pseudo-time iteration. Hence, with the revised approach, the discretized momentum equations :eq:`umom` and :eq:`vmom` become .. math:: - {\beta^*(u^{k+1}-u^k)\over\Delta t_e} + {m(u^{k+1}-u^n)\over\Delta t} + {\left({\tt vrel} \cos\theta + C_b \right)} u^{k+1} + {\beta^*(u^{k+1}-u^k)\over\Delta t_e} + {m(u^{k+1}-u^n)\over\Delta t} + {\left({\tt vrel} \cos\theta + C_b \right)} u^{k+1} - {\left(mf+{\tt vrel}\sin\theta\right)} v^{k+1} - = {{\partial\sigma_{1j}^{k+1}\over\partial x_j}} + = {{\partial\sigma_{1j}^{k+1}\over\partial x_j}} + {\tau_{ax} - mg{\partial H_\circ\over\partial x} } + {\tt vrel} {\left(U_w\cos\theta-V_w\sin\theta\right)}, :label: umomr .. math:: - {\beta^*(v^{k+1}-v^k)\over\Delta t_e} + {m(v^{k+1}-v^n)\over\Delta t} + {\left({\tt vrel} \cos\theta + C_b \right)}v^{k+1} - + {\left(mf+{\tt vrel}\sin\theta\right)} u^{k+1} - = {{\partial\sigma_{2j}^{k+1}\over\partial x_j}} + {\beta^*(v^{k+1}-v^k)\over\Delta t_e} + {m(v^{k+1}-v^n)\over\Delta t} + {\left({\tt vrel} \cos\theta + C_b \right)}v^{k+1} + + {\left(mf+{\tt vrel}\sin\theta\right)} u^{k+1} + = {{\partial\sigma_{2j}^{k+1}\over\partial x_j}} + {\tau_{ay} - mg{\partial H_\circ\over\partial y} } + {\tt vrel}{\left(U_w\sin\theta+V_w\cos\theta\right)}, :label: vmomr -where :math:`\beta^*` is a numerical parameter and :math:`u^n, v^n` are the components of the previous time level solution. +where :math:`\beta^*` is a numerical parameter and :math:`u^n, v^n` are the components of the previous time level solution. With :math:`\beta=\beta^* \Delta t \left( m \Delta t_e \right)^{-1}` :cite:`Bouillon13`, these equations can be written as - + .. math:: - \underbrace{\left((\beta+1){m\over\Delta t}+{\tt vrel} \cos\theta\ + C_b \right)}_{\tt cca} u^{k+1} + \underbrace{\left((\beta+1){m\over\Delta t}+{\tt vrel} \cos\theta\ + C_b \right)}_{\tt cca} u^{k+1} - \underbrace{\left(mf+{\tt vrel}\sin\theta\right)}_{\tt ccb}v^{k+1} - = \underbrace{{\partial\sigma_{1j}^{k+1}\over\partial x_j}}_{\tt strintx} + = \underbrace{{\partial\sigma_{1j}^{k+1}\over\partial x_j}}_{\tt strintx} + \underbrace{\tau_{ax} - mg{\partial H_\circ\over\partial x} }_{\tt forcex} + {\tt vrel}\underbrace{\left(U_w\cos\theta-V_w\sin\theta\right)}_{\tt waterx} + {m\over\Delta t}(\beta u^k + u^n), :label: umomr2 .. math:: - \underbrace{\left(mf+{\tt vrel}\sin\theta\right)}_{\tt ccb} u^{k+1} + \underbrace{\left(mf+{\tt vrel}\sin\theta\right)}_{\tt ccb} u^{k+1} + \underbrace{\left((\beta+1){m\over\Delta t}+{\tt vrel} \cos\theta + C_b \right)}_{\tt cca}v^{k+1} - = \underbrace{{\partial\sigma_{2j}^{k+1}\over\partial x_j}}_{\tt strinty} + = \underbrace{{\partial\sigma_{2j}^{k+1}\over\partial x_j}}_{\tt strinty} + \underbrace{\tau_{ay} - mg{\partial H_\circ\over\partial y} }_{\tt forcey} + {\tt vrel}\underbrace{\left(U_w\sin\theta+V_w\cos\theta\right)}_{\tt watery} + {m\over\Delta t}(\beta v^k + v^n), - :label: vmomr2 + :label: vmomr2 At this point, the solutions :math:`u^{k+1}` and :math:`v^{k+1}` are obtained in the same manner as for the standard EVP approach (see equations :eq:`cevpuhat` to :eq:`cevpb`). @@ -721,16 +720,26 @@ Introducing another numerical parameter :math:`\alpha=2T \Delta t_e ^{-1}` :cite .. math:: \begin{aligned} - {\alpha (\sigma_1^{k+1}-\sigma_1^{k})} + {\sigma_1^{k}} + {\alpha (\sigma_1^{k+1}-\sigma_1^{k})} + {\sigma_1^{k}} + {P_R^k(1-k_t)} &=& {P(1+k_t)\over \Delta^k} D_D^k, \\ {\alpha (\sigma_2^{k+1}-\sigma_2^{k})} + {\sigma_2^{k}} &=& {P(1+k_t)\over e^2\Delta^k} D_T^k,\\ {\alpha (\sigma_{12}^{k+1}-\sigma_{12}^{k})} + {\sigma_{12}^{k}} &=& {P(1+k_t)\over 2e^2\Delta^k}D_S^k,\end{aligned} - -where as opposed to the classic EVP, the second term in each equation is at iteration :math:`k` :cite:`Bouillon13`. Also, as opposed to the classic EVP, -:math:`\Delta t_e` times the number of subcycles (or iterations) does not need to be equal to the advective time step :math:`\Delta t`. -Finally, as with the classic EVP approach, the stresses are initialized using the previous time level values. -The revised EVP is activated by setting the namelist parameter ``revised_evp = true``. -In the code :math:`\alpha` is ``arlx`` and :math:`\beta` is ``brlx``. The values of ``arlx`` and ``brlx`` can be set in the namelist. + +where as opposed to the classic EVP, the second term in each equation is at iteration :math:`k` :cite:`Bouillon13`. Also, as opposed to the classic EVP, +:math:`\Delta t_e` times the number of subcycles (or iterations) does not need to be equal to the advective time step :math:`\Delta t`. +Finally, as with the classic EVP approach, the stresses are initialized using the previous time level values. +The revised EVP is activated by setting the namelist parameter ``revised_evp = true``. +In the code :math:`\alpha` is ``arlx`` and :math:`\beta` is ``brlx``. The values of ``arlx`` and ``brlx`` can be set in the namelist. It is recommended to use large values of these parameters and to set :math:`\alpha=\beta` :cite:`Kimmritz15`. + +.. _evp1d: + +**************** +1d EVP solver +**************** + +The standard EVP solver iterates hundreds of times, where each iteration includes a communication through MPI and a limited number of calculations. This limits how much the solver can be optimized as the speed is primarily determined by the communication. The 1d EVP solver avoids the communication by utilizing shared memory, which removes the requirement for calls to the MPI communicator. As a consequence of this the potential scalability of the code is improved. The performance is best on shared memory but the solver is also functional on MPI and hybrid MPI/OpenMP setups as it will run on the master processor alone. + +The scalability of geophysical models is in general terms limited by the memory usage. In order to optimize this the 1d EVP solver solves the same equations that are outlined in the section :ref:`stress-evp` but it transforms all matrices to vectors (1d matrices) as this compiles better with the computer hardware. The vectorization and the contiguous placement of arrays in the memory makes it easier for the compiler to optimize the code and pass pointers instead of copying the vectors. The 1d solver is not supported for tripole grids and the code will abort if this combination is attempted. diff --git a/doc/source/science_guide/sg_tracers.rst b/doc/source/science_guide/sg_tracers.rst index bbd18eb1f..215c13d08 100644 --- a/doc/source/science_guide/sg_tracers.rst +++ b/doc/source/science_guide/sg_tracers.rst @@ -90,6 +90,10 @@ is not in use. "tr_iso", "n_iso", "vice, vsno", "nt_iso"," " "tr_brine", " ", "vice", "nt_fbri", " " "tr_fsd","nfsd","aice","nt_fsd"," " + "tr_snow","nslyr","vsno","nt_rsnw"," " + " ","nslyr","vsno","nt_rhos"," " + " ","nslyr","vsno","nt_smice"," " + " ","nslyr","vsno","nt_smliq"," " "solve_zsal", "n_trzs", "fbri or (a,v)ice", "nt_bgc_S", " " "tr_bgc_N", "n_algae", "fbri or (a,v)ice", "nt_bgc_N", "nlt_bgc_N" "tr_bgc_Nit", " ", "fbri or (a,v)ice", "nt_bgc_Nit", "nlt_bgc_Nit" @@ -115,4 +119,4 @@ Users may add any number of additional tracers that are transported conservative provided that the dependency ``trcr_depend`` is defined appropriately. See Section :ref:`addtrcr` for guidance on adding tracers. -Please see the `Icepack documentation `_ for additional information about tracers that depend on other tracers, the floe size distribution, age of the ice, aerosols, water isotopes, brine height, and the sea ice ecosystem. +Please see the `Icepack documentation `_ for additional information about tracers that depend on other tracers, the floe size distribution, advanced snow physics, age of the ice, aerosols, water isotopes, brine height, and the sea ice ecosystem. diff --git a/doc/source/user_guide/figures/CICE_Bgrid.png b/doc/source/user_guide/figures/CICE_Bgrid.png new file mode 100755 index 0000000000000000000000000000000000000000..09356a0c6223beccd539393ca28e722c0767c861 GIT binary patch literal 53070 zcmd?R2UJsAyC@ozA|Oo!R5~a~M+}52NU|AkgW;GlU=z-V^iAkH7=G=MQenf-+kfu)qbr*-hw85GW^zbjOGQxF)u}qx~EN zqNBq7hu6%pA`Jqy{<uV>zgB z&k|B{ZnA-ObSIK|#rSqM=VOl1oy8#WP0jIb?5<{cRG4;Hnw?4J)b{R@~n!;|CQaTYIs~8jBM(iyz9$Ox7KnN0uGB|mh8y)*#x-;Y{hL0=Lr*|Kkf6LHeX(*`a>&c(D^>$b7is)*g=XP{MV0bP) ze*4UidTh7fNtAUbH{WwJnt0zf*)qmt>!FiqVgw^UC&E$4C2I+7k=z{VSY(v*e53wJ zL!Db2zqIV=Y;XS3%ty2FhALKRJs%^%DM|F!QsrKyN2~Q2j;XA@hRQQ3WPy;%-9cUH zy%9fVrm)F{UCbDdljK}0eQva&wATdlSsOo|XOOcfjb*EFhnm{Kt4dl;-?Y2C>?}4f zE$=SpLk8Ye+lNWUnCM@DI^LNR(vg_IWmKXV-;!$`P+^lBLfn6i7pxCgI~q0^YU`() zN9}^lq;dKpsDGu+R2!l&!4{LecxX;MCuBYdTrM-%n0(1mBw>cb=AevDe5ho_D8 z@y=ZOMW!igP?5=a3`*%}_@Ll32+6-vO-AkX#ZL8Bm_MdW9Y-^65ch4r++TNb^hOrZ zhowQ7U902bKYld(mQK%c%nITC80;CozfgdGX|?&X4L;k_v=7d+}+)3 zSOX8Lm(f=LK#Vxt=yLOY()raJM>9zZx4TL^ivtY*sTCZ5e`NZf3*z4mv^+Cy!@MlQ z`^75j$-w<*5QK_~)Y{LCjR<|mz5*8wCQ;k(aW`%qC=Ea`t+orLr>B3ulD=M;QpA`O z3gctooxin~x(8RNY-v?_Ch?dHtt0yIaO7ly*WQ3++U^{e^=)1gT~eaJQdJN7z0B)v zlF#jyn>cM^%uq=sjMx4^N&YPL;!M|2MRBnh#O6n{qsnvZPsdu=>u_5^Gd~$}Q@I^) zT{~VF8;QmY5E z{I$fG=J%XOW~r&0|Ep=$VK(&07Dwy=+teRSkUz2#tCVi*lA5Np*+=W&DhxG_uw@rJ z-bT2okU?q&uUc6TDm$XeRjkX+{9;C|&x>pzog}XhCXT^LVf6tI3thDJi`7b3jEV`{{J%Y#ZLY9I?i0T7H-&&KXMY~;|sB2_G}+4O5}zF zb)c2=B1<_2EZV6fl1PIqEhr-Rhmwbz@@YHw=q)6zYv1 z8k0KAX-EHlzmR%~>QTzjgNo`R^N$;G>@|Vq0@@o(izHbVRjBVda#6En|XX_+Vh5mZ2ksNZRJPw7vs?@`VEa`u%59quolqYu zOB4`}MFInk1941fHe~Ne{lq+dLQJ>yGD1SH=+~OY3e5q)v{_0=?b4c5qi6@6Cl@93 z0y1;udQD==3{!AO-l_H_URF*+k8Jf~0}kPqDp;3nG&PMwHio($9L#Xk-(FRXaiKkc z_x6AsLH$E2)T6&=@*R-bH*&7-Pz}8!4?;WG-i!izNV*FJXn$Zg*&2uH8BJ+zNei+5 z0Eh))6PS=c=kNiFdUvz0yLx04ranX~{S^K5w}GVX@b`kYL~tqq;OHi11(BB7zaGWN zuUhtQ8a-+Tb0Zs?Oy8mKsR2||!ROgs(1Wm;E}o2+fV?|Ez-P(>?Go}2tlN5b+=upH4tHa7AwKWqBiC9d87h0dvgxzX| zePQjJhjge5oq@|YtA%h!gF(BrAEi?zbUQ_7SGYlRBDaKIvA$y0c5sQyyVNZq8$Z;9hG<|@uAng&;huoheHE8N0iGuUOyCnNzW&Jv69);+EoXXR zLfG4`ZjJDFume+4EWGpC8HiThb~v>ro6|`s1xV;0T3J`?az;%MrvvCfo#f5rypZ_y zi3}9ZY94?!Gwgw4hv#XV1GwsNF7uMspp)a8+o3@?I0jC&CS|Q-zHb?HnS|#z3ON=H zbTKx~-yb3Gu50BuGz)zbTk5sa>V+qM&v8Bm&#l2i&-RTdmI1}(p@CfF`*;;|>s^(D zNa{=;4#;_vyhEA~?Xok8BcM>#IQT%xN0a^-KMT|n;7*wE@bZuGsYd@i@UYyPE5O9a z%md=D4xY3;oZbODIuCz(D5Q{R90@lai-3tFr~_8cC+l7Ytm@LM1B(Mp_Tcgezr_RU z@wSTl`%lIb#k(2jBRm&gg)n;Ub!kQPK^R&w`5Toq0y>We-qGh84~!`9McDK=XT_C@ z>=ipRl1uM2Yr%G%8R9w1N5huI864*^+gf$V)rJTO=F6=%Wj1Zy9D6|1~v(JXCm2G`RcJ|x>U=5vgOc%4;;eVKJOZa}Z+kAK@xkiKf@lfN4*{A(90 zTU7Jz*>*Y1aK^DY!pHDyf(@5p&(^E%7lTaOn6wdP9)@4*e)}(9Uvf|(T!>vHnlh8{ z;C+oos0*hWZ&6q&m8?r4wqk*gGG}pqYZu~tMhp|Y?5F?4k z9}W322&6|7?XqpM(Gerl=pyg5KTbtXj&(5VECkwOR#9&e??_S%Rj;Bdq<6u_*~s^aF`^AFnhcOF3xtev4^W$F*B4b=^`=?%3#bl`C=HEKwv#Y23bU zYLjxmD*0|?gljK?27Q&iC|H4hyrrk+Txp1$tMg{R0s-e#Vvr0$1V`XC_K#aO-Z5FX5J&i(Se<-Agl6p>01 z>KYWF$hCLV_0`!9gBb1vxz$W|R|>IR2KZXIzP+;q_kdEB{Wzt=L&b|c`aWCohIlgc7wpz>zxutDx@TY|4Jeb0Rs0r%#XyDy_ecPn41i+O@gd<6R zLy(HQxC@XJiPW$u37g}WeMx~ z`>=-PGuUPqv8yKO=Uoq^S~1V@nFbZH;B30(vM|`d&65^0?~!X~(6ySw{bx|EhQ~f3 z#aG&6EYWtWUpg*^p^-=Ap@dUvO0OLOm7l!@X#P-w%k5D{Tt~>X$W{IRvg$JRBm3XU zLrk6#r=tV8Iri`8GUt(IXsPqe(^<(uV7OWZa`W*4Zw_kX=4@# z_EXf0r~tPK#ue(J9G>}$^B$2lA;LJpx}VSB>57Gjh)lpW?h-(FuMFM$R)y~UMlxr= zz1Jzby-75l#)2}bIG1@_Os~Qh=XEaIQnxuIoNSalCONYlP>);ziIb(qcp_)#LVt~? zQNjxo(z^2<5fyost`DnxUayQj7o96x7JkCRep!e`{Ek`^r%LK<*=ld4PPme%&rfLK z&A_^QSwCBJc}k4pnF6^*Q%4;3#KzOy3l`FP!&%jBv70Fgl{c5$)^2-_rVN_+P7K~= zLYtyoh{&aKTKFfLv@K_HSMnrxYiEYiH)(SD1Wrd}49mJDP6wvJ1rWpdQC<#-?|$0r zFrtNsQR-~MO=<1h?-?9W#^TXCB&jhsexKn_LM$i+?|tKtzM=G7VT9{jVHm)w{~%yK z;3VskxmVvBB~y7M5#3?JI3HZ^dbbEv6s4#fgJY{&*QoNCpIA`dGbGw?iu3C_vcry* za{8f|*@`7GC#J2VnX`yX;D-l&sj69+{T?+ze%xn&&;mN=}7i%f9r!Qc)pe zmR#!hR|#pYquWIYoe}+KPcfVvNk`#O{HI*?BKKlTy$Nr z7pGykieuZAz-`L1Zq*0U0Rk3ExXXww&2_Vdvp_><%YZ^| z?Ri)U({7o|<#$ibj;KD6=%JTu(PupRD~~Xo7ySt=Cqthx1BcMxfZxnIee9m_nTLLF z-fH}jKg(mt(R5Up+|LvK&0TbSaWV)9DLpwx_%AbXj`{xuut4!SNWUJGfa@9!wXi73 zrS{5}+eiBL40#TXHK@4M6bNaCOZYd%4DHRsyh^&#ehcy?KVE?wi^n^5Gnb<35~Nos z#%LZtn{c}!y^)fZCkRUWX55p%e{V@htD@Fxn=>I?%m1$G2dQm#=_Qn|*OtI8hKN&w zzj%T!)mNui(B`KY<7_cmrbXT8A2dr}Ym36SW1oO<|gn$kq zXiE;6e|20><(zLXczVR{(utDsKl}NX(8WFy6J8Wx8;_~(eNgzm#NqV!tyG>YSy7dE z^hk1qpW+q63J2qIPBUUV3dcZr;i9eC3!{-Rclzn!P(N%zIM^Z%Tf;8>FZ} zLRPZN37Q$hyc!;02c%4;IYG*YQu$wsA!VVHL*#5;HOyQIbKoqNsf5o{ZZ2KY&8>cU z!DZN7Z|bs4()E}2Ir>B&RYbDN+@Ff`iB&VFdT{E1%Dh?bTuS{h8`5DCohhIvko2m> z@Sc9WaFfbiW0bHq;TG#GeZI@&xGu_~X=JxCjnt$>-O+(27Gd6yoUzIxbvfv>!bO87_Ws)KJ6>m#?rBu;_tW*WVA1@iXTBi3(=L_ihDD5A>`iJ9 zA?WP)t+^3eK*Nv}G^RUCqTJAUQ^y?g@u5ajpGOTm>m19)JRfo=rHtS*Moq%C=GANo z*{?dc?vywXfj}?d2RjNDx;K#T*id}sCK-1jy&h@nx8!xA> z5i$^Ol|b1qDRgt>9l;!?i9Kgkov2Y7kY*C^Jq=mj0AR~wv~C;4ndLc1KMvd;!}XB# zHXk_6V~~p(4`&DRHkKNiq$&_kO=?s-cj6OYF9XStv$b;}esgqguFJ z)0VJFWPQs>TyY%kx^j&VN$rGAN^pZVay{MrrXlc3kWrv=ytaVky`uGnexZV2A8mGw zw09o8E4ui~La|OnGWl&wz@^ofK<0p}+t#Qx_ojcn$qXb=3q=CR!9$&t#*F4P{!Xpg zx-AFD%mrK)jW+n^@+0m&mfOTi^QR_TS&weWTC=;`7av}mlB04|TUGZ`@%2J^EB)+g zn~MzA@PjxXLzAD3EqTz4`1l;iJN9i=)bsXAT3p88J{OS1@i&v%5J*5jR9xT2mFk~o z3c%XdU>q^ccYQ~=aL6XwU37>oOiWSe%>WGE0GzaAGFel@)^6n|FHqN_LP@Nh6)yrv z#OyJjNm|*JCRta~xQi5!?+JlI8k#!n3slwGsxfkts#OZLz%oD$$QUX(^df$(U%UhT zW4Qc#-#w@|Re+Jl6@K*&A*%Fyjg@?IgC;iO#FLb;;Ip-401e@Sae7^p7_&@kZDpZH zu0Uq`gML*hWm>^jobR$yO2Qls#d0yi8+ky2Sd-v^So=Hkv~Q#j^T(DC64Efhxo8F! zLxn_mEbp;X5N~iJf){nv7Tmre2JlbPt@}oy=dNHR@|({uo)*JQl+RMA(DheVPx9E-iiD6|gEVr1=894%AD>?&v{Az4_ziLaT<-R$tn1 zwf$B+33)ee*$Ma1IIF|ex6sozVq_ZGsz^XIQxB*SRl!Y|5jwjuGl?LX-=!CT+LS*5 z%)F`uSK;Ml1>EGhvc0<{mOeZ^@%OF~8)R^MOs_5}VTQZN7N{1}=>cPr%YVFl7Uelo z@N3>a$GZ5haBu2qyxA(l79aTHfu6z8U2C>WVIEs>v&o5Z1kSq-OalcTuMv0a<@z;I zy;*!hIneSUMPJ{>acxYQUKq~c53LMtP&HC^LdK!<9NsMl4uO0ACv^N9OO~`ZJ@B|c zR&wNB|6xS?^Y9-|a8O_VQQX05`iPBCBes)K@i-72GN0^Cf8cos_GC4$p{1X{wl@}{ z5Q1SE0)mO=amg_EgO|7VwCS)TJ?_MD$-x@n%=5p7pPAi zd&;`{I)>|a?lQBsNq=fee90PyTQv?c$c>B#R=LknbbOq+uoS*ch8tHjxajq<4YwOw zU?J|&;l+Iz9_>CFIfrkONR611SVMLa#=i9kaH8mTQ-l&o_?eOJKJkFF7t-&kXza%* z8LlQYyK@)0^RCdiR=1oVu>LLGGUf^vV8f{o|9c8H>#E=tISRuDvc)L z+p$XVL&;tQ+!g5o!eQ03irJsfMe?Iv1&}U&g(D`0@v%=X#D5%m(Ef=TpukjQbH5sl z5rVF~#H9obt*i==M_QzIS5bpl&CP_hO7i^`5f>RC$sx=9aO|OuH9dZ_ix8Qo3v}cd z1Ppxy%RMVf+GFUHsmm+?(MQHF?2P4y#Fawe8226{2pRk^qtwKBY9<18vZ0Q-^!_Jz znkTMZTiuFX6dx>d@tBOZN}Ai!R;PQzFB+EYM!^!Uk$G(nsN9LSUfGZ?Z-WNzZih+4 zV_Xds`3}L%hKkm-rHwLsYR9T%SX%a^y2H>ichi>c)KWS*#f3}R!+8-Npu-O#m zF2p;mD9j~1qDbcRmK|W~yOdF;4OQAtFrSxrHD>exe>tuOY+OajTkVRnt@SZwh}Aw^ zqO@?HAV;0ZYus@?m_NnLVY0MHpkWA`u{%yge+!U5A(BC_L^;`;l&Eg>@p0hykC%To zcN`g6Tn)V1Le%Z}j_*8*h6^fsJip1u2*0_+76cWPXV@t&4#ZaSn~;FwKZ zxfuwZlm7TO2Rx|iD*YjpbCOeWr6(Gdw&*1o4N76#FfuKD^|>h2pwhKxwmINFv?t}5 zt6GOkns*k4oSQO|X7fm66R%=~Ja}*gBQ8$g)Qh-O-WQNMO{tOMgR0rYlvNz3mPgU= zmlCfV%Jd z?q|rm;juNiTfzWshl`x7J!2qS<&9%X|JO+n*J*J)K^WH>pV9Rb*Z;n-|C0udnK*=i zj_;qc<79nwGAF{h{!#d$LPO4;Op8a0g@2oOn*~1{7VihT!1ee4r2Uf|2|~w-)i;Tn_A5ayT&qtBrr${zu+bQp2UzBV|{C2RG&xl-{=!L9YJiY+;8mj_!6q zsamok2JUfly5KxYWY{S4!Z4U;nw<8IrLt?!?NsB0_#3>dI=b?5}Gi-*lvh8 z^*tbVGrrBu8bF~O1pG!U$-Qfw=bC-?v9=_omos)UxEX*6yK15P5OBFl_3 z$@ftY%ky3hm&q>p`Ju|l%CfU9zoY>GoHwsFf^~s>8lN$IdB_K`CKC;A$wEjnJi#;v zb=Q#IE4ro;7Nvw@G#C-QqJOy(v7BQ;kfNSJ+9}uY#ka9TyYcI?y3+%{n>b78V15aY13>dH>x`hb$u@837)hE%oQxbf0GHX5wB zvB`@HPaVeBR_>t}2@NKXz8$!Hcf!vZMd2O9g7?fVViF$Y^WX79 zxFe9#Q3eih{m@%b3XkfL`@tfYnzXCi!m9`bbFT@}@+VodmosW_FM?R0Wy@TsY8g0! z0fO~|lJ{NE;O1j&ntd0fifH}V>3TXTF#P$O`nBoT*>)&ul5^R;)MO>R|5-QlfB$*< z8x%L>>jkI z2hh|vi}$-~2bF2&Z$N#nd^nr!HAX>#W^!MLB$Lzsy{X+ z@C|d6`t2*U7<5U*g7O;W+6U?FLZz{ zyHeGoIHhbs?PvMapv`HFsB@2J0x0$Cb(NPU`o?QHXw8|}u#rt+~G;S{x3%!J2mB}+(K-T7i&SSkag(veoPcq}u_pU@F@3&Y=a^Fn3 z3F_~7;1t9U`>t-Z1DHx&X0Nd%pnY#-yfw#71@PU|WWmyQstQ-7Ohul9dp~Z#P8HcodB;8= zbE2(!|L%kC>odz0*iZLB&KLRa!C*DqNFVbo_pYrcm+%;5o=1m9Y4p36GNco4ev8q3 zE%NmN8sF9%->M|taZoc6TZ@pP0MULKH$Nj~ODo~o9qzTM1R`pW6)`c5D!{;fgju~D z(E;tI>S&O1HWF#k!QHP{MFQ$Fzj#Rpl+x>P&m%bt)c)24M{mxlq5^E9{OP2UG!;bq zGfO1Ctt@Jxi+!;Kz%l(n=x1IT{K9wQ5j;kKu`jaiy(= z6*JyV3-kR&U!5sHpmv@-lVC=mTU;dJK&_=sOGTB#BVmpN`T8*Am(5W~3l+)kHC5 z$xRe-_MHRo`B6_W3tp}Xy0(-#QFOCz&vX|}cY*cSpb2G8@xs<>K58A2$;Z=Qy07X^ zc!3Yah~Z)J?`_=TcDm5Eq)?KrKntDo;Dr^-3%R#n32iZw@_PEP21UHw zmmuK5SiKXvok9-(|TpNsIDUy?x3G2>~11<7rC?&3+;ePbcctjk^@AZ~ZOmT>H7kTElDu#grL+cs%jCyn zc&rDI1yd~2yKtuI!Z$%^BcG3W7B;e=WKCE>;3H)WV4-wFq;IaZ+|RntGCFRqJtMyD#q0m`2&(T@!;z+6wXRZ9QK`~b`S6@y_sDQs@BTVt{btp9m8S|J5Co@a|vD;1Q1 zOOyjFK64-*ru>DNduTy&2gF&K9GJq?3k!KP8?fx59~3P~Gg{rJd*M2Yv4-cozrTNK z+T24f84=3wtZbu?jtz?HSvG)*B$x~^Gb)p_hsq&nBm`*i|0DT7mU;D$=KkG72LDTE z`Io=+R0{SzU#uOQiBW;hm+fD3()Tx3Ju+>Es6s|h_vQT3_-H*(`$jyysYD{?LE0a4+v`pP_Wm>KM^wC>iaaGc)8ZG;5?pin=doLfcY~C7 ze?r@!g$qDGz+sZTUh`;1Z5V@FaDc^sF1_(pwgb~X77$EpKd3!uXt6AY>DdQB=ck^W z24pBNEVDE*o=%3Tbs(@EtNZT|FVtxX30FXz6$GyEIUiV4U> z?jz9af>3=BrzXMUEuAZP1HX^SV~^Htx9!-YD_Q%7MS&U125j@ryA zzG6jiET=*CLLF@8YbQVZhT}@-!@1odb%}*EC#iBiDRPw~q*Pz7-qgMz*`z=$)TSKr zLWN)6{E||W5cKqoF3%w$q;biy7l>ZH0s@W)#2??F>SAKHVbY|J_wB;&L|ix)`YrHT znd~S#L&)wzx`ECQ-6K>$$Y%O?A^RYnQ&AD66&NRj`eZ5YNsgPYKUfCk&-$8Yq-DlgEG z)D}$dspqTQ6--(YD4i98K&L)Ch-mb5h`i8UbHfEwOQa>Mj7a)`3@f5UT4PQvaKerM z+uM{(<0#NPaQfU11^$2w;05Wlr=A=#^H>)kq(6nWWkb*OqKZBd9>Zu+o=H9>3;kh` zMkBV>x^9>>73Y=SdO)q^6hKJ>d@tcGPjV%4$YVb`}5Tt3=*LZ~{gJ(+y77W^|; z2MFV+I1CW&bAGC&LbK#y2z)aQyy*EE=qXVsy6#K-=VfHpjO(Xy-F4w z)^=3qgjXislw=9GDuu68x~_yDFsf5<4=*H7`ZV8TIY>agoJWXtSebBO#SMJ5OwK+MTD?Hm z|3k{)DoU!AE6<{Cg@kWpr+_nL-uMnOFOF zkyzO__qIt+uT6@=;u%?aLyM3bF2HsGoWLZF7zsDLt@H_*r*EB?w<`yp6BoJ{j43QJ zX}kJ!^RxT1PJv2Ggb7j}d7-`P#wI;n+Hx{is|E%YY z|BCM%s>3grY`8zuDI2^icOF`r2~uu1DIa&}{|t7Ja@J7lN%8pw95n`O$;j)z0D@Io z;oGU?d{?CZ|KU$x?EY}4M+md$c$(Fw)fny!)EI6XW|#cwZx2L7_cR~7t3?Jz==1;C zPrA*1NSOjsgW5{W${P-4eG#GtN2WVv^KQw>0>sAHD6=XSj|L8YAgZlZvemPchuyr8ycbeJhgZif@wNO$^T z#jT8FYR5x!Y;nh6JjXaa`@`E{&0VP7tU-hiBspy!_#F z>;M=k4LY7P(YL&dH6k+`0B0{M+raD(k5%*7#AAfyLpNglm~2sOqszDw5=X;@@A>-s zpMqmn`=pcR;T|Jh@sC5?5H65`4AKf(Ryy{^1-;kqZ3rc=tiNTmLJ|XYBdof5oYtb@ zhn%UB)2V}6eEQK-8z#2Q%%co;p@ZRck;=g0j(yBNt_<7s*Z-TvzzrbfLT&4_^DVb$ zAN);@VuZF%F`OmND^#V&RatFo7%Bz-SrPeBH6j1@^S~LsSEBVbM!bLTO#^R;_L7Xv zmnA%V)|+vnqOt`lD!AmSyZaPO9e(%&aRw>7vsh%pDn6FqGf23T7#c&SV`)eM}qzYL93S1cJ#0}@Ud#E&> z^1R5xBU;!v7R)Hzopfb3Fl3qLby3In$(4%(x(Mw*uXYYT+y7*Pd(2a-^oG(o0P~pXRwrMSlTF@RQ)q|q4r>pUo ztDY0ctkMtE`gb`wC4R|ew!4KLZ3DJiu~h}pwUUz2#uzK6D zDI4QACX2Z8V&%AyyXF={XbL+esv6Hh1-Cg@_GZKm-?ri}w z#&tu$nip5r$0g$XwgyZ$1sVTmfbn+YsbtaVoq-c;-_*b z8~T=!B4)?T$}^-Nk<7+M{?`mnRB{(o>v<1WssDww(%XM5q~!l=&<~fH|2@z<0yEu= z#0lr;*@KE#sGbQQG5)`^06bZ~{=dJx{4YU2SiUuUXB1t1Ex}BsrFK+V|5M)oAyfYs zC`0v6N8!Io)xfJdN)6xjaT`5B{%I#d%%8TzpC#&_z5>^G(Y~e4w#w02pudL(&RK`x zJ~9!)yq(&k;tA}pMDL8Xl{?tsPELQodabRY2W7qe=YS=_EXHlY%XS&q--(O$Sb}iu zPs1vuHa{f%#-=VCz~=_?m)az5YLg70&q^zlxpgZ>_v6jQd2Q1%wRz$@Xj;GwDiDBE3XBRD#aHme{u8_~`3VScYBVtJ%eaBjbT>K-;v5 z#p!`F?zL02zzptaYY5xi{#DHBx2KEqO3t+JmEv2xUoFA|$t)k7leH_QOks?fEplGy zGZ8v&x`H0kYn|WwbiehRSLKeoVxpbFre&0bH1k&m{?!RxsWN+Rkh(Fl*+MAMmy{H? zMH$DxR-Ig*M(Pk>R=9FGqm@)F<^JT|{%_4%aqv^$?e0Vs&SjQG*jW=JBSYn&l>ylt z;71ZScELfj_)wvG%`pcoevGmsg2Q~A9MqCMo=zOy%@d+AlTA}mwh=7T`WZVT`o&iD z5-9REG9r|f0vp)@4s3pivJa@o%O(UHN8_trYV1m!p{}_jT?>bKC#|@G@V=Wx32}%> zeLWYAZ_66(smhpRegkB68>va1=T}CqZ-{arfpu1s^!~_6QA{gJcF(BE{6EJ&Gnh9n=s4|?G`TcqX?7H?G=kL&AGopGy4x62!#ox4U|ZXh8h z-p-D{t#2UkNhwP_f&$dlo7;ttE3of0Wkl!DaW zIlEkCg;xp+jvO)FYzb-nnE)0_Z9~ILOEL^GQOJZv=qqPFFX}Keg{=gKHR?-Z_;`S^dcyn=P3=e^QwJ8 zdK5?0=U$miEafV>75=g|SjpnjTjJ;sFlCD=AxlOPMjD8WTUpGAuTI%!H{#QedrvcJ!eL@}NNre@ zgXjShxoGLia)s!AU~^h)*1cb>Z0$jy>yn zeA6QZaaYsM{5A9c%YW63q*wEhaK3CHjj`29O#c-wTe_3%BcvbP1FUX0KP#cY-sv;d z)q-)(4vEmGAF#_FA|6H70LywG)@>PK#iJxDyccy~J}pK4t&+7V$*lQo zhj-^hE}#1;eDm}Jkom3Ig7{%r%WE1-XAQ+Js*-lFa|pX-X)AX%`(BBR%0$nu2(ag- znCiMKy#5bBGI*y-_6qWh3O3CI@*nChB*rAgaXRZ#3CIz&;CFq25?DH&EB0rVcbmkA zrXGHhsMP@eZ%T_Cy$czoFJ2ClePYZ( zyXrQ8KMcbZ5JKu_PG@E}q{9d-duZ5DV3v>`L|%a(IVjqQr<=pPzrX|!%34Auk*OuYobjEfx^=KQVXiR||TJe%!&Zb@?Q3shHl8X)( z(}$-GL$fOt{FErm@bcIlb!}!Ycozz{bIAEDS`fp+HTrpnIQT%ndjs-`k+|W=;TC$H zd&lDGC$tIdCL@PkSI?%S=3PngYtu-pfu~I;o_^s9Fov%?-~S0|f3b|%NKW?eq)@NB zNXEFTo^-$6(nGA&dqs(#T2&}j>wUylGH3|?IN0xw~rOuuMuLSSS z5T(?|VJ2wxiZpi$&|7xgsLu65ys;nLCh(@*=v$$7C1>20*ue72V*cyb)IYDLkY zd}G){(~CeHcyW`Z{L!V^uj?HU+Tz^nk^$C2k)rNNVMXSWX^*^gV=&_N&Hoy*znk{~ z-+ZY64*GJe%zJ!NIt2m+gMmF=0trX2UxiN^JW)1#iRAv6iMHMT1r?_q#?s7P!+?WA z>837W-zSM}V7v8W={MazEFOqO!5qoL6FonRND?pr;@8KQ1!r?_))S~Z2xx77t6VOv zQ?AsCZvnnh*De9zpc(&g`>EJtJkSgEPkgV^ANdi1UL?xt5^Cp+k_Y+w_m<+@1k>Q# z`1_066JuC-H2X%FJ%1a?>s6&KxHiYV_n*7${3<=T6k55YH6&Ij*R#3hwX(oDxlr7} zC*$AygJRC@nH}&J(cyO24K|StxqWO*h)KUtrif2f=eoa-0i?ET=f_L4l$SBCtE|$D zY$8rk1?kQN7dY%9ut1AAX}jU{MiBuK6`fnlL=Nm_%yhCs?Sz5bOkzD=Y!dp}QhjM4 zP=zX-E`sbV_sze*F=N1CE2mfI#^n5yY@n7xZ~J5ESvC=^(?trfi!bTll8EgKNH|_K z9-!3fA?;5uwQ+CNC#OPl!NPmOuho;)w|0LB&=RD~i90wxZTX#6dGZut?K~KEQHGYX zjP;iKD+tiyar|O|1m(UMCIdDzZ2XsmkDi7CcPij~ryEasLx81J8Ebt{o(V`>X3_V8 zDsbm_bOG-bWv(*Ybw~Ok0ezqABqnaowM9aP`1DklqJK$#s<%{KyKagtoi(jcilow;nbbGav+GTy`K{)K5hd$x*%O; zOxYV0#>@+fAo+zIh|XIp@U=e?72En*p@XDRG;nPy2H#rna|d z+A)RC;R%n??3nH({SbUgr8wJ{-YbZs?^Z;Dciv z8@RWWfi)-~o4{aHv`D$e(!?9P7z&EGq18~Vh0nRg4A%e+fjj2jd$(MNZh4!2)n%c4 zCM-}g?}PQg%;P>Nb~qcFgLFnJzMyH{@P;tXaPZ4!0tsAK#QEDs0Ch~eh}7j)O#P<$VN{==|;FL)7-yHJY+MwTx5) z8dARAe!Q|179d>5ua<8mDt#7XWfR`RGuffFXj)l_6$%#Ajl%vLA^}G|`{zlE$IxEu zSageV2>Wg3XOrxsI2Hflje7xn$lY<)_}mDst1>q>}?sPB-~5z{npu#<)&97TfmO#!nI0tnXPs+x?mX4FDIbfMf|v^`^L5!N4JluC-7u5z*}m zr8Dg=nn4OHa^MX7Ai-0I?l5X;mvG>T>3l{i5gre`7PX3XvEa;SDry5&#)W)iw^0oRT#eFr`WCF7~;B_ob;s!CjT$iuqA6#*J1MFvXrZx zknZkOAuoye*vy|KA5Qpfr<+Z{@zBYD{cVI13vQbnMJj; zad-OB>nY_Cz%`H1^3se^sbf4w`p+4V@!U*QN+TVhjkEKi%Fb7F#(B!~p~A}R#ilF0 zV~riR2R8kSO5l9=$co=AAnveyMROVhqY1~Cv@V{E5`@772PwiFs{3F3NwvE?*QT`2 zAbG8COq12W<2byo0>_IWjiZORq)+t*RUOE_^ju)bG7 zg&)ixeX;7DU*I|F3+!NdO`<}GnAyJ8JTpH65e3A!sj(8bitC$b^m`S@AyNA0g^dE| zEr=&h3Ghm5h{OW3vPyRkBq6%Y$=Ho@qw1+)^gx>rMda7xEJVJ0!)64lK|DJ3 zrsn4nf?M{y5NsHeJbkVHvl;1PEai6lG4LEIqmdOj=REG?JMf!&g~yQIFD*T^d-;mc z_HzW`v$!KJ*u^KzVieox@fhHrj)e1z4|r1f5=1oJb-zoXZOp-CHL+ucX_ zR9&L0j@9r#*n9IpD8IIGe3Vjzd?ZPdC8Q9A8qs2DVW=1qLn+BJCNlP|M3S`F%7pA0 zYr-a|L6AQUe z^*4bJt510EcOZ3@#=R$%KnaqkeUrY=0%8UJ(?2C6btqE}%OLZhJ)!}cb6G{V?`wKX z>%Fn<;>EZ%MBpp3!mxjkA1QgB6X{50!>!^NJb@dXt1yZDi+Gju$L9g{$Z4kX?UHfF zdz^Dhe`QgZ*>{?Np2zUMCw+O^dX2b={2}fKu;?-QF7%jLd3sED{VG&yL7uB5bGyvr zV_&MO8?g+SX{R;sur-gsJqTkz-HM{3kfPVBw{O675o?r9jb{-w1!@&j#$e&q)g=^z z2yogPG~F|L3%JI*oDNw90y_U@FlW)sXDI1KRoLv6&|s5kR~oWys`KEgRAba9&iJHo z#vnungkCyyjlY!P3I9$LN<$4GfM>q&O8nVsT+8H*e8c?i7vBw=oQxiursBdvRa59lk;cC~^ipP62&`k$ z`srPI?8CcnZXLZ#_-eX}-gU8W%4k<-SErTqnI&FGn0EA}3v4%1ij_XfjIVHv4L~ zZ0fle&h2TQJhkD|=^y{wOb$9k-0MwmVV%q7Fd9=zTqQ!4fKIdndMwuQaoedM@C5gy zdq309EP<+1g_BzN;Eg|6onD>!Copx+_ zBVnUz7*75avNH&O{P9@@&>1pEECY3ym&M+nRD%C_jlc4uWr_1oN8zO2eco`8r(!WX zPU9mW8d7EU~Is$b{JZeY- z=m%$mFTaYW>jl*`2dhpOPl71Lt$UbOp2$S1qstf5MeO}G1D>hh-HUMzH+wPmp_9+Z z(d>m7{l~j2S`<3HyMExVtkZ^g*#b-oalAa*jhEk2r@|bprRL04;Q>zuGt^kDY1Od_ zH9J?v0gT!kldWuogkBIl^?fFp3wICZN;tGl!V!*HLpB;SUj1DGw*_(#Ej=Nn% zxfm0R!m5!$Jm+&rTpkChP0KRfWdkOwy0lop8VVkEbX+?Mgyi|I=mrnT+49jNyL z=*SAhG;=ATWr5E5HvKQ(JxOn1Y_yMf%oN(mzxK)DD{tho_hKdHEd z49-~@=FT2h%<{g~)CSP${jj+fKj#nWis|~MiM%7i9~%LMt*wEL;H5g# z@c{a{{9%PSYF?)papB{aFJJU`zCn-K&QjwD=N@}6fS5jj@tdzwHHQ`UR&xZJ+-o~g z6`MVjR*{LI1?)bC;7vZAUcCodbl8a0iKtJWz2d0kn-bLg_e4#p!inOY&&Wx)!bCZ@ zw4Bz8ue)ucam~?m$gu$!GG}4Qj8km zBzlYtZ0ZZ)=^ViF3Z>dKo#D&8pdMfDN-(PqLVAc}3~-$Yr;Q08sxzKKTKDD5ze!Ck zmrT2kLh58^ZYB#NbO;Q5kW?Qu{9~m%G^wr6Tv9|k&7yq6<)9!%hscc>tf>+eJ2=k2 zVMQ$m{9|RKebCmxiaYsS#K=d+8A|Wj;*l;imrW*B;s}Y$ykq<)-l323m8^eb6FGd+ zzZxs={8v^ZY9VLkK`Fx@AKM)NvdzMJlmM6Bx+N+1;HkUBOBxq|QsA4iZ*Ud4KuZGW zb3IIbg!bh-oAtdn!HE9g)3;6ADyt@T;bfPorw<`tChUU9N4Rl^Ejh+BNRzSX@OU=k zSqyMUHYr;qk$zZGl%sdDZyG17zWrFsfAXp2^#u1DpnPbK_}u;d*b3Aok^1ToSH~AQ z#@ld0`eemA=~nft<{gmnp?ALDE#Ei|qzt~4>QH^`T>*4KAt*G-1SK?7vl%GhdbFFV zZYuSirZ{j`PZ+Gs53 zlN?!=QIs0^gdxv{dyh*^TiZW9T)?CY4#T-mRP>PXx=;cicjtfkrb+Q-w z_##s#BU{HCv>Yj0apr(gjfUSW*JA&$l_)*a;bC&-yTED@Tb7w3xdL9tDVL0W-k3KQA(oSG zdMEoUS-6kUYv*?H#_wZj|2^El`4dD!y(g*t7lzU6bI@_qsnBBg+C@CO>)@>3JrS9x zwl**0U`?CWF|!LVVrfrm4@W9Tj^!jE6Iw2ZZx5Nt!qI7oSiabAk-8ye(m_wY|?l`F%;7Kg4q4cykm;* zGZENGM*zBo^;XJct!^4(S%;cVp9 zUZnP6o)f1~oECxcaJR3aqv7*k`;zUN!G_(ea5cgY87yGGzu~yRR3}2?4j#$L*K+uC znnh9(;EpZoM_pzUmcB|@w7#6sS^W0Q6pnFFvPjuyh}fYwCS=k_WawrM!_hiy7O;W^(^Q+$>Q$86-G{IEH}6l$t3cfMPA~<`)pwu0 ze-*BloT{@3TR7C?Q>A?K@*Cy~an48rgKmHo$~ic>2pGFpEaVU?Zh2>2>*)<&bP0%4 zL$d~64%VG!(~5i<2BOxt^1ly~f3#2hc9H(W#A|GSp=DdY4z$cx0?w|aqz04+G5+4? z@8-~mds|w++lC$b79JzQ%f5kRd)pPe?sw%uRQ9jHxr>eRliX}Nf7=lBJqSS3Z4dR^ zv8uiSRhXH49jA=hZPdJ~`w4_x1ESDzDu_e9{6k}*I!yEl*v7B1Q7k$ai?3=CDu-#E z!?R9dHa|TB3~4ryR9*In_JyN(2;>&^yA)-t>c_<*W@FPdJ8KUk--J~QPL%yve(Sz# zU;7nZcNUAU>UBnkk6U*z=|KYL8>Ur_YzLB=tZ-{sb;P+Cxw;G^8)^64E0l-Oz15dh zw31~ype(D06!=J0wKCU)3hS;9b*pxj3ocp>p!!T_RdpFKj<+&)DoHSZmihsTb7L|o z%$@nzZ&tF*M%=4&1MB0&!q_4wVOa4U=W!(0Xq zvV^6_voXM$RnOP73I<>X(mQnAaiKzXf@9Qa?1C&$tm4a zu-Cf?zB=G@q8+(l0k3sox7?SQlKEBOeZDg3_<1>Q_aP5)^7;MMA$GmA-v9KL+Dj4H z#i_dPtX24{Xx-g%G>=Bmm*lFHa2@=nzg0NT%An17VF0C3?Q-97rt)R}@T#f6NGZFT zG#jwgHvPfDRJ)Ps^Ct4_8t)jvl{izwJ~Amjv>%Ae|Y!A6Wt41g^J78qCoR&)<{cFpjn) zI|tJzoy_AND-OHv`i+`KiJf9w+La`D{vw`qg&cdpmhjy1PQ3|>vmIRGbL3=0N@4JiN$n;x-m-0jjevhb{aNEpZoeyk7u(jjg(pKSJ zTnJDm%NSI682KJ&r)Ws>7Y;|y*4%1Urr!Qx7QNITIxt!1X*!^8=w+%L>I&M~RK*ay zDjpcXze7mrz?SuUNvhhx$>R3@Ffla=?IhBGSYM3d3Ye5YobveretY)3{il1kB?UL_ z6c=!${s0tY!d|wR7UT_Q$??`pZV3RY9Py0}piR@HE|9gr0(`>9AJu(v!K1d4Y@JVT8>R3 zZ$~Ugp3RxDjWZ{g-KK=D9UIRX`P)3aE9;Qh1hwzTXAR~knl!_uXaBI_0#djYP-^J; z{-Qmy-;e@#iVn;Wx38~{nbp0Ub<1JKAlG+iEc;KxUgg@TI*qTvJ7s!S3uTngj}_j# z8d%8ez{fYxO7V{Zq6 z&650`Ad5dj%6Q`{6K#bkf9eY#K&9f}HbRvkNU8sU;(v32|Ad~VfkRp|4+yrvqqrO(0m8}C|Bau;A4h;G z3opyM#*(T#GO-rL{{({nWP$%j@c(=p{2#$X-eKQW271uaR^NJ97b?mjRIt#egK+J3 zaT0>^=-NTeh~+?@>Vkr=BV|wr%I8Dvd6FQ^YqVR=cGN|q^%I@%eb5ADF>t7-U}mx4 z#d4;=coC?9(o^a>FtwaWP#?O)D&8TRy>utalJ8{kG+@EDqj8IiO{Tc1{cg@>%axqd z`w#}bx4AwJhi5p#*2lF(EL9XNjRjk9PP|N;sPJ=J7^Y=!*vK}R2gGDOI=w(p2ka=w z<e( zJ5}t+3ZxJH&S|K}iz%Dc8!4e&v>sFzi;HV%dTBqH@nPm^(E+F?wZEZ#sZ%>(EG?i` z{mZ0x!vV4j+_KF4GPheZ6ZKiHTsn|jwDeJ9ezJ$4@GTrX*z)PE!hE=}330A_02Q5P1ZH$+m?g+w)(^+MA{)8tlZK=Ubo6g%LXVpqhKYK;!QU@H^x76 z2owt^fW!={?9#36S;LDTFdX4@AMU(l3FIMtEtuE=deH7z2VQIMOTXsn#4FJC^+xR+}`6}Tht^EtMS{UtD~1WWDuw?Wwq`I(zdhv1L3g4iWSeb zk{`ZiBKNq*<6UcviM`uQ)wacR3l2x75pRaV4n7X*#(N|k6iF~oK<5Zyy;E<7*06<0 zK3udYJA%;jl)oGlmIXf?dI>wp!*7ZXmVh-x8Cx`VCRsW_YkZ2`KPzt=a$iOYRC{u| zVa^tkoIFgxo&v$=j^B>RZRgq?0y0HXMbP%=(pj8Buoqql-^B2Me1eJ={Cho~9JTu1 zHfN1`?7DXqMQkBK+-IAyV?ya?nN;_<-NN|>o&5B$ZeX@J#~>|r(3b64tl2Ie*teTA z=l0o~AF_y~!s%W)WM33S`F1k%U-KXc z$@ICtXNq8{@JuynEhhy(@nJ68z_j;E!UO)Um zUUx#e3&k7aD?cb@FYr!JKqjgYxxYqeFfCXr0r~M57yV|~@TQ7Onxxbr>$^Nr*Ln*D z-Fr7xTx>+fA#So0&uW0+iI6v3Xw>xK80JrS+g&d>$#z0S$=Cc7uvhYzGcOK0ylr8M zOGMnahP|segTd*9MLQE>5Mxb(%u25YHrcnn?no|DjFZ?&Or+ZlOFvzKEPs!?*w_zZ ztMM$}#WE&uEJACP?h?XY{43Ulv$opa=jzRlCfB=+rrdb*tN_4v=+Xz|qsi89?At~)Me9YnV% zGQfswqyjIGHTB%3*IR+-u(k+e(Tq++zB$`7%NLaOATP%{Mw>?s#7W!}GJUmP@S>%)z;Qm}tBB3ycvc7I z(s!GuMf89drz*Mv_Rjj|3$u&y$%pLCpJ}^k)O#qqx0_zxSfu^}FV!St?;*|iUJv*F zoy=RDQE=^*Dn$?J4E%C@eTA04Pcw_9rH6RE%~4=B#|ya5k?3A=q03eMOB>jTdws&z zV|;EH;fxCtPy?3oM)ePnG?(@6B`pauxc2eVAo&!#c7bDeF?X5CS}%mAo(4FOm{?(z zPo?T_ynY(=7N3@RZcJ#3Rq2$6Y`i<})e8}sy@~BW*YqN60!cD0Qaz@%@K@@rdht%> z^P+_+C)Ua2DmYMVjxfN8zCjVOH)rvOZh>G*HASI!%bX9ZFxY zklJ;w%?e>ylJ*`K-SKU+X5mi((b+Ik?X+fMZq4b^a0xT> z;n@(s$ITD9`S_7>YJzP+B6?d=)}glR=9}G0Qx1O!^#`x4&4g>MlQ6B$5KvAs_a9bP z#3z^5`(eNo*9GmFqAa9^_%dAEgWx9ml`DZWk}fU|IVCFZEt>R~v@;}4G|Y+V_7 z-1tE0l(q9<<)ET_|4W^33#Tv)S}Eeb59Ynb6$M|&Gz_9ORh;0w#GU5PD=}%SB>jbD zpXbSf)1r?u?KhwTu@XgGp5)mE@y$*PxeA8>F+f-MjsDUppM|_8Va5ymlb{A!CK@Sjle2?`;m3XB`}HW;aUWe6>B- zKV0*Y>~*ANy9m!|R1K0Zi90^?9LBbBDfL`jSR{|vxEq)*{e5P5h3MexLRHF#gU`{9 z2e&HDh@lrHj?Oe~sIWN106r*{QSo?kikX}#@72RCq52+H`x0&MvkphD$0wW;{n-0z znx-3^7aVe+FG)i`Mzh)v@4p1{E3P01+TJ5cAvyl~2zWxFv0!Jgt>Im&XpwPoj&YBj zJp&i=t7q|^ak;`tqhJO>AKwrHg<#eWw#`<4^ux?UipP=(1r*WYWA8-97~?P6X-HcI zy60C^$wuH)&mGJMGNsE4b$PuREY_9>*|`Y&5K)tkR@ZMGJx`*i%r>Me zlO#edkfFCw3wAyWtVQ-9x$+8f=!#HCyZUKZRa;n2WPWh zZVttaa}SNCWs2Q=Shdx=0#sXCDn*8^1FP%o+K_}wPq{mMBu)I(>8Awdfud11Qf9C9 ztJ)s=m$U-i+#J5QkDQ3h6LEOC>KiuhPH8iZ75GMV_8+}3O?M~O}{{BqBqhC+MXV9w->Si1?Pdr;$*}DBY?ZgLp#4Hz z@`UD8CVQ9T_1z#hS@TSwvZA)JD+hoRYhJ0ZNh2n$LJRv0iQHt}^omh$OegR91~33) z%A*0iOQ5^7K4AybXs*$Q%Gm;kC1l6Rvm|yinGsWN3;^bV!Vh#S6qR#sPNI|oKX|`N zwm%dJC<}RBskJpvJq~Ow4lIW^%_+p@S(#1l?gSma)>>;U2LxQaJ=#iV*gQEDa}K3e zo9;res!m%53CHs4T^j9D+7<^#qb8Ug;~oOA?A?J?)Gp$dwMCv8e4XPbKd;~vo~3-! zrbCgZXG^xx>AJ>8z5wd|ck!>JW8aMBCsbF-#ta+}`W81(9BR}ZhWsoBpvFp4Vg8zx zkAE~Axe$Cu#wP#ds15Q=Al@nJ!A0*Wg?#|0%38=>jvBHv^6(_;qkx2w_17d$)^;V0 zFM!wIe1!-|XY(bnDZm{2J;B}?c@-`0mujWK39a6}WECjq;<$Xhl9}R-B)Yc*IzZ#L zRArp!KA~!&%bk8(Osk5%<*5xK%nO{5siMo2a;}eb49l>01(U(cqh0BC=iA*(qF`bs zm{k-LPl+~!@?<7oJcGL;SA?Hj*9#xmJ@Cp)(Hfw`2--v$DK_~?P3(F8`r(+@Hu)e+ zTy2%WlHSNsdbVF^7h-HW_30E|Ys2t>sAu{bxH2p8WSv0y(Gwb)%ZO zo9uB{)*E6Q4J>sPj*;?)md7(*fjt;c)9!p^9kedgp~>#qTPU;o`}{H|+YFtC$h|pq zqVbF@%^X|XguQGz#$?{Rrr3PI-kA&gF%wDh(}|8Ep%}1LIRHBj zc8ZsPo9^YgF)sfXQ~Cv0jm8|~P}qer+pO4fKkoHOA~`JfoO~u5az=WaR1w#4fE=10 z$#*B&Hs))i)VZL2Qo=vZTQ+-bL)Y{xC4ey)ENHKVW?Ku*%BAB$JhlQI3EqLd;=r=P zE@J}-cDfJRrXC!K7%;#aH4A4IE+r+fedQa^MfWW)3VId>dr()R+nrOzE8)UTdWj59 zC&SxX0*sdk;&SDaG8_>-GuoXH! z&VF1z&+rkk+~mb#=J9!uYbmaI7-qNqQ4#KjIsNjA++}X*Z-gB@WjjPZg6T_s@Q_)_!ZD$zM3mx5gL?nom*IM8*#y;}pB|-ypgF z_~N5*vG3zvXI&mQ*7;o3KEy?4|DZ}AJ~$yrXJ zzR?A;4CJ8p2k{^csEurFR;ZDJ;hIT%Q&g$3ocPQK-tE8?as>s|>@^$(#XQ{3?VSgR zBPG2Kp6aWf_b}~UoC_z*9?fznAk~2UybqJWSFO-Y-Ja9exfvMez#5nPr!LV7T8_)R z`e5~Kr$Oh$Yo)?6WNm;$cZF{qC~nR&vl8yTz52m5N;#au;y*73Xsm#OFFXnny%q*TYUQ1_*SG)SsJ8 ze3&og0P1c$pNEhP+2oG6z}25x%!-EhdW9BztD<3;uWmnXUn0vz_44-46A69}CBSsB zWD2?3FFo{E)c5i6_Lko-(6u;K|cTbE6WzG-q%@}rP-zF>@w2Q z&~c6Z)((Nypl|#{&%?Qq&nFq7gj0}r$wg2^b4e6!a5U<~&Mc(#HZvlj|Z>MdlgDI4l)+8sipB`)=Nu63J=5-*= z(Wh<(tXZ-7f8qW;hpPSaC#kxiw#aS9yrn7MLhS9!5%XWh2>O>Um3EyvYlrRHCq)PI zQoTf%_3Ps9an+T@^5_JY-}AazihQkRD2psmwzz8}IN)~T^saX&HZv8F_5h**ml4ge z^txxZKver=!KK5eJ}JV7hnC`5Ondaa=WS8b_I#nYVCxIFFUA{qxG(P`?~Xwpe~vFc zdHR!co;C+7Z>vTup0+8Z+d?mvuIC7?1iEZf?9{ZMbJyJwhT$X@xISgEf7mJ~MA;w|!T8b@P*hnW^eiJL#AAz0#1u=<~#b zoP{ic7e=0Ltjvn6S+uj!Unl4z_F6MLCrZp9FF&MJXuorD(A;dQq)i5Vd+#V*10UNCE8yNZbLLag)^8H zDX7%TPq@$Uv0i=HW3$$ae9P;4#`i8iXGLKTdqq){T5Vxxyi=d#{0jP^zo<3i`2sc< zOrE&xITeVIqY$fz#&W*Vw+9CbZ@GjG_B)%mb+sb<^z5sT1>Qdh6D-!}TmIPMZQ!tD zW4oUWAlD*Iw!0@ju;rPbH0G$ee-(j=DbKii-;fX9uTViH-yyi!2ZhbwY+w>=16y~3 z|K5fMGz3x2fGSzw5zB$Tt=XQ!mXlmXz^h)$b*?#uWH96n#oVRoJ-;eh>il>Rdv5fA(}y;oiHzq*oi!1GCEm$iHz2(U+9iPHf>K>5fSgow z;^-#S={~(i@APE32iAkmdOC4JumffKU6T+NfBUfHs0lPmAidw&Nf&m;J-d+l#}Q*D z#CDT05fjXN1imoFHUC7fsBV|j{ct!1A2z?w790Z-Ot8%db&mW@4?`i{o@Cmk=Gqxs zl?YJPZf`a=ABi=ym56Q_U|9I4A6kX*524!7>zsbXX62a8T#Ouk+)dUTE6SD6hWH!nzgT5bC~uy!-KeWIKK-`rra1MA|5Gir3O1?msHbgiNwP{o$tgDz#!yVxYgsrM_E*XF zh86bmJkll~l+Lc1@+)J!%}@MvsFy{M)Y=&9Ut<~GHo15kd2fk3IAoomuUO*B-j`IS zIx8v_M=Y}>ZywKJBZil@#IjBAGovPKaUXu5t6p`1{JR6L(wAUHNSd zr=IiisMTffT`J3w>%wo6m`N``b6D=O27K72p*>At<~q;pB+|h+n)@B>4M)lE>p1w> zcaOhc);1NFGMaf@8-L!@+lVK<*B!Wqg9>{Iu+()|OFCts9Sg2yqT++We7&=M$UpJm zQ^0q_cAF#b=WPaq9pwKa>bQ(tz>SMXAjHrC;r0`9^5^ql9T2~sC-nck0e(I^T?b#Xfd8lOx2RLM%)iT!K!}5S0`a_gi=L(?5%yHs z@K7j4rlKGhblwR*2O1>AFJ}igQzm+PptP!XT4&VAERgngL3?B^c#Cb@*c=AOJU|86 znU<+)jpdYGsNGWgRc;jPOaUt!TYb$hvIhuLT?Da0prAoZgWQOlv!iaJeRrOQ z`f0)gLyEJRc?0fsRM&Nzm%c$xln<(wCM3~vIB4wEp6)*a60?f>;Y!83f5R$OG>R1M zp#h)nNo$m7w{>oFpn>*wIiZ+`0aZtJ7x&v8lW#rHH0UA>4*Do?T!C6r0!GP^wB<8< z*JIkTvHt7bVA<+94>9(x<7DOn_NobX z=xZZIbA~Vdp7~c;8`aLOK5{`gs&{BYJlg}(Lfipmylqe+3PPFYD1Rt_C87lymjrafLIlGkYVoNJ{ld1 zO(CCA)t`p)qE;#^xa`R~Fr^5J9xK$m2N@YhZU9#G(*}iDSdq30v2~suWT2mw732SW z=~pB|yp)+DH|l1gx7){V;>1R!PiKDpOz$`CP%dvM28so>1f(Ddod|1J%_bRSogzz` z5u&+G)ba8AUHEN?{{gP@JQ73}wb({2f)Fu~Ru1Hv#$Sl#-;%@;^W!|#YEYVIrb|pY z)HDE;04~1tCp4r!0!3XcNsFJ4W&+u3I`2p}RMGVq$j|!q`yn|f^8!3#2s|59MSyw( zvOzA1Nu|U#BjZ=d@dT-@2se%_s7w!hC(rhS;JYm2yhz4CjGj^g&fG6fypU?!`37m?j6gT7XlxHwFn5ndh0m z?kYCT^ScG1t^n=oLNCN8_I7~m@1w&u&(x1zh)t=R5#n({1hEFPNX>|m;YeLt@TmQV z4sq(40EZB5mF!l(4@jOO`H`*50_;y5p;#f1MU?724u7IV4FFPQ%tX1~hqq@~xa!g<$ix zt15lmpT~ZKMazBb?AOf^*O^C$wT=DR%+V?d(mZs8{xr=g&|N_eL5V(e0SMhz-+rTk zT6qS7ovZ%aqq!7LztS941QK(!I$4fVEW|4R#3{YZ_$_%HZ!2}~GR8F!V2Q=>511rn zp=`3_2gC-sq9za)b#y|Z-sCjAg3?Le9kp73MD9a6(po@4f-<^YhI)WK&shE%Hva)a zr5}k>7p*sT^=CM658&*G^PgBjcjnpBXZ>KFw-X5Q{f)Ke*m7eK60v~jnvDI&FWH_6 z#TeBK;}@rr-c}vVOSG-s8;||Rk7u4WdC}5buMRp0l^z;#RP6mrlV@M^r0MIhfBgM{ zsz=d_+O6rgO?g5wk-ioaO~curT*!(4@QrV1AaDwN1PDHUyEAuWyh$7mKu*aR`bRH$ zyX*_qGKhPYkzB8)QiDog+F+MmlMJUQx%kA#&F{v%$dsYJOdRO(r&_;A1mS+oalgP^ z2@q$+p*+963BK<2_qn|5-dvaK7=RZgI*pE4(#ITAdq2lQ-8xxE$e=3jj*dd)Yt{|E zCW_1LWV3Sk6&L)mSNtoEDo*8_AygiCCOekJ+SpS)I2v|DRfhPoX!X);THkwnC;WOc zut`eCXd+bJw$>Wgy%7}1Au@2CIUVAA&wpmzpzfpp#oV2q+A%z#LZSB!LXrc9pZaBu z9e=u1Hm9-mjoWWa_-tK%MwUw7Ev2C}Ir*EkHr#jtBlTA$NW|{{h0#kY({{DHvZk2J zWIyKkfE4dYpL;ogO*SpX=}&Xb)oCm3O8%8yKw6`Iob_?(+yc)wCpdO zzxCe#(*0G7#7~DUux6|nN!E&M|3zu|hl%9p?Y{t89H>E!9L0Ly2D1Re74lDz{I0zG zw8%9|&Hoek|JQ&95V2w+`dMFqGG<=9ETB2*m77TL01V=PWASU0)wP&~1iDov_P=od zQ*DKKu7&kTc3;j!A~!d;|Doz*w=drha~?1AR~64LMi5wJWo6?Ebloau`ofQgeu+Wu z&9g`VeV0}=TJy&xzH6rcbdnnVtV%SV395Kt8W$2=<=ExPS=AWz88mSjh^(jSWKMn~Cp0&3hy3^gBi=Su-bN&xQg~lrs6D$Laet!R&3H*p zg$Xh|MxhNKtpS{VT$gb~6Yy0R*6Q&GpO?{DOc+}-pS#5fR`F8S!ccY1GIr=u+c%$% z&B(xCtJ1?X2jj{|3!n^nIP+ZoOXMQc@U4If+q}c*sm0SOpv4z|@{N|vptUf{w0;n8 zvqLuzelARKXRK!4Q;RNnas%>o-r3W*JWfz_ROBGo??y6H4NZNtY`;Br54W=GGc(qbvrB*Dg}U0Fm?SBp$;+&BCIP;az%WR?^^De)(}=anr(&M? zosb(4PY1T7+QC}^OD(FoEGuNhotY4`1{y<(DSl@4GeS{4M66A#q)>tIv|akw22G97 zIWd%NewnT3AZKsEbX{rCiXqV;_wpJJRje>M@LDDmLl#NO9K0Dx<8i^}JCNFq`}B|| zQNB47|MWE;6Y_XuyJ3}TK_c1CH)8^^n0sqs08gvcQ~AE(&}M6y_gqavRETXp@=?uS zd6NnKE~S@%^mS{&tXY5&2fX}{?J)%*_7hn+Vz-52rs`&ZFbMvw`Y-=m3&*k>;@L3+WFJAnpeByJA|Q-))}ki{il z!)E)|SbOGb2G9Iutb~iLRR>KGbKbm>BoIaFi_$AZ0CHw=wqXvTx=oopCcwvCh$OU+kxVw#^eYhhd>J7gRJ zpI#p)*<{Ovnn;4x_|<_=Fp;G8DkuV9T^jKezKx-DoeMrv97yoXOM_*gZ zQ+DB|4b1mFa`&%5OcBKDC3o27C(R08Bz+a)xl^H!=hj>y=r&u@+RY;=%BobOD%YEl zAp9y5Ich3ga8lp+x^ByikiY`b29d5|gffVB{VMF9O za_e0vFWDU77S((~r2g@VL zx!F&eUr2E=*fY=iKJ)WL4}fC11ibWG3@(28Klmfes06)NW1NC ziWTS!5h000br2omRaf>SDU+LS4;>4q%wotikl zbboMsEGXgPxH-p{rWM2gs$sHAsz#$mUzL-T>3i^mI_TOP5gbvv9iN9N6h#d-du!c|_2} zm}5gQ_2b)@I41)Qdh07>lu|oa{;Jx>CZrULQAp@nP%VAMe|14Ej6POK8-Ps-M=j7rXneX<=vmS%b6f;+2 zI(kAN5*#XI@KZQ^)UEnaeN~#F0*&3D7fy}N&NS1d%qBC%4ldMJQ{EZJ1HXD@?&D*b zQ+r+k8>y*QN}lKtCl3GQeky|>_2}~8fLGA8 zb$x8}I|+}4N5FZ}1!_SGg&!Ro%G@Zpti9-$M6t>rp7BtnA>_yg3puh?85$_j;K*{? znbVX7Uw7c<@MD&vG|()aG2ZKmDvRwuK_wC_@j0GR)NEb9Q(|oy&D*?>$f>>NuEKuXma|58Fgd zih4wb56*)ook`JzDtI0?y;jd86a>rkC%skA*1E#+_d^YH(&7ofJ;TLC^Z_GGHr1+N zLa~m6H=_vZgMG>QE^U0%P}oT_+NTRjx2YfXzkl>2O%+G*>6$)pk>!*2nu#zx=*gxB z-bc2@?4IxoObkC@RIkXYFpzm+nasxRfjJl>=d%9=bK9PmM1nwbqV??( z<&U(pZN}nGXw`3XnLB+!iD@&(8z0O?2GSp?v;X`-%k${Q5&UF+1Hsx4_!SQS__ZW! zUK#2Z|B6$2)A8oL)Ys!KZu8lLag@=8FEU=Fw`czGn?FJ9KR@B$ul3g*Ul{mW#_j5G zxVOhs#a3g!MQ zXbT2`g7Fx+{T6XK-}eymDt%2<2T!I++hu7pgk0GA=!-Ej_iRs4H&G zS}E_@diTj&kM&6J_$940=lTVC@c#-? z;o7`wLW40BXItwfPy$v4pSr(a_|opt+%^l`d}-HuuB-ItU*{-?A2yqUHxKp3kl7LF z`yq|F!8n8Qwn_3W)Ye6D!q-j9g#khRifQ36V5b9?B>Ul{`>~-{P4fFDpoy20%4S^* z=e`g+C?Mb{D-jurvDOO*1-UZBlA2XGY`+sm8Yy;x>EJpMt&X=h8$ynGpNwlp1zpaN zpFYXbCNP2>D-oPWkapfxE@wWq>?`5}UU)V(ohg6AY%JKxAZPDjvjUb1_0_2#o%zU# zv2|kr>29K4p%~uW#hJ1jC1S=z+aF&U)FD&Ju!C+Je&h}7h17w)5wln5sU-&YVlxIj zb`8-EJM2*mZlN3kQ6V8BG_Q$R0pK3`DzVg+X6{^}v~Z4Tj~$!@e@ z)*K25>dL~m%}keIc*4o~rK(Ym!^o>TH5#eM$aW=dGCQ_z!ymcVeDk3czq(r#U=A(# z)To}swB0;c3H=c)lAGKfFGs*6*8?st<%n7TEc7Idmd-qr8ONzKNVly2M-pxC%|Jv`ap6u;{l4XDD+*s$A1^?T}ojTI% zElNw5ti}P`%l&%uh(ib!=Q_bETrlw_1rfXyRnUGs`>Wqv`zQ#s8mdTL5FG}G7C17J zlL?3>!ib*6d!O^KKl8C(9CNLfVpXXTj`DJ3HNV90W1?xC-R>|2wke+APb$UIUzcm* zFA#XFo1y5IyTEd`3Jq*F`aY9!=wSZTw!|}yukqrH$d0}Kl4p`85Sx}LNXNLT_*gux zryhINt#rk`3{>M4SMT!a@`OclG&iS31}9vxrT@wWKEkh~6143Tq-fzk$7^J5nPewg;9T#D zXQH`sM_$pp-+PfjPlO_ZB$ZirIT|g;I?RAdRJZGat{#j3lgaUWxXl_deK}f1jCbiv zlUvA_uctxVw((wIN@|~Y7X>-UB7QaALTUEKth2c>Rv8R;j;!my%i; zC{QGx5P?ibe|7{5HPQ^9V+t`FWN-a_LKz2*Hp{WnXomSq}7?lN%CZ;<8r(hZf< zSndov&2&NhG*ny`z{Oaw5kXa;ZEZ9zIpVc5uvrb9m&kXed|9VyhAAj;6y>&8H zqaCF}qokI_D;qDwLS5fLG^Z8nR}er}#neYoaij5leb2Ny>?{?E^SZWLE&S=hTe`-F zUmjy?963G!l`4S_X3(6?8wEwVtkKB;orC2f1D%Rodi~9lQCN4ay%YneI9v~d&N}Vmy}+Nlzstz=SA-}P zTOOOODe_nr8T8&+SEsw(7GGu@T*tR}0zE4U z%!R?h+GqdK@^(-pC|}ijHC55~N#xwe=trCU)kUd^mw~ac>8mFwfe*HOWUfUZgwfet zV+U05yt-~MK`hPH;mWBN2a%OB;3=P{j02t%ULsXqI8=CL@@&p5j|rDT zd^sy@fa<#%+C{SmV%Z9qUe0Im(JYIe9tJ? z?wi8N?*@~DNIgDzQCl#_|L}@kLp`EM%L{Y0p03?0>;^obWARBBH|b;5bx*) zmv6fd7C}kgse!CiaMf%@vvB3Vc{HS^edgIt{N?_)%yh8Wk7LXqGRjg9T<66dE$hea4C;OyZzLaFvK=Qax*TpTN zIT3pE9YQ+!C390Lm*vXGIQL&4t#6$)P3PO_)r#JNo_)weo_;##@u{M`i4X#od-ASl3XozC>S3~-uCcLGoIwbchy!}w_V*1+a7ClX~+89E;MsV zL4}Y7&+c2H;;oa{U0X%4+$Cu`w+YqWB)Sn+~ae;1UUuih>hg+ z|9|aWTU3)-mX3>6MNccis#3X=vWO_HK+zOX5-wFLh0z8|5QB{&G!(E9LtwZh2qD#l z1|=vM(0~EbmLMod$mJ)=1m|I<=V6{^=3!Q?bsqP> z&)(nK`}@v1YyJCdu!5*;2{5qP50lhr?e{1$X-wOW~yeFrT z1esvO+#hl4cVNM^-?8`|i+`uV-|ON(B*8F`Doo0o5Y3b30hQ-Hr6nk;@;_71|DI^P zYAfU!~=b>%>%(woB|p(`Wab0zGL+-lD65^)ImQbo^2=#Z&i? zI_@1KmWyJ=9EMX(x9YIX^;~kjTTX^ScW@-Z(4D?exZzocj8kd%CzrLWB!;%W;?nqV=SGuh`B}I z6j(df-4Czr&eR*%4yvPUo;| zq=W_h(==h7$4g*k4$&vcXyfQyu=l87kbI=4@z+7wle`i)5apR}RuK^3Qv0U61y*7H zZgFIowhs=-*Qcq9DL$o)=|pqsO^3<;rdG!a8+Hn$i!Kzq%b&XD0ZX zOE@&6B7l;U(k}8A^T_r1Tr)-vZ6BO9ApwlfJhgVTH7*7hq1S*iJ{LXCPWS`G-~_6v zhY$=9r@9;A(sPrwFG?`Cf=`k+rX^V<`rxm?m7%z5k6~ctk+NX!^5sI+fE*>@)H-Gk;Z z$fvQwZaJGz$R9jO59c9R7iUh!&DOp!s^7lUg>DszM~Ed~u=(qFijB#cWJX_eU5pJ_ zfJ>hBUBz|2|AdYQF7rQ+DR4<=^&+<%cHS%UCPg)ldKh7&kv#ka6-@a4NZ#6{$n73a z*B}G)e#*?ZOu=l5w<>#AN$9y@(WU}dVxBMF0qij-2}Yh{kdMzElUDs^uMykp9@pvJ zdYiR!JH0r<^V)1i19dBLG^+R5kB1S45UeWaU0w#2hhCyd0dVc;BJY zXW9fE+sgmHT@5t%plC;9^5fmc4qT4Pg6kW|(vE+{dc_vHiSQkwtYk=5WTE*NSgpUG zE)HOy8{o*W$k+H@v)9h3%%xRf$$}H_?ts%cb8%MSMmY7GV6;bMSo}@}ggDw08Pu!& zp#`<7!|fJ7%q4FMd>M918zlt7kmh!X!g0NXg1}Hb+sJo^k9lyN$;)0|JMHhvyAm<5 z2S)yGB_tm>^-v$@JH3Aquh$?FzGk*g5dfMCd+=^h#GK8QzCKeXo(!!$)`oIYgU-2b zyik-naM_dQ7y=)`7IPEI1W7-9O^mkI!#fTctpgqHxwOUW(jL*GZ{9kNn(+Pe^t&DD zH)5g>Irl5+o}U&c?jc2)Hp={W9w&E(83L2#eXgu5h&h@m8pGhYr5ukw%Z}W2y#QOb2wfq;e?SpwFM00$#QDGqju3UKA%9 zFmz!%6NN~1cQK+B4fud%LjDbCDnzZhJ=%)+nS1lY^q1-UJ7;`%?ntc52W)t@?E6=V z{Y~06TPr0~Io8vdH*lufxWk1Ic%RT&L%@@zA`WXj^M1YBu43PddzG9jD62>=#B=XOMO_Z&D z^P3z{l(sdgIq#A5FDs#`NJC$9qV^U;TAuOdWLirz(6bM2-w_=lb8I%sXyJJ69`5wa zN?3=dF`~xsx-_qt!8)DIPr&(X>ivhhBhlJ`)zM7i#Z!bw*czCc41L(l!RG7`uIeL@ zF9tNegGCrJ)QJ%dh^j1_6o&$e7pQNJ36g56ZI4{boH*n678c$w44-7k*5>(v4L-(n zioPteRPC??g%3-Wu}=LAwVM7H)V?~h^r<&|tl_+XyI=L)SnHCoP%xjSb*+u3Zr&Sn z+uZq3?~lzB+=`~GZY;Mq<F6t04c=dcSIW|EK;6>JUTT>a z(WRuM+*DdWqb)u3_D`}n3-p1)=bjT$-aQMGue%V{YTCtiDOlTO(P_MyZGJe$?5~BB zQhVtP5n~X&ZdaVpmRkHBDuK?t`{jmtE)tGi<0WC1Fe5zmnjVGd#8IscTEcJDY42k(N46RM)N<@)>Dmc$XPF*2J+$KPX+#e8_~R#bYr%gc*Po28!60gr z5LcfHS~-VvzC(tHEg)0`vaC^JnEoth}jL;3us+*6Ju{xn@H-C4wKBJ2}PnR3ly z6iFaRZP{WgjoRpmoavIwoYwvx0w-T*A(FTuZZeO`Y1e+g)R7)od14><1SiLW=_Nx= z`@VtnqVtDv6L&2Pmy;_FN}zCiRM5G&kIQJ~qalE9AF1Zv6ek97|4cj^8ffLJm@CVSK3jlNDaY^v zl~C*V*Bo5T{-9*4MF|u>JGp!qU>p> z1@VmP!v@;@ym!VNVt Date: Fri, 1 Oct 2021 09:42:49 -0400 Subject: [PATCH 56/90] Use CICE-Consortium/Icepack master (#40) * switch to icepack master at consortium --- .gitmodules | 5 +++-- icepack | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.gitmodules b/.gitmodules index 8a773d230..1868e25dc 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,4 @@ [submodule "icepack"] - path = icepack - url = https://github.com/NOAA-EMC/Icepack + path = icepack + url = https://github.com/CICE-Consortium/Icepack + branch = master diff --git a/icepack b/icepack index 29ee0cefc..34c8e688b 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 29ee0cefc35b6d6b00f3f0fd11cb9a1877f20fa6 +Subproject commit 34c8e688bf7f3008cf84093cd703cf8cfe068eda From 8d4a3c626f65e73f20efaeb4d8b7fc5e0771983a Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Mon, 22 Nov 2021 09:06:56 -0500 Subject: [PATCH 57/90] recreate cap update branch (#42) * add debug_model feature * add required variables and calls for tr_snow --- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 49 +++++++++--- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 80 +++++++++++++++++-- 2 files changed, 113 insertions(+), 16 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index cfca994c3..8b69730b8 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -8,6 +8,7 @@ module CICE_InitMod use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave + use icepack_intfc, only: icepack_init_snow use icepack_intfc, only: icepack_configure use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags @@ -83,7 +84,7 @@ subroutine cice_init2() use ice_dyn_vp , only: init_vp use ice_flux , only: init_coupler_flux, init_history_therm use ice_flux , only: init_history_dyn, init_flux_atm, init_flux_ocn - use ice_forcing , only: init_forcing_ocn + use ice_forcing , only: init_forcing_ocn, init_snowtable use ice_forcing_bgc , only: get_forcing_bgc, get_atm_bgc use ice_forcing_bgc , only: faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_history , only: init_hist, accum_hist @@ -95,7 +96,8 @@ subroutine cice_init2() use ice_transport_driver , only: init_transport logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers - logical(kind=log_kind) :: tr_iso, tr_fsd, wave_spec + logical(kind=log_kind) :: tr_iso, tr_fsd, wave_spec, tr_snow + character(len=char_len) :: snw_aging_table character(len=*), parameter :: subname = '(cice_init2)' !---------------------------------------------------- @@ -145,7 +147,7 @@ subroutine cice_init2() call ice_HaloRestore_init ! restored boundary conditions call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & - wave_spec_out=wave_spec) + wave_spec_out=wave_spec, snw_aging_table_out=snw_aging_table) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -158,7 +160,7 @@ subroutine cice_init2() call init_history_dyn ! initialize dynamic history variables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) - call icepack_query_tracer_flags(tr_iso_out=tr_iso) + call icepack_query_tracer_flags(tr_iso_out=tr_iso, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -167,6 +169,17 @@ subroutine cice_init2() call faero_optics !initialize aerosol optical property tables end if + ! snow aging lookup table initialization + if (tr_snow) then ! advanced snow physics + call icepack_init_snow() + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + if (snw_aging_table(1:4) /= 'test') then + call init_snowtable() + endif + endif + ! Initialize shortwave components using swdn from previous timestep ! if restarting. These components will be scaled to current forcing ! in prep_radiation. @@ -199,12 +212,12 @@ subroutine init_restart() use ice_calendar, only: calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_iso, n_aero, nfsd + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd, nslyr use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn use ice_grid, only: tmask use ice_init, only: ice_ic - use ice_init_column, only: init_age, init_FY, init_lvl, & + use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & @@ -212,6 +225,7 @@ subroutine init_restart() restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & + restart_snow, read_restart_snow, & restart_fsd, read_restart_fsd, & restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & @@ -226,12 +240,13 @@ subroutine init_restart() iblk ! block index logical(kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & - tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, tr_snow, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_smice, nt_smliq, nt_rhos, nt_rsnw, & nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -247,10 +262,12 @@ subroutine init_restart() call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -347,6 +364,21 @@ subroutine init_restart() enddo ! iblk endif ! .not. restart_pond endif + ! snow redistribution/metamorphism + if (tr_snow) then + if (trim(runtype) == 'continue') restart_snow = .true. + if (restart_snow) then + call read_restart_snow + else + do iblk = 1, nblocks + call init_snowtracers(trcrn(:,:,nt_smice:nt_smice+nslyr-1,:,iblk), & + trcrn(:,:,nt_smliq:nt_smliq+nslyr-1,:,iblk), & + trcrn(:,:,nt_rhos :nt_rhos +nslyr-1,:,iblk), & + trcrn(:,:,nt_rsnw :nt_rsnw +nslyr-1,:,iblk)) + enddo ! iblk + endif + endif + ! floe size distribution if (tr_fsd) then if (trim(runtype) == 'continue') restart_fsd = .true. @@ -441,7 +473,6 @@ subroutine init_restart() call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - end subroutine init_restart !======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 81fa367c1..219777f6f 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -110,7 +110,7 @@ subroutine ice_step use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep use ice_calendar, only: idate, msec - use ice_diagnostics, only: init_mass_diags, runtime_diags + use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks use ice_dyn_eap, only: write_restart_eap @@ -123,12 +123,13 @@ subroutine ice_step use ice_restart_column, only: write_restart_age, write_restart_FY, & write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_iso, write_restart_bgc, write_restart_hbrine + write_restart_iso, write_restart_bgc, write_restart_hbrine, & + write_restart_snow use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, save_init, step_dyn_wave + biogeochemistry, save_init, step_dyn_wave, step_snow use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite @@ -144,19 +145,28 @@ subroutine ice_step offset ! d(age)/dt time offset logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, & + tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' + character (len=char_len) :: plabeld + + if (debug_model) then + plabeld = 'beginning time step' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo + endif + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -201,15 +211,33 @@ subroutine ice_step !----------------------------------------------------------------- if (calc_Tsfc) call prep_radiation (iblk) + if (debug_model) then + plabeld = 'post prep_radiation' + call debug_ice (iblk, plabeld) + endif !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- call step_therm1 (dt, iblk) ! vertical thermodynamics + if (debug_model) then + plabeld = 'post step_therm1' + call debug_ice (iblk, plabeld) + endif + call biogeochemistry (dt, iblk) ! biogeochemistry + if (debug_model) then + plabeld = 'post biogeochemistry' + call debug_ice (iblk, plabeld) + endif + if (.not.prescribed_ice) & call step_therm2 (dt, iblk) ! ice thickness distribution thermo + if (debug_model) then + plabeld = 'post step_therm2' + call debug_ice (iblk, plabeld) + endif endif ! ktherm > 0 @@ -237,6 +265,12 @@ subroutine ice_step ! momentum, stress, transport call step_dyn_horiz (dt_dyn) + if (debug_model) then + plabeld = 'post step_dyn_horiz' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo ! iblk + endif ! ridging !$OMP PARALLEL DO PRIVATE(iblk) @@ -244,12 +278,24 @@ subroutine ice_step if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) enddo !$OMP END PARALLEL DO + if (debug_model) then + plabeld = 'post step_dyn_ridge' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo ! iblk + endif ! clean up, update tendency diagnostics offset = c0 call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) enddo + if (debug_model) then + plabeld = 'post dynamics' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo + endif endif ! not prescribed ice @@ -260,18 +306,36 @@ subroutine ice_step call ice_timer_start(timer_column) ! column physics call ice_timer_start(timer_thermo) ! thermodynamics + !----------------------------------------------------------------- + ! snow redistribution and metamorphosis + !----------------------------------------------------------------- + + if (tr_snow) then ! advanced snow physics + do iblk = 1, nblocks + call step_snow (dt, iblk) + enddo + call update_state (dt) ! clean up + endif + !MHRI: CHECK THIS OMP !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks if (ktherm >= 0) call step_radiation (dt, iblk) + if (debug_model) then + plabeld = 'post step_radiation' + call debug_ice (iblk, plabeld) + endif !----------------------------------------------------------------- ! get ready for coupling and the next time step !----------------------------------------------------------------- call coupling_prep (iblk) - + if (debug_model) then + plabeld = 'post coupling_prep' + call debug_ice (iblk, plabeld) + endif enddo ! iblk !$OMP END PARALLEL DO @@ -309,6 +373,7 @@ subroutine ice_step if (tr_pond_cesm) call write_restart_pond_cesm if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo + if (tr_snow) call write_restart_snow if (tr_fsd) call write_restart_fsd if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero @@ -634,11 +699,12 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & real (kind=dbl_kind) :: & puny, & ! + Lsub, & ! rLsub ! 1/Lsub character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - call icepack_query_parameters(puny_out=puny) + call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) From abbebab832a434c42f78e794e1266809f9f4618e Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Tue, 30 Nov 2021 05:10:24 -0500 Subject: [PATCH 58/90] remove 2 extraneous lines * remove two log print lines that were removed prior to merge of driver updates to consortium --- cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 4 ---- 1 file changed, 4 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 9d650d1ff..cad480dd9 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -915,8 +915,6 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS - if(profile_memory) call ESMF_VMLogMemInfo("Entering CICE Model_ADVANCE: ") - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) ! query the Component for its clock, importState and exportState @@ -1127,8 +1125,6 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if(profile_memory) call ESMF_VMLogMemInfo("Leaving CICE Model_ADVANCE: ") - end subroutine ModelAdvance !=============================================================================== From 8ff0fb2503a5953d2674937792c476ede46e6c40 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Tue, 30 Nov 2021 05:11:57 -0500 Subject: [PATCH 59/90] duplicate gitmodule style for icepack --- .gitmodules | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.gitmodules b/.gitmodules index 1868e25dc..22e452f35 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +1,3 @@ [submodule "icepack"] - path = icepack - url = https://github.com/CICE-Consortium/Icepack - branch = master + path = icepack + url = https://github.com/cice-consortium/Icepack From 27dfd1b6b06a2e95fbb10c78c9a6110669d3eb3f Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Thu, 24 Feb 2022 11:19:10 -0500 Subject: [PATCH 60/90] Update CICE to latest Consortium/main (#45) --- .../cicedynB/analysis/ice_diagnostics.F90 | 2 +- cicecore/cicedynB/analysis/ice_history.F90 | 29 +-- .../cicedynB/analysis/ice_history_bgc.F90 | 31 +-- .../cicedynB/analysis/ice_history_drag.F90 | 30 +-- .../cicedynB/analysis/ice_history_fsd.F90 | 29 +-- .../cicedynB/analysis/ice_history_mechred.F90 | 29 +-- .../cicedynB/analysis/ice_history_pond.F90 | 29 +-- .../cicedynB/analysis/ice_history_snow.F90 | 46 +++-- .../cicedynB/dynamics/ice_transport_remap.F90 | 34 ++-- cicecore/cicedynB/general/ice_init.F90 | 183 +++++++++++++---- .../cicedynB/infrastructure/ice_domain.F90 | 27 +-- .../infrastructure/io/io_pio2/ice_restart.F90 | 11 +- .../drivers/mct/cesm1/ice_prescribed_mod.F90 | 27 ++- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 6 +- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 20 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 188 +++++++++++------- .../drivers/nuopc/cmeps/ice_import_export.F90 | 4 +- cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 | 7 +- cicecore/shared/ice_init_column.F90 | 27 +-- configuration/scripts/cice.batch.csh | 21 +- configuration/scripts/cice.launch.csh | 2 +- .../scripts/machines/Macros.narwhal_aocc | 59 ++++++ .../scripts/machines/Macros.narwhal_cray | 60 ++++++ .../scripts/machines/Macros.narwhal_gnu | 67 +++++++ .../scripts/machines/Macros.narwhal_intel | 57 ++++++ .../scripts/machines/env.narwhal_aocc | 52 +++++ .../scripts/machines/env.narwhal_cray | 53 +++++ .../scripts/machines/env.narwhal_gnu | 52 +++++ .../scripts/machines/env.narwhal_intel | 52 +++++ configuration/scripts/tests/perf_suite.ts | 27 +++ 30 files changed, 964 insertions(+), 297 deletions(-) create mode 100644 configuration/scripts/machines/Macros.narwhal_aocc create mode 100644 configuration/scripts/machines/Macros.narwhal_cray create mode 100644 configuration/scripts/machines/Macros.narwhal_gnu create mode 100644 configuration/scripts/machines/Macros.narwhal_intel create mode 100755 configuration/scripts/machines/env.narwhal_aocc create mode 100755 configuration/scripts/machines/env.narwhal_cray create mode 100755 configuration/scripts/machines/env.narwhal_gnu create mode 100755 configuration/scripts/machines/env.narwhal_intel create mode 100644 configuration/scripts/tests/perf_suite.ts diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedynB/analysis/ice_diagnostics.F90 index d4e7066fb..23f39634f 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics.F90 @@ -1706,7 +1706,7 @@ subroutine print_state(plabel,i,j,iblk) ! dynamics (transport and/or ridging) causes the floe size distribution to become non-normal ! if (tr_fsd) then ! if (abs(sum(trcrn(i,j,nt_fsd:nt_fsd+nfsd-1,n,iblk))-c1) > puny) & -! print*,'afsdn not normal', & +! write(nu_diag,*) 'afsdn not normal', & ! sum(trcrn(i,j,nt_fsd:nt_fsd+nfsd-1,n,iblk)), & ! trcrn(i,j,nt_fsd:nt_fsd+nfsd-1,n,iblk) ! endif diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedynB/analysis/ice_history.F90 index 4b295b54d..dea07af63 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedynB/analysis/ice_history.F90 @@ -93,6 +93,7 @@ subroutine init_hist (dt) integer (kind=int_kind), dimension(max_nstrm) :: & ntmp integer (kind=int_kind) :: nml_error ! namelist i/o error flag + character(len=*), parameter :: subname = '(init_hist)' !----------------------------------------------------------------- @@ -121,25 +122,27 @@ subroutine init_hist (dt) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call get_fileunit(nu_nml) if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + write(nu_diag,*) subname,' Reading icefields_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 + call abort_ice(subname//'ERROR: icefields_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - close (nu_nml) - call abort_ice(subname//'ERROR: reading icefields_nml') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: icefields_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif ! histfreq options ('1','h','d','m','y') diff --git a/cicecore/cicedynB/analysis/ice_history_bgc.F90 b/cicecore/cicedynB/analysis/ice_history_bgc.F90 index fdb8c4393..8802cf431 100644 --- a/cicecore/cicedynB/analysis/ice_history_bgc.F90 +++ b/cicecore/cicedynB/analysis/ice_history_bgc.F90 @@ -282,7 +282,8 @@ subroutine init_hist_bgc_2D tr_bgc_N, tr_bgc_C, tr_bgc_chl, & tr_bgc_DON, tr_bgc_Fe, tr_bgc_hum, & skl_bgc, solve_zsal, z_tracers - character(len=*), parameter :: subname = '(init_hist_bgc_2D)' + + character(len=*), parameter :: subname = '(init_hist_bgc_2D)' call icepack_query_parameters(skl_bgc_out=skl_bgc, & solve_zsal_out=solve_zsal, z_tracers_out=z_tracers) @@ -303,25 +304,27 @@ subroutine init_hist_bgc_2D ! read namelist !----------------------------------------------------------------- - call get_fileunit(nu_nml) if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + write(nu_diag,*) subname,' Reading icefields_bgc_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 + call abort_ice(subname//'ERROR: icefields_bgc_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_bgc_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - close (nu_nml) - call abort_ice(subname//'ERROR: reading icefields_bgc_nml') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: icefields_bgc_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif if (.not. tr_iso) then diff --git a/cicecore/cicedynB/analysis/ice_history_drag.F90 b/cicecore/cicedynB/analysis/ice_history_drag.F90 index 31a92158b..c0a1f99bd 100644 --- a/cicecore/cicedynB/analysis/ice_history_drag.F90 +++ b/cicecore/cicedynB/analysis/ice_history_drag.F90 @@ -68,6 +68,7 @@ subroutine init_hist_drag_2D integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag logical (kind=log_kind) :: formdrag + character(len=*), parameter :: subname = '(init_hist_drag_2D)' call icepack_query_parameters(formdrag_out=formdrag) @@ -79,26 +80,27 @@ subroutine init_hist_drag_2D ! read namelist !----------------------------------------------------------------- - call get_fileunit(nu_nml) if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + write(nu_diag,*) subname,' Reading icefields_drag_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 + call abort_ice(subname//'ERROR: icefields_drag_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_drag_nml,iostat=nml_error) - if (nml_error > 0) read(nu_nml,*) ! for Nagware compiler end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - close (nu_nml) - call abort_ice(subname//'ERROR: reading icefields_drag_nml') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: icefields_drag_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif call broadcast_scalar (f_Cdn_atm, master_task) diff --git a/cicecore/cicedynB/analysis/ice_history_fsd.F90 b/cicecore/cicedynB/analysis/ice_history_fsd.F90 index 7ad81e7d2..c64ecbefa 100644 --- a/cicecore/cicedynB/analysis/ice_history_fsd.F90 +++ b/cicecore/cicedynB/analysis/ice_history_fsd.F90 @@ -81,6 +81,7 @@ subroutine init_hist_fsd_2D integer (kind=int_kind) :: nml_error ! namelist i/o error flag real (kind=dbl_kind) :: secday logical (kind=log_kind) :: tr_fsd, wave_spec + character(len=*), parameter :: subname = '(init_hist_fsd_2D)' call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) @@ -95,25 +96,27 @@ subroutine init_hist_fsd_2D ! read namelist !----------------------------------------------------------------- - call get_fileunit(nu_nml) if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + write(nu_diag,*) subname,' Reading icefields_fsd_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 + call abort_ice(subname//'ERROR: icefields_fsd_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_fsd_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - close (nu_nml) - call abort_ice(subname//'ERROR: reading icefields_fsd_nml') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: icefields_fsd_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif call broadcast_scalar (f_afsd, master_task) diff --git a/cicecore/cicedynB/analysis/ice_history_mechred.F90 b/cicecore/cicedynB/analysis/ice_history_mechred.F90 index a20df5fb0..920a83b47 100644 --- a/cicecore/cicedynB/analysis/ice_history_mechred.F90 +++ b/cicecore/cicedynB/analysis/ice_history_mechred.F90 @@ -89,6 +89,7 @@ subroutine init_hist_mechred_2D integer (kind=int_kind) :: nml_error ! namelist i/o error flag real (kind=dbl_kind) :: secday logical (kind=log_kind) :: tr_lvl + character(len=*), parameter :: subname = '(init_hist_mechred_2D)' call icepack_query_parameters(secday_out=secday) @@ -101,25 +102,27 @@ subroutine init_hist_mechred_2D ! read namelist !----------------------------------------------------------------- - call get_fileunit(nu_nml) if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + write(nu_diag,*) subname,' Reading icefields_mechred_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 + call abort_ice(subname//'ERROR: icefields_mechred_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_mechred_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - close (nu_nml) - call abort_ice(subname//'ERROR: reading icefields_mechred_nml') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: icefields_mechred_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif if (.not. tr_lvl) then diff --git a/cicecore/cicedynB/analysis/ice_history_pond.F90 b/cicecore/cicedynB/analysis/ice_history_pond.F90 index 182865fec..365bd4410 100644 --- a/cicecore/cicedynB/analysis/ice_history_pond.F90 +++ b/cicecore/cicedynB/analysis/ice_history_pond.F90 @@ -73,6 +73,7 @@ subroutine init_hist_pond_2D integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag logical (kind=log_kind) :: tr_pond + character(len=*), parameter :: subname = '(init_hist_pond_2D)' call icepack_query_tracer_flags(tr_pond_out=tr_pond) @@ -84,25 +85,27 @@ subroutine init_hist_pond_2D ! read namelist !----------------------------------------------------------------- - call get_fileunit(nu_nml) if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + write(nu_diag,*) subname,' Reading icefields_pond_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 + call abort_ice(subname//'ERROR: icefields_pond_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_pond_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - close (nu_nml) - call abort_ice(subname//'ERROR: reading icefields_pond_nml') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: icefields_pond_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif if (.not. tr_pond) then diff --git a/cicecore/cicedynB/analysis/ice_history_snow.F90 b/cicecore/cicedynB/analysis/ice_history_snow.F90 index 5a590af2b..090759759 100644 --- a/cicecore/cicedynB/analysis/ice_history_snow.F90 +++ b/cicecore/cicedynB/analysis/ice_history_snow.F90 @@ -87,30 +87,32 @@ subroutine init_hist_snow_2D (dt) if (tr_snow) then - !----------------------------------------------------------------- - ! read namelist - !----------------------------------------------------------------- - - call get_fileunit(nu_nml) - if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) - if (nml_error /= 0) then - nml_error = -1 - else + !----------------------------------------------------------------- + ! read namelist + !----------------------------------------------------------------- + + if (my_task == master_task) then + write(nu_diag,*) subname,' Reading icefields_snow_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: icefields_snow_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=icefields_snow_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: icefields_snow_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif - do while (nml_error > 0) - read(nu_nml, nml=icefields_snow_nml,iostat=nml_error) - end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - close (nu_nml) - call abort_ice('ice: error reading icefields_snow_nml') - endif else ! .not. tr_snow f_smassice = 'x' diff --git a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 b/cicecore/cicedynB/dynamics/ice_transport_remap.F90 index 070f3b7ad..89c0609ef 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_remap.F90 @@ -2962,17 +2962,17 @@ subroutine locate_triangles (nx_block, ny_block, & i = indxid(ij) j = indxjd(ij) if (abs(areasum(i,j) - edgearea(i,j)) > eps13*areafac_c(i,j)) then - print*, '' - print*, 'Areas do not add up: m, i, j, edge =', & + write(nu_diag,*) '' + write(nu_diag,*) 'Areas do not add up: m, i, j, edge =', & my_task, i, j, trim(edge) - print*, 'edgearea =', edgearea(i,j) - print*, 'areasum =', areasum(i,j) - print*, 'areafac_c =', areafac_c(i,j) - print*, '' - print*, 'Triangle areas:' + write(nu_diag,*) 'edgearea =', edgearea(i,j) + write(nu_diag,*) 'areasum =', areasum(i,j) + write(nu_diag,*) 'areafac_c =', areafac_c(i,j) + write(nu_diag,*) '' + write(nu_diag,*) 'Triangle areas:' do ng = 1, ngroups ! not vector friendly if (abs(triarea(i,j,ng)) > eps16*abs(areafact(i,j,ng))) then - print*, ng, triarea(i,j,ng) + write(nu_diag,*) ng, triarea(i,j,ng) endif enddo endif @@ -3029,18 +3029,18 @@ subroutine locate_triangles (nx_block, ny_block, & do i = ib, ie if (abs(triarea(i,j,ng)) > puny) then if (abs(xp(i,j,nv,ng)) > p5+puny) then - print*, '' - print*, 'WARNING: xp =', xp(i,j,nv,ng) - print*, 'm, i, j, ng, nv =', my_task, i, j, ng, nv -! print*, 'yil,xdl,xcl,ydl=',yil,xdl,xcl,ydl -! print*, 'yir,xdr,xcr,ydr=',yir,xdr,xcr,ydr -! print*, 'ydm=',ydm + write(nu_diag,*) '' + write(nu_diag,*) 'WARNING: xp =', xp(i,j,nv,ng) + write(nu_diag,*) 'm, i, j, ng, nv =', my_task, i, j, ng, nv +! write(nu_diag,*) 'yil,xdl,xcl,ydl=',yil,xdl,xcl,ydl +! write(nu_diag,*) 'yir,xdr,xcr,ydr=',yir,xdr,xcr,ydr +! write(nu_diag,*) 'ydm=',ydm ! stop endif if (abs(yp(i,j,nv,ng)) > p5+puny) then - print*, '' - print*, 'WARNING: yp =', yp(i,j,nv,ng) - print*, 'm, i, j, ng, nv =', my_task, i, j, ng, nv + write(nu_diag,*) '' + write(nu_diag,*) 'WARNING: yp =', yp(i,j,nv,ng) + write(nu_diag,*) 'm, i, j, ng, nv =', my_task, i, j, ng, nv endif endif ! triarea enddo diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 7485cbe23..e3030ec55 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -551,53 +551,154 @@ subroutine input_data nml_filename = 'ice_in'//trim(inst_suffix) #endif - call get_fileunit(nu_nml) - if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 - endif + call abort_ice(subname//'ERROR: open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) + endif + write(nu_diag,*) subname,' Reading setup_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: setup_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 do while (nml_error > 0) - print*,'Reading setup_nml' - read(nu_nml, nml=setup_nml,iostat=nml_error) - if (nml_error /= 0) exit - print*,'Reading grid_nml' - read(nu_nml, nml=grid_nml,iostat=nml_error) - if (nml_error /= 0) exit - print*,'Reading tracer_nml' - read(nu_nml, nml=tracer_nml,iostat=nml_error) - if (nml_error /= 0) exit - print*,'Reading thermo_nml' - read(nu_nml, nml=thermo_nml,iostat=nml_error) - if (nml_error /= 0) exit - print*,'Reading dynamics_nml' - read(nu_nml, nml=dynamics_nml,iostat=nml_error) - if (nml_error /= 0) exit - print*,'Reading shortwave_nml' - read(nu_nml, nml=shortwave_nml,iostat=nml_error) - if (nml_error /= 0) exit - print*,'Reading ponds_nml' - read(nu_nml, nml=ponds_nml,iostat=nml_error) - if (nml_error /= 0) exit - print*,'Reading snow_nml' - read(nu_nml, nml=snow_nml,iostat=nml_error) - if (nml_error /= 0) exit - print*,'Reading forcing_nml' - read(nu_nml, nml=forcing_nml,iostat=nml_error) - if (nml_error /= 0) exit + read(nu_nml, nml=setup_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: reading namelist', & - file=__FILE__, line=__LINE__) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: setup_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + write(nu_diag,*) subname,' Reading grid_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: grid_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=grid_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: grid_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + write(nu_diag,*) subname,' Reading tracer_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: tracer_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=tracer_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: tracer_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + write(nu_diag,*) subname,' Reading thermo_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: thermo_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=thermo_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: thermo_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + write(nu_diag,*) subname,' Reading dynamics_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: dynamics_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=dynamics_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: dynamics_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + write(nu_diag,*) subname,' Reading shortwave_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: shortwave_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=shortwave_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: shortwave_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + write(nu_diag,*) subname,' Reading ponds_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: ponds_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=ponds_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: ponds_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + write(nu_diag,*) subname,' Reading snow_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: snow_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=snow_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: snow_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + write(nu_diag,*) subname,' Reading forcing_nml' + rewind(unit=nu_nml, iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: forcing_nml rewind ', & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=forcing_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: forcing_nml reading ', & + file=__FILE__, line=__LINE__) + endif + + close(nu_nml) + call release_fileunit(nu_nml) endif - call release_fileunit(nu_nml) !----------------------------------------------------------------- ! set up diagnostics output and resolve conflicts diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index 1dfdd0428..c44e896ac 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -161,24 +161,27 @@ subroutine init_domain_blocks nx_global = -1 ! NXGLOB, i-axis size ny_global = -1 ! NYGLOB, j-axis size - call get_fileunit(nu_nml) if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + write(nu_diag,*) subname,' Reading domain_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 + call abort_ice(subname//'ERROR: domain_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=domain_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: error reading domain_nml') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: domain_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif call broadcast_scalar(nprocs, master_task) diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index 0ec6b7628..c9e7fdf8a 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -718,7 +718,8 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & status = pio_inq_varid(File,trim(vname),vardesc) if (status /= PIO_noerr) then - call abort_ice(subname//"ERROR: CICE restart? Missing variable: "//trim(vname)) + call abort_ice(subname// & + "ERROR: CICE restart? Missing variable: "//trim(vname)) endif status = pio_inq_varndims(File, vardesc, ndims) @@ -728,6 +729,10 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & ! if (ndim3 == ncat .and. ncat>1) then if (ndim3 == ncat .and. ndims == 3) then call pio_read_darray(File, vardesc, iodesc3d_ncat, work, status) +!#ifndef CESM1_PIO +!! This only works for PIO2 +! where (work == PIO_FILL_DOUBLE) work = c0 +!#endif if (present(field_loc)) then do n=1,ndim3 call ice_HaloUpdate (work(:,:,n,:), halo_info, & @@ -737,6 +742,10 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & ! elseif (ndim3 == 1) then elseif (ndim3 == 1 .and. ndims == 2) then call pio_read_darray(File, vardesc, iodesc2d, work, status) +!#ifndef CESM1_PIO +!! This only works for PIO2 +! where (work == PIO_FILL_DOUBLE) work = c0 +!#endif if (present(field_loc)) then call ice_HaloUpdate (work(:,:,1,:), halo_info, & field_loc, field_type) diff --git a/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 index e068a2892..0868ef2fa 100644 --- a/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 +++ b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 @@ -168,23 +168,28 @@ subroutine ice_prescribed_init(compid, gsmap, dom) prescribed_ice_fill = .false. ! true if pice data fill required ! read from input file - call get_fileunit(nu_nml) + if (my_task == master_task) then - open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + write(nu_diag,*) subname,' Reading ice_prescribed_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 + call abort_ice(subname//'ERROR: ice_prescribed_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=ice_prescribed_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call release_fileunit(nu_nml) - call broadcast_scalar(nml_error,master_task) - if (nml_error /= 0) then - call abort_ice (subname//' ERROR: Namelist read error in ice_prescribed_mod') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: ice_prescribed_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif call broadcast_scalar(prescribed_ice,master_task) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 8b69730b8..338b25050 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -84,7 +84,7 @@ subroutine cice_init2() use ice_dyn_vp , only: init_vp use ice_flux , only: init_coupler_flux, init_history_therm use ice_flux , only: init_history_dyn, init_flux_atm, init_flux_ocn - use ice_forcing , only: init_forcing_ocn, init_snowtable + use ice_forcing , only: init_snowtable use ice_forcing_bgc , only: get_forcing_bgc, get_atm_bgc use ice_forcing_bgc , only: faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_history , only: init_hist, accum_hist @@ -139,9 +139,6 @@ subroutine cice_init2() call calendar() ! determine the initial date - !TODO: - why is this being called when you are using CMEPS? - call init_forcing_ocn(dt) ! initialize sss and sst from data - call init_state ! initialize the ice state call init_transport ! initialize horizontal transport call ice_HaloRestore_init ! restored boundary conditions @@ -388,7 +385,6 @@ subroutine init_restart() call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) endif endif - ! isotopes if (tr_iso) then if (trim(runtype) == 'continue') restart_iso = .true. diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 219777f6f..779adc65d 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -56,9 +56,9 @@ subroutine CICE_Run tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd character(len=*), parameter :: subname = '(CICE_Run)' - !-------------------------------------------------------------------- - ! initialize error code and step timer - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! initialize error code and step timer + !-------------------------------------------------------------------- call ice_timer_start(timer_step) ! start timing entire run @@ -73,13 +73,13 @@ subroutine CICE_Run if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - !-------------------------------------------------------------------- - ! timestep loop - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! timestep loop + !-------------------------------------------------------------------- call ice_timer_start(timer_couple) ! atm/ocn coupling - call advance_timestep() ! advance timestep and update calendar data + call advance_timestep() ! advance timestep and update calendar data if (z_tracers) call get_atm_bgc ! biogeochemistry @@ -90,9 +90,9 @@ subroutine CICE_Run call ice_step - !-------------------------------------------------------------------- - ! end of timestep loop - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! end of timestep loop + !-------------------------------------------------------------------- call ice_timer_stop(timer_step) ! end timestepping loop timer diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index cad480dd9..a9d71e479 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -237,6 +237,10 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain character(len=char_len_long) :: diag_filename = 'unset' character(len=char_len_long) :: logmsg + character(len=char_len_long) :: single_column_lnd_domainfile + real(dbl_kind) :: scol_lon + real(dbl_kind) :: scol_lat + real(dbl_kind) :: scol_spval character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- @@ -376,8 +380,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) depressT_in = 0.054_dbl_kind, & Tocnfrz_in = -34.0_dbl_kind*0.054_dbl_kind, & pi_in = SHR_CONST_PI, & - snowpatch_in = 0.005_dbl_kind, & - dragio_in = 0.00536_dbl_kind) + snowpatch_in = 0.005_dbl_kind) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -506,12 +509,67 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) ! First cice initialization phase - before initializing grid info !---------------------------------------------------------------------------- +#ifdef CESMCOUPLED + ! Determine if single column + + call NUOPC_CompAttributeGet(gcomp, name='scol_lon', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlon + call NUOPC_CompAttributeGet(gcomp, name='scol_lat', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlat + call NUOPC_CompAttributeGet(gcomp, name='scol_spval', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_spval + + if (scmlon > scol_spval .and. scmlat > scol_spval) then + call NUOPC_CompAttributeGet(gcomp, name='single_column_lnd_domainfile', & + value=single_column_lnd_domainfile, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (trim(single_column_lnd_domainfile) /= 'UNSET') then + single_column = .true. + else + call abort_ice('single_column_domainfile cannot be null for single column mode') + end if + call NUOPC_CompAttributeGet(gcomp, name='scol_ocnmask', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_mask + call NUOPC_CompAttributeGet(gcomp, name='scol_ocnfrac', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_frac + call NUOPC_CompAttributeGet(gcomp, name='scol_ni', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_ni + call NUOPC_CompAttributeGet(gcomp, name='scol_nj', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_nj + + call ice_mesh_create_scolumn(scmlon, scmlat, ice_mesh, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + scol_valid = (scol_mask == 1) + if (.not. scol_valid) then + write(6,*)'DEBUG: i am here' + ! Advertise fields + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call t_stopf ('cice_init_total') + + ! ******************* + ! *** RETURN HERE *** + ! ******************* + RETURN + end if + end if + ! Read the cice namelist as part of the call to cice_init1 + ! Note that if single_column is true and scol_valid is not - will never get here + call t_startf ('cice_init1') call cice_init1 call t_stopf ('cice_init1') -#ifdef CESMCOUPLED ! Form of ocean freezing temperature ! 'minus1p8' = -1.8 C ! 'linear_salt' = -depressT * sss @@ -559,13 +617,20 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) ' must be the same as natmiter from cice namelist ',natmiter call abort_ice(trim(errmsg)) endif + +#else + + ! Read the cice namelist as part of the call to cice_init1 + call t_startf ('cice_init1') + call cice_init1 + call t_stopf ('cice_init1') + #endif + !---------------------------------------------------------------------------- ! Initialize grid info !---------------------------------------------------------------------------- - ! Initialize cice mesh and mask if appropriate - if (single_column .and. scol_valid) then call ice_mesh_init_tlon_tlat_area_hm() else @@ -750,82 +815,43 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) #ifdef CESMCOUPLED - call NUOPC_CompAttributeGet(gcomp, name='scol_lon', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlon - call NUOPC_CompAttributeGet(gcomp, name='scol_lat', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlat - call NUOPC_CompAttributeGet(gcomp, name='scol_spval', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_spval - - if (scmlon > scol_spval .and. scmlat > scol_spval) then - call NUOPC_CompAttributeGet(gcomp, name='single_column_lnd_domainfile', & - value=single_column_lnd_domainfile, rc=rc) + ! if single column is not valid - set all export state fields to zero and return + if (single_column .and. .not. scol_valid) then + write(nu_diag,'(a)')' (ice_comp_nuopc) single column mode point does not contain any ocn/ice '& + //' - setting all export data to 0' + call ice_realize_fields(gcomp, mesh=ice_mesh, & + flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (trim(single_column_lnd_domainfile) /= 'UNSET') then - single_column = .true. - else - call abort_ice('single_column_domainfile cannot be null for single column mode') - end if - call NUOPC_CompAttributeGet(gcomp, name='scol_ocnmask', value=cvalue, rc=rc) + call ESMF_StateGet(exportState, itemCount=fieldCount, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_mask - call NUOPC_CompAttributeGet(gcomp, name='scol_ocnfrac', value=cvalue, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_frac - call NUOPC_CompAttributeGet(gcomp, name='scol_ni', value=cvalue, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_ni - call NUOPC_CompAttributeGet(gcomp, name='scol_nj', value=cvalue, rc=rc) + allocate(lfieldnamelist(fieldCount)) + call ESMF_StateGet(exportState, itemNameList=lfieldnamelist, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_nj - - call ice_mesh_create_scolumn(scmlon, scmlat, ice_mesh, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - scol_valid = (scol_mask == 1) - if (.not. scol_valid) then - ! if single column is not valid - set all export state fields to zero and return - write(nu_diag,'(a)')' (ice_comp_nuopc) single column mode point does not contain any ocn/ice '& - //' - setting all export data to 0' - call ice_realize_fields(gcomp, mesh=ice_mesh, & - flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ESMF_StateGet(exportState, itemCount=fieldCount, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - allocate(lfieldnamelist(fieldCount)) - call ESMF_StateGet(exportState, itemNameList=lfieldnamelist, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - do n = 1, fieldCount - if (trim(lfieldnamelist(n)) /= flds_scalar_name) then - call ESMF_StateGet(exportState, itemName=trim(lfieldnamelist(n)), field=lfield, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - call ESMF_FieldGet(lfield, rank=rank, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - if (rank == 2) then - call ESMF_FieldGet(lfield, farrayPtr=fldptr2d, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - fldptr2d(:,:) = 0._dbl_kind - else - call ESMF_FieldGet(lfield, farrayPtr=fldptr1d, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - fldptr1d(:) = 0._dbl_kind - end if + do n = 1, fieldCount + if (trim(lfieldnamelist(n)) /= flds_scalar_name) then + call ESMF_StateGet(exportState, itemName=trim(lfieldnamelist(n)), field=lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfield, rank=rank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (rank == 2) then + call ESMF_FieldGet(lfield, farrayPtr=fldptr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + fldptr2d(:,:) = 0._dbl_kind + else + call ESMF_FieldGet(lfield, farrayPtr=fldptr1d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + fldptr1d(:) = 0._dbl_kind end if - enddo - deallocate(lfieldnamelist) - ! ******************* - ! *** RETURN HERE *** - ! ******************* - RETURN - else - write(nu_diag,'(a,3(f10.5,2x))')' (ice_comp_nuopc) single column mode lon/lat/frac is ',& - scmlon,scmlat,scol_frac - end if + end if + enddo + deallocate(lfieldnamelist) + ! ******************* + ! *** RETURN HERE *** + ! ******************* + RETURN else - single_column = .false. + write(nu_diag,'(a,3(f10.5,2x))')' (ice_comp_nuopc) single column mode lon/lat/frac is ',& + scmlon,scmlat,scol_frac end if #endif @@ -915,6 +941,16 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS + + call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + if (single_column .and. .not. scol_valid) then + ! ******************* + ! *** RETURN HERE *** + ! ******************* + RETURN + end if + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) ! query the Component for its clock, importState and exportState diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index f8627d690..8fe939785 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -131,7 +131,9 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam write(nu_diag,*)'send_i2x_per_cat = ',send_i2x_per_cat end if if (.not.send_i2x_per_cat) then - deallocate(fswthrun_ai) + if (allocated(fswthrun_ai)) then + deallocate(fswthrun_ai) + end if end if ! Determine if the following attributes are sent by the driver and if so read them in diff --git a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 index 17941435d..fffe575de 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 @@ -639,16 +639,13 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) diff_lon = abs(mod(lonMesh(n) - tmplon,360.0)) if (diff_lon > eps_imesh ) then write(6,100)n,lonMesh(n),tmplon, diff_lon - call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) + !call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) end if diff_lat = abs(latMesh(n) - lat(n)) if (diff_lat > eps_imesh) then write(6,101)n,latMesh(n),lat(n), diff_lat - call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) + !call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) end if - enddo enddo enddo diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index eff39a464..5643b4277 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -1301,27 +1301,28 @@ subroutine input_zbgc ! read from input file !----------------------------------------------------------------- - call get_fileunit(nu_nml) - if (my_task == master_task) then + write(nu_diag,*) subname,' Reading zbgc_nml' + + call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - nml_error = -1 - else - nml_error = 1 - endif + call abort_ice(subname//'ERROR: zbgc_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) + endif - print*,'Reading zbgc_nml' + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=zbgc_nml,iostat=nml_error) end do - if (nml_error == 0) close(nu_nml) - endif - call broadcast_scalar(nml_error, master_task) - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: reading zbgc namelist') + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: zbgc_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) endif - call release_fileunit(nu_nml) !----------------------------------------------------------------- ! broadcast diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 024270039..f86b55502 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -91,7 +91,26 @@ cat >> ${jobfile} << EOFB #PBS -l walltime=${batchtime} EOFB -else if (${ICE_MACHINE} =~ gordon* || ${ICE_MACHINE} =~ conrad* || ${ICE_MACHINE} =~ gaffney* || ${ICE_MACHINE} =~ koehr* || ${ICE_MACHINE} =~ mustang) then +else if (${ICE_MACHINE} =~ gordon* || ${ICE_MACHINE} =~ conrad* || ${ICE_MACHINE} =~ gaffney* || ${ICE_MACHINE} =~ koehr* || ${ICE_MACHINE} =~ mustang*) then +cat >> ${jobfile} << EOFB +#PBS -N ${shortcase} +#PBS -q ${queue} +#PBS -A ${acct} +#PBS -l select=${nnodes}:ncpus=${maxtpn}:mpiprocs=${taskpernode} +#PBS -l walltime=${batchtime} +#PBS -j oe +#PBS -W umask=022 +###PBS -M username@domain.com +###PBS -m be +EOFB + +else if (${ICE_MACHINE} =~ narwhal*) then +if (${runlength} <= 0) then + set batchtime = "00:29:59" + set queue = "debug" +else + set queue = "standard" +endif cat >> ${jobfile} << EOFB #PBS -N ${shortcase} #PBS -q ${queue} diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index 40b8996b4..a63c802ed 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -58,7 +58,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ onyx*) then +else if (${ICE_MACHINE} =~ onyx* || ${ICE_MACHINE} =~ narwhal) then cat >> ${jobfile} << EOFR aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR diff --git a/configuration/scripts/machines/Macros.narwhal_aocc b/configuration/scripts/machines/Macros.narwhal_aocc new file mode 100644 index 000000000..44b1dc2f6 --- /dev/null +++ b/configuration/scripts/machines/Macros.narwhal_aocc @@ -0,0 +1,59 @@ +#============================================================================== +# Macros file for NAVYDSRC narwhal, aocc compiler +#============================================================================== + +CPP := ftn -E +CPPDEFS := -DNO_R16 -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 + +FIXEDFLAGS := -ffixed-form +FREEFLAGS := -ffree-form +FFLAGS := -byteswapio +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -fsanitize=integer-divide-by-zero,float-divide-by-zero,bounds +else + FFLAGS += -O2 +endif + +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +#INCLDIR += -I$(NETCDF_PATH)/include + +#LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -mp + CFLAGS += -mp + FFLAGS += -mp +else + LDFLAGS += -nomp +# CFLAGS += -nomp + FFLAGS += -nomp +endif + diff --git a/configuration/scripts/machines/Macros.narwhal_cray b/configuration/scripts/machines/Macros.narwhal_cray new file mode 100644 index 000000000..ab0e6378e --- /dev/null +++ b/configuration/scripts/machines/Macros.narwhal_cray @@ -0,0 +1,60 @@ +#============================================================================== +# Macros file for NAVYDSRC narwhal, cray compiler +#============================================================================== + +CPP := ftn -e P +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 + +FIXEDFLAGS := -132 +FREEFLAGS := +FFLAGS := -hbyteswapio +FFLAGS_NOOPT:= -O0 +LDFLAGS := -hbyteswapio + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -hfp0 -g -Rbcdps -Ktrap=fp +else + FFLAGS += -O2 -hfp0 # -eo +endif + +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +#INCLDIR += -I$(NETCDF_PATH)/include + +#LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -fopenmp + CFLAGS += -fopenmp + FFLAGS += -fopenmp +else + LDFLAGS += -hnoomp +# CFLAGS += -hnoomp + FFLAGS += -hnoomp +endif + diff --git a/configuration/scripts/machines/Macros.narwhal_gnu b/configuration/scripts/machines/Macros.narwhal_gnu new file mode 100644 index 000000000..e980c1e29 --- /dev/null +++ b/configuration/scripts/machines/Macros.narwhal_gnu @@ -0,0 +1,67 @@ +#============================================================================== +# Macros file for NAVYDSRC narwhal, gnu compiler +#============================================================================== + +CPP := ftn -E +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c + +FIXEDFLAGS := -ffixed-line-length-132 +FREEFLAGS := -ffree-form +FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -fallow-argument-mismatch +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + CFLAGS += -O0 +endif + +ifeq ($(ICE_COVERAGE), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_COVERAGE), true) + FFLAGS += -O2 + CFLAGS += -O2 +endif +endif + +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +#INCLDIR += -I$(NETCDF_PATH)/include + +#LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -fopenmp + CFLAGS += -fopenmp + FFLAGS += -fopenmp +endif + diff --git a/configuration/scripts/machines/Macros.narwhal_intel b/configuration/scripts/machines/Macros.narwhal_intel new file mode 100644 index 000000000..c7c103b24 --- /dev/null +++ b/configuration/scripts/machines/Macros.narwhal_intel @@ -0,0 +1,57 @@ +#============================================================================== +# Macros file for NAVYDSRC narwhal, intel compiler +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -fcommon + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback +# -mcmodel medium -shared-intel +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays +else + FFLAGS += -O2 +endif + +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +#INCLDIR += -I$(NETCDF_PATH)/include + +#LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + diff --git a/configuration/scripts/machines/env.narwhal_aocc b/configuration/scripts/machines/env.narwhal_aocc new file mode 100755 index 000000000..a392f9363 --- /dev/null +++ b/configuration/scripts/machines/env.narwhal_aocc @@ -0,0 +1,52 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-aocc +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-nvidia +module load PrgEnv-aocc/8.1.0 +module load cray-pals/1.0.17 +module load bct-env/0.1 +module unload aocc +module load aocc/2.2.0.1 +module unload cray-mpich +module load cray-mpich/8.1.5 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.7.4.4 +module load cray-hdf5/1.12.0.4 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited + +endif + +setenv ICE_MACHINE_MACHNAME narwhal +setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "aocc_3.0.0-Build#78 2020_12_10 clang/flang 12.0.0, cray-mpich/8.1.9, netcdf/4.7.4.4" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 128 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.narwhal_cray b/configuration/scripts/machines/env.narwhal_cray new file mode 100755 index 000000000..eb9e42bb2 --- /dev/null +++ b/configuration/scripts/machines/env.narwhal_cray @@ -0,0 +1,53 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-aocc +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-nvidia +module load PrgEnv-cray/8.1.0 +module load cray-pals/1.0.17 +module load bct-env/0.1 +module unload cce +module load cce/12.0.3 +module unload cray-mpich +module load cray-mpich/8.1.9 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.7.4.4 +module load cray-hdf5/1.12.0.4 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited +setenv OMP_WAIT_POLICY passive + +endif + +setenv ICE_MACHINE_MACHNAME narwhal +setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12" +setenv ICE_MACHINE_ENVNAME cray +setenv ICE_MACHINE_ENVINFO "cce 12.0.3, cray-mpich/8.1.9, netcdf/4.7.4.4" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 128 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.narwhal_gnu b/configuration/scripts/machines/env.narwhal_gnu new file mode 100755 index 000000000..4df81b957 --- /dev/null +++ b/configuration/scripts/machines/env.narwhal_gnu @@ -0,0 +1,52 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-aocc +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-nvidia +module load PrgEnv-gnu/8.1.0 +module load cray-pals/1.0.17 +module load bct-env/0.1 +module unload gcc +module load gcc/11.2.0 +module unload cray-mpich +module load cray-mpich/8.1.9 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.7.4.4 +module load cray-hdf5/1.12.0.4 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited + +endif + +setenv ICE_MACHINE_MACHNAME narwhal +setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "gnu fortran/c 11.2.0, cray-mpich/8.1.9, netcdf/4.7.4.4" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 128 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.narwhal_intel b/configuration/scripts/machines/env.narwhal_intel new file mode 100755 index 000000000..2cdf4f93c --- /dev/null +++ b/configuration/scripts/machines/env.narwhal_intel @@ -0,0 +1,52 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-aocc +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-nvidia +module load PrgEnv-intel/8.0.0 +module load cray-pals/1.0.17 +module load bct-env/0.1 +module unload intel +module load intel/2021.1 +module unload cray-mpich +module load cray-mpich/8.1.9 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.7.4.4 +module load cray-hdf5/1.12.0.4 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited + +endif + +setenv ICE_MACHINE_MACHNAME narwhal +setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "ifort 2021.1 Beta 20201112, cray-mpich/8.1.9, netcdf/4.7.4.4" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 128 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/tests/perf_suite.ts b/configuration/scripts/tests/perf_suite.ts new file mode 100644 index 000000000..859b9f21b --- /dev/null +++ b/configuration/scripts/tests/perf_suite.ts @@ -0,0 +1,27 @@ +# Test Grid PEs Sets BFB-compare +smoke gx1 1x1x320x384x1 run2day,droundrobin +smoke gx1 64x1x16x16x8 run2day,droundrobin,thread +sleep 180 +# +smoke gx1 1x1x320x384x1 run2day,droundrobin +smoke gx1 1x1x160x192x4 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 1x1x80x96x16 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 1x1x40x48x64 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 1x1x20x24x256 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +# +smoke gx1 1x1x16x16x480 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 2x1x16x16x240 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 4x1x16x16x120 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 8x1x16x16x60 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 16x1x16x16x30 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 32x1x16x16x15 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 128x1x16x16x4 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +# +smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 64x1x16x16x8 run2day,droundrobin,thread +smoke gx1 32x2x16x16x16 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread +smoke gx1 16x4x16x16x32 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread +smoke gx1 8x8x16x16x64 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread +smoke gx1 4x16x16x16x128 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread +# From c660075e50530add27cf920ece06911577612a80 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Tue, 10 May 2022 15:28:44 -0400 Subject: [PATCH 61/90] Update CICE to Consortium/main (#48) Update OpenMP directives as needed including validation via new omp_suite. Fixed OpenMP in dynamics. Refactored eap puny/pi lookups to improve scalar performance Update Tsfc implementation to make sure land blocks don't set Tsfc to freezing temp Update for sea bed stress calculations --- LICENSE.pdf | Bin 80898 -> 113509 bytes .../cicedynB/analysis/ice_diagnostics.F90 | 11 +- cicecore/cicedynB/analysis/ice_history.F90 | 1 - cicecore/cicedynB/dynamics/ice_dyn_eap.F90 | 198 +++++++----------- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 133 +++++------- cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 | 23 +- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 126 ++++++----- cicecore/cicedynB/dynamics/ice_dyn_vp.F90 | 66 +++--- .../dynamics/ice_transport_driver.F90 | 46 +++- .../cicedynB/dynamics/ice_transport_remap.F90 | 26 ++- cicecore/cicedynB/general/ice_init.F90 | 13 +- cicecore/cicedynB/general/ice_step_mod.F90 | 26 +-- .../infrastructure/comm/mpi/ice_timers.F90 | 36 +++- .../infrastructure/comm/serial/ice_timers.F90 | 34 ++- cicecore/cicedynB/infrastructure/ice_grid.F90 | 31 +++ .../infrastructure/ice_restart_driver.F90 | 11 + .../cicedynB/infrastructure/ice_restoring.F90 | 6 +- cicecore/drivers/direct/hadgem3/CICE.F90 | 4 +- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 1 - cicecore/drivers/mct/cesm1/CICE_copyright.txt | 4 +- .../drivers/nuopc/cmeps/CICE_copyright.txt | 4 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 1 - .../drivers/nuopc/cmeps/ice_import_export.F90 | 4 +- cicecore/drivers/nuopc/dmi/CICE.F90 | 4 +- cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 | 4 +- cicecore/drivers/standalone/cice/CICE.F90 | 4 +- .../drivers/standalone/cice/CICE_FinalMod.F90 | 5 +- .../drivers/standalone/cice/CICE_RunMod.F90 | 28 ++- cicecore/version.txt | 2 +- configuration/scripts/cice.batch.csh | 3 +- configuration/scripts/cice.launch.csh | 6 - configuration/scripts/cice.run.setup.csh | 6 +- configuration/scripts/cice.settings | 2 + configuration/scripts/ice_in | 1 + .../scripts/machines/Macros.conrad_cray | 57 ----- .../scripts/machines/Macros.conrad_gnu | 55 ----- .../scripts/machines/Macros.conrad_intel | 56 ----- .../scripts/machines/Macros.conrad_pgi | 55 ----- .../scripts/machines/Macros.gordon_cray | 57 ----- .../scripts/machines/Macros.gordon_gnu | 67 ------ .../scripts/machines/Macros.gordon_intel | 55 ----- .../scripts/machines/Macros.gordon_pgi | 55 ----- .../scripts/machines/env.banting_gnu | 3 + .../scripts/machines/env.banting_intel | 3 + .../scripts/machines/env.cesium_intel | 3 + .../scripts/machines/env.cheyenne_gnu | 6 +- .../scripts/machines/env.cheyenne_intel | 6 +- .../scripts/machines/env.cheyenne_pgi | 6 +- .../scripts/machines/env.compy_intel | 3 + .../scripts/machines/env.conda_linux | 3 + .../scripts/machines/env.conda_macos | 3 + .../scripts/machines/env.conrad_cray | 57 ----- configuration/scripts/machines/env.conrad_gnu | 77 ------- .../scripts/machines/env.conrad_intel | 59 ------ configuration/scripts/machines/env.conrad_pgi | 57 ----- configuration/scripts/machines/env.cori_intel | 1 + configuration/scripts/machines/env.daley_gnu | 3 + .../scripts/machines/env.daley_intel | 3 + configuration/scripts/machines/env.fram_intel | 3 + configuration/scripts/machines/env.freya_gnu | 3 +- .../scripts/machines/env.freya_intel | 1 + configuration/scripts/machines/env.gaea_intel | 3 + .../scripts/machines/env.gaffney_gnu | 1 + .../scripts/machines/env.gaffney_intel | 1 + .../scripts/machines/env.gordon_cray | 57 ----- configuration/scripts/machines/env.gordon_gnu | 57 ----- .../scripts/machines/env.gordon_intel | 59 ------ configuration/scripts/machines/env.gordon_pgi | 57 ----- configuration/scripts/machines/env.hera_intel | 3 + .../scripts/machines/env.high_Sierra_gnu | 3 + .../scripts/machines/env.hobart_intel | 3 + configuration/scripts/machines/env.hobart_nag | 3 + .../scripts/machines/env.koehr_intel | 3 + .../scripts/machines/env.millikan_intel | 3 + .../scripts/machines/env.mustang_intel18 | 2 +- .../scripts/machines/env.mustang_intel19 | 2 +- .../scripts/machines/env.mustang_intel20 | 2 +- .../scripts/machines/env.narwhal_aocc | 2 + .../scripts/machines/env.narwhal_cray | 3 +- .../scripts/machines/env.narwhal_gnu | 2 + .../scripts/machines/env.narwhal_intel | 2 + configuration/scripts/machines/env.onyx_cray | 1 + configuration/scripts/machines/env.onyx_gnu | 1 + configuration/scripts/machines/env.onyx_intel | 1 + .../scripts/machines/env.orion_intel | 3 + .../scripts/machines/env.phase3_intel | 3 + .../scripts/machines/env.testmachine_intel | 3 + .../scripts/machines/env.travisCI_gnu | 3 + configuration/scripts/options/set_env.cmplog | 1 + .../scripts/options/set_env.cmplogrest | 1 + configuration/scripts/options/set_env.cmprest | 1 + .../scripts/options/set_env.ompschedd1 | 1 + .../scripts/options/set_env.ompscheds | 1 + .../scripts/options/set_env.ompscheds1 | 1 + configuration/scripts/options/set_env.qcchk | 1 + configuration/scripts/options/set_env.qcchkf | 1 + configuration/scripts/options/set_nml.dt3456s | 1 + .../scripts/options/set_nml.qcnonbfb | 16 -- .../scripts/options/set_nml.timerstats | 1 + configuration/scripts/tests/baseline.script | 54 ++++- configuration/scripts/tests/first_suite.ts | 2 +- configuration/scripts/tests/nothread_suite.ts | 14 +- configuration/scripts/tests/omp_suite.ts | 46 ++++ configuration/scripts/tests/perf_suite.ts | 3 + configuration/scripts/tests/prod_suite.ts | 8 +- configuration/scripts/tests/reprosum_suite.ts | 20 +- .../scripts/tests/test_logbfb.script | 33 --- .../scripts/tests/test_qcchkf.script | 36 ---- doc/source/cice_index.rst | 1 + doc/source/conf.py | 6 +- doc/source/intro/copyright.rst | 2 +- doc/source/user_guide/ug_case_settings.rst | 27 ++- doc/source/user_guide/ug_implementation.rst | 61 ++++-- icepack | 2 +- 114 files changed, 747 insertions(+), 1504 deletions(-) delete mode 100644 configuration/scripts/machines/Macros.conrad_cray delete mode 100644 configuration/scripts/machines/Macros.conrad_gnu delete mode 100644 configuration/scripts/machines/Macros.conrad_intel delete mode 100644 configuration/scripts/machines/Macros.conrad_pgi delete mode 100644 configuration/scripts/machines/Macros.gordon_cray delete mode 100644 configuration/scripts/machines/Macros.gordon_gnu delete mode 100644 configuration/scripts/machines/Macros.gordon_intel delete mode 100644 configuration/scripts/machines/Macros.gordon_pgi delete mode 100755 configuration/scripts/machines/env.conrad_cray delete mode 100755 configuration/scripts/machines/env.conrad_gnu delete mode 100755 configuration/scripts/machines/env.conrad_intel delete mode 100755 configuration/scripts/machines/env.conrad_pgi delete mode 100755 configuration/scripts/machines/env.gordon_cray delete mode 100755 configuration/scripts/machines/env.gordon_gnu delete mode 100755 configuration/scripts/machines/env.gordon_intel delete mode 100755 configuration/scripts/machines/env.gordon_pgi create mode 100644 configuration/scripts/options/set_env.cmplog create mode 100644 configuration/scripts/options/set_env.cmplogrest create mode 100644 configuration/scripts/options/set_env.cmprest create mode 100644 configuration/scripts/options/set_env.ompschedd1 create mode 100644 configuration/scripts/options/set_env.ompscheds create mode 100644 configuration/scripts/options/set_env.ompscheds1 create mode 100644 configuration/scripts/options/set_env.qcchk create mode 100644 configuration/scripts/options/set_env.qcchkf create mode 100644 configuration/scripts/options/set_nml.dt3456s delete mode 100644 configuration/scripts/options/set_nml.qcnonbfb create mode 100644 configuration/scripts/options/set_nml.timerstats create mode 100644 configuration/scripts/tests/omp_suite.ts delete mode 100644 configuration/scripts/tests/test_logbfb.script delete mode 100644 configuration/scripts/tests/test_qcchkf.script diff --git a/LICENSE.pdf b/LICENSE.pdf index 5d6b29280111f197c9f13fd29e59e3011d268a74..d98d3da80d4be814224a113d781d15426d4bbf6d 100644 GIT binary patch delta 28961 zcmb?@2RxST+i;{^N+m@`Q(>zv1V?Bh7E+rd7ehMw{JQw0Q-=TK!RGA@=5 z)0CAJH`+Mcdf3U(=}dZbr@X-FW$cwy1uF`LBE#mcWYCz_Rvh*U85)H~RisiBX>0{5 zONmBNqSG?&tPrRcm_bvZ&=hD?V+vP^!cn5qqR*}rVEwn&jL$0tUJ5LxaA=&JEG9>R zO0~3BU@{n%3OkvW6a`Bfoo#8!v7xf*))}uT0!|Yn^94l(8SKc9g7Jb3R;2&fEd+!0 zH`Hr(y4z^FID04>YwMe-tIHePJK4DF*?1eeIPG-SH_|m$Txa8MW#eqU)7b;Zxyvv( zFxya3!`aHk+TPh#QQO+a*~8w$M`699k*B4H&mJ2^V|Y*$ja^Kf?O~9O41-Hf!DdBG z7hz`)Rn=**Bq9zVTfJj3L8DQ)s;cfDZZ42;=FFL<=NGpbXGWbXjymdvaE_`wG^E1I$dgJ321BeG_?yMA= z?$Im_0seCB@ zyk^f17hY!f*RGJ2J*B=J!~0=&V=f18+EjS?QAz*K?w1-5GW#4CCpT)}Kep}%<)fzM z0hLhobn50l1+OW}Vca`ogg=^%edkA*j-CCZP@ukMT}Xod#$bviRU&c7bGFgqz1hLq zl7~`*7QDJLWhX^@;nMDBQ^x5G`Yz6Wk{L7WUeoqMMJ0i`B0s(ukC`TWeEp;gD~|Qb z?xZeU!ZK6)Hfzo9*PM%X+ZOxGHxOd9HGC2MUcO)ZV!-TgTH`|fRvr$qNOh?wc9M2p zKui@a=9Nz09C%&DwrvwzenbC>D7kRM*LOA_eg2}mrF^x@V^K|I-759C-c|F0jb3{0 zvydA6wK&}H?9fv;B^w3%lMmKRX3thn)ue419IBl@ymz2O+DvV@BfRSxF;sr5-7b04 z&4|g?wrif{t`03-wm{;#%CT*iAGb~)_U+UTO(&kdE)}j_Qxg()S@v>Jxhqfi_={*Q zYhBd^yHf`P8?={fW;)k4b(ckYJqnZ-byIlvBO&t5<#)OA=TF=ZQF+V7r|vA#R2wj$98g|bbhml?i;mW4veX zDtO}0+F!%CI?2b@WR*SpjqgNL_7TA{N6nlHQH4WAJ)a&CkurH%F#-v^++3md&_I#m zji+kGU+&>0xT^n=o z)cdoW^fQ*Ht)z?F*QnYV&7LZq*e<=-#!#cz#^6QAhvw|%((+*)_q>b8*Jg9_INNfc zX5HM8-^B~-ID3>ZdCppGX#ZKrqE3DB!W%Y03m#;bxp*Ca7dDV{TTwKUKGTbSzG7=~ zbyReBS&Z(2j2)3TzE?NzSJ>CIe5D16Z@IuY~$d&D^!p`hqOGrcx{a;_5ku z)Z)A)_gCjIZgb95AE(*h)bZ2`)D-ZzrkO4se)1#ZtcZP>`1TpX%iXOveI3U-rB4_i zF-^*g(L499rf;6y2Sb^8gs#O=-=A+qQfHn%G<*M~sh)c;b?7HfJUI1wzEj8Ip&P_=8d}Qi~Yy+Pxc$!D$AQRqhRXYi?%9V7kRT7u{P`PW@NS8&t2E= z_r}j|;s%!u@%e?BlPIR?3KOoi*eue%lD6%rb@;IBo(t3K;x_ubEckd`VwM+SB{{5- znfKiOQfj@hs6fZ6Blirft4mccYx~=qzw(K_u=mUJPPJo;o?KBhKI~*Dzh@~utN7R1 z+)nnF)5Y@#YBXMJa4jF6D{5cnv1URPoi2ZvxozO&)vjKp?%egVSE7#Z{b_be$XHn}^rSjG{v2Wkh9Q-#N@E*lEG zvX%4YMeOpWXDXt{L{xwIaJ;%d{M7JD!DtgbnRmj&Yf1Tk4m+jr)Pac zCU94xGZ7>3yk2>G%fTGQDUbHKM9b>|X7%lRs`1xM zyb|Z`c~aT*y{gwlnDNVS%QgR_VUKOS>mnx+(<6V(J=}CMw5V6J*CqLD)!;0jj+@Gh z>b@@4zZYcKWI~kayp(F}-+9bRV97dJWr29#rOG?^Rz1?Wb*Z$trREC9-g-xP%qk^m z(TpWcg()ZW^d;w-EccmtUN-sQMS2Tmigc)oj_d2PAmxqnPW=xAhraQoj|jc;@fKgc zH;toA1Z$1Eb~U!$K>197MsZBW`1htQ>>ibtBlT*IQ_$H%1B23ndYZ?3;tw{GXy`r^7PHx0^TLJc(1 ziCNElcFw+Eq*J_L$E6)(1bV}NT)LNFWb>fhcW8AC@#}c?O0)ImWjsp^VL6b?? zWOhy5WS^M#LhHh}w6ODpZLTJ|=bL7XFJ+pXdRcpNX3A=*-c4m&nHGa9q&|lkS02w% zAKR|4@=LEV`Ng&$EmhY3Yh9v+yi7zJMKd>~M* zHRoQq;Z>`ShEt1FTHQWp1rB*^D2dQxSyz3kP`W%x;VNssqs`Hak@d$kC<-Pv}c)Zr;*YC%W9WXTU}T~C&qPYelL zBRsG+MPRyW|F#X+Z&~F|E-&z{N}2@JF4H1`+J(zp{HE&U`M-kCSr!u z-k9CcT=0PC*Xuqs$jW|DoN_<7?nY7jpnv@rzwZ-m#CAQ8^geOOQAGK(l`eJN42M-M zTKfgYT&!`4{u4(8-}@s`GvYu0@1E#d?R7>z?jANy+RnRNWSHoC8!AGhv5S`WI^CUn z{&GbX*O|#s!Se=Z)7b<4S2z1T9xiS&Od2@_+*E?}Q(-QSK);kmI<4dcMKg1XIhzgi zi{9v5z^5T#_-q-88(w_VZ(lGJg*F!L!yGL!tpr( z9e)ysX-?q62sk2!!;yg+F2@DL!`Zo1Km)&lP9lZD0c_#6P%;@*9+xA-r66 zb5SWYAUi+;N2gMlGE^#)1e8i;L7m(S$V34eQ3!H%DxGT1paA*kFoDhoj!I{m(~;y% zAPdXa{gjWq#BJv#qgG7R2l{No&pC${-*#xfOz0OG>{J@IYFbs$!Wj?IDp0=nGQJx zH$%QaCPc`Q>Ns%}P%Lo_0t>OO3=M>Y6gD6c4ah==ej1R526Ndccnl_q{?RpXKbQ~l zhnv8uag&jfM`JQUTFJ$sWRpUV6LHL+B?}^Gd>;k!&k_$ozQgIUq>s$~^8job!9Nix z-T={P`RD2XT8K6xs()bO&kYecCW;#X#0GW3q%mX|Y``9HIuo$M0Fp34BuUJ%0plPQ zT!O{r(fPUxN)km-g07+RH58r7HRl2Y;i8CM7NVWS;L%}mI*keMSOTel0ofGL6Kp#0 z0%{Q$gA5IGxyVPrfOKFyay&2|h$t`>g$d{9QrSE@2dNF`L$>08z5w`HNC0F_kO>lL z(1%+vL3`seG>~KlfQtH;0o&k~ct!@W9GwYgrvNu0cu*fD+CK;_!y-o5>z{}u&-(|W z|LvV*TR{51mcU&|K$y>CAQ2e+!!Q_p{Ys2<@=u7+bD4Z(|FQYMamBv@HQJ6EoxuWi z1MH1$4Avi-7~dHTg49>I!RRCe2qu#6f0R51P>;bGF*qCs3KO(}Vnxy+2{>$0z<@)b z9kmK@1dxoug+A`c7%rQyDS)gTASHuJkVa011Dcr)){F)^4FN%bSPVLkiS(xMwE*w| zIT<&@F>GMJQ9VbF1zunR+mZ(Xwk5R#vK(4E7|#SPL-v7|VS~27{petSMjQEp0)r3; zBm-{DKr0I>3MV6zVILEi3Qx|Y18RX88K4D_K^b6*=x|b;OKKC43rNlVu^x^AnSnOs zH#W=#`-|Ehm;x+1lLb=C;%jFni*6383EE(D7Mv6WjLBk|vq86!n?ur{#{{}E;Y8#@92SW>k^~4PkbuplfNKE2 zAx#mLUL#sHC601UJPiEw2G7-0rj7|0OI5De5}prvWZTzF>C z& zKmR0u9)wO(`oA9n4=@5e2{h;*fb>5=i`c-0$f0tbOaKrfmn;CHfx%QR=vIXpH97aM#~ejE6HsI<|q0}qG;${d{!4wxej;ulup zP(VkrfS&*i4&aDGh4V8Z0^k6GIp7+h;^cri#yS@$MJMeaNdG_HVRH~zG`CD12TU;s z^%-se49o$&1~uFoF@UWs8mX>1pi4NwFJv2tGKUNNk7(g=K_$XK7O3G7M1nI042d2a zjmzW0dAMktfG4;V@Z9+AAeS7%lcP4_&lJt`Ry>50Oq2DD&d0EqqRcd@{(*%6kxcl{1-I(&c7Ou32@44ajLSt{0bb(b@jz=Z zfg!oT30wl$0IA6ZE6s%gbf72~G&&b8HcsGTA3zF#aX}qpG)+3tPRA47~j2w^(3`L~^`EU%lRii#4+ALh~|6@t~wctNJ;Q!b6e|o^=$wx8puO;Ze z|2PmW^(gB8r}EE*`6wPm^}m<6-sn7Hv@%j6YDZ7<9|8Nv_`epyn3)E`1P_vFXy6T@ z&xOto`e10MNk<5!5^Xz_umR*Bzzg`doxsNeP{FStOFpVbiBIkfsu5MCdWcz|Pp z+7kmAcpxO?8;%6^MvQdgGGOXPI^jGFP*MCoRBQ|&GrtdX6ubdd(bz!^*8xJKRf7uo zKy}LRBuAh}g6&9X1`rx;6;$9%Kvm)os|V>aBr!y(;Bw&sz%Y&)G{~eO*yyy9U65w@ zqYD@wedduaP?#g;5n6E_penzMgR;Q4oX`q~U=Tpokv?!q{_Z35b|8EttRT9eu>oo- zoD^y}5p5%hd~gN-;hn)b&{Cp+qE&+m`2dRnli&@eV+cw15fD1yE8p5e1vbW@cVr?i z0}6yckxKpNPXEm>{0AieTR)FVkW^)&tAwY)=l~v|QAj{QHNhLkJ!Buk2IhRE4`Cx+ zbL;~)g9qQ2fC{bx)aB0uHiI{a2FNvm%ixXgQBcYJBVctzSuxQJl5sm&RH)!k44_f` znV>SLOkg5(MW6!V?x?t61zKP~6c>n=M;#L$yx6;u-#=#nan2|`T#H~)kQ z?hzG(UH+~}b_jPzW`Ljkmm>r5E_RJ*5+)=;|LVdl8I^^OKlnLNVW2jJge}qRpQZZsCO#ynw01lZ+0E&;6m~_HrFuufXh!Hmn z)j3LS;2$&d+)ziz^DmD_s^Mp zVnO_am47;$xC)|dwES}-saF9!>_afI{AWL&70H0>;yS1yAj|aEIHVrthMDO9jc$&M z{0YqHqW=|c+qGy82 zjSWYD6f4Q8Oi;&=;ez={3%rY^qZbAF2y?JR*bH<;T|k5E1ymr5guns|(9j})9tXb_ zh9U8}02hfY%sjvwS`0Wk7bs07F!6w<3-mpl4^s{Np#TpR#fHQZW*gv*&+Aa(0G0uP z=!gKkc&A1KW`|W_M{+H2Wa#MMKyCpZ;5G3V1+R(Z6)sYZ%o`Bou0R7A4wKQz#ascJ za;R`en$aX;c7Wdu9uI#s7(g<}@Fnzc^YA3R)#{akzCIX+}`+hMR1L zJ8ph6#Cwo;9R-Cu0t!I^wubgmw7>-ke>gbMG|V!Pt3Z~4-we43Y#spxjQU73_=q$} zEsTr?-i7aIQKdrxEQ*d7G5}QY0tf_cL=|$lqwzOb8dAdlP|{?Sig$;AC^9wx4;$kE z@@^6Qe$;oUI526*1mR$T>Ok&-+gvyj4fZ2pr@{9Oil^@){!ukl% zi1`4?hxs6!@POS&;2Dra0yU0G0f(WKQ^5w3Hv^={0OF6yG#2O_Qga};VycG&Y%&^p z!*xF-wqPnc4UohzWqa`18G-q_rZ%ADItyqX{F$z=x1VN0+-g1?j;jNe*pm6 z5TSy&8UTk(8>CBd02Fu;if$3#7a?^UvK(whx@YL4P^gf91zM1(#)JUk1XL>r(2Oo0 z9D^V5kON?wze+#|-hi+{*`S}v_eNkM&;woV(GsaXiUOR=gk4!M1_KllIs(8O1@o^( z&~PI8M*8r4I0nm6-{7A%b99qY^kO+WZ*-l%%0E;*fp=fzy$muncKoCK3n-*G{2%q} zIKbbS-A8SX*B981Mga9aUaX<+M1@WQ4(%)uF%|{53!`xwng<{fgHBLMDj}#Oevmqd zr+<`x9pw+uNwock_fgdRZzbuY{B{5M9~Szz4M$`N zzb*KG+m0tg433tFQ3Uosl)(SUX)uBGn*W#b-%c{x{(lSL-{V04Bcy*`bi;=qpuf;# z#9>e&@4ER?@&}c0Pm*o^QQ~rl_0c_W47|fPCFr7~M~5F~Km`mob5#FA4LgBH+58G9 z65v+S6NCzIEPAVW?*$dmjx5w>XbV`NJZXFljDx@#fd^0&oI0rB19L)XcA5PzX&f*N*$0mb`Mr~owhaSM4E7I>EM21i2J$S@G460Znoa07}Dyh}pZK+tHU zM1xX;3M4F;Tpm<#Q*=x)kq7Q;Bp5mI2$}4 z5`!#oaNrFNfEPwAi0a`1AcNk73ONr~Am`x&Z7Bc+ynJX#e;(+}R|B9%;R7--k0dmZ z0wFI+p&Q+L4k${P#G{dsE?kC=Ok%_RAGD32iH%p#cmX=1Sm2vDn1^04v?3sY;*l<( zJZ8-hgK$-gTLV(js6YiVg}yMq53vX}KFE+0hgbkO7x;_|o)sJJpTKoRlZ@<0UYX)u zDTyb%EajjUM}r8y1$l1^--JM@3tOO>=1-!6e*vEmK??wb;X0}rsKBQtBVtHi*Nlod z$SsV8L<%wk6Z~&{gY#hI1_yz-4IfB9Knuie{Ba{14I6PDejjgkCjJKzV8J0Ue~EL*r9im?}ZiMKR5-ZvB?YvXdwoOC}#OF?+6)W=mV|; z5u}sBCA?#<6x#voO!#gAoG`#H23jCtSOI?-oMSq0EPiQ8r&0+R1O6jC0HFXZRN(A* z`%8X22-m;nbYM?Ne!*yra*0osQWC(%z!d-!nhpFwi2Qg6PQ^l35lsMo?MFdu;iWBE z@FR9KZ1DXZb^@RjJRZgw6i8m8{8MS51c48rpD%1!A-Ntf2Oc``HS_}gQB2@upkesF z7wKXG;rZWhpqIyIS(F6+dO$EbUeKZmh6+Yl{15^%*RTspA_Ytm!pek*mkXabQs6}J z;425Hz;_rR74ZET!iP$f0vZMW6}13L>`F*%3C{unPST1;jgOGi2R(J!VF~I&p4VOXppWlhgAUC00QQ&0oH4PA+ z>_fgHBSx|huncdbz@P|XAjzKx!U%7pz<~Gx!;b(1M(MBIIRVYE3dWU~Jcr8PA6LK& zEJ8+`{4QJscmO#IDmabIl90`?IeuP^S4L1F2LL~T9)-96;rj-#za%^UBMpt;q~g?( z@CPb?-}k@^ZU!QcCI~8j-S?0d^?&+o1}(=Q{jduG{tE~;9tyYyt~c-jct-((L=Qet z3due^6e=KoA8rlmk^Eu<68HE9!i{u+3_Cv17Qe1q=`l{Jt9=qoaX) za4ozQ!Ra`K+!%5em_Xy_7NG({hFe1Z0n7q^O$^Eg9-wUyFwCjJgAWE&fPVlE>;kx8 zFxW46V7d(lgLWD5u}QG;{&Qp~oC${`Cy_Y`Bqih};C2Y53w~NbgZwKt!QsH6h6=J0 z4&rNkKq-s?)e8^Q(>NAB5P{4cHUp(GQ$d;r$X0;S$AEw&5OxEB$dBaF4A9^^K>!r= z3BG`DfgTJTnaYLlUr9qjo)~pCbb?C)xd%KWsvuwhsK75g0Lcg(xS`mM7y`aQ1dM!E zNSZh_VR$<9$-q8ACpi$vKq3tivFKoo1Y*z)_rI_LP#rxAI!=QJm=>TjQ~+93hUm&c z1qFc&H=qyH0zUX|9#p`c0)*hLCAtB)K5mG&7^890C~J|x$-E)m|JFnZ6KW;HW)hmqyQfHgDxoeZXWONpRj`}`Rk7WY;v=>s%+6$r@hOuzLp?qpgHJS0S$X665&7vNLL%Gkjz7_uU3>iamV-q``iq&& zDZv}(^c88S?vis@dGJzlaA@Q1r5-l)T}%7c$(JWIUM`G|5a-(^phx(7Mq2SJxiXo=u%nNs=Q4tw>74;Dd@HcZdTm&#kMgmRYgiLb1B_h zyWqz8ZrcN_Q~TFH+py_ubyA~)Kv|FQs_vI{VRMGPgabT_i_DYyw)k;o@S<0_n2K9u z&lX%UpDb)Pe)s1*EKBvG)-~!TcSbiikgJtZ>haCljH`J$Btwete&7ptz7SaGple~*lm$;8EK6Bj?y6hG@K@Y-O1>-~@! zi$BjyYwF-cOqZTY$>=+G*fJ$PM_D9@$65V=AtK@^dz{kpSpF_EEonhs9A(L6B!t$-B&T!`vM^!jF+K!knK@^}UmA)^ zn5^X*k}Y@iiH}y(-l4Tr{XDwY@yNQ9+ZMdLDci5oYtviWUU*f=wB+9B`9Vke4qyI0 zswC`PMsZ z?Z)b*a{G_?j9WCd{A=N_$j9#)LBAU4lar>pTo&5R*ZVWa_PV(xby~k8H;wsH z8kgeJtz42Rni`lKqnhy4BPy(1)VESd`Bub8p>lIPXJj7{yz)BIz*ex8(Q*K3cimiQP{&3<8> za`@MTh4WQnnm6ov5aXV7uyc}aoyERT7uVdFR{xn-xb^ zyEb054`IY=ZwXnmRj)%|BuD2~^PydibvY(PSz&5)=Qzn7aS59J$DCjLKh|Ec*w+2R z#5p3CV>%dmQ^l1Up_q9 zNW|ep>}{G!gwl-`lS96@2UCq~o8A2LUcH~`q@Dg{Gi!@#%EwgY~_{8m`#cRp8mgUcSpO`hao{D-Ql5$j=W_Bmyn#cI+NUzNGOK9dL z)KxYI_bzTKx9(grZoQi1B6`x~z?=D^ovT0U*D$m{UYgOZ>c61zIcKN+)^+iwH8KZ2eHO z#fwg<702a_&l@z{UnyBv>k-aMa)`SSar5k6lc^UROg~zp5dS|{~tfKc3tArDR8g$WPp#!Ye&mu)|0?eomA&@*uJ61|;r+Q+|q{8d{T z`e5JKqIHzru|(wCFAR;xViH?Ar0-8>%B4G(CB)gTvYxwCX6D_a^+!Kvxt^nnb}X1Y zfiodkM2@vHW{Y0RmR_xtfXQu_=dU{M931rii%i z^P06WLN0%5%F~TnOP9tjuc+17#qPIE&=ZODoG$uO+%F`z&iALvse8_{bqiY!WPa7L zti*mu-=2PH-8tKD^DRw@n3V60dxiEK-sL8m6R7;L;r^1Mtv|iOMVL#~8{LmM>E@UcGl8EQ6M8Ixb1;OHdzcHU4ME zyNpd99oH=)L{y$W^*!u0eUk9xw#JFyGX^~UlXaT(6#J_7DS6q|-ud>!iP);v)bPIO zsYCgO$!pHMjyU1qJuV^B_{1VAHT%Z5_4Q$?wVqcp+diK9t|c9*; zrIm9^D{GZ+B>K-7e@+M7xlF4)jD47dF{Pawd390>EDAuen-y&MP0#GfrQ0)Y9h8YX1;qrtL?pU>o>ZXtCPh|GrQ7@YznIn%8(#({ajhJL zl(*95%w)B)Uw0o|lzZ%e&iJ9|9}d&QZYtH+s`S?6Nc{K|sO;sk<}1&cJ=JkphnCEf z(@7#nFB}+?MUBv%ShPE8_RX|JanBjsF1#suQ(iHlJ|t|Wt?!}Al}d|}tF{jB_r?= zo;MP=)9x(KGuas)XLI4uZkMFb+OHnRHg;OoD;GJQ3EI_{F#D&QU$XSs>Pw9t1##iN zkpo%2Z#>#wjC(S)%CS~jl z)@yyF)sgKq@o>1ahGUk|k82D2A||?Y)HufZ>hBpKKAe1TPgf-^NKTb`+D#)?VQp># z`~`-7f1QhdRRZDRpT^7HbUW02WO!~$=E7wA+*`)O6MhuL<#?vOuBwPgofYGunx6P= z(FX27_CbZaa@?_k?G6U(9Ad_{B(|(~8#m;eny>Asr>JrBu~x<uvk3b+4bemS0Cw~YZrHI_bT$;zDH5+j(73h=jWs) zwpdUuDL=_M88SI2&9}U&CgS-=%9DzNPnSp9ySAO)sPy_=cfa5K4<0nFw^w3cIBvQ! z_^s6UZbzc0c~3&pQQe|_`sQVI>RRt#v*w!h+-_el#q5kpyGE$^Y+iPWo5RVMpZ)UN z^Ur=V$IivBIjE-I?W&Yp`h7!|rT#LPqwLP*4o9tmwBOyUd%0+N^^T;j*fpk<3(|*p zCvI-Mw9w|3+o~g0jnTShXH8?9&YEs>x};I|Qhx8o_&IXnQCgk)1@<2Kra^b|JTpVY zPAu4SLtvKh*Ryun_vD6%B$1WQQ;!r@bL8h2KCLS})A{yoWXMvhn9h45M?YlPOFsUv zM)Gsw<8uwG3RQW(H@-J&xI6oyVbA#?i?8g@;cKLiY;3!Ar#&OPH-FvlbXZ}*8m8$n(PbtI7bonQU}7RJy48M;^K8+4c^zU-x0sy9 z7SCPTd2eBP;B55AC;+oM$TO7!sbh}NyH znayn*Qf_q=-w|!fDv7lfDM&HP>L+67@V-e*{a!Ozw>rY@_SeCM>OB7&mCaqr7Yn+Q zLnoIld$BEYQLp)cLZy+7ZkF!gkatl{jH+()fT@_LT=M1+%j4oPx(q+l9QuMC9QVc- zagT$i&wXM(n-ifFig zZ?jKVLy*O9uiEnM!|g%dHNSrEewTa3Rxf^G>3H7!N-y8)qHe3$l|qRZ_67S5)n{(M zx9>w=$@$_{ciZW5n*%LHs^q&OgF>FA2_@2-yGnFqb+^!en<>ecJ^P(EC2{&b#~V6! z(VDZzrH>OXf3i>Y3W{{B(4+N{D{Ms>%hHsW)1`B3PU*$k zv1-I=^@mg63vQ2IT@%u8exY_^)TK?4c0XCG6@vm6SsDrF2(7X_C*gatSkH#2 zaT)p92`AD%&au)nR5Nxs75}Nbf35hNsvCxP26)TsJ%7XwGLwhfUO!ZyGQghC9bQ~% z=31R`cXh}U%`Y+?Z`b}xQ&k9i?bUt3TR3yp^(#We)4mf0R{pm8H4~#(TrL@q*thMf z^u_nfCLBl-3jf7Wxq1E?$1(ZQtWUa%3bSADJoAk=v-Wc4>L}+Yz9KHM6KjoLsOFH`xJ4z_*80i<&o)h1--A?zqPOyABzqsMWlse(6 zvBrYa`c(ZH;m_^le;nzJ@NeAqDLKac#k}oLEaIC6=UK?QJDedrevap6yijkfpm2{p zC^ZZ%W^SChmJ!4~8=R(?Fnv;qR>`F8{X=^Kism0M&M+KT7}i@I9k60@myE{FtS!Sw zonKvjt<-JyiIDwizMyHLYH88@-`fVL=jLRspS$+W{+ur#{T-tAo!l9xpS~k|LEb%! zReGDWeb#I&(_5CQ`pN$$G5zQSx9{Db-5609<3g8M-Ty`;4ihq}WfQ&%e+ zTxpb+m^x?58l((=%)IuF`K&L4t-0i4M<(_Q*GTA*JlGc%0b~qLGkY%dB{v2 z`0&eJsEN{Zwj*rN!uFHJT_avQ;c0nJZF*ava#POorM`!btYU7UciCER5-5+SPJV+TH^OKuDo`xC8fsYZDGT_DE+$z8$uTZm5WiQv>q&I*11lI`mYZw zbdq@xHaOkxlWhDr9fJakUrn?Q|4%bd3C`*+-6NH#adz$Sm)zfnZr#7h38-n$_1B&% zv}OhCMBWU+!?Tad7=#E+77$F061=)zBWWRBZ~TS^t!s(L-(1|GV5M_zj?s}P#@5T! zJo-#-q_3VZZ|fwJGo~{nCu?{F)?NQSm$2)qD1DG6A$9ifi8;faFZVUJ<@gbC-n}*M zx$l0zvz%3EroD+b;kEYypCidL6Hb2HY$%>8$MSEyAojv{##p0@h0k8D_1x%v@_ovd ztEDm>4qMZw8q~EW#Qh3>x$SM&k<KTJ5S|e$F}F*I#(Xn&F*x*`ZMJnz2s2a zty{`2oywP|4=p_B>D;095}U{mt`?q&Mao5G|E1K?$qfEiEN?pIIPz zBI4#E6SnYS$D-wuMAVbl>OAk-g2{AN2j+v|7VjBl};GSoa*=}xnjL}A2=ie{nN11z7rX=^M3#S-gNbIhfVIeqs^ zxpR4u@wLWkrz2D}>A+@K51vSCdjcz9Tw?*1)S9spqDVi;s ztE16me?#@uUbaiGXM^My@uR%&Uh?HVB{Su0bKS1J`jA*j49f)V&)V!ZGk9@fyRN_L z;KGKz?7EM_&Gy0G4;12^g&y9E@|+()AMV^$o^tEgFVj%(l<)pxPd)0N`qU-AU;B#M z6Xq>wKFcpCJLkma$x#Ca?KAJ#_MN* ztqV@N!@^&5%2IA^{_s4%>}LRlDs5Piw(Z^9J3nfJYGr?~Ebll(JU!=oCh&Y>G_B3L z?c7COm6UQ7$;2sE+B!}PZ7<(WqYlS^YRjH9;gk1p>jTRt7DouZ9SnyfA#M{cb9Wz( ztLpQM2%PUI)+H!=_ds^l-l(Pt7owVYZxWxZES%{ee)5gq7ZHi9s^=OHgI7!_SYPsE zfMszoXpkztUG&%YO#)MX4^7)7P@5rQQ;{UedNk+yrA?d?dX)5&Ty>uiHPf-Bp&gHD zbElUMq<_{f>ur9~d8dVlnA3hS<4wBP@ck_kHS{$f+Jwuc%w3IoGP5-M20z^U%)2yh zcg2G5wYr^6f!=Xt_lQ?U>N;XKt{O`wZe@i9m=0E2ch{IYIY#e@E8Ar+Z2awZKT~z$ z%eLAd8;sfyU4D`sA#QFZX3g|C=zJhC;iYK1Qg`0@6_za;L$x}S8^Va#cS)Zl8@a*T zdNb1>OI?Z!%9-1kN=au0Cp&#fHRn0Ig#Dh#_1y6&WMbmmpna-4ZqIy~|1BZ$<6wll zk33h2xrzAbcBX0VtDvIQOQ;_|B(C$cr%RS;K2SLKp~lVFJ2h*5`I4R74Jp0lIbtm_ z+nzfubZf5)u2G1Se3wdmT}%9&&pwxN;e~kUh9jJ^pg0r3)}vxy3B~F6>wV>K3^;E+ zGokfO5HQc_d8WR?wa9vHBKXD^Ebl(>&egO zOkPFa|GjO;5ssSPf?eSin>$uAoo?G_Cpl zjg)X9FT3BhOBIPHl2`o~M`mvOE$_ON*dRC6X#38E9qs#bdyKla_$~FHr)Da(K@GB=Fr$@L!Z=40Y+*%$Zh$52UC z$TYVYp>*2sQUl!!L*;kN#OwT3JZqA+*U5f)Rd{7WtNT)BbjDSu+o3fg1zTI2d==ks zBo4hvy30E0zEu+{5n4vZd9l zr_6I&<5xKEQO&$hyb2XfpDO|K)jGS>5@ocM+@c3X=Z$@O?i=mn(8Z8C;xseOM`ZhE z^QxANjtk;z%Li?`Pg^>2e;(M7zh%*>eNTVxFKHtV(CUj#dP4h83z*PqVyqf}l`^&x*H z6^{Aa9s#-6!@GwT#U6LboJ%NH{x~W&)PE-3InnsRz^943??v@J)NredkC@-<{Gf14 z0TD2}>pGzp7{-(PRVL4RGho%0adNtAe*e8MAKc-G1Nq0kMM!UM+Hv`}c+V60JzI`3 z%O=Ei=(sEke11HQYWi%zJ!#qAk{|ImBo?lk_IPfR>bNSO;nY=KIag`DvkPhi!)N*I zlFFJjuu#xnYN9@`#-umYU7FhBtEqYMZk2$+bu;t%ORf{8p=)xgL=3J=mmic)Oxyi3 zyrRWV<6`bRh3}WsT!Sy^Z}%8GT%dI^VBf0Wc24K~G%w~hP5XUj#*zL>25Lt)5r^JA zh(2F1Gp~P6+s7Qb8WoxGSeQ_yXY@amFcOMcIprVq))k;`S#T{{Tg z$-+;0Y4>Fc&pB?f-`;h{L3sV_GxOi)%dx3u&phYJJ#d&$C1!Jp8)X5enM#T?9tLdK zwrb_Pl5HZqZH{isJ6VIfzebmQjlBEH<>nVzi^!VJt{MGHtvPmx_G^I3$XU|-h0r|~pHOqM{WIYveXUlnd45%h*p-D&%wNl*dyB1&`@L>> zb)I7mYkTiC->hHe;F`#)%NZ{?lo*sAi>FzcJDiwa|ZF|-Qf)rqEu3NdZ|Z@)|!q}L73ELc5l#f2eWwP{%S#Js$>Ty*NmX+k%d)4P@_=tj)of}LCIgOX;q*V4Q_h0&gp&I94X zGC48NC|klaf95pQc^s0vp(MJyc&PI;C4Kw5A05&!h2O@TrB*)u9&voON?XfwgBAU& z3%P`A_w6?~v?W!Z)v3HrNRO=G-ffFF@tH#mh!-Be=(XBx`*RC}hneD=zoiTfvW&M+ zPaf>|-JA8BHV8BHu63>T9GK`8w^i2OHRDX4IKy6j&hCcUe%~$(yw)$SwGONQ*}A!J zW@Dt)%S#E19tRZe-8~~CNWFQW`1@}O>g+3`)5d!TmWCV_lU+(LkdpFIJi#elHb38f zJe7!=8$MU+*SDYrzx-aQ>I?DuvLz;Q-_0BQeb8!g_EDjYRYAwU?l{?cep}&}%*BMa zTki8-*}}mqwVjax(YLy@v||01S4O%Vtd%?abSm4WN4+EM)BL{ZH!HH^uPt|jJk8_Q zPtwPYW`>zx7gKyXp=N(=!p{dUruTo9Klz3I*dmG$pA@0|ZBDj3y|-!evih9^Nv-aJ zKjej64$bJ#mhI5Gol}##@_v(2WQf0&p8DPU$)CoDDOjy<@4V2h<6!LDasO37>CSku zB#SlvrzJY^plm+COy57y7wsVisFeWWVZ^XG89t zxdZCquRgGeo628XJ&OmLyym+(8Omsd&He~mtULuW`nNlwMt4-M!wPP858Df3F=x`AL?l?JNRRV|D({9 zdGlp|@%j`JOvl+AQ(R4S&O6fkeQw3JD0Q)84d36n_2ql^5!D;z0|wN1>sD2+^C_tR zH1pMf{*XXL#^h$9zB&7)n9~nli?EnFAX_PwdVs|`^o%ZH&=@qR^i^oy%7SrX?xK$4 zR@5vhc^atBy|4M+EhE0usgid=H_cpA>W7H_!BlClrEQzkC;9AEuFtqRcrS{zUBa&Z zz^+X$S2QY(f?u^CA+`seTU6k){NCyE&HG+Y?HwxfewCB4UNYZ1WOCB1U+-QBy``Qh zu0APQB0?2i*4k-!F74Qy&0?+{ekYxieU}xxPbhioZFit)SFEMem9vx*vBxS?e>ZjX zIZs)qkTB1W5L!zn2PJ3#n!QJKLnUXbgB|M4%=LWV`{dP zMF`wDckh9)ztj%@g-lk-KA+svugp#b%0JHT8+zvVne%&zr2m{-gIRMQHZARuPZ;}d z|JYsfosw>0F|G5d^1trL4AivNXrG(bK3MTUqJF5%{b(btV4%cn=@R1bgDRg^fsfM8 zV}G1D@9iU1OS>}nCojkRt6l*S8}9Xj_Vu~s$9IDzmm6|K8KSEh?BX$06D4~$ zw4Y`B*8Jwysh-GXb$2NT7=ew?(kiz7WPb3<-R^-OrvlWte%2qWwd%QnI*A3f0r5Ws z$Ep>r4GA!?PxEQ?YI|q4tnJ*IdXWKVuhM|L4Pm@#M-P?#cF3OGdM2d7ir`+o7UF-* z^VZMOqqCx1rz#l~aO~9LLc7lAiEp+bLIsvo!h!2dc9j{P_o2w#X+vUP9587blQ)=ee*0wO1;Y#KPXU&pWsJ z^rV=Ol&C^e?-0DZG6zNq{FrA{DLrp%9M$os&Ub)Zp^N)5jZ{Oa80g` zvt!$bmjR8M<_-`0&jd$G8!aao;-QVSsh2{`n^l%yl3f+>O`ZhO3_*C;4uzQQs)e_OfEVu=`WO9>Ml|_acf5X`~9bx*IiC0uueU_ z6P6%V26)9vHsi@=@bd>g;b%8T5RkZEEwrpWLu7h4$vhMEOqLiVeek_jA++ zdxo{n2b^)pTQ!OLR837;JzKXp-=}KH(0bbg8t<1q*K|;|)2I@VYP!yeUZyZjRBiey zx{OmtYV*aX+sem^`!JTTR-=u&xu%i&XOg|sRLf@h%*K=lqB@TY`+Rp z^5?mpzWN|%=4-)=-cRQ(qvVY*qD}2G3hz^q6j~}8%%1rxe#$pr?cd_vcaNEMxbT-s0iCxwR~&?w&W&7T!6b5$z;$Lzkbd&M@-7Tshd_H}&enBaTg ze;!(+|9A;u`LHtFXK3zV%J&sQ24&M$toW=Nb@%hN%0;HHr+v-Z;&^#d()8fOSM7l> zkND4Q@JUizbN<`PQ^Nu)-gW3VgfEI8JK%34z4q7Q%5Tq7{EZtvJj)7O@#KmBy- zES=h8r&{xhJWn3E8D@P}I79cq*VSi*Q=8$T8oh36M{sA%F)v&OYDbT>_qXOIja?K+*n0%n4@>~lfD6|+2YXO_0m$KQsFj_dk0s|VEUf(`7d;I`C^BKWt>h9JOlh05Qz$I1ERn<;1ZU> zs;~s>60A$456A&-j96li7qD9OG5ZYnu@>_SkNHKM_@Z;DvoF?*M{W+70Yv0pgnyAo zS6a*5Zi6?ty{{TS%k3(7dyI$&+z@}v4PpBY;o(0-a=?EC;3n(hAbuPMS!S>bOaRM( zOB}*yP#hGN@L3cV#fYUG*r}3TyYNz5zeD!N{6a0(fI{Ep{A13SI7c}*S#`b2gI?uV zy~=lbmHlW4&G=DP=npxEEN@J?7E}6SN=r;Zw$G6eQG}hGCC2n`oS)|$u^K|k$06l@ zNck|Nyd8g1&V`gOhS<2>*t?3*z}d@~zQMVd^FGU~D!)~gK~?Eem6$@WQdB2#&U-mG zGNvD0^Cf)bshjj8l0YWvm@R5qAYAB{W5yu=am-_oe>>(?$S00@MZHBoru-+3K5&}u zsj;2(AGF8C`uD5)A?+ax&`s!B=sYp00sVzzUSNOm?_m6!=>ME(GK>Ei>F48SXb-o) zT^;vRCo+PnZ#j`~qUxI@!mIwsiQI$riW9j6`GFI88FJY%*aW}kn9r!O9rP?|5?Jgg zF$6Y~bhYA_kojfkvC8126JetgtU#G|JNg00em2WnR44tsoa*TOB=-=Vr`Sz&KIukc zaIJr%K0bMcXk2ASzkn>#HRGQ8FY_ilaq>^tLiaDU$5~MSu*xo%qpnBF z)VQwxUcagSrm0L%IqF)ZEW>d-QWofYY6%zFMyx=utILtI>XOdAb2_}Z)iY+R`j$SV z{=`7vQ7=Spv1yR6MD)F^*bxGfXKtQz%@{tfjxm=Bg|&&q;&psCEJ z(vBPp*VSiGai7ko^5w1@!n4Ffi&n%l=^62y@*MXZ_H=q$JweYN&u&kHTrd0OI=NP^ zk!4wuU9un(xuLwZVTB2HVT0snBDom5c-t>9X8%O-)EB5MVDH#Zizy*B5T|y1ill!| z#_g_fs_fZ1VIK^qY`Om{nG&Vn&!T4w-z`&eGJ^%mOcxvN`tHmsp|tJd>y1pmb@BCV zmZt2rQIZ-7+7AYB9yP~@Y_}c{kX^4t1JU{;JDxk5cmn0C9DZ~J!k;|?d+aw-0~!05 zd$M*1Yi#YwrtD(_!PA*l;hZp@Osszj6HKz1Rr;!ME_s64>8pwC!!V)=69^+F6Bf2Y zG!{lQy28WKJPgZJOD0O1Rtb8a_OKu583-sDUEIC!$gLZd>12t)>lif4>rj9CD5kj* zgH=$C5oE-K|DVTL{C^0hE5mosj3&qQWL{5>0eSm-ue=zr&yNIyr89S#BWR2J@*|@! zG94bX@9JX-`;49lmWD^4;2ve}VLeeIqsfz*(x^3-aE7hnq&}R;uFOw$ryi?y>0zzz zsV5|xV#&H$t@+dw+$rXsXSIJ)tX7KEnz!b8t<;G)P4#C=GKpuqPgitB@YY~IF!u$zp)pCb4Nl11bCH@u7@u+Gf(W@`-;eWw)bPJ-RcN z|F*eq3ghOKdD)yXmrZxEn1#9iwzjSbIc;2<(k^Q=+Oj4w$LY*<%bd~vu8BpgCOU~D zncy{xP<&Z;vRGgTL4g8LDHkq=yE8G3jN)yV;?2C3JPqhTCollG$*;kG1U>-n11|Dy z@D<=4V1-%4R0mLlsRvUr{sG5r8_;&YGAH zFW!{g0RIm76Yy8y6TmHYh#fq6v9fBj1yTr8OrM}L$*2&Xq+v8DyWEq7aF{T#1&%v_ z&@g>`-4j|U5?l)05D0>}wZMiInQkA<@m~Q30Q<@yybWb;WOEO6a%Ev{4P|a*Z(?c< zH8qz&IRPV=_BjD6f87d#KoAAsSDCwZFLY-Vv?K&kgGkGMpy*C?10fvPHnN_+Bnr;e zIeeTD5ax50Bvlcu^z*@{y+QCPcPcfywjK9bZMDcE4{1u|&B(`q=q$fG8*%&?seV{U zRdV|`-Wb9ePRVmHoFGo5stmXHMjKJq4Q%#s0z#L3H-PQEK{oaURkldEw|cNr9Xfl} zqU;*OH@;XH@4$g#nda+#?zY?vGLGK@3;?FYI}Z(IZe(+Ga%Ev{4P|a*Z(?cRcv', nblocks,distrb_info%nprocs) #endif - call get_ice_timer(timer_evp_1d, '1d-evp', nblocks,distrb_info%nprocs) - call get_ice_timer(timer_evp_2d, '2d-evp', nblocks,distrb_info%nprocs) -! call get_ice_timer(timer_tmp, ' ',nblocks,distrb_info%nprocs) + call get_ice_timer(timer_evp_1d, '1d-evp', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_evp_2d, '2d-evp', nblocks,distrb_info%nprocs) + call get_ice_timer(timer_updstate, 'UpdState', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp1, 'tmp1', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp2, 'tmp2', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp3, 'tmp3', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp4, 'tmp4', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp5, 'tmp5', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp6, 'tmp6', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp7, 'tmp7', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp8, 'tmp8', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp9, 'tmp9', nblocks,distrb_info%nprocs) !----------------------------------------------------------------------- @@ -333,6 +355,7 @@ subroutine ice_timer_start(timer_id, block_id) character(len=*), parameter :: subname = '(ice_timer_start)' +! if (my_task == master_task) write(nu_diag,*) subname,trim(all_timers(timer_id)%name) !----------------------------------------------------------------------- ! ! if timer is defined, start it up @@ -433,6 +456,7 @@ subroutine ice_timer_stop(timer_id, block_id) character(len=*), parameter :: subname = '(ice_timer_stop)' +! if (my_task == master_task) write(nu_diag,*) subname,trim(all_timers(timer_id)%name) !----------------------------------------------------------------------- ! ! get end cycles diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 index 4599de42e..b18c35040 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 @@ -30,6 +30,9 @@ module ice_timers ice_timer_print_all, & ice_timer_check + logical(log_kind), public :: & + timer_stats ! controls printing of timer statistics + !----------------------------------------------------------------------- ! public timers !----------------------------------------------------------------------- @@ -54,8 +57,18 @@ module ice_timers timer_bgc, &! biogeochemistry timer_forcing, &! forcing timer_evp_1d, &! timer only loop - timer_evp_2d ! timer including conversion 1d/2d -! timer_tmp ! for temporary timings + timer_evp_2d, &! timer including conversion 1d/2d + timer_updstate ! update state +! timer_updstate, &! update state +! timer_tmp1, &! for temporary timings +! timer_tmp2, &! for temporary timings +! timer_tmp3, &! for temporary timings +! timer_tmp4, &! for temporary timings +! timer_tmp5, &! for temporary timings +! timer_tmp6, &! for temporary timings +! timer_tmp7, &! for temporary timings +! timer_tmp8, &! for temporary timings +! timer_tmp9 ! for temporary timings !----------------------------------------------------------------------- ! @@ -187,7 +200,7 @@ subroutine init_ice_timers ! call get_ice_timer(timer_ponds, 'Meltponds',nblocks,distrb_info%nprocs) call get_ice_timer(timer_ridge, 'Ridging', nblocks,distrb_info%nprocs) ! call get_ice_timer(timer_catconv, 'Cat Conv', nblocks,distrb_info%nprocs) - call get_ice_timer(timer_fsd, 'Floe size',nblocks,distrb_info%nprocs) + call get_ice_timer(timer_fsd, 'FloeSize', nblocks,distrb_info%nprocs) call get_ice_timer(timer_couple, 'Coupling', nblocks,distrb_info%nprocs) call get_ice_timer(timer_readwrite,'ReadWrite',nblocks,distrb_info%nprocs) call get_ice_timer(timer_diags, 'Diags ',nblocks,distrb_info%nprocs) @@ -197,7 +210,16 @@ subroutine init_ice_timers call get_ice_timer(timer_forcing, 'Forcing', nblocks,distrb_info%nprocs) call get_ice_timer(timer_evp_1d, '1d-evp', nblocks,distrb_info%nprocs) call get_ice_timer(timer_evp_2d, '2d-evp', nblocks,distrb_info%nprocs) -! call get_ice_timer(timer_tmp, ' ',nblocks,distrb_info%nprocs) + call get_ice_timer(timer_updstate, 'UpdState', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp1, 'tmp1', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp2, 'tmp2', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp3, 'tmp3', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp4, 'tmp4', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp5, 'tmp5', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp6, 'tmp6', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp7, 'tmp7', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp8, 'tmp8', nblocks,distrb_info%nprocs) +! call get_ice_timer(timer_tmp9, 'tmp9', nblocks,distrb_info%nprocs) !----------------------------------------------------------------------- @@ -341,6 +363,8 @@ subroutine ice_timer_start(timer_id, block_id) character(len=*), parameter :: subname = '(ice_timer_start)' +! if (my_task == master_task) write(nu_diag,*) subname,trim(all_timers(timer_id)%name) + !----------------------------------------------------------------------- ! ! if timer is defined, start it up @@ -444,6 +468,8 @@ subroutine ice_timer_stop(timer_id, block_id) character(len=*), parameter :: subname = '(ice_timer_stop)' +! if (my_task == master_task) write(nu_diag,*) subname,trim(all_timers(timer_id)%name) + !----------------------------------------------------------------------- ! ! get end cycles diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index 18dbaaefe..2cf7775ab 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -358,6 +358,9 @@ subroutine init_grid2 field_loc_center, field_loc_NEcorner, & field_type_scalar, field_type_vector, field_type_angle use ice_domain_size, only: max_blocks +#if defined (_OPENMP) + use OMP_LIB +#endif integer (kind=int_kind) :: & i, j, iblk, & @@ -376,6 +379,11 @@ subroutine init_grid2 type (block) :: & this_block ! block information for current block +#if defined (_OPENMP) + integer(kind=omp_sched_kind) :: ompsk ! openmp schedule + integer(kind=int_kind) :: ompcs ! openmp schedule count +#endif + character(len=*), parameter :: subname = '(init_grid2)' !----------------------------------------------------------------- @@ -406,6 +414,29 @@ subroutine init_grid2 call rectgrid ! regular rectangular grid endif + !----------------------------------------------------------------- + ! Diagnose OpenMP thread schedule, force order in output + !----------------------------------------------------------------- + +#if defined (_OPENMP) + !$OMP PARALLEL DO ORDERED PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + if (my_task == master_task) then + !$OMP ORDERED + if (iblk == 1) then + call omp_get_schedule(ompsk,ompcs) + write(nu_diag,*) '' + write(nu_diag,*) subname,' OpenMP runtime thread schedule:' + write(nu_diag,*) subname,' omp schedule = ',ompsk,ompcs + endif + write(nu_diag,*) subname,' block, thread = ',iblk,OMP_GET_THREAD_NUM() + call flush_fileunit(nu_diag) + !$OMP END ORDERED + endif + enddo + !$OMP END PARALLEL DO +#endif + !----------------------------------------------------------------- ! T-grid cell and U-grid cell quantities !----------------------------------------------------------------- diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 index 1a5681b38..ebbef60e0 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 @@ -416,6 +416,17 @@ subroutine restartfile (ice_ic) enddo !$OMP END PARALLEL DO + ! set Tsfcn to c0 on land + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (.not. tmask(i,j,iblk)) trcrn(i,j,nt_Tsfc,:,iblk) = c0 + enddo + enddo + enddo + !$OMP END PARALLEL DO + ! for mixed layer model if (oceanmixed_ice) then diff --git a/cicecore/cicedynB/infrastructure/ice_restoring.F90 b/cicecore/cicedynB/infrastructure/ice_restoring.F90 index c7254cd80..f21e50513 100644 --- a/cicecore/cicedynB/infrastructure/ice_restoring.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restoring.F90 @@ -394,7 +394,11 @@ subroutine set_restore_var (nx_block, ny_block, & aicen(i,j,n) = c0 vicen(i,j,n) = c0 vsnon(i,j,n) = c0 - trcrn(i,j,nt_Tsfc,n) = Tf(i,j) ! surface temperature + if (tmask(i,j)) then + trcrn(i,j,nt_Tsfc,n) = Tf(i,j) ! surface temperature + else + trcrn(i,j,nt_Tsfc,n) = c0 ! on land gridcells + endif if (ntrcr >= 2) then do it = 2, ntrcr trcrn(i,j,it,n) = c0 diff --git a/cicecore/drivers/direct/hadgem3/CICE.F90 b/cicecore/drivers/direct/hadgem3/CICE.F90 index b2314240c..b0176e801 100644 --- a/cicecore/drivers/direct/hadgem3/CICE.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE.F90 @@ -1,8 +1,8 @@ !======================================================================= -! Copyright (c) 2021, Triad National Security, LLC +! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. ! -! Copyright 2021. Triad National Security, LLC. This software was +! Copyright 2022. Triad National Security, LLC. This software was ! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad ! National Security, LLC for the U.S. Department of Energy. The U.S. diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index e9ab0d7e4..610b146a6 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -287,7 +287,6 @@ subroutine ice_step call ice_timer_start(timer_column) ! column physics call ice_timer_start(timer_thermo) ! thermodynamics -!MHRI: CHECK THIS OMP !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks diff --git a/cicecore/drivers/mct/cesm1/CICE_copyright.txt b/cicecore/drivers/mct/cesm1/CICE_copyright.txt index e10da1e77..6eb3c9cca 100644 --- a/cicecore/drivers/mct/cesm1/CICE_copyright.txt +++ b/cicecore/drivers/mct/cesm1/CICE_copyright.txt @@ -1,7 +1,7 @@ -! Copyright (c) 2021, Triad National Security, LLC +! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. ! -! Copyright 2021. Triad National Security, LLC. This software was +! Copyright 2022. Triad National Security, LLC. This software was ! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad ! National Security, LLC for the U.S. Department of Energy. The U.S. diff --git a/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt b/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt index e10da1e77..6eb3c9cca 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt +++ b/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt @@ -1,7 +1,7 @@ -! Copyright (c) 2021, Triad National Security, LLC +! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. ! -! Copyright 2021. Triad National Security, LLC. This software was +! Copyright 2022. Triad National Security, LLC. This software was ! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad ! National Security, LLC for the U.S. Department of Energy. The U.S. diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index a9d71e479..8920ea386 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -549,7 +549,6 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) scol_valid = (scol_mask == 1) if (.not. scol_valid) then - write(6,*)'DEBUG: i am here' ! Advertise fields call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 8fe939785..11d3e6bf0 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -187,9 +187,9 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific ! from atm - black carbon deposition fluxes (3) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) - ! from atm - wet dust deposition frluxes (4 sizes) + ! from atm - wet dust deposition fluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstwet', ungridded_lbound=1, ungridded_ubound=4) - ! from - atm dry dust deposition frluxes (4 sizes) + ! from - atm dry dust deposition fluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) do n = 1,fldsToIce_num diff --git a/cicecore/drivers/nuopc/dmi/CICE.F90 b/cicecore/drivers/nuopc/dmi/CICE.F90 index 7056e0e5b..d6a28c3ba 100644 --- a/cicecore/drivers/nuopc/dmi/CICE.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE.F90 @@ -1,8 +1,8 @@ !======================================================================= -! Copyright (c) 2021, Triad National Security, LLC +! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. ! -! Copyright 2021. Triad National Security, LLC. This software was +! Copyright 2022. Triad National Security, LLC. This software was ! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad ! National Security, LLC for the U.S. Department of Energy. The U.S. diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index 2d3e22973..33b9a165c 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -496,7 +496,7 @@ subroutine coupling_prep (iblk) enddo enddo - call ice_timer_start(timer_couple) ! atm/ocn coupling + call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling if (oceanmixed_ice) & call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst @@ -663,7 +663,7 @@ subroutine coupling_prep (iblk) endif !echmod #endif - call ice_timer_stop(timer_couple) ! atm/ocn coupling + call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling end subroutine coupling_prep diff --git a/cicecore/drivers/standalone/cice/CICE.F90 b/cicecore/drivers/standalone/cice/CICE.F90 index 7056e0e5b..d6a28c3ba 100644 --- a/cicecore/drivers/standalone/cice/CICE.F90 +++ b/cicecore/drivers/standalone/cice/CICE.F90 @@ -1,8 +1,8 @@ !======================================================================= -! Copyright (c) 2021, Triad National Security, LLC +! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. ! -! Copyright 2021. Triad National Security, LLC. This software was +! Copyright 2022. Triad National Security, LLC. This software was ! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad ! National Security, LLC for the U.S. Department of Energy. The U.S. diff --git a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 index a59c210aa..28811c3cd 100644 --- a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 @@ -31,7 +31,8 @@ module CICE_FinalMod subroutine CICE_Finalize use ice_restart_shared, only: runid - use ice_timers, only: ice_timer_stop, ice_timer_print_all, timer_total + use ice_timers, only: ice_timer_stop, ice_timer_print_all, timer_total, & + timer_stats character(len=*), parameter :: subname = '(CICE_Finalize)' @@ -40,7 +41,7 @@ subroutine CICE_Finalize !------------------------------------------------------------------- call ice_timer_stop(timer_total) ! stop timing entire run - call ice_timer_print_all(stats=.false.) ! print timing information + call ice_timer_print_all(stats=timer_stats) ! print timing information call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 0fde18e04..aef6a1ec1 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -218,10 +218,9 @@ subroutine ice_step call save_init - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) then + if (ktherm >= 0) then + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks !----------------------------------------------------------------- ! scale radiation fields @@ -237,7 +236,7 @@ subroutine ice_step !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics if (debug_model) then @@ -259,10 +258,9 @@ subroutine ice_step call debug_ice (iblk, plabeld) endif - endif ! ktherm > 0 - - enddo ! iblk - !$OMP END PARALLEL DO + enddo + !$OMP END PARALLEL DO + endif ! ktherm > 0 ! clean up, update tendency diagnostics offset = dt @@ -292,7 +290,7 @@ subroutine ice_step endif ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) enddo @@ -326,14 +324,15 @@ subroutine ice_step !----------------------------------------------------------------- if (tr_snow) then ! advanced snow physics + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks call step_snow (dt, iblk) enddo + !$OMP END PARALLEL DO call update_state (dt) ! clean up endif -!MHRI: CHECK THIS OMP - !$OMP PARALLEL DO PRIVATE(iblk) + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks !----------------------------------------------------------------- @@ -405,7 +404,6 @@ subroutine ice_step if (kdyn == 2) call write_restart_eap call final_restart endif - call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step @@ -488,7 +486,7 @@ subroutine coupling_prep (iblk) enddo enddo - call ice_timer_start(timer_couple) ! atm/ocn coupling + call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling if (oceanmixed_ice) & call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst @@ -655,7 +653,7 @@ subroutine coupling_prep (iblk) endif !echmod #endif - call ice_timer_stop(timer_couple) ! atm/ocn coupling + call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling end subroutine coupling_prep diff --git a/cicecore/version.txt b/cicecore/version.txt index 04a90ef1a..9e5f9f3e1 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.3.0 +CICE 6.3.1 diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index f86b55502..85191dbf6 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -91,7 +91,7 @@ cat >> ${jobfile} << EOFB #PBS -l walltime=${batchtime} EOFB -else if (${ICE_MACHINE} =~ gordon* || ${ICE_MACHINE} =~ conrad* || ${ICE_MACHINE} =~ gaffney* || ${ICE_MACHINE} =~ koehr* || ${ICE_MACHINE} =~ mustang*) then +else if (${ICE_MACHINE} =~ gaffney* || ${ICE_MACHINE} =~ koehr* || ${ICE_MACHINE} =~ mustang*) then cat >> ${jobfile} << EOFB #PBS -N ${shortcase} #PBS -q ${queue} @@ -107,7 +107,6 @@ EOFB else if (${ICE_MACHINE} =~ narwhal*) then if (${runlength} <= 0) then set batchtime = "00:29:59" - set queue = "debug" else set queue = "standard" endif diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index a63c802ed..cb9068141 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -63,12 +63,6 @@ cat >> ${jobfile} << EOFR aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR -#======= -else if (${ICE_MACHINE} =~ gordon* || ${ICE_MACHINE} =~ conrad*) then -cat >> ${jobfile} << EOFR -aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE -EOFR - #======= else if (${ICE_MACHINE} =~ cori*) then if (${ICE_COMMDIR} =~ serial*) then diff --git a/configuration/scripts/cice.run.setup.csh b/configuration/scripts/cice.run.setup.csh index aa578b5ca..58c4ebe66 100755 --- a/configuration/scripts/cice.run.setup.csh +++ b/configuration/scripts/cice.run.setup.csh @@ -9,8 +9,6 @@ echo "running cice.run.setup.csh" set jobfile = cice.run set subfile = cice.submit -set nthrds = ${ICE_NTHRDS} - #========================================== # Write the batch code into the job file @@ -43,7 +41,9 @@ set ICE_RUNLOG_FILE = "cice.runlog.\${stamp}" #-------------------------------------------- cd \${ICE_RUNDIR} -setenv OMP_NUM_THREADS ${nthrds} +setenv OMP_NUM_THREADS \${ICE_NTHRDS} +setenv OMP_SCHEDULE "\${ICE_OMPSCHED}" +#setenv OMP_DISPLAY_ENV TRUE cp -f \${ICE_CASEDIR}/ice_in \${ICE_RUNDIR} cp -f \${ICE_CASEDIR}/env.${ICE_MACHCOMP} \${ICE_RUNDIR} diff --git a/configuration/scripts/cice.settings b/configuration/scripts/cice.settings index 1faf2c5be..9b57aab3f 100755 --- a/configuration/scripts/cice.settings +++ b/configuration/scripts/cice.settings @@ -21,6 +21,7 @@ setenv ICE_QUIETMODE false setenv ICE_GRID undefined setenv ICE_NTASKS undefined setenv ICE_NTHRDS undefined +setenv ICE_OMPSCHED "static,1" setenv ICE_TEST undefined setenv ICE_TESTNAME undefined setenv ICE_TESTID undefined @@ -28,6 +29,7 @@ setenv ICE_BASELINE undefined setenv ICE_BASEGEN undefined setenv ICE_BASECOM undefined setenv ICE_BFBCOMP undefined +setenv ICE_BFBTYPE restart setenv ICE_SPVAL undefined setenv ICE_RUNLENGTH -1 setenv ICE_ACCOUNT undefined diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index bb44663eb..7ed806edf 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -38,6 +38,7 @@ debug_forcing = .false. print_global = .true. print_points = .true. + timer_stats = .false. conserv_check = .false. latpnt(1) = 90. lonpnt(1) = 0. diff --git a/configuration/scripts/machines/Macros.conrad_cray b/configuration/scripts/machines/Macros.conrad_cray deleted file mode 100644 index 19ddcb8f5..000000000 --- a/configuration/scripts/machines/Macros.conrad_cray +++ /dev/null @@ -1,57 +0,0 @@ -#============================================================================== -# Macros file for NAVYDSRC conrad, cray compiler -#============================================================================== - -CPP := ftn -e P -CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -h fp0 - -FIXEDFLAGS := -132 -FREEFLAGS := -FFLAGS := -h fp0 -h byteswapio -FFLAGS_NOOPT:= -O0 -LDFLAGS := -h byteswapio - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -Rbcdps -# FFLAGS += -O0 -g -Rbcdps -ei -else - FFLAGS += -O2 -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -# defined by module -#NETCDF_PATH := $(NETCDF) -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default -#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -INCLDIR := $(INCLDIR) -#INCLDIR += -I$(NETCDF_PATH)/include - -#LIB_NETCDF := $(NETCDF_PATH)/lib -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), false) - LDFLAGS += -hnoomp - CFLAGS += -hnoomp - FFLAGS += -hnoomp -endif - diff --git a/configuration/scripts/machines/Macros.conrad_gnu b/configuration/scripts/machines/Macros.conrad_gnu deleted file mode 100644 index 5459d9b6b..000000000 --- a/configuration/scripts/machines/Macros.conrad_gnu +++ /dev/null @@ -1,55 +0,0 @@ -#============================================================================== -# Macros file for NAVYDSRC conrad, gnu compiler -#============================================================================== - -CPP := ftn -E -CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 - -FIXEDFLAGS := -ffixed-line-length-132 -FREEFLAGS := -ffree-form -FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -FFLAGS_NOOPT:= -O0 - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow -else - FFLAGS += -O2 -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -# defined by module -#NETCDF_PATH := $(NETCDF) -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default -#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -INCLDIR := $(INCLDIR) -#INCLDIR += -I$(NETCDF_PATH)/include - -#LIB_NETCDF := $(NETCDF_PATH)/lib -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), true) - LDFLAGS += -fopenmp - CFLAGS += -fopenmp - FFLAGS += -fopenmp -endif - diff --git a/configuration/scripts/machines/Macros.conrad_intel b/configuration/scripts/machines/Macros.conrad_intel deleted file mode 100644 index 74a36304d..000000000 --- a/configuration/scripts/machines/Macros.conrad_intel +++ /dev/null @@ -1,56 +0,0 @@ -#============================================================================== -# Macros file for NAVYDSRC conrad, intel compiler -#============================================================================== - -CPP := fpp -CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -fp-model precise -xHost - -FIXEDFLAGS := -132 -FREEFLAGS := -FR -FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -xHost -FFLAGS_NOOPT:= -O0 - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -# FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -init=snan,arrays -else - FFLAGS += -O2 -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -# defined by module -#NETCDF_PATH := $(NETCDF) -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default -#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -INCLDIR := $(INCLDIR) -#INCLDIR += -I$(NETCDF_PATH)/include - -#LIB_NETCDF := $(NETCDF_PATH)/lib -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), true) - LDFLAGS += -qopenmp - CFLAGS += -qopenmp - FFLAGS += -qopenmp -endif - diff --git a/configuration/scripts/machines/Macros.conrad_pgi b/configuration/scripts/machines/Macros.conrad_pgi deleted file mode 100644 index ef0a25ab4..000000000 --- a/configuration/scripts/machines/Macros.conrad_pgi +++ /dev/null @@ -1,55 +0,0 @@ -#============================================================================== -# Macros file for NAVYDSRC conrad, pgi compiler -#============================================================================== - -CPP := pgcc -E -CPPDEFS := -DFORTRANUNDERSCORE -DNO_R16 ${ICE_CPPDEFS} -CFLAGS := -c -O2 -Kieee - -FIXEDFLAGS := -Mextend -FREEFLAGS := -Mfree -FFLAGS := -Kieee -Mbyteswapio -traceback -FFLAGS_NOOPT:= -O0 - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -Mbounds -Mchkptr -else - FFLAGS += -O -g -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -# defined by module -#NETCDF_PATH := $(NETCDF) -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default -#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -INCLDIR := $(INCLDIR) -#INCLDIR += -I$(NETCDF_PATH)/include - -#LIB_NETCDF := $(NETCDF_PATH)/lib -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), true) - LDFLAGS += -mp - CFLAGS += -mp - FFLAGS += -mp -endif - diff --git a/configuration/scripts/machines/Macros.gordon_cray b/configuration/scripts/machines/Macros.gordon_cray deleted file mode 100644 index 6c5032e0d..000000000 --- a/configuration/scripts/machines/Macros.gordon_cray +++ /dev/null @@ -1,57 +0,0 @@ -#============================================================================== -# Macros file for NAVYDSRC gordon, cray compiler -#============================================================================== - -CPP := ftn -e P -CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -h fp0 - -FIXEDFLAGS := -132 -FREEFLAGS := -FFLAGS := -h fp0 -h byteswapio -FFLAGS_NOOPT:= -O0 -LDFLAGS := -h byteswapio - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -Rbcdps -# FFLAGS += -O0 -g -Rbcdps -ei -else - FFLAGS += -O2 -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -# defined by module -#NETCDF_PATH := $(NETCDF) -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default -#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -INCLDIR := $(INCLDIR) -#INCLDIR += -I$(NETCDF_PATH)/include - -#LIB_NETCDF := $(NETCDF_PATH)/lib -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), false) - LDFLAGS += -hnoomp - CFLAGS += -hnoomp - FFLAGS += -hnoomp -endif - diff --git a/configuration/scripts/machines/Macros.gordon_gnu b/configuration/scripts/machines/Macros.gordon_gnu deleted file mode 100644 index 8c3e277ab..000000000 --- a/configuration/scripts/machines/Macros.gordon_gnu +++ /dev/null @@ -1,67 +0,0 @@ -#============================================================================== -# Macros file for NAVYDSRC gordon, gnu compiler -#============================================================================== - -CPP := ftn -E -CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c - -FIXEDFLAGS := -ffixed-line-length-132 -FREEFLAGS := -ffree-form -FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -FFLAGS_NOOPT:= -O0 - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow - CFLAGS += -O0 -endif - -ifeq ($(ICE_COVERAGE), true) - FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage - CFLAGS += -O0 -g -coverage - LDFLAGS += -g -ftest-coverage -fprofile-arcs -endif - -ifneq ($(ICE_BLDDEBUG), true) -ifneq ($(ICE_COVERAGE), true) - FFLAGS += -O2 - CFLAGS += -O2 -endif -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -# defined by module -#NETCDF_PATH := $(NETCDF) -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default -#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -INCLDIR := $(INCLDIR) -#INCLDIR += -I$(NETCDF_PATH)/include - -#LIB_NETCDF := $(NETCDF_PATH)/lib -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), true) - LDFLAGS += -fopenmp - CFLAGS += -fopenmp - FFLAGS += -fopenmp -endif - diff --git a/configuration/scripts/machines/Macros.gordon_intel b/configuration/scripts/machines/Macros.gordon_intel deleted file mode 100644 index 84659d00a..000000000 --- a/configuration/scripts/machines/Macros.gordon_intel +++ /dev/null @@ -1,55 +0,0 @@ -#============================================================================== -# Macros file for NAVYDSRC gordon, intel compiler -#============================================================================== - -CPP := fpp -CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -fp-model precise -xHost - -FIXEDFLAGS := -132 -FREEFLAGS := -FR -FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -xHost -FFLAGS_NOOPT:= -O0 - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -else - FFLAGS += -O2 -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -# defined by module -#NETCDF_PATH := $(NETCDF) -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default -#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -INCLDIR := $(INCLDIR) -#INCLDIR += -I$(NETCDF_PATH)/include - -#LIB_NETCDF := $(NETCDF_PATH)/lib -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), true) - LDFLAGS += -qopenmp - CFLAGS += -qopenmp - FFLAGS += -qopenmp -endif - diff --git a/configuration/scripts/machines/Macros.gordon_pgi b/configuration/scripts/machines/Macros.gordon_pgi deleted file mode 100644 index 1190f6eca..000000000 --- a/configuration/scripts/machines/Macros.gordon_pgi +++ /dev/null @@ -1,55 +0,0 @@ -#============================================================================== -# Macros file for NAVYDSRC gordon, pgi compiler -#============================================================================== - -CPP := pgcc -Mcpp -CPPDEFS := -DFORTRANUNDERSCORE -DNO_R16 ${ICE_CPPDEFS} -CFLAGS := -c -O2 -Kieee - -FIXEDFLAGS := -Mextend -FREEFLAGS := -Mfree -FFLAGS := -Kieee -Mbyteswapio -traceback -FFLAGS_NOOPT:= -O0 - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -Mbounds -Mchkptr -else - FFLAGS += -O -g -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -# defined by module -#NETCDF_PATH := $(NETCDF) -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default -#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -INCLDIR := $(INCLDIR) -#INCLDIR += -I$(NETCDF_PATH)/include - -#LIB_NETCDF := $(NETCDF_PATH)/lib -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), true) - LDFLAGS += -mp - CFLAGS += -mp - FFLAGS += -mp -endif - diff --git a/configuration/scripts/machines/env.banting_gnu b/configuration/scripts/machines/env.banting_gnu index 0c0a4abce..997816a9d 100755 --- a/configuration/scripts/machines/env.banting_gnu +++ b/configuration/scripts/machines/env.banting_gnu @@ -19,6 +19,9 @@ module load cray-netcdf # NetCDF module load cray-hdf5 # HDF5 setenv HDF5_USE_FILE_LOCKING FALSE # necessary since data is on an NFS filesystem +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME banting diff --git a/configuration/scripts/machines/env.banting_intel b/configuration/scripts/machines/env.banting_intel index ac01e4d72..0beeb2618 100755 --- a/configuration/scripts/machines/env.banting_intel +++ b/configuration/scripts/machines/env.banting_intel @@ -14,6 +14,9 @@ module load cray-netcdf # NetCDF module load cray-hdf5 # HDF5 setenv HDF5_USE_FILE_LOCKING FALSE # necessary since data is on an NFS filesystem +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME banting diff --git a/configuration/scripts/machines/env.cesium_intel b/configuration/scripts/machines/env.cesium_intel index 19209919e..8dabe1645 100755 --- a/configuration/scripts/machines/env.cesium_intel +++ b/configuration/scripts/machines/env.cesium_intel @@ -6,6 +6,9 @@ source /fs/ssm/main/opt/intelcomp/intelcomp-2016.1.156/intelcomp_2016.1.156_mult source $ssmuse -d /fs/ssm/main/opt/openmpi/openmpi-1.6.5/intelcomp-2016.1.156 # openmpi source $ssmuse -d /fs/ssm/hpco/tmp/eccc/201402/04/intel-2016.1.150 # netcdf (and openmpi) +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + setenv ICE_MACHINE_MACHNAME cesium setenv ICE_MACHINE_ENVNAME intel setenv ICE_MACHINE_MAKE colormake-short diff --git a/configuration/scripts/machines/env.cheyenne_gnu b/configuration/scripts/machines/env.cheyenne_gnu index 8ddc443b1..1b79c7f3b 100755 --- a/configuration/scripts/machines/env.cheyenne_gnu +++ b/configuration/scripts/machines/env.cheyenne_gnu @@ -29,8 +29,8 @@ if ($ICE_IOTYPE =~ pio*) then endif endif -if ($?ICE_TEST) then -if ($ICE_TEST =~ qcchk*) then +if ($?ICE_BFBTYPE) then +if ($ICE_BFBTYPE =~ qcchk*) then module load python source /glade/u/apps/opt/ncar_pylib/ncar_pylib.csh default endif @@ -40,6 +40,8 @@ endif limit coredumpsize unlimited limit stacksize unlimited +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M setenv ICE_MACHINE_MACHNAME cheyenne setenv ICE_MACHINE_MACHINFO "SGI ICE XA Xeon E5-2697V4 Broadwell" diff --git a/configuration/scripts/machines/env.cheyenne_intel b/configuration/scripts/machines/env.cheyenne_intel index 28df6647d..f469b3585 100755 --- a/configuration/scripts/machines/env.cheyenne_intel +++ b/configuration/scripts/machines/env.cheyenne_intel @@ -29,8 +29,8 @@ if ($ICE_IOTYPE =~ pio*) then endif endif -if ($?ICE_TEST) then -if ($ICE_TEST =~ qcchk*) then +if ($?ICE_BFBTYPE) then +if ($ICE_BFBTYPE =~ qcchk*) then module load python source /glade/u/apps/opt/ncar_pylib/ncar_pylib.csh default endif @@ -40,6 +40,8 @@ endif limit coredumpsize unlimited limit stacksize unlimited +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M setenv ICE_MACHINE_MACHNAME cheyenne setenv ICE_MACHINE_MACHINFO "SGI ICE XA Xeon E5-2697V4 Broadwell" diff --git a/configuration/scripts/machines/env.cheyenne_pgi b/configuration/scripts/machines/env.cheyenne_pgi index d492129fb..b5cf11a51 100755 --- a/configuration/scripts/machines/env.cheyenne_pgi +++ b/configuration/scripts/machines/env.cheyenne_pgi @@ -29,8 +29,8 @@ if ($ICE_IOTYPE =~ pio*) then endif endif -if ($?ICE_TEST) then -if ($ICE_TEST =~ qcchk*) then +if ($?ICE_BFBTYPE) then +if ($ICE_BFBTYPE =~ qcchk*) then module load python source /glade/u/apps/opt/ncar_pylib/ncar_pylib.csh default endif @@ -40,6 +40,8 @@ endif limit coredumpsize unlimited limit stacksize unlimited +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M setenv ICE_MACHINE_MACHNAME cheyenne setenv ICE_MACHINE_MACHINFO "SGI ICE XA Xeon E5-2697V4 Broadwell" diff --git a/configuration/scripts/machines/env.compy_intel b/configuration/scripts/machines/env.compy_intel index fe3511aa6..6fc273204 100755 --- a/configuration/scripts/machines/env.compy_intel +++ b/configuration/scripts/machines/env.compy_intel @@ -23,6 +23,9 @@ setenv I_MPI_ADJUST_ALLREDUCE 1 limit coredumpsize unlimited limit stacksize unlimited +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME compy diff --git a/configuration/scripts/machines/env.conda_linux b/configuration/scripts/machines/env.conda_linux index 08cf27724..ae6ea1b79 100755 --- a/configuration/scripts/machines/env.conda_linux +++ b/configuration/scripts/machines/env.conda_linux @@ -24,6 +24,9 @@ if $status then exit 1 endif +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME conda diff --git a/configuration/scripts/machines/env.conda_macos b/configuration/scripts/machines/env.conda_macos index e33eee710..3b3537bf7 100755 --- a/configuration/scripts/machines/env.conda_macos +++ b/configuration/scripts/machines/env.conda_macos @@ -24,6 +24,9 @@ if $status then exit 1 endif +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME conda diff --git a/configuration/scripts/machines/env.conrad_cray b/configuration/scripts/machines/env.conrad_cray deleted file mode 100755 index 62549a738..000000000 --- a/configuration/scripts/machines/env.conrad_cray +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -source /opt/modules/default/init/csh - -module unload PrgEnv-cray -module unload PrgEnv-gnu -module unload PrgEnv-intel -module unload PrgEnv-pgi -module load PrgEnv-cray/5.2.82 - -module unload cce -module load cce/8.5.8 - -module unload cray-mpich -module unload cray-mpich2 -module load cray-mpich/7.5.3 - -module unload netcdf -module unload cray-netcdf -module unload cray-hdf5 -module unload cray-hdf5-parallel -module unload cray-netcdf-hdf5parallel -module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1 -module load cray-hdf5/1.10.0.1 - -module unload cray-libsci - -module load craype-haswell - -setenv NETCDF_PATH ${NETCDF_DIR} -limit coredumpsize unlimited -limit stacksize unlimited - -endif - -setenv ICE_MACHINE_MACHNAME conrad -setenv ICE_MACHINE_MACHINFO "Cray XC40 Xeon E5-2698v3 Haswell" -setenv ICE_MACHINE_ENVNAME cray -setenv ICE_MACHINE_ENVINFO "cce 8.5.8, mpich 7.5.3, netcdf 4.4.1.1" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /p/work1/RASM_data/cice_consortium -setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "qsub " -setenv ICE_MACHINE_ACCT P00000000 -setenv ICE_MACHINE_QUEUE "debug" -setenv ICE_MACHINE_TPNODE 32 # tasks per node -setenv ICE_MACHINE_BLDTHRDS 4 -setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.conrad_gnu b/configuration/scripts/machines/env.conrad_gnu deleted file mode 100755 index f14ee33a5..000000000 --- a/configuration/scripts/machines/env.conrad_gnu +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -source /opt/modules/default/init/csh - -module unload PrgEnv-cray -module unload PrgEnv-gnu -module unload PrgEnv-intel -module unload PrgEnv-pgi -module load PrgEnv-gnu/5.2.82 - -module unload gcc -module load gcc/6.3.0 - -module unload cray-mpich -module unload cray-mpich2 -module load cray-mpich/7.5.3 - -module unload netcdf -module unload cray-netcdf -module unload cray-hdf5 -module unload cray-hdf5-parallel -module unload cray-netcdf-hdf5parallel -module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1 -module load cray-hdf5/1.10.0.1 - -module unload cray-libsci - -module load craype-haswell - -setenv NETCDF_PATH ${NETCDF_DIR} -limit coredumpsize unlimited -limit stacksize unlimited - -endif - -setenv ICE_MACHINE_MACHNAME conrad -setenv ICE_MACHINE_MACHINFO "Cray XC40 Xeon E5-2698v3 Haswell" -setenv ICE_MACHINE_ENVNAME gnu -setenv ICE_MACHINE_ENVINFO "GNU Fortran (GCC) 6.3.0 20161221, mpich 7.5.3, netcdf 4.4.1.1" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /p/work1/RASM_data/cice_consortium -setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "qsub " -setenv ICE_MACHINE_ACCT P00000000 -setenv ICE_MACHINE_QUEUE "debug" -setenv ICE_MACHINE_TPNODE 32 # tasks per node -setenv ICE_MACHINE_BLDTHRDS 4 -setenv ICE_MACHINE_QSTAT "qstat " - -# For lcov -set lcovpath = "/p/home/apcraig/bin" -set lcovp5l = "/p/home/apcraig/usr/lib/perl5/site_perl/5.10.0/x86_64-linux-thread-multi" - -if ($?PATH) then - if ("$PATH" !~ "*${lcovpath}*") then - setenv PATH ${PATH}:$lcovpath - endif -else - setenv PATH $lcovpath -endif - -if ($?PERL5LIB) then - if ("$PERL5LIB" !~ "*${lcovp5l}*") then - setenv PERL5LIB ${PERL5LIB}:$lcovp5l - endif -else - setenv PERL5LIB $lcovp5l -endif diff --git a/configuration/scripts/machines/env.conrad_intel b/configuration/scripts/machines/env.conrad_intel deleted file mode 100755 index e37ce4b1f..000000000 --- a/configuration/scripts/machines/env.conrad_intel +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -source /opt/modules/default/init/csh - -module unload PrgEnv-cray -module unload PrgEnv-gnu -module unload PrgEnv-intel -module unload PrgEnv-pgi -module load PrgEnv-intel/5.2.40 - -module unload intel -module load intel/17.0.2.174 - -module unload cray-mpich -module unload cray-mpich2 -module load cray-mpich/7.3.2 - -module unload netcdf -module unload cray-netcdf -module unload cray-hdf5 -module unload cray-hdf5-parallel -module unload cray-netcdf-hdf5parallel -module unload cray-parallel-netcdf -module load cray-netcdf/4.3.2 -module load cray-hdf5/1.8.13 - -module unload cray-libsci - -module load craype-haswell - -setenv NETCDF_PATH ${NETCDF_DIR} -limit coredumpsize unlimited -limit stacksize unlimited - -endif - -setenv ICE_MACHINE_MACHNAME conrad -setenv ICE_MACHINE_MACHINFO "Cray XC40 Xeon E5-2698v3 Haswell" -setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 17.0.2 20170213, mpich 7.3.2, netcdf 4.3.2" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /p/work1/RASM_data/cice_consortium -setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "qsub " -setenv ICE_MACHINE_ACCT P00000000 -setenv ICE_MACHINE_QUEUE "debug" -setenv ICE_MACHINE_TPNODE 32 # tasks per node -setenv ICE_MACHINE_MAXPES 8000 # maximum total pes (tasks * threads) available -setenv ICE_MACHINE_MAXRUNLENGTH 168 # maximum batch wall time limit in hours (integer) -setenv ICE_MACHINE_BLDTHRDS 4 -setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.conrad_pgi b/configuration/scripts/machines/env.conrad_pgi deleted file mode 100755 index 2e82ea34f..000000000 --- a/configuration/scripts/machines/env.conrad_pgi +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -source /opt/modules/default/init/csh - -module unload PrgEnv-cray -module unload PrgEnv-gnu -module unload PrgEnv-intel -module unload PrgEnv-pgi -module load PrgEnv-pgi/5.2.82 - -module unload pgi -module load pgi/16.10.0 - -module unload cray-mpich -module unload cray-mpich2 -module load cray-mpich/7.5.3 - -module unload netcdf -module unload cray-netcdf -module unload cray-hdf5 -module unload cray-hdf5-parallel -module unload cray-netcdf-hdf5parallel -module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1 -module load cray-hdf5/1.10.0.1 - -module unload cray-libsci - -module load craype-haswell - -setenv NETCDF_PATH ${NETCDF_DIR} -limit coredumpsize unlimited -limit stacksize unlimited - -endif - -setenv ICE_MACHINE_MACHNAME conrad -setenv ICE_MACHINE_MACHINFO "Cray XC40 Xeon E5-2698v3 Haswell" -setenv ICE_MACHINE_ENVNAME pgi -setenv ICE_MACHINE_ENVINFO "pgf90 16.10-0, mpich 7.5.3, netcdf 4.4.1.1" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /p/work1/RASM_data/cice_consortium -setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "qsub " -setenv ICE_MACHINE_ACCT ARLAP96070PET -setenv ICE_MACHINE_QUEUE "debug" -setenv ICE_MACHINE_TPNODE 32 # tasks per node -setenv ICE_MACHINE_BLDTHRDS 4 -setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.cori_intel b/configuration/scripts/machines/env.cori_intel index ed01928f4..734b2edf3 100755 --- a/configuration/scripts/machines/env.cori_intel +++ b/configuration/scripts/machines/env.cori_intel @@ -39,6 +39,7 @@ module load craype/2.6.2 setenv NETCDF_PATH ${NETCDF_DIR} setenv OMP_PROC_BIND true setenv OMP_PLACES threads +setenv OMP_STACKSIZE 32M limit coredumpsize unlimited limit stacksize unlimited diff --git a/configuration/scripts/machines/env.daley_gnu b/configuration/scripts/machines/env.daley_gnu index b1e379eb0..25b438e8e 100755 --- a/configuration/scripts/machines/env.daley_gnu +++ b/configuration/scripts/machines/env.daley_gnu @@ -19,6 +19,9 @@ module load cray-netcdf # NetCDF module load cray-hdf5 # HDF5 setenv HDF5_USE_FILE_LOCKING FALSE # necessary since data is on an NFS filesystem +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME daley diff --git a/configuration/scripts/machines/env.daley_intel b/configuration/scripts/machines/env.daley_intel index 502c71037..49178365b 100755 --- a/configuration/scripts/machines/env.daley_intel +++ b/configuration/scripts/machines/env.daley_intel @@ -14,6 +14,9 @@ module load cray-netcdf # NetCDF module load cray-hdf5 # HDF5 setenv HDF5_USE_FILE_LOCKING FALSE # necessary since data is on an NFS filesystem +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME daley diff --git a/configuration/scripts/machines/env.fram_intel b/configuration/scripts/machines/env.fram_intel index a7b141479..98edb3a66 100755 --- a/configuration/scripts/machines/env.fram_intel +++ b/configuration/scripts/machines/env.fram_intel @@ -7,6 +7,9 @@ source /fs/ssm/main/opt/intelcomp/intelcomp-2016.1.156/intelcomp_2016.1.156_mult source $ssmuse -d /fs/ssm/main/opt/openmpi/openmpi-1.6.5/intelcomp-2016.1.156 # openmpi source $ssmuse -d /fs/ssm/hpco/tmp/eccc/201402/04/intel-2016.1.150 # netcdf (and openmpi) +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + setenv ICE_MACHINE_MACHNAME fram setenv ICE_MACHINE_ENVNAME intel setenv ICE_MACHINE_MAKE make diff --git a/configuration/scripts/machines/env.freya_gnu b/configuration/scripts/machines/env.freya_gnu index b655d6dd0..2681e1318 100755 --- a/configuration/scripts/machines/env.freya_gnu +++ b/configuration/scripts/machines/env.freya_gnu @@ -8,7 +8,7 @@ endif if ("$inp" != "-nomodules") then source /opt/modules/default/init/csh # Initialize modules for csh - Clear environment +# Clear environment module rm PrgEnv-intel module rm PrgEnv-cray module rm PrgEnv-gnu @@ -37,3 +37,4 @@ setenv ICE_MACHINE_ACCT P0000000 setenv ICE_MACHINE_QUEUE "development" setenv ICE_MACHINE_BLDTHRDS 18 setenv ICE_MACHINE_QSTAT "qstat " +setenv OMP_STACKSIZE 64M diff --git a/configuration/scripts/machines/env.freya_intel b/configuration/scripts/machines/env.freya_intel index dcbc1f8ba..4b45cd9e7 100755 --- a/configuration/scripts/machines/env.freya_intel +++ b/configuration/scripts/machines/env.freya_intel @@ -36,3 +36,4 @@ setenv ICE_MACHINE_ACCT P0000000 setenv ICE_MACHINE_QUEUE "development" setenv ICE_MACHINE_BLDTHRDS 18 setenv ICE_MACHINE_QSTAT "qstat " +setenv OMP_STACKSIZE 64M diff --git a/configuration/scripts/machines/env.gaea_intel b/configuration/scripts/machines/env.gaea_intel index d143270d7..e204c6fff 100755 --- a/configuration/scripts/machines/env.gaea_intel +++ b/configuration/scripts/machines/env.gaea_intel @@ -16,6 +16,9 @@ module load cray-netcdf module load PrgEnv-intel/6.0.5 module list +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME gaea diff --git a/configuration/scripts/machines/env.gaffney_gnu b/configuration/scripts/machines/env.gaffney_gnu index a63ee2ae4..dd889c5af 100755 --- a/configuration/scripts/machines/env.gaffney_gnu +++ b/configuration/scripts/machines/env.gaffney_gnu @@ -24,6 +24,7 @@ setenv MPI_DSM_DISTRIBUTE 0 setenv KMP_AFFINITY disabled limit coredumpsize unlimited limit stacksize unlimited +setenv OMP_STACKSIZE 64M endif diff --git a/configuration/scripts/machines/env.gaffney_intel b/configuration/scripts/machines/env.gaffney_intel index 9fa11d16e..c7fd0f6b3 100755 --- a/configuration/scripts/machines/env.gaffney_intel +++ b/configuration/scripts/machines/env.gaffney_intel @@ -24,6 +24,7 @@ setenv MPI_DSM_DISTRIBUTE 0 setenv KMP_AFFINITY disabled limit coredumpsize unlimited limit stacksize unlimited +setenv OMP_STACKSIZE 64M endif diff --git a/configuration/scripts/machines/env.gordon_cray b/configuration/scripts/machines/env.gordon_cray deleted file mode 100755 index d8c392d60..000000000 --- a/configuration/scripts/machines/env.gordon_cray +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -source /opt/modules/default/init/csh - -module unload PrgEnv-cray -module unload PrgEnv-gnu -module unload PrgEnv-intel -module unload PrgEnv-pgi -module load PrgEnv-cray/5.2.82 - -module unload cce -module load cce/8.5.8 - -module unload cray-mpich -module unload cray-mpich2 -module load cray-mpich/7.5.3 - -module unload netcdf -module unload cray-netcdf -module unload cray-hdf5 -module unload cray-hdf5-parallel -module unload cray-netcdf-hdf5parallel -module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1 -module load cray-hdf5/1.10.0.1 - -module unload cray-libsci - -module load craype-haswell - -setenv NETCDF_PATH ${NETCDF_DIR} -limit coredumpsize unlimited -limit stacksize unlimited - -endif - -setenv ICE_MACHINE_MACHNAME gordon -setenv ICE_MACHINE_MACHINFO "Cray XC40 Xeon E5-2698v3 Haswell" -setenv ICE_MACHINE_ENVNAME cray -setenv ICE_MACHINE_ENVINFO "cce 8.5.8, mpich 7.5.3, netcdf 4.4.1.1" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /p/work1/RASM_data/cice_consortium -setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "qsub " -setenv ICE_MACHINE_ACCT P00000000 -setenv ICE_MACHINE_QUEUE "debug" -setenv ICE_MACHINE_TPNODE 32 # tasks per node -setenv ICE_MACHINE_BLDTHRDS 4 -setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.gordon_gnu b/configuration/scripts/machines/env.gordon_gnu deleted file mode 100755 index d17923bd3..000000000 --- a/configuration/scripts/machines/env.gordon_gnu +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -source /opt/modules/default/init/csh - -module unload PrgEnv-cray -module unload PrgEnv-gnu -module unload PrgEnv-intel -module unload PrgEnv-pgi -module load PrgEnv-gnu/5.2.82 - -module unload gcc -module load gcc/6.3.0 - -module unload cray-mpich -module unload cray-mpich2 -module load cray-mpich/7.5.3 - -module unload netcdf -module unload cray-netcdf -module unload cray-hdf5 -module unload cray-hdf5-parallel -module unload cray-netcdf-hdf5parallel -module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1 -module load cray-hdf5/1.10.0.1 - -module unload cray-libsci - -module load craype-haswell - -setenv NETCDF_PATH ${NETCDF_DIR} -limit coredumpsize unlimited -limit stacksize unlimited - -endif - -setenv ICE_MACHINE_MACHNAME gordon -setenv ICE_MACHINE_MACHINFO "Cray XC40 Xeon E5-2698v3 Haswell" -setenv ICE_MACHINE_ENVNAME gnu -setenv ICE_MACHINE_ENVINFO "GNU Fortran (GCC) 6.3.0 20161221, mpich 7.5.3, netcdf 4.4.1.1" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /p/work1/RASM_data/cice_consortium -setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "qsub " -setenv ICE_MACHINE_ACCT P00000000 -setenv ICE_MACHINE_QUEUE "debug" -setenv ICE_MACHINE_TPNODE 32 # tasks per node -setenv ICE_MACHINE_BLDTHRDS 4 -setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.gordon_intel b/configuration/scripts/machines/env.gordon_intel deleted file mode 100755 index 67aaa9c69..000000000 --- a/configuration/scripts/machines/env.gordon_intel +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -source /opt/modules/default/init/csh - -module unload PrgEnv-cray -module unload PrgEnv-gnu -module unload PrgEnv-intel -module unload PrgEnv-pgi -module load PrgEnv-intel/5.2.40 - -module unload intel -module load intel/17.0.2.174 - -module unload cray-mpich -module unload cray-mpich2 -module load cray-mpich/7.3.2 - -module unload netcdf -module unload cray-netcdf -module unload cray-hdf5 -module unload cray-hdf5-parallel -module unload cray-netcdf-hdf5parallel -module unload cray-parallel-netcdf -module load cray-netcdf/4.3.2 -module load cray-hdf5/1.8.13 - -module unload cray-libsci - -module load craype-haswell - -setenv NETCDF_PATH ${NETCDF_DIR} -limit coredumpsize unlimited -limit stacksize unlimited - -endif - -setenv ICE_MACHINE_MACHNAME gordon -setenv ICE_MACHINE_MACHINFO "Cray XC40 Xeon E5-2698v3 Haswell" -setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 17.0.2 20170213, mpich 7.3.2, netcdf 4.3.2" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /p/work1/RASM_data/cice_consortium -setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "qsub " -setenv ICE_MACHINE_ACCT P00000000 -setenv ICE_MACHINE_QUEUE "debug" -setenv ICE_MACHINE_TPNODE 32 # tasks per node -setenv ICE_MACHINE_MAXPES 8000 # maximum total pes (tasks * threads) available -setenv ICE_MACHINE_MAXRUNLENGTH 168 # maximum batch wall time limit in hours (integer) -setenv ICE_MACHINE_BLDTHRDS 4 -setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.gordon_pgi b/configuration/scripts/machines/env.gordon_pgi deleted file mode 100755 index 5885afb4b..000000000 --- a/configuration/scripts/machines/env.gordon_pgi +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -source /opt/modules/default/init/csh - -module unload PrgEnv-cray -module unload PrgEnv-gnu -module unload PrgEnv-intel -module unload PrgEnv-pgi -module load PrgEnv-pgi/5.2.82 - -module unload pgi -module load pgi/16.10.0 - -module unload cray-mpich -module unload cray-mpich2 -module load cray-mpich/7.5.3 - -module unload netcdf -module unload cray-netcdf -module unload cray-hdf5 -module unload cray-hdf5-parallel -module unload cray-netcdf-hdf5parallel -module unload cray-parallel-netcdf -module load cray-netcdf/4.4.1.1 -module load cray-hdf5/1.10.0.1 - -module unload cray-libsci - -module load craype-haswell - -setenv NETCDF_PATH ${NETCDF_DIR} -limit coredumpsize unlimited -limit stacksize unlimited - -endif - -setenv ICE_MACHINE_MACHNAME gordon -setenv ICE_MACHINE_MACHINFO "Cray XC40 Xeon E5-2698v3 Haswell" -setenv ICE_MACHINE_ENVNAME pgi -setenv ICE_MACHINE_ENVINFO "pgf90 16.10-0, mpich 7.5.3, netcdf 4.4.1.1" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /p/work1/RASM_data/cice_consortium -setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "qsub " -setenv ICE_MACHINE_ACCT ARLAP96070PET -setenv ICE_MACHINE_QUEUE "debug" -setenv ICE_MACHINE_TPNODE 32 # tasks per node -setenv ICE_MACHINE_BLDTHRDS 4 -setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.hera_intel b/configuration/scripts/machines/env.hera_intel index 7330c3937..a9cf59516 100755 --- a/configuration/scripts/machines/env.hera_intel +++ b/configuration/scripts/machines/env.hera_intel @@ -15,6 +15,9 @@ module load impi/2018.0.4 module load netcdf/4.7.0 #module list +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME hera diff --git a/configuration/scripts/machines/env.high_Sierra_gnu b/configuration/scripts/machines/env.high_Sierra_gnu index 3845a91aa..0bd31181b 100755 --- a/configuration/scripts/machines/env.high_Sierra_gnu +++ b/configuration/scripts/machines/env.high_Sierra_gnu @@ -1,5 +1,8 @@ #!/bin/csh -f +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + setenv ICE_MACHINE_MACHNAME high_Sierra setenv ICE_MACHINE_ENVNAME gnu setenv ICE_MACHINE_MAKE make diff --git a/configuration/scripts/machines/env.hobart_intel b/configuration/scripts/machines/env.hobart_intel index 2ab7a3c53..0b6c5b12c 100755 --- a/configuration/scripts/machines/env.hobart_intel +++ b/configuration/scripts/machines/env.hobart_intel @@ -12,6 +12,9 @@ source /usr/share/Modules/init/csh module purge module load compiler/intel/18.0.3 +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME hobart diff --git a/configuration/scripts/machines/env.hobart_nag b/configuration/scripts/machines/env.hobart_nag index cae8c0fd8..6d22beca9 100755 --- a/configuration/scripts/machines/env.hobart_nag +++ b/configuration/scripts/machines/env.hobart_nag @@ -12,6 +12,9 @@ source /usr/share/Modules/init/csh module purge module load compiler/nag/6.2 +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME hobart diff --git a/configuration/scripts/machines/env.koehr_intel b/configuration/scripts/machines/env.koehr_intel index f4d7cada2..21f124b5f 100755 --- a/configuration/scripts/machines/env.koehr_intel +++ b/configuration/scripts/machines/env.koehr_intel @@ -25,6 +25,9 @@ setenv KMP_AFFINITY disabled limit coredumpsize unlimited limit stacksize unlimited +# May be needed for OpenMP memory +setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME koehr diff --git a/configuration/scripts/machines/env.millikan_intel b/configuration/scripts/machines/env.millikan_intel index 63913166d..c0a7356ad 100755 --- a/configuration/scripts/machines/env.millikan_intel +++ b/configuration/scripts/machines/env.millikan_intel @@ -6,6 +6,9 @@ source /fs/ssm/main/opt/intelcomp/intelcomp-2016.1.156/intelcomp_2016.1.156_mult source $ssmuse -d /fs/ssm/main/opt/openmpi/openmpi-1.6.5/intelcomp-2016.1.156 # openmpi source $ssmuse -d /fs/ssm/hpco/tmp/eccc/201402/04/intel-2016.1.150 # netcdf (and openmpi) +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + setenv ICE_MACHINE_MACHNAME millikan setenv ICE_MACHINE_ENVNAME intel setenv ICE_MACHINE_MAKE make diff --git a/configuration/scripts/machines/env.mustang_intel18 b/configuration/scripts/machines/env.mustang_intel18 index f420ec7ff..45e5b6518 100755 --- a/configuration/scripts/machines/env.mustang_intel18 +++ b/configuration/scripts/machines/env.mustang_intel18 @@ -21,7 +21,7 @@ module load netcdf-fortran/intel/4.4.2 setenv NETCDF_PATH /app/COST/netcdf-fortran/4.4.2/intel -#setenv OMP_STACKSIZE 256M +setenv OMP_STACKSIZE 64M #setenv MP_LABELIO yes #setenv MP_INFOLEVEL 2 #setenv MP_SHARED_MEMORY yes diff --git a/configuration/scripts/machines/env.mustang_intel19 b/configuration/scripts/machines/env.mustang_intel19 index 0fc0458fd..438bc1111 100755 --- a/configuration/scripts/machines/env.mustang_intel19 +++ b/configuration/scripts/machines/env.mustang_intel19 @@ -21,7 +21,7 @@ module load netcdf-fortran/intel/4.4.2 setenv NETCDF_PATH /app/COST/netcdf-fortran/4.4.2/intel -#setenv OMP_STACKSIZE 256M +setenv OMP_STACKSIZE 64M #setenv MP_LABELIO yes #setenv MP_INFOLEVEL 2 #setenv MP_SHARED_MEMORY yes diff --git a/configuration/scripts/machines/env.mustang_intel20 b/configuration/scripts/machines/env.mustang_intel20 index 00c4a250d..cca0b3019 100755 --- a/configuration/scripts/machines/env.mustang_intel20 +++ b/configuration/scripts/machines/env.mustang_intel20 @@ -21,7 +21,7 @@ module load netcdf-fortran/intel/4.4.2 setenv NETCDF_PATH /app/COST/netcdf-fortran/4.4.2/intel -#setenv OMP_STACKSIZE 256M +setenv OMP_STACKSIZE 64M #setenv MP_LABELIO yes #setenv MP_INFOLEVEL 2 #setenv MP_SHARED_MEMORY yes diff --git a/configuration/scripts/machines/env.narwhal_aocc b/configuration/scripts/machines/env.narwhal_aocc index a392f9363..6d6822f46 100755 --- a/configuration/scripts/machines/env.narwhal_aocc +++ b/configuration/scripts/machines/env.narwhal_aocc @@ -33,6 +33,8 @@ module load cray-hdf5/1.12.0.4 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE endif diff --git a/configuration/scripts/machines/env.narwhal_cray b/configuration/scripts/machines/env.narwhal_cray index eb9e42bb2..d0fcc9ba7 100755 --- a/configuration/scripts/machines/env.narwhal_cray +++ b/configuration/scripts/machines/env.narwhal_cray @@ -33,7 +33,8 @@ module load cray-hdf5/1.12.0.4 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited limit stacksize unlimited -setenv OMP_WAIT_POLICY passive +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE endif diff --git a/configuration/scripts/machines/env.narwhal_gnu b/configuration/scripts/machines/env.narwhal_gnu index 4df81b957..51a272f4e 100755 --- a/configuration/scripts/machines/env.narwhal_gnu +++ b/configuration/scripts/machines/env.narwhal_gnu @@ -33,6 +33,8 @@ module load cray-hdf5/1.12.0.4 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE endif diff --git a/configuration/scripts/machines/env.narwhal_intel b/configuration/scripts/machines/env.narwhal_intel index 2cdf4f93c..f79d962ff 100755 --- a/configuration/scripts/machines/env.narwhal_intel +++ b/configuration/scripts/machines/env.narwhal_intel @@ -33,6 +33,8 @@ module load cray-hdf5/1.12.0.4 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE endif diff --git a/configuration/scripts/machines/env.onyx_cray b/configuration/scripts/machines/env.onyx_cray index 38785a27d..e696d1b98 100755 --- a/configuration/scripts/machines/env.onyx_cray +++ b/configuration/scripts/machines/env.onyx_cray @@ -39,6 +39,7 @@ module load craype-broadwell setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited limit stacksize unlimited +setenv OMP_STACKSIZE 64M endif diff --git a/configuration/scripts/machines/env.onyx_gnu b/configuration/scripts/machines/env.onyx_gnu index 699c01559..80ebb8e43 100755 --- a/configuration/scripts/machines/env.onyx_gnu +++ b/configuration/scripts/machines/env.onyx_gnu @@ -39,6 +39,7 @@ module load craype-broadwell setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited limit stacksize unlimited +setenv OMP_STACKSIZE 64M endif diff --git a/configuration/scripts/machines/env.onyx_intel b/configuration/scripts/machines/env.onyx_intel index 39f25e8e5..362454dd4 100755 --- a/configuration/scripts/machines/env.onyx_intel +++ b/configuration/scripts/machines/env.onyx_intel @@ -39,6 +39,7 @@ module load craype-broadwell setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited limit stacksize unlimited +setenv OMP_STACKSIZE 64M endif diff --git a/configuration/scripts/machines/env.orion_intel b/configuration/scripts/machines/env.orion_intel index 95850b6bb..bdfccdd60 100755 --- a/configuration/scripts/machines/env.orion_intel +++ b/configuration/scripts/machines/env.orion_intel @@ -22,6 +22,9 @@ echo " module load netcdf/4.7.2" #module load netcdf/4.7.2 ##module list +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + endif setenv ICE_MACHINE_MACHNAME orion diff --git a/configuration/scripts/machines/env.phase3_intel b/configuration/scripts/machines/env.phase3_intel index af8dd3e5f..f5e3e4584 100755 --- a/configuration/scripts/machines/env.phase3_intel +++ b/configuration/scripts/machines/env.phase3_intel @@ -13,6 +13,9 @@ module load NetCDF/4.5.0 module load ESMF/7_1_0r module list +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + setenv ICE_MACHINE_MACHNAME phase3 setenv ICE_MACHINE_ENVNAME intel setenv ICE_MACHINE_MAKE gmake diff --git a/configuration/scripts/machines/env.testmachine_intel b/configuration/scripts/machines/env.testmachine_intel index 5b52f1b07..b6f7c329e 100755 --- a/configuration/scripts/machines/env.testmachine_intel +++ b/configuration/scripts/machines/env.testmachine_intel @@ -1,5 +1,8 @@ #!/bin/csh -f +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + setenv ICE_MACHINE_MACHNAME testmachine setenv ICE_MACHINE_MACHINFO "Undefined" setenv ICE_MACHINE_ENVNAME intel diff --git a/configuration/scripts/machines/env.travisCI_gnu b/configuration/scripts/machines/env.travisCI_gnu index b7a1b6176..aa3c1eec7 100755 --- a/configuration/scripts/machines/env.travisCI_gnu +++ b/configuration/scripts/machines/env.travisCI_gnu @@ -1,5 +1,8 @@ #!/bin/csh -f +# May be needed for OpenMP memory +#setenv OMP_STACKSIZE 64M + setenv ICE_MACHINE_MACHNAME travisCI setenv ICE_MACHINE_MACHINFO "Cloud Computing" setenv ICE_MACHINE_ENVNAME gnu diff --git a/configuration/scripts/options/set_env.cmplog b/configuration/scripts/options/set_env.cmplog new file mode 100644 index 000000000..b59c1cb6d --- /dev/null +++ b/configuration/scripts/options/set_env.cmplog @@ -0,0 +1 @@ +setenv ICE_BFBTYPE log diff --git a/configuration/scripts/options/set_env.cmplogrest b/configuration/scripts/options/set_env.cmplogrest new file mode 100644 index 000000000..118986199 --- /dev/null +++ b/configuration/scripts/options/set_env.cmplogrest @@ -0,0 +1 @@ +setenv ICE_BFBTYPE logrest diff --git a/configuration/scripts/options/set_env.cmprest b/configuration/scripts/options/set_env.cmprest new file mode 100644 index 000000000..7258fa058 --- /dev/null +++ b/configuration/scripts/options/set_env.cmprest @@ -0,0 +1 @@ +setenv ICE_BFBTYPE restart diff --git a/configuration/scripts/options/set_env.ompschedd1 b/configuration/scripts/options/set_env.ompschedd1 new file mode 100644 index 000000000..a4d255f48 --- /dev/null +++ b/configuration/scripts/options/set_env.ompschedd1 @@ -0,0 +1 @@ +setenv ICE_OMPSCHED "dynamic,1" diff --git a/configuration/scripts/options/set_env.ompscheds b/configuration/scripts/options/set_env.ompscheds new file mode 100644 index 000000000..b9a4f58b0 --- /dev/null +++ b/configuration/scripts/options/set_env.ompscheds @@ -0,0 +1 @@ +setenv ICE_OMPSCHED "static" diff --git a/configuration/scripts/options/set_env.ompscheds1 b/configuration/scripts/options/set_env.ompscheds1 new file mode 100644 index 000000000..a9ca4874d --- /dev/null +++ b/configuration/scripts/options/set_env.ompscheds1 @@ -0,0 +1 @@ +setenv ICE_OMPSCHED "static,1" diff --git a/configuration/scripts/options/set_env.qcchk b/configuration/scripts/options/set_env.qcchk new file mode 100644 index 000000000..9b9fbbd2e --- /dev/null +++ b/configuration/scripts/options/set_env.qcchk @@ -0,0 +1 @@ +setenv ICE_BFBTYPE qcchk diff --git a/configuration/scripts/options/set_env.qcchkf b/configuration/scripts/options/set_env.qcchkf new file mode 100644 index 000000000..589e60772 --- /dev/null +++ b/configuration/scripts/options/set_env.qcchkf @@ -0,0 +1 @@ +setenv ICE_BFBTYPE qcchkf diff --git a/configuration/scripts/options/set_nml.dt3456s b/configuration/scripts/options/set_nml.dt3456s new file mode 100644 index 000000000..74e5482d7 --- /dev/null +++ b/configuration/scripts/options/set_nml.dt3456s @@ -0,0 +1 @@ +dt = 3456.0 diff --git a/configuration/scripts/options/set_nml.qcnonbfb b/configuration/scripts/options/set_nml.qcnonbfb deleted file mode 100644 index a965b863c..000000000 --- a/configuration/scripts/options/set_nml.qcnonbfb +++ /dev/null @@ -1,16 +0,0 @@ -dt = 3456.0 -npt_unit = 'y' -npt = 5 -year_init = 2005 -month_init = 1 -day_init = 1 -sec_init = 0 -use_leap_years = .false. -fyear_init = 2005 -ycycle = 1 -dumpfreq = 'm' -dumpfreq_n = 12 -diagfreq = 24 -histfreq = 'd','x','x','x','x' -f_hi = 'd' -hist_avg = .false. diff --git a/configuration/scripts/options/set_nml.timerstats b/configuration/scripts/options/set_nml.timerstats new file mode 100644 index 000000000..723891b7b --- /dev/null +++ b/configuration/scripts/options/set_nml.timerstats @@ -0,0 +1 @@ +timer_stats = .true. diff --git a/configuration/scripts/tests/baseline.script b/configuration/scripts/tests/baseline.script index ac69d49a0..bb8f50a1f 100644 --- a/configuration/scripts/tests/baseline.script +++ b/configuration/scripts/tests/baseline.script @@ -36,7 +36,7 @@ if (${ICE_BASECOM} != ${ICE_SPVAL}) then ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} notcicefile set bfbstatus = $status - else if (${ICE_TEST} =~ qcchk*) then + else if (${ICE_BFBTYPE} =~ qcchk*) then set test_dir = ${ICE_RUNDIR} set base_dir = ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME} ${ICE_SANDBOX}/configuration/scripts/tests/QC/cice.t-test.py ${base_dir} ${test_dir} @@ -151,7 +151,7 @@ if (${ICE_BFBCOMP} != ${ICE_SPVAL}) then endif endif - if (${ICE_TEST} == "logbfb") then + if (${ICE_BFBTYPE} == "log") then set test_file = `ls -1t ${ICE_RUNDIR}/cice.runlog* | head -1` set base_file = `ls -1t ${ICE_RUNDIR}/../${ICE_BFBCOMP}.${ICE_TESTID}/cice.runlog* | head -1` @@ -163,21 +163,61 @@ if (${ICE_BFBCOMP} != ${ICE_SPVAL}) then ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} set bfbstatus = $status - else if (${ICE_TEST} =~ qcchk*) then + else if (${ICE_BFBTYPE} == "logrest") then + set test_file = `ls -1t ${ICE_RUNDIR}/cice.runlog* | head -1` + set base_file = `ls -1t ${ICE_RUNDIR}/../${ICE_BFBCOMP}.${ICE_TESTID}/cice.runlog* | head -1` + + echo "" + echo "bfb Log Compare Mode:" + echo "base_file: ${base_file}" + echo "test_file: ${test_file}" + + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} + set bfbstatusl = $status + + set test_dir = ${ICE_RUNDIR}/restart + set base_dir = ${ICE_RUNDIR}/../${ICE_BFBCOMP}.${ICE_TESTID}/restart + + echo "" + echo "bfb Restart Compare Mode:" + echo "base_dir: ${base_dir}" + echo "test_dir: ${test_dir}" + + ${ICE_CASEDIR}/casescripts/comparebfb.csh ${base_dir} ${test_dir} + set bfbstatusr = $status + + if ({$bfbstatusl} == ${bfbstatusr}) then + set bfbstatus = ${bfbstatusl} + else if (${bfbstatusl} == 1 || ${bfbstatusr} == 1) then + set bfbstatus = 1 + else if ({$bfbstatusl} > ${bfbstatusr}) then + set bfbstatus = ${bfbstatusl} + else + set bfbstatus = ${bfbstatusr} + endif + + echo "bfb log, rest, combined status = ${bfbstatusl},${bfbstatusr},${bfbstatus}" + + else if (${ICE_BFBTYPE} =~ qcchk*) then set test_dir = ${ICE_RUNDIR} set base_dir = ${ICE_RUNDIR}/../${ICE_BFBCOMP}.${ICE_TESTID} + echo "" + echo "qcchk Compare Mode:" + echo "base_dir: ${base_dir}" + echo "test_dir: ${test_dir}" ${ICE_SANDBOX}/configuration/scripts/tests/QC/cice.t-test.py ${base_dir} ${test_dir} set bfbstatus = $status # expecting failure, so switch value - if (${ICE_TEST} =~ qcchkf*) then + if (${ICE_BFBTYPE} == "qcchkf") then @ bfbstatus = 1 - $bfbstatus endif + else set test_dir = ${ICE_RUNDIR}/restart set base_dir = ${ICE_RUNDIR}/../${ICE_BFBCOMP}.${ICE_TESTID}/restart echo "" - echo "bfb Compare Mode:" + echo "bfb Restart Compare Mode:" echo "base_dir: ${base_dir}" echo "test_dir: ${test_dir}" @@ -190,10 +230,10 @@ if (${ICE_BFBCOMP} != ${ICE_SPVAL}) then rm -f ${ICE_CASEDIR}/test_output.prev if (${bfbstatus} == 0) then echo "PASS ${ICE_TESTNAME} bfbcomp ${ICE_BFBCOMP}" >> ${ICE_CASEDIR}/test_output - echo "bfb baseline and test dataset are identical" + echo "bfbcomp baseline and test dataset pass" else if (${bfbstatus} == 1) then echo "FAIL ${ICE_TESTNAME} bfbcomp ${ICE_BFBCOMP} different-data" >> ${ICE_CASEDIR}/test_output - echo "bfbcomp and test dataset are different" + echo "bfbcomp baseline and test dataset fail" else if (${bfbstatus} == 2) then echo "MISS ${ICE_TESTNAME} bfbcomp ${ICE_BFBCOMP} missing-data" >> ${ICE_CASEDIR}/test_output echo "Missing data" diff --git a/configuration/scripts/tests/first_suite.ts b/configuration/scripts/tests/first_suite.ts index 31eba9fb7..b42d917ea 100644 --- a/configuration/scripts/tests/first_suite.ts +++ b/configuration/scripts/tests/first_suite.ts @@ -2,5 +2,5 @@ smoke gx3 8x2 diag1,run5day restart gx3 4x2x25x29x4 dslenderX2 smoke gx3 4x2x25x29x4 debug,run2day,dslenderX2 -logbfb gx3 4x2x25x29x4 dslenderX2,diag1,reprosum +smoke gx3 4x2x25x29x4 dslenderX2,diag1,reprosum,cmplog smoke gx3 1x2 run2day diff --git a/configuration/scripts/tests/nothread_suite.ts b/configuration/scripts/tests/nothread_suite.ts index 616741aa2..12fd03662 100644 --- a/configuration/scripts/tests/nothread_suite.ts +++ b/configuration/scripts/tests/nothread_suite.ts @@ -1,7 +1,7 @@ # Test Grid PEs Sets BFB-compare restart gx3 8x1x25x29x2 dslenderX2 -logbfb gx3 8x1x25x29x2 dslenderX2,diag1,reprosum +smoke gx3 8x1x25x29x2 dslenderX2,diag1,reprosum smoke gx3 16x1 diag1,run5day smoke gx3 1x1 debug,diag1,run2day @@ -70,9 +70,9 @@ restart gx3 32x1x5x10x12 drakeX2 restart_gx3_8x1x25x29x2_ restart gx3 16x1x8x10x10 droundrobin,maskhalo restart_gx3_8x1x25x29x2_dslenderX2 restart gx3 4x1x25x29x4 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 -logbfb gx3 1x1x50x58x4 droundrobin,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -logbfb gx3 4x1x25x116x1 dslenderX1,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -logbfb gx3 20x1x5x29x20 dsectrobin,diag1,short,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -logbfb gx3 16x1x8x10x10 droundrobin,diag1,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -logbfb gx3 6x1x50x58x1 droundrobin,diag1,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -logbfb gx3 12x1x4x29x9 dspacecurve,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +smoke gx3 1x1x50x58x4 droundrobin,diag1,maskhalo,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +smoke gx3 4x1x25x116x1 dslenderX1,diag1,maskhalo,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +smoke gx3 20x1x5x29x20 dsectrobin,diag1,short,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +smoke gx3 16x1x8x10x10 droundrobin,diag1,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +smoke gx3 6x1x50x58x1 droundrobin,diag1,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +smoke gx3 12x1x4x29x9 dspacecurve,diag1,maskhalo,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum diff --git a/configuration/scripts/tests/omp_suite.ts b/configuration/scripts/tests/omp_suite.ts new file mode 100644 index 000000000..9202b06e5 --- /dev/null +++ b/configuration/scripts/tests/omp_suite.ts @@ -0,0 +1,46 @@ +# Test Grid PEs Sets BFB-compare + +smoke gx3 8x4 diag1,reprosum,run10day +smoke gx3 6x2 alt01,reprosum,run10day +smoke gx3 8x2 alt02,reprosum,run10day +smoke gx3 12x2 alt03,droundrobin,reprosum,run10day +smoke gx3 4x4 alt04,reprosum,run10day +smoke gx3 4x4 alt05,reprosum,run10day +smoke gx3 8x2 alt06,reprosum,run10day +smoke gx3 8x2 bgcz,reprosum,run10day +smoke gx1 15x2 seabedprob,reprosum,run10day +smoke gx3 14x2 fsd12,reprosum,run10day +smoke gx3 11x2 isotope,reprosum,run10day +smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day +smoke gx3 6x4 dynpicard,reprosum,run10day +smoke gx3 8x3 zsal,reprosum,run10day + +smoke gbox128 8x2 reprosum,run10day +smoke gbox128 12x2 boxnodyn,reprosum,run10day +smoke gbox128 9x2 boxadv,reprosum,run10day +smoke gbox128 14x2 boxrestore,reprosum,run10day +smoke gbox80 4x5 box2001,reprosum,run10day +smoke gbox80 11x3 boxslotcyl,reprosum,run10day + +smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest smoke_gx3_8x4_diag1_reprosum_run10day +smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread smoke_gx3_8x4_diag1_reprosum_run10day +smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread smoke_gx3_6x2_alt01_reprosum_run10day +smoke gx3 16x1 alt02,reprosum,run10day,cmplogrest,thread smoke_gx3_8x2_alt02_reprosum_run10day +smoke gx3 24x1 alt03,reprosum,run10day,cmplogrest,thread smoke_gx3_12x2_alt03_droundrobin_reprosum_run10day +smoke gx3 24x1 alt04,reprosum,run10day,cmplogrest,thread smoke_gx3_4x4_alt04_reprosum_run10day +smoke gx3 14x1 alt05,reprosum,run10day,cmplogrest,thread smoke_gx3_4x4_alt05_reprosum_run10day +smoke gx3 24x1 alt06,reprosum,run10day,cmplogrest,thread smoke_gx3_8x2_alt06_reprosum_run10day +smoke gx3 12x1 bgcz,reprosum,run10day,cmplogrest,thread smoke_gx3_8x2_bgcz_reprosum_run10day +smoke gx1 28x1 seabedprob,reprosum,run10day,cmplogrest,thread smoke_gx1_15x2_reprosum_run10day_seabedprob +smoke gx3 30x1 fsd12,reprosum,run10day,cmplogrest,thread smoke_gx3_14x2_fsd12_reprosum_run10day +smoke gx3 16x1 isotope,reprosum,run10day,cmplogrest,thread smoke_gx3_11x2_isotope_reprosum_run10day +smoke gx3 28x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread smoke_gx3_8x4_icdefault_reprosum_run10day_snwitdrdg_snwgrain +smoke gx3 18x1 dynpicard,reprosum,run10day,cmplogrest,thread smoke_gx3_6x4_dynpicard_reprosum_run10day +smoke gx3 20x1 zsal,reprosum,run10day,cmplogrest,thread smoke_gx3_8x3_reprosum_run10day_zsal + +smoke gbox128 20x1 reprosum,run10day,cmplogrest,thread smoke_gbox128_8x2_reprosum_run10day +smoke gbox128 16x1 boxnodyn,reprosum,run10day,cmplogrest,thread smoke_gbox128_12x2_boxnodyn_reprosum_run10day +smoke gbox128 14x1 boxadv,reprosum,run10day,cmplogrest,thread smoke_gbox128_9x2_boxadv_reprosum_run10day +smoke gbox128 24x1 boxrestore,reprosum,run10day,cmplogrest,thread smoke_gbox128_14x2_boxrestore_reprosum_run10day +smoke gbox80 19x1 box2001,reprosum,run10day,cmplogrest,thread smoke_gbox80_4x5_box2001_reprosum_run10day +smoke gbox80 8x4 boxslotcyl,reprosum,run10day,cmplogrest,thread smoke_gbox80_11x3_boxslotcyl_reprosum_run10day diff --git a/configuration/scripts/tests/perf_suite.ts b/configuration/scripts/tests/perf_suite.ts index 859b9f21b..9a17d8a55 100644 --- a/configuration/scripts/tests/perf_suite.ts +++ b/configuration/scripts/tests/perf_suite.ts @@ -24,4 +24,7 @@ smoke gx1 32x2x16x16x16 run2day,droundrobin smoke_gx1_64x1x16x16x8 smoke gx1 16x4x16x16x32 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread smoke gx1 8x8x16x16x64 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread smoke gx1 4x16x16x16x128 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread +smoke gx1 32x2x16x16x16 run2day,droundrobin,ompscheds smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread +smoke gx1 32x2x16x16x16 run2day,droundrobin,ompschedd1 smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread +smoke gx1 32x2x16x16x16 run2day,droundrobin,ompscheds1 smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread # diff --git a/configuration/scripts/tests/prod_suite.ts b/configuration/scripts/tests/prod_suite.ts index 04982adb1..877fa1ce6 100644 --- a/configuration/scripts/tests/prod_suite.ts +++ b/configuration/scripts/tests/prod_suite.ts @@ -1,6 +1,6 @@ # Test Grid PEs Sets BFB-compare -qcchk gx3 72x1 qc,medium qcchk_gx3_72x1_medium_qc -qcchk gx1 144x1 qc,medium +qcchk gx3 72x1 qc,qcchk,medium qcchk_gx3_72x1_medium_qc_qcchk +qcchk gx1 144x1 qc,qcchk,medium smoke gx1 144x2 gx1prod,long,run10year -qcchkf gx3 72x1 qc,medium,alt02 qcchk_gx3_72x1_medium_qc -qcchk gx3 72x1 qcnonbfb,medium qcchk_gx3_72x1_medium_qc +qcchk gx3 72x1 qc,qcchkf,medium,alt02 qcchk_gx3_72x1_medium_qc_qcchk +qcchk gx3 72x1 qc,qcchk,dt3456s,medium qcchk_gx3_72x1_medium_qc_qcchk diff --git a/configuration/scripts/tests/reprosum_suite.ts b/configuration/scripts/tests/reprosum_suite.ts index a7f3fe5bc..417a7de2e 100644 --- a/configuration/scripts/tests/reprosum_suite.ts +++ b/configuration/scripts/tests/reprosum_suite.ts @@ -1,11 +1,11 @@ # Test Grid PEs Sets BFB-compare -logbfb gx3 4x2x25x29x4 dslenderX2,diag1,reprosum -#logbfb gx3 4x2x25x29x4 dslenderX2,diag1 -logbfb gx3 1x1x50x58x4 droundrobin,diag1,thread,maskhalo,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum -logbfb gx3 4x1x25x116x1 dslenderX1,diag1,thread,maskhalo,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum -logbfb gx3 1x20x5x29x80 dsectrobin,diag1,short,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum -logbfb gx3 8x2x8x10x20 droundrobin,diag1,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum -logbfb gx3 6x2x50x58x1 droundrobin,diag1,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum -logbfb gx3 6x2x4x29x18 dspacecurve,diag1,maskhalo,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum -logbfb gx3 17x2x1x1x800 droundrobin,diag1,maskhalo,reprosum logbfb_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum -#logbfb gx3 8x2x8x10x20 droundrobin,diag1 logbfb_gx3_4x2x25x29x4_diag1_dslenderX2 +smoke gx3 4x2x25x29x4 dslenderX2,diag1,reprosum +#smoke gx3 4x2x25x29x4 dslenderX2,diag1 +smoke gx3 1x1x50x58x4 droundrobin,diag1,thread,maskhalo,reprosum,cmplog smoke_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum +smoke gx3 4x1x25x116x1 dslenderX1,diag1,thread,maskhalo,reprosum,cmplog smoke_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum +smoke gx3 1x20x5x29x80 dsectrobin,diag1,short,reprosum,cmplog smoke_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum +smoke gx3 8x2x8x10x20 droundrobin,diag1,reprosum,cmplog smoke_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum +smoke gx3 6x2x50x58x1 droundrobin,diag1,reprosum,cmplog smoke_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum +smoke gx3 6x2x4x29x18 dspacecurve,diag1,maskhalo,reprosum,cmplog smoke_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum +smoke gx3 17x2x1x1x800 droundrobin,diag1,maskhalo,reprosum,cmplog smoke_gx3_4x2x25x29x4_diag1_dslenderX2_reprosum +#smoke gx3 8x2x8x10x20 droundrobin,diag1,cmplog smoke_gx3_4x2x25x29x4_diag1_dslenderX2 diff --git a/configuration/scripts/tests/test_logbfb.script b/configuration/scripts/tests/test_logbfb.script deleted file mode 100644 index 0ac1ed224..000000000 --- a/configuration/scripts/tests/test_logbfb.script +++ /dev/null @@ -1,33 +0,0 @@ -# This is identical to a smoke test, but triggers bfbcompare with log files instead of restarts -#---------------------------------------------------- -# Run the CICE model -# cice.run returns -1 if run did not complete successfully - -./cice.run -set res="$status" - -set log_file = `ls -t1 ${ICE_RUNDIR}/cice.runlog* | head -1` -set ttimeloop = `grep TimeLoop ${log_file} | grep Timer | cut -c 22-32` -set tdynamics = `grep Dynamics ${log_file} | grep Timer | cut -c 22-32` -set tcolumn = `grep Column ${log_file} | grep Timer | cut -c 22-32` -if (${ttimeloop} == "") set ttimeloop = -1 -if (${tdynamics} == "") set tdynamics = -1 -if (${tcolumn} == "") set tcolumn = -1 - -mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev -cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} run" >! ${ICE_CASEDIR}/test_output -mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev -cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} test" >! ${ICE_CASEDIR}/test_output -rm -f ${ICE_CASEDIR}/test_output.prev - -set grade = PASS -if ( $res != 0 ) then - set grade = FAIL - echo "$grade ${ICE_TESTNAME} run ${ttimeloop} ${tdynamics} ${tcolumn}" >> ${ICE_CASEDIR}/test_output - echo "$grade ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output - exit 99 -endif - -echo "$grade ${ICE_TESTNAME} run ${ttimeloop} ${tdynamics} ${tcolumn}" >> ${ICE_CASEDIR}/test_output -echo "$grade ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output - diff --git a/configuration/scripts/tests/test_qcchkf.script b/configuration/scripts/tests/test_qcchkf.script deleted file mode 100644 index 81b5f05fc..000000000 --- a/configuration/scripts/tests/test_qcchkf.script +++ /dev/null @@ -1,36 +0,0 @@ - -cp ${ICE_SANDBOX}/configuration/scripts/tests/QC/CICE_t_critical_p0.8.nc . -cp ${ICE_SANDBOX}/configuration/scripts/tests/QC/CICE_Lookup_Table_p0.8_n1825.nc . - -#---------------------------------------------------- -# Run the CICE model -# cice.run returns -1 if run did not complete successfully - -./cice.run -set res="$status" - -set log_file = `ls -t1 ${ICE_RUNDIR}/cice.runlog* | head -1` -set ttimeloop = `grep TimeLoop ${log_file} | grep Timer | cut -c 22-32` -set tdynamics = `grep Dynamics ${log_file} | grep Timer | cut -c 22-32` -set tcolumn = `grep Column ${log_file} | grep Timer | cut -c 22-32` -if (${ttimeloop} == "") set ttimeloop = -1 -if (${tdynamics} == "") set tdynamics = -1 -if (${tcolumn} == "") set tcolumn = -1 - -mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev -cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} run" >! ${ICE_CASEDIR}/test_output -mv -f ${ICE_CASEDIR}/test_output ${ICE_CASEDIR}/test_output.prev -cat ${ICE_CASEDIR}/test_output.prev | grep -iv "${ICE_TESTNAME} test" >! ${ICE_CASEDIR}/test_output -rm -f ${ICE_CASEDIR}/test_output.prev - -set grade = PASS -if ( $res != 0 ) then - set grade = FAIL - echo "$grade ${ICE_TESTNAME} run ${ttimeloop} ${tdynamics} ${tcolumn}" >> ${ICE_CASEDIR}/test_output - echo "$grade ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output - exit 99 -endif - -echo "$grade ${ICE_TESTNAME} run ${ttimeloop} ${tdynamics} ${tcolumn}" >> ${ICE_CASEDIR}/test_output -echo "$grade ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output - diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 85acbece3..d216f7849 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -650,6 +650,7 @@ either Celsius or Kelvin units). "Tffresh", "freezing temp of fresh ice", "273.15 K" "tfrz_option", "form of ocean freezing temperature", "" "thinS", "minimum ice thickness for brine tracer", "" + "timer_stats", "logical to turn on extra timer statistics", ".false." "timesecs", "total elapsed time in seconds", "s" "time_beg", "beginning time for history averages", "" "time_bounds", "beginning and ending time for history averages", "" diff --git a/doc/source/conf.py b/doc/source/conf.py index 099f65403..8b9aecaa6 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -54,7 +54,7 @@ # General information about the project. project = u'CICE' -copyright = u'2021, Triad National Security, LLC (code) and National Center for Atmospheric Research (documentation)' +copyright = u'2022, Triad National Security, LLC (code) and National Center for Atmospheric Research (documentation)' author = u'CICE-Consortium' # The version info for the project you're documenting, acts as replacement for @@ -62,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.3.0' +version = u'6.3.1' # The full version, including alpha/beta/rc tags. -version = u'6.3.0' +version = u'6.3.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/intro/copyright.rst b/doc/source/intro/copyright.rst index f09f6c58d..86b15b8d2 100644 --- a/doc/source/intro/copyright.rst +++ b/doc/source/intro/copyright.rst @@ -5,7 +5,7 @@ Copyright ============================= -© Copyright 2021, Triad National Security LLC. All rights reserved. +© Copyright 2022, Triad National Security LLC. All rights reserved. This software was produced under U.S. Government contract 89233218CNA000001 for Los Alamos National Laboratory (LANL), which is operated by Triad National Security, LLC for the U.S. Department diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 23e6951fc..eed9c8c5f 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -81,25 +81,34 @@ can be modified as needed. "ICE_TARGET", "string", "build target", "set by cice.setup" "ICE_IOTYPE", "string", "I/O format", "set by cice.setup" " ", "netcdf", "serial netCDF" - " ", "pio", "parallel netCDF" " ", "none", "netCDF library is not available" + " ", "pio", "parallel netCDF" "ICE_CLEANBUILD", "true, false", "automatically clean before building", "true" "ICE_CPPDEFS", "user defined preprocessor macros for build", "null" "ICE_QUIETMODE", "true, false", "reduce build output to the screen", "false" "ICE_GRID", "string (see below)", "grid", "set by cice.setup" - " ", "gx3", "3-deg displace-pole (Greenland) global grid", " " - " ", "gx1", "1-deg displace-pole (Greenland) global grid", " " - " ", "tx1", "1-deg tripole global grid", " " " ", "gbox80", "80x80 box", " " " ", "gbox128", "128x128 box", " " - "ICE_NTASKS", "integer", "number of tasks, must be set to 1", "set by cice.setup" - "ICE_NTHRDS", "integer", "number of threads per task, must be set to 1", "set by cice.setup" + " ", "gbox180", "180x180 box", " " + " ", "gx1", "1-deg displace-pole (Greenland) global grid", " " + " ", "gx3", "3-deg displace-pole (Greenland) global grid", " " + " ", "tx1", "1-deg tripole global grid", " " + "ICE_NTASKS", "integer", "number of MPI tasks", "set by cice.setup" + "ICE_NTHRDS", "integer", "number of threads per task", "set by cice.setup" + "ICE_OMPSCHED", "string", "OpenMP SCHEDULE env setting", "static,1" "ICE_TEST", "string", "test setting if using a test", "set by cice.setup" "ICE_TESTNAME", "string", "test name if using a test", "set by cice.setup" - "ICE_BASELINE", "string", "baseline directory name, associated with cice.setup -bdir ", "set by cice.setup" + "ICE_TESTID", "string", "test name testid", "set by cice.setup" + "ICE_BASELINE", "string", "baseline directory name, associated with cice.setup --bdir ", "set by cice.setup" "ICE_BASEGEN", "string", "baseline directory name for regression generation, associated with cice.setup -bgen ", "set by cice.setup" "ICE_BASECOM", "string", "baseline directory name for regression comparison, associated with cice.setup -bcmp ", "set by cice.setup" - "ICE_BFBCOMP", "string", "location of case for comparison, associated with cice.setup -td", "set by cice.setup" + "ICE_BFBCOMP", "string", "location of case for comparison, associated with cice.setup --bcmp", "set by cice.setup" + "ICE_BFBTYPE", "string", "type and files used in BFBCOMP", "restart" + " ", "log", "log file comparison for bit for bit", " " + " ", "logrest", "log and restart files for bit for bit", " " + " ", "qcchk", "QC test for same climate", " " + " ", "qcchkf", "QC test for different climate", " " + " ", "restart", "restart files for bit for bit", " " "ICE_SPVAL", "string", "special value for cice.settings strings", "set by cice.setup" "ICE_RUNLENGTH", "integer (see below)", "batch run length default", "set by cice.setup" " ", "-1", "15 minutes (default)", " " @@ -111,6 +120,7 @@ can be modified as needed. "ICE_ACCOUNT", "string", "batch account number", "set by cice.setup, .cice_proj or by default" "ICE_QUEUE", "string", "batch queue name", "set by cice.setup or by default" "ICE_THREADED", "true, false", "force threading in compile, will always compile threaded if ICE_NTHRDS :math:`> 1`", "false" + "ICE_COMMDIR", "mpi, serial", "specify infrastructure comm version", "set by ICE_NTASKS" "ICE_BLDDEBUG", "true, false", "turn on compile debug flags", "false" "ICE_COVERAGE", "true, false", "turn on code coverage flags", "false" @@ -214,6 +224,7 @@ setup_nml "``runtype``", "``continue``", "restart using ``pointer_file``", "``initial``" "", "``initial``", "start from ``ice_ic``", "" "``sec_init``", "integer", "the initial second if not using restart", "0" + "``timer_stats``", "logical", "controls extra timer output", "``.false.``" "``use_leap_years``", "logical", "include leap days", "``.false.``" "``use_restart_time``", "logical", "set initial date using restart file on initial runtype only", "``.false.``" "``version_name``", "string", "model version", "'unknown_version_name'" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index 650d3d6c7..a74e13ecf 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -169,7 +169,8 @@ and chooses a block size ``block_size_x`` :math:`\times`\ ``block_size_y``, and ``distribution_type`` in **ice\_in**. That information is used to determine how the blocks are distributed across the processors, and how the processors are -distributed across the grid domain. Recommended combinations of these +distributed across the grid domain. The model is parallelized over blocks +for both MPI and OpenMP. Some suggested combinations for these parameters for best performance are given in Section :ref:`performance`. The script **cice.setup** computes some default decompositions and layouts but the user can overwrite the defaults by manually changing the values in @@ -384,7 +385,8 @@ The user specifies the total number of tasks and threads in **cice.settings** and the block size and decompostion in the namelist file. The main trades offs are the relative efficiency of large square blocks versus model internal load balance -as CICE computation cost is very small for ice-free blocks. +as CICE computation cost is very small for ice-free blocks. The code +is parallelized over blocks for both MPI and OpenMP. Smaller, more numerous blocks provides an opportunity for better load balance by allocating each processor both ice-covered and ice-free blocks. But smaller, more numerous blocks becomes @@ -395,6 +397,18 @@ volume-to-surface ratio important for communication cost. Often 3 to 8 blocks per processor provide the decompositions flexiblity to create reasonable load balance configurations. +Like MPI, load balance +of blocks across threads is important for efficient performance. Most of the OpenMP +threading is implemented with ``SCHEDULE(runtime)``, so the OMP_SCHEDULE env +variable can be used to set the OpenMPI schedule. The default ``OMP_SCHEDULE`` +setting is defined by the +variable ``ICE_OMPSCHE`` in **cice.settings**. ``OMP_SCHEDULE`` values of "STATIC,1" +and "DYNAMIC,1" are worth testing. The OpenMP implementation in +CICE is constantly under review, but users should validate results and +performance on their machine. CICE should be bit-for-bit with different block sizes, +different decompositions, different MPI task counts, and different OpenMP threads. +Finally, we recommend the ``OMP_STACKSIZE`` env variable should be set to 32M or greater. + The ``distribution_type`` options allow standard cartesian distributions of blocks, redistribution via a ‘rake’ algorithm for improved load balancing across processors, and redistribution based on space-filling @@ -1056,15 +1070,18 @@ Timers are declared and initialized in **ice\_timers.F90**, and the code to be timed is wrapped with calls to *ice\_timer\_start* and *ice\_timer\_stop*. Finally, *ice\_timer\_print* writes the results to the log file. The optional “stats" argument (true/false) prints -additional statistics. Calling *ice\_timer\_print\_all* prints all of +additional statistics. The "stats" argument can be set by the ``timer_stats`` +namelist. Calling *ice\_timer\_print\_all* prints all of the timings at once, rather than having to call each individually. Currently, the timers are set up as in :ref:`timers`. Section :ref:`addtimer` contains instructions for adding timers. The timings provided by these timers are not mutually exclusive. For -example, the column timer (5) includes the timings from 6–10, and -subroutine *bound* (timer 15) is called from many different places in -the code, including the dynamics and advection routines. +example, the Column timer includes the timings from several other +timers, while timer Bound is called from many different places in +the code, including the dynamics and advection routines. The +Dynamics, Advection, and Column timers do not overlap and represent +most of the overall model work. The timers use *MPI\_WTIME* for parallel runs and the F90 intrinsic *system\_clock* for single-processor runs. @@ -1080,35 +1097,41 @@ The timers use *MPI\_WTIME* for parallel runs and the F90 intrinsic +--------------+-------------+----------------------------------------------------+ | 1 | Total | the entire run | +--------------+-------------+----------------------------------------------------+ - | 2 | Step | total minus initialization and exit | + | 2 | Timeloop | total minus initialization and exit | +--------------+-------------+----------------------------------------------------+ - | 3 | Dynamics | EVP | + | 3 | Dynamics | dynamics | +--------------+-------------+----------------------------------------------------+ | 4 | Advection | horizontal transport | +--------------+-------------+----------------------------------------------------+ | 5 | Column | all vertical (column) processes | +--------------+-------------+----------------------------------------------------+ - | 6 | Thermo | vertical thermodynamics | + | 6 | Thermo | vertical thermodynamics, part of Column timer | + +--------------+-------------+----------------------------------------------------+ + | 7 | Shortwave | SW radiation and albedo, part of Thermo timer | + +--------------+-------------+----------------------------------------------------+ + | 8 | Ridging | mechanical redistribution, part of Column timer | + +--------------+-------------+----------------------------------------------------+ + | 9 | FloeSize | flow size, part of Column timer | +--------------+-------------+----------------------------------------------------+ - | 7 | Shortwave | SW radiation and albedo | + | 10 | Coupling | sending/receiving coupler messages | +--------------+-------------+----------------------------------------------------+ - | 8 | Meltponds | melt ponds | + | 11 | ReadWrite | reading/writing files | +--------------+-------------+----------------------------------------------------+ - | 9 | Ridging | mechanical redistribution | + | 12 | Diags | diagnostics (log file) | +--------------+-------------+----------------------------------------------------+ - | 10 | Cat Conv | transport in thickness space | + | 13 | History | history output | +--------------+-------------+----------------------------------------------------+ - | 11 | Coupling | sending/receiving coupler messages | + | 14 | Bound | boundary conditions and subdomain communications | +--------------+-------------+----------------------------------------------------+ - | 12 | ReadWrite | reading/writing files | + | 15 | BGC | biogeochemistry, part of Thermo timer | +--------------+-------------+----------------------------------------------------+ - | 13 | Diags | diagnostics (log file) | + | 16 | Forcing | forcing | +--------------+-------------+----------------------------------------------------+ - | 14 | History | history output | + | 17 | 1d-evp | 1d evp, part of Dynamics timer | +--------------+-------------+----------------------------------------------------+ - | 15 | Bound | boundary conditions and subdomain communications | + | 18 | 2d-evp | 2d evp, part of Dynamics timer | +--------------+-------------+----------------------------------------------------+ - | 16 | BGC | biogeochemistry | + | 19 | UpdState | update state | +--------------+-------------+----------------------------------------------------+ .. _restartfiles: diff --git a/icepack b/icepack index 152bd701e..76ecd418d 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 152bd701e0cf3ec4385e5ce81918ba94e7a791cb +Subproject commit 76ecd418d2efad7e74fe35c4ec85f0830923bda6 From 247dc1dfb98aa8c60da58826b4778b353e4c9bb5 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Sat, 14 May 2022 09:01:12 -0400 Subject: [PATCH 62/90] fix comment, fix env for orion and hera --- cicecore/drivers/nuopc/cmeps/ice_import_export.F90 | 2 +- configuration/scripts/machines/env.hera_intel | 3 --- configuration/scripts/machines/env.orion_intel | 3 --- 3 files changed, 1 insertion(+), 7 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 182aa68c5..82bf63fe3 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -189,7 +189,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) ! from atm - wet dust deposition fluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstwet', ungridded_lbound=1, ungridded_ubound=4) - ! from - atm dry dust deposition fluxes (4 sizes) + ! from atm - dry dust deposition fluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) do n = 1,fldsToIce_num diff --git a/configuration/scripts/machines/env.hera_intel b/configuration/scripts/machines/env.hera_intel index dea59cddc..6698c0c2c 100755 --- a/configuration/scripts/machines/env.hera_intel +++ b/configuration/scripts/machines/env.hera_intel @@ -15,9 +15,6 @@ module load impi/2018.0.4 module load netcdf/4.7.0 #module list -# May be needed for OpenMP memory -#setenv OMP_STACKSIZE 64M - endif # May be needed for OpenMP memory diff --git a/configuration/scripts/machines/env.orion_intel b/configuration/scripts/machines/env.orion_intel index 60238fc1a..7a8e47f5d 100755 --- a/configuration/scripts/machines/env.orion_intel +++ b/configuration/scripts/machines/env.orion_intel @@ -15,9 +15,6 @@ module load impi/2020.2 module load netcdf/4.7.4 ##module list -# May be needed for OpenMP memory -#setenv OMP_STACKSIZE 64M - endif limit coredumpsize unlimited From dd158e2f23ae197e89da9bfc04b1a40e24ffec1c Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Sat, 14 May 2022 13:59:26 +0000 Subject: [PATCH 63/90] replace save_init with step_prep in CICE_RunMod --- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 6f145ab0e..79066e82a 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -129,7 +129,7 @@ subroutine ice_step use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, save_init, step_dyn_wave, step_snow + biogeochemistry, step_prep, step_dyn_wave, step_snow use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite From ae50efe26061704902ead8b377cc71e0a8a7f27e Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Sun, 22 May 2022 16:30:00 -0600 Subject: [PATCH 64/90] fixes for cgrid repro --- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 13 +++++++++---- cicecore/cicedynB/infrastructure/ice_grid.F90 | 13 +++++++++++++ cicecore/drivers/nuopc/cmeps/ice_import_export.F90 | 9 ++++----- 3 files changed, 26 insertions(+), 9 deletions(-) diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index f18e60802..8da31e5f0 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -316,12 +316,17 @@ subroutine evp (dt) ! convert fields from T to U grid !----------------------------------------------------------------- + call ice_HaloUpdate (tmass, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate (aice_init, halo_info, & + field_loc_center, field_type_scalar) + call grid_average_X2Y('F', tmass , 'T' , umass , 'U') call grid_average_X2Y('F', aice_init, 'T' , aiu , 'U') - call grid_average_X2Y('S', uocn , grid_ocn_dynu, uocnU , 'U') - call grid_average_X2Y('S', vocn , grid_ocn_dynv, vocnU , 'U') - call grid_average_X2Y('S', ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') - call grid_average_X2Y('S', ss_tlty , grid_ocn_dynv, ss_tltyU, 'U') + call grid_average_X2Y('F', uocn , grid_ocn_dynu, uocnU , 'U') + call grid_average_X2Y('F', vocn , grid_ocn_dynv, vocnU , 'U') + call grid_average_X2Y('F', ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') + call grid_average_X2Y('F', ss_tlty , grid_ocn_dynv, ss_tltyU, 'U') if (grid_ice == 'CD' .or. grid_ice == 'C') then call grid_average_X2Y('F', tmass , 'T' , emass, 'E') diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index 1892a396e..814cc354a 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -605,6 +605,19 @@ subroutine init_grid2 !----------------------------------------------------------------- call ice_timer_start(timer_bound) + + call ice_HaloUpdate (tarea, halo_info, & + field_loc_center, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (uarea, halo_info, & + field_loc_NEcorner, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (tarear, halo_info, & + field_loc_center, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (uarear, halo_info, & + field_loc_NEcorner, field_type_scalar, & + fillValue=c1) call ice_HaloUpdate (dxhy, halo_info, & field_loc_center, field_type_vector, & fillValue=c1) diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 82bf63fe3..aed530abe 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -754,7 +754,6 @@ subroutine ice_import( importState, rc ) ! ocean workx = uocn (i,j,iblk) ! currents, m/s worky = vocn (i,j,iblk) - uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! rotate to align with model i,j + worky*sin(ANGLET(i,j,iblk)) vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & @@ -800,10 +799,10 @@ subroutine ice_import( importState, rc ) if (.not.prescribed_ice) then call t_startf ('cice_imp_t2u') - call ice_HaloUpdate(uocn, halo_info, field_loc_center, field_type_scalar) - call ice_HaloUpdate(vocn, halo_info, field_loc_center, field_type_scalar) - call ice_HaloUpdate(ss_tltx, halo_info, field_loc_center, field_type_scalar) - call ice_HaloUpdate(ss_tlty, halo_info, field_loc_center, field_type_scalar) + call ice_HaloUpdate(uocn, halo_info, field_loc_center, field_type_vector) + call ice_HaloUpdate(vocn, halo_info, field_loc_center, field_type_vector) + call ice_HaloUpdate(ss_tltx, halo_info, field_loc_center, field_type_vector) + call ice_HaloUpdate(ss_tlty, halo_info, field_loc_center, field_type_vector) ! tcraig, moved to dynamics for consistency !work = uocn !call grid_average_X2Y('F',work,'T',uocn,'U') From cbc6046dc7397c8c49abc2d23394615efd683777 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 23 May 2022 19:07:39 -0600 Subject: [PATCH 65/90] remove added haloupdates * baselines pass with these extra halo updates removed --- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 5 ----- 1 file changed, 5 deletions(-) diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index 8da31e5f0..5436044da 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -316,11 +316,6 @@ subroutine evp (dt) ! convert fields from T to U grid !----------------------------------------------------------------- - call ice_HaloUpdate (tmass, halo_info, & - field_loc_center, field_type_scalar) - call ice_HaloUpdate (aice_init, halo_info, & - field_loc_center, field_type_scalar) - call grid_average_X2Y('F', tmass , 'T' , umass , 'U') call grid_average_X2Y('F', aice_init, 'T' , aiu , 'U') call grid_average_X2Y('F', uocn , grid_ocn_dynu, uocnU , 'U') From 2d5487a2a7cd7de5ffca16db4e6b1c133eef5eb3 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 13 Jun 2022 11:56:40 -0600 Subject: [PATCH 66/90] change F->S for ocean velocities and tilts --- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index b1c219f2e..866775132 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -318,10 +318,10 @@ subroutine evp (dt) call grid_average_X2Y('F', tmass , 'T' , umass , 'U') call grid_average_X2Y('F', aice_init, 'T' , aiu , 'U') - call grid_average_X2Y('F', uocn , grid_ocn_dynu, uocnU , 'U') - call grid_average_X2Y('F', vocn , grid_ocn_dynv, vocnU , 'U') - call grid_average_X2Y('F', ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') - call grid_average_X2Y('F', ss_tlty , grid_ocn_dynv, ss_tltyU, 'U') + call grid_average_X2Y('S', uocn , grid_ocn_dynu, uocnU , 'U') + call grid_average_X2Y('S', vocn , grid_ocn_dynv, vocnU , 'U') + call grid_average_X2Y('S', ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') + call grid_average_X2Y('S', ss_tlty , grid_ocn_dynv, ss_tltyU, 'U') if (grid_ice == 'CD' .or. grid_ice == 'C') then call grid_average_X2Y('F', tmass , 'T' , emass , 'E') From 26498db14bd99c72e92cf34b5f6dfe2fd4eb36f8 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 20 Jun 2022 15:27:46 -0600 Subject: [PATCH 67/90] fix debug failure when grid_ice=C * compiling in debug mode using -init=snan,arrays requires initialization of variables --- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 41 ++++++++++++++++--- 1 file changed, 36 insertions(+), 5 deletions(-) diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index 722022fad..367adebcd 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -197,7 +197,11 @@ subroutine init_dyn (dt) stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & - stresspU, stressmU, stress12U + stresspU, stressmU, stress12U, & + taubxE, taubyE, strairxE, strairyE, strocnxE, strocnyE, & + strtltxE, strtltyE, strintxE, strintyE, iceEmask, fmE, TbE, & + taubxN, taubyN, strairxN, strairyN, strocnxN, strocnyN, & + strtltxN, strtltyN, strintxN, strintyN, iceNmask, fmN, TbN use ice_state, only: uvel, vvel, uvelE, vvelE, uvelN, vvelN, divu, shear use ice_grid, only: ULAT, NLAT, ELAT, tarea @@ -244,10 +248,37 @@ subroutine init_dyn (dt) uvel(i,j,iblk) = c0 ! m/s vvel(i,j,iblk) = c0 ! m/s if (grid_ice == 'CD' .or. grid_ice == 'C') then ! extra velocity variables - uvelE(i,j,iblk) = c0 - vvelE(i,j,iblk) = c0 - uvelN(i,j,iblk) = c0 - vvelN(i,j,iblk) = c0 + uvelE (i,j,iblk) = c0 + vvelE (i,j,iblk) = c0 + taubxE (i,j,iblk) = c0 + taubyE (i,j,iblk) = c0 + strairxE(i,j,iblk) = c0 + strairyE(i,j,iblk) = c0 + strocnxE(i,j,iblk) = c0 + strocnyE(i,j,iblk) = c0 + strtltxE(i,j,iblk) = c0 + strtltyE(i,j,iblk) = c0 + strintxE(i,j,iblk) = c0 + strintyE(i,j,iblk) = c0 + iceEmask(i,j,iblk) = c0 + fmE (i,j,iblk) = c0 + TbE (i,j,iblk) = c0 + + uvelN (i,j,iblk) = c0 + vvelN (i,j,iblk) = c0 + taubxN (i,j,iblk) = c0 + taubyN (i,j,iblk) = c0 + strairxN(i,j,iblk) = c0 + strairyN(i,j,iblk) = c0 + strocnxN(i,j,iblk) = c0 + strocnyN(i,j,iblk) = c0 + strtltxN(i,j,iblk) = c0 + strtltyN(i,j,iblk) = c0 + strintxN(i,j,iblk) = c0 + strintyN(i,j,iblk) = c0 + iceNmask(i,j,iblk) = c0 + fmN (i,j,iblk) = c0 + TbN (i,j,iblk) = c0 endif ! strain rates From 9e2dd69b659b93d75d2672edb6dc5f080069e52b Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Wed, 22 Jun 2022 13:07:56 -0600 Subject: [PATCH 68/90] respond to review comments --- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 40 ++------ cicecore/cicedynB/general/ice_flux.F90 | 91 ++++++++++++------- 2 files changed, 65 insertions(+), 66 deletions(-) diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index 367adebcd..2ecc6e5f6 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -192,16 +192,12 @@ subroutine init_dyn (dt) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks, halo_dynbundle use ice_domain_size, only: max_blocks - use ice_flux, only: rdg_conv, rdg_shear, iceumask, & + use ice_flux, only: rdg_conv, rdg_shear, iceumask, iceemask, icenmask, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & - stresspU, stressmU, stress12U, & - taubxE, taubyE, strairxE, strairyE, strocnxE, strocnyE, & - strtltxE, strtltyE, strintxE, strintyE, iceEmask, fmE, TbE, & - taubxN, taubyN, strairxN, strairyN, strocnxN, strocnyN, & - strtltxN, strtltyN, strintxN, strintyN, iceNmask, fmN, TbN + stresspU, stressmU, stress12U use ice_state, only: uvel, vvel, uvelE, vvelE, uvelN, vvelN, divu, shear use ice_grid, only: ULAT, NLAT, ELAT, tarea @@ -250,35 +246,8 @@ subroutine init_dyn (dt) if (grid_ice == 'CD' .or. grid_ice == 'C') then ! extra velocity variables uvelE (i,j,iblk) = c0 vvelE (i,j,iblk) = c0 - taubxE (i,j,iblk) = c0 - taubyE (i,j,iblk) = c0 - strairxE(i,j,iblk) = c0 - strairyE(i,j,iblk) = c0 - strocnxE(i,j,iblk) = c0 - strocnyE(i,j,iblk) = c0 - strtltxE(i,j,iblk) = c0 - strtltyE(i,j,iblk) = c0 - strintxE(i,j,iblk) = c0 - strintyE(i,j,iblk) = c0 - iceEmask(i,j,iblk) = c0 - fmE (i,j,iblk) = c0 - TbE (i,j,iblk) = c0 - uvelN (i,j,iblk) = c0 vvelN (i,j,iblk) = c0 - taubxN (i,j,iblk) = c0 - taubyN (i,j,iblk) = c0 - strairxN(i,j,iblk) = c0 - strairyN(i,j,iblk) = c0 - strocnxN(i,j,iblk) = c0 - strocnyN(i,j,iblk) = c0 - strtltxN(i,j,iblk) = c0 - strtltyN(i,j,iblk) = c0 - strintxN(i,j,iblk) = c0 - strintyN(i,j,iblk) = c0 - iceNmask(i,j,iblk) = c0 - fmN (i,j,iblk) = c0 - TbN (i,j,iblk) = c0 endif ! strain rates @@ -342,7 +311,10 @@ subroutine init_dyn (dt) ! ice extent mask on velocity points iceumask(i,j,iblk) = .false. - + if (grid_ice == 'CD' .or. grid_ice == 'C') then + iceemask(i,j,iblk) = .false. + icenmask(i,j,iblk) = .false. + end if enddo ! i enddo ! j enddo ! iblk diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 312891f95..18727b63e 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -39,7 +39,7 @@ module ice_flux real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - ! in from atmos (if .not.calc_strair) + ! in from atmos (if .not.calc_strair) strax , & ! wind stress components (N/m^2), on grid_atm_dynu stray , & ! on grid_atm_dynv @@ -48,7 +48,7 @@ module ice_flux vocn , & ! ocean current, y-direction (m/s), on grid_ocn_dynv ss_tltx , & ! sea surface slope, x-direction (m/m), on grid_ocn_dynu ss_tlty , & ! sea surface slope, y-direction, on grid_ocn_dynv - hwater , & ! water depth for seabed stress calc (landfast ice) + hwater , & ! water depth for seabed stress calc (landfast ice) ! out to atmosphere strairxT, & ! stress on ice by air, x-direction at T points, computed in icepack @@ -103,7 +103,7 @@ module ice_flux dvirdgdt, & ! rate of ice volume ridged (m/s) opening ! rate of opening due to divergence/shear (1/s) - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & ! ridging diagnostics in categories dardg1ndt, & ! rate of area loss by ridging ice (1/s) @@ -114,7 +114,7 @@ module ice_flux ardgn, & ! fractional area of ridged ice vrdgn, & ! volume of ridged ice araftn, & ! rafting ice area - vraftn, & ! rafting ice volume + vraftn, & ! rafting ice volume aredistn, & ! redistribution function: fraction of new ridge area vredistn ! redistribution function: fraction of new ridge volume @@ -178,7 +178,7 @@ module ice_flux ! NOTE: when in CICE_IN_NEMO mode, these are gridbox mean fields, ! not per ice area. When in standalone mode, these are per ice area. - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & fsurfn_f , & ! net flux to top surface, excluding fcondtop fcondtopn_f, & ! downward cond flux at top surface (W m-2) @@ -201,7 +201,7 @@ module ice_flux Tf , & ! freezing temperature (C) qdp , & ! deep ocean heat flux (W/m^2), negative upward hmix , & ! mixed layer depth (m) - daice_da ! data assimilation concentration increment rate + daice_da ! data assimilation concentration increment rate ! (concentration s-1)(only used in hadgem drivers) ! out to atmosphere (if calc_Tsfc) @@ -247,8 +247,8 @@ module ice_flux dimension(:,:,:,:), allocatable, public :: & albcnt ! counter for zenith angle - ! out to ocean - ! (Note CICE_IN_NEMO does not use these for coupling. + ! out to ocean + ! (Note CICE_IN_NEMO does not use these for coupling. ! It uses fresh_ai,fsalt_ai,fhocn_ai and fswthru_ai) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & fpond , & ! fresh water flux to ponds (kg/m^2/s) @@ -280,7 +280,7 @@ module ice_flux snoicen ! snow-ice formation in category n (m) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - keffn_top ! effective thermal conductivity of the top ice layer + keffn_top ! effective thermal conductivity of the top ice layer ! on categories (W/m^2/K) ! quantities passed from ocean mixed layer to atmosphere @@ -324,7 +324,7 @@ module ice_flux frz_onset, &! day of year that freezing begins (congel or frazil) frazil_diag ! frazil ice growth diagnostic (m/step-->cm/day) - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & fsurfn, & ! category fsurf fcondtopn,& ! category fcondtop @@ -339,7 +339,7 @@ module ice_flux ! As above but these remain grid box mean values i.e. they are not ! divided by aice at end of ice_dynamics. These are used in ! CICE_IN_NEMO for coupling and also for generating - ! ice diagnostics and history files as these are more accurate. + ! ice diagnostics and history files as these are more accurate. ! (The others suffer from problem of incorrect values at grid boxes ! that change from an ice free state to an icy state.) @@ -369,12 +369,12 @@ module ice_flux rside , & ! fraction of ice that melts laterally fside , & ! lateral heat flux (W/m^2) fsw , & ! incoming shortwave radiation (W/m^2) - coszen , & ! cosine solar zenith angle, < 0 for sun below horizon + coszen , & ! cosine solar zenith angle, < 0 for sun below horizon rdg_conv, & ! convergence term for ridging (1/s) rdg_shear ! shear term for ridging (1/s) real (kind=dbl_kind), dimension(:,:,:,:), allocatable, public :: & - salinz ,& ! initial salinity profile (ppt) + salinz ,& ! initial salinity profile (ppt) Tmltz ! initial melting temperature (^oC) !======================================================================= @@ -383,7 +383,7 @@ module ice_flux !======================================================================= ! -! Allocate space for all variables +! Allocate space for all variables ! subroutine alloc_flux @@ -393,12 +393,12 @@ subroutine alloc_flux allocate( & strax (nx_block,ny_block,max_blocks), & ! wind stress components (N/m^2) - stray (nx_block,ny_block,max_blocks), & ! + stray (nx_block,ny_block,max_blocks), & ! uocn (nx_block,ny_block,max_blocks), & ! ocean current, x-direction (m/s) vocn (nx_block,ny_block,max_blocks), & ! ocean current, y-direction (m/s) ss_tltx (nx_block,ny_block,max_blocks), & ! sea surface slope, x-direction (m/m) ss_tlty (nx_block,ny_block,max_blocks), & ! sea surface slope, y-direction - hwater (nx_block,ny_block,max_blocks), & ! water depth for seabed stress calc (landfast ice) + hwater (nx_block,ny_block,max_blocks), & ! water depth for seabed stress calc (landfast ice) strairxT (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction strairyT (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction strocnxT (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction @@ -544,7 +544,7 @@ subroutine alloc_flux rside (nx_block,ny_block,max_blocks), & ! fraction of ice that melts laterally fside (nx_block,ny_block,max_blocks), & ! lateral melt rate (W/m^2) fsw (nx_block,ny_block,max_blocks), & ! incoming shortwave radiation (W/m^2) - coszen (nx_block,ny_block,max_blocks), & ! cosine solar zenith angle, < 0 for sun below horizon + coszen (nx_block,ny_block,max_blocks), & ! cosine solar zenith angle, < 0 for sun below horizon rdg_conv (nx_block,ny_block,max_blocks), & ! convergence term for ridging (1/s) rdg_shear (nx_block,ny_block,max_blocks), & ! shear term for ridging (1/s) dardg1ndt (nx_block,ny_block,ncat,max_blocks), & ! rate of area loss by ridging ice (1/s) @@ -555,7 +555,7 @@ subroutine alloc_flux ardgn (nx_block,ny_block,ncat,max_blocks), & ! fractional area of ridged ice vrdgn (nx_block,ny_block,ncat,max_blocks), & ! volume of ridged ice araftn (nx_block,ny_block,ncat,max_blocks), & ! rafting ice area - vraftn (nx_block,ny_block,ncat,max_blocks), & ! rafting ice volume + vraftn (nx_block,ny_block,ncat,max_blocks), & ! rafting ice volume aredistn (nx_block,ny_block,ncat,max_blocks), & ! redistribution function: fraction of new ridge area vredistn (nx_block,ny_block,ncat,max_blocks), & ! redistribution function: fraction of new ridge volume fsurfn_f (nx_block,ny_block,ncat,max_blocks), & ! net flux to top surface, excluding fcondtop @@ -575,7 +575,7 @@ subroutine alloc_flux flatn (nx_block,ny_block,ncat,max_blocks), & ! category latent heat flux albcnt (nx_block,ny_block,max_blocks,max_nstrm), & ! counter for zenith angle snwcnt (nx_block,ny_block,max_blocks,max_nstrm), & ! counter for snow - salinz (nx_block,ny_block,nilyr+1,max_blocks), & ! initial salinity profile (ppt) + salinz (nx_block,ny_block,nilyr+1,max_blocks), & ! initial salinity profile (ppt) Tmltz (nx_block,ny_block,nilyr+1,max_blocks), & ! initial melting temperature (^oC) stat=ierr) if (ierr/=0) call abort_ice('(alloc_flux): Out of memory') @@ -719,7 +719,7 @@ subroutine init_coupler_flux fcondtopn_f(:,:,:,:) = c0 ! conductive heat flux (W/m^2) flatn_f (:,:,:,:) = -1.0_dbl_kind ! latent heat flux (W/m^2) fsensn_f (:,:,:,:) = c0 ! sensible heat flux (W/m^2) - endif ! + endif ! fiso_atm (:,:,:,:) = c0 ! isotope deposition rate (kg/m2/s) faero_atm (:,:,:,:) = c0 ! aerosol deposition rate (kg/m2/s) @@ -762,7 +762,7 @@ subroutine init_coupler_flux flat (:,:,:) = c0 fswabs (:,:,:) = c0 fswint_ai(:,:,:) = c0 - flwout (:,:,:) = -stefan_boltzmann*Tffresh**4 + flwout (:,:,:) = -stefan_boltzmann*Tffresh**4 ! in case atm model diagnoses Tsfc from flwout evap (:,:,:) = c0 evaps (:,:,:) = c0 @@ -816,7 +816,7 @@ subroutine init_coupler_flux coszen (:,:,:) = c0 ! Cosine of the zenith angle fsw (:,:,:) = c0 ! shortwave radiation (W/m^2) - scale_factor(:,:,:) = c1 ! shortwave scaling factor + scale_factor(:,:,:) = c1 ! shortwave scaling factor wind (:,:,:) = sqrt(uatm(:,:,:)**2 & + vatm(:,:,:)**2) ! wind speed, (m/s) Cdn_atm(:,:,:) = (vonkar/log(zref/iceruf)) & @@ -986,8 +986,8 @@ subroutine init_history_therm snowfrac (:,:,:) = c0 frazil_diag (:,:,:) = c0 - ! drag coefficients are computed prior to the atmo_boundary call, - ! during the thermodynamics section + ! drag coefficients are computed prior to the atmo_boundary call, + ! during the thermodynamics section Cdn_ocn(:,:,:) = dragio Cdn_atm(:,:,:) = (vonkar/log(zref/iceruf)) & * (vonkar/log(zref/iceruf)) ! atmo drag for RASM @@ -1023,6 +1023,7 @@ end subroutine init_history_therm subroutine init_history_dyn use ice_state, only: aice, vice, trcr, strength + use ice_grid, only: grid_ice logical (kind=log_kind) :: & tr_iage @@ -1074,6 +1075,32 @@ subroutine init_history_dyn aredistn (:,:,:,:) = c0 vredistn (:,:,:,:) = c0 + if (grid_ice == "CD" .or. grid_ice == "C") then + taubxE (:,:,:) = c0 + taubyE (:,:,:) = c0 + strocnxE (:,:,:) = c0 + strocnyE (:,:,:) = c0 + strairxE (:,:,:) = c0 + strairyE (:,:,:) = c0 + strtltxE (:,:,:) = c0 + strtltyE (:,:,:) = c0 + strintxE (:,:,:) = c0 + strintyE (:,:,:) = c0 + fmE (:,:,:) = c0 + TbE (:,:,:) = c0 + taubxN (:,:,:) = c0 + taubyN (:,:,:) = c0 + strocnxN (:,:,:) = c0 + strocnyN (:,:,:) = c0 + strairxN (:,:,:) = c0 + strairyN (:,:,:) = c0 + strtltxN (:,:,:) = c0 + strtltyN (:,:,:) = c0 + strintxN (:,:,:) = c0 + strintyN (:,:,:) = c0 + fmN (:,:,:) = c0 + TbN (:,:,:) = c0 + end if end subroutine init_history_dyn !======================================================================= @@ -1166,8 +1193,8 @@ subroutine scale_fluxes (nx_block, ny_block, & ! zsalinity fluxes real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout) :: & - fzsal , & ! salt flux to ocean with prognositic salinity (kg/m2/s) - fzsal_g ! Gravity drainage salt flux to ocean (kg/m2/s) + fzsal , & ! salt flux to ocean with prognositic salinity (kg/m2/s) + fzsal_g ! Gravity drainage salt flux to ocean (kg/m2/s) ! isotopes real (kind=dbl_kind), dimension(nx_block,ny_block,icepack_max_iso), & @@ -1221,8 +1248,8 @@ subroutine scale_fluxes (nx_block, ny_block, & alidr (i,j) = alidr (i,j) * ar alvdf (i,j) = alvdf (i,j) * ar alidf (i,j) = alidf (i,j) * ar - fzsal (i,j) = fzsal (i,j) * ar - fzsal_g (i,j) = fzsal_g (i,j) * ar + fzsal (i,j) = fzsal (i,j) * ar + fzsal_g (i,j) = fzsal_g (i,j) * ar flux_bio (i,j,:) = flux_bio (i,j,:) * ar faero_ocn(i,j,:) = faero_ocn(i,j,:) * ar if (present(Qref_iso )) Qref_iso (i,j,:) = Qref_iso (i,j,:) * ar @@ -1251,10 +1278,10 @@ subroutine scale_fluxes (nx_block, ny_block, & fswthru_idf (i,j) = c0 alvdr (i,j) = c0 ! zero out albedo where ice is absent alidr (i,j) = c0 - alvdf (i,j) = c0 + alvdf (i,j) = c0 alidf (i,j) = c0 - fzsal (i,j) = c0 - fzsal_g (i,j) = c0 + fzsal (i,j) = c0 + fzsal_g (i,j) = c0 flux_bio (i,j,:) = c0 faero_ocn(i,j,:) = c0 if (present(Qref_iso )) Qref_iso (i,j,:) = c0 @@ -1265,7 +1292,7 @@ subroutine scale_fluxes (nx_block, ny_block, & enddo ! j ! Scale fluxes for history output - if (present(fsurf) .and. present(fcondtop) ) then + if (present(fsurf) .and. present(fcondtop) ) then do j = 1, ny_block do i = 1, nx_block From cc0f89ce2508a0ff100aaff8535f89969458aa55 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Wed, 22 Jun 2022 14:13:25 -0600 Subject: [PATCH 69/90] remove inserted whitespace for uvelE,N and vvelE,N --- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index 2ecc6e5f6..a30cc1b1c 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -244,10 +244,10 @@ subroutine init_dyn (dt) uvel(i,j,iblk) = c0 ! m/s vvel(i,j,iblk) = c0 ! m/s if (grid_ice == 'CD' .or. grid_ice == 'C') then ! extra velocity variables - uvelE (i,j,iblk) = c0 - vvelE (i,j,iblk) = c0 - uvelN (i,j,iblk) = c0 - vvelN (i,j,iblk) = c0 + uvelE(i,j,iblk) = c0 + vvelE(i,j,iblk) = c0 + uvelN(i,j,iblk) = c0 + vvelN(i,j,iblk) = c0 endif ! strain rates From 968a0edc8e186ce72ac9d3cb932ad6318bcb6a79 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Tue, 30 Aug 2022 12:51:07 -0400 Subject: [PATCH 70/90] Add wave-cice coupling; update to Consortium main (#51) * add wave-ice fields * initialize aicen_init, which turns up as NaN in calc of floediam export * add call to icepack_init_wave to initialize wavefreq and dwavefreq * update to latest consortium main (PR 752) --- cice.setup | 51 +- .../cicedynB/analysis/ice_diagnostics.F90 | 155 ++-- .../cicedynB/analysis/ice_diagnostics_bgc.F90 | 328 ++++---- cicecore/cicedynB/analysis/ice_history.F90 | 478 ++++++------ .../cicedynB/analysis/ice_history_bgc.F90 | 720 +++++++++--------- .../cicedynB/analysis/ice_history_drag.F90 | 50 +- .../cicedynB/analysis/ice_history_fsd.F90 | 13 +- .../cicedynB/analysis/ice_history_mechred.F90 | 6 +- .../cicedynB/analysis/ice_history_pond.F90 | 39 +- .../cicedynB/analysis/ice_history_shared.F90 | 38 +- .../cicedynB/analysis/ice_history_snow.F90 | 13 +- cicecore/cicedynB/dynamics/ice_dyn_eap.F90 | 82 +- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 127 +-- cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 | 3 +- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 522 ++++++------- cicecore/cicedynB/dynamics/ice_dyn_vp.F90 | 197 ++--- .../dynamics/ice_transport_driver.F90 | 18 +- .../cicedynB/dynamics/ice_transport_remap.F90 | 29 +- cicecore/cicedynB/general/ice_flux.F90 | 104 ++- cicecore/cicedynB/general/ice_flux_bgc.F90 | 70 +- cicecore/cicedynB/general/ice_forcing.F90 | 591 +++++++------- cicecore/cicedynB/general/ice_forcing_bgc.F90 | 144 ++-- cicecore/cicedynB/general/ice_init.F90 | 252 +++--- cicecore/cicedynB/general/ice_state.F90 | 14 +- cicecore/cicedynB/general/ice_step_mod.F90 | 124 +-- .../infrastructure/comm/mpi/ice_boundary.F90 | 594 +++++++-------- .../comm/mpi/ice_communicate.F90 | 2 +- .../infrastructure/comm/mpi/ice_exit.F90 | 2 +- .../comm/mpi/ice_gather_scatter.F90 | 57 +- .../comm/mpi/ice_global_reductions.F90 | 47 +- .../infrastructure/comm/mpi/ice_reprosum.F90 | 322 ++++---- .../infrastructure/comm/mpi/ice_timers.F90 | 30 +- .../comm/serial/ice_boundary.F90 | 556 +++++++------- .../comm/serial/ice_gather_scatter.F90 | 12 +- .../comm/serial/ice_global_reductions.F90 | 47 +- .../comm/serial/ice_reprosum.F90 | 320 ++++---- .../infrastructure/comm/serial/ice_timers.F90 | 36 +- .../cicedynB/infrastructure/ice_blocks.F90 | 26 +- .../cicedynB/infrastructure/ice_domain.F90 | 55 +- cicecore/cicedynB/infrastructure/ice_grid.F90 | 146 ++-- .../cicedynB/infrastructure/ice_memusage.F90 | 16 +- .../infrastructure/ice_memusage_gptl.c | 12 +- .../infrastructure/ice_read_write.F90 | 198 +++-- .../infrastructure/ice_restart_driver.F90 | 33 +- .../cicedynB/infrastructure/ice_restoring.F90 | 18 +- .../io/io_binary/ice_history_write.F90 | 4 +- .../io/io_binary/ice_restart.F90 | 40 +- .../io/io_netcdf/ice_history_write.F90 | 17 +- .../io/io_netcdf/ice_restart.F90 | 40 +- .../io/io_pio2/ice_history_write.F90 | 32 +- .../infrastructure/io/io_pio2/ice_pio.F90 | 58 +- .../infrastructure/io/io_pio2/ice_restart.F90 | 30 +- cicecore/drivers/direct/hadgem3/CICE.F90 | 18 +- .../drivers/direct/hadgem3/CICE_InitMod.F90 | 46 +- .../drivers/direct/hadgem3/CICE_RunMod.F90 | 64 +- .../direct/nemo_concepts/CICE_InitMod.F90 | 46 +- .../direct/nemo_concepts/CICE_RunMod.F90 | 64 +- cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 46 +- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 52 +- cicecore/drivers/mct/cesm1/CICE_copyright.txt | 16 +- cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 | 88 +-- cicecore/drivers/mct/cesm1/ice_comp_mct.F90 | 148 ++-- .../drivers/mct/cesm1/ice_cpl_indices.F90 | 36 +- .../drivers/mct/cesm1/ice_import_export.F90 | 90 +-- .../drivers/mct/cesm1/ice_prescribed_mod.F90 | 54 +- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 25 +- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 22 +- .../drivers/nuopc/cmeps/CICE_copyright.txt | 16 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 22 +- .../drivers/nuopc/cmeps/ice_import_export.F90 | 112 ++- cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 | 4 +- .../nuopc/cmeps/ice_prescribed_mod.F90 | 2 +- cicecore/drivers/nuopc/dmi/CICE.F90 | 18 +- cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 46 +- cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 | 54 +- cicecore/drivers/nuopc/dmi/cice_cap.info | 76 +- cicecore/drivers/standalone/cice/CICE.F90 | 18 +- .../drivers/standalone/cice/CICE_FinalMod.F90 | 1 - .../drivers/standalone/cice/CICE_InitMod.F90 | 48 +- .../drivers/standalone/cice/CICE_RunMod.F90 | 54 +- .../unittest/gridavgchk/CICE_InitMod.F90 | 48 +- .../unittest/gridavgchk/gridavgchk.F90 | 4 +- cicecore/drivers/unittest/optargs/optargs.F90 | 246 ++++++ .../drivers/unittest/optargs/optargs_subs.F90 | 148 ++++ .../drivers/unittest/sumchk/CICE_InitMod.F90 | 48 +- cicecore/drivers/unittest/sumchk/sumchk.F90 | 4 +- cicecore/shared/ice_arrays_column.F90 | 84 +- cicecore/shared/ice_calendar.F90 | 13 +- cicecore/shared/ice_constants.F90 | 29 +- cicecore/shared/ice_distribution.F90 | 46 +- cicecore/shared/ice_domain_size.F90 | 4 +- cicecore/shared/ice_fileunits.F90 | 8 +- cicecore/shared/ice_init_column.F90 | 299 ++++---- cicecore/shared/ice_restart_column.F90 | 113 +-- cicecore/shared/ice_restart_shared.F90 | 2 +- cicecore/shared/ice_spacecurve.F90 | 81 +- cicecore/version.txt | 2 +- configuration/scripts/Makefile | 8 +- configuration/scripts/cice.batch.csh | 40 +- configuration/scripts/cice.launch.csh | 13 +- configuration/scripts/cice.settings | 1 + configuration/scripts/ice_in | 4 +- .../scripts/machines/Macros.cheyenne_gnu | 2 +- .../scripts/machines/Macros.cheyenne_intel | 2 +- .../scripts/machines/env.badger_intel | 4 +- .../scripts/machines/env.cheyenne_gnu | 5 +- .../scripts/machines/env.cheyenne_intel | 5 +- .../scripts/machines/env.cheyenne_pgi | 5 +- .../scripts/machines/env.gpsc3_intel | 2 +- configuration/scripts/machines/env.ppp5_intel | 2 + configuration/scripts/machines/env.ppp6_gnu | 2 +- .../scripts/machines/env.ppp6_gnu-impi | 2 +- configuration/scripts/machines/env.ppp6_intel | 2 + .../scripts/machines/env.ppp6_intel19 | 2 +- .../scripts/machines/env.robert_intel | 2 + .../scripts/machines/env.underhill_intel | 2 + .../scripts/machines/environment.yml | 2 +- .../scripts/options/set_env.memlarge | 2 + configuration/scripts/options/set_env.memmed | 2 + .../scripts/options/set_env.memsmall | 2 + configuration/scripts/options/set_env.optargs | 2 + configuration/scripts/options/set_nml.alt01 | 3 +- configuration/scripts/options/set_nml.alt02 | 1 - configuration/scripts/options/set_nml.alt03 | 1 - configuration/scripts/options/set_nml.alt04 | 1 - configuration/scripts/options/set_nml.alt05 | 1 - configuration/scripts/options/set_nml.alt07 | 2 +- configuration/scripts/options/set_nml.boxadv | 3 +- .../scripts/options/set_nml.boxnodyn | 3 +- .../scripts/options/set_nml.boxrestore | 3 +- configuration/scripts/options/set_nml.gridcd | 3 + configuration/scripts/options/set_nml.qc | 1 + configuration/scripts/setup_machparams.csh | 64 ++ configuration/scripts/tests/QC/cice.t-test.py | 303 +++++--- configuration/scripts/tests/omp_suite.ts | 14 +- configuration/scripts/tests/qctest.yml | 11 + configuration/scripts/tests/unittest_suite.ts | 1 + .../convert_restarts.f90 | 5 + doc/source/cice_index.rst | 24 +- doc/source/conf.py | 4 +- doc/source/science_guide/sg_tracers.rst | 6 +- doc/source/user_guide/ug_case_settings.rst | 14 +- doc/source/user_guide/ug_implementation.rst | 4 +- doc/source/user_guide/ug_running.rst | 6 +- doc/source/user_guide/ug_testing.rst | 13 +- icepack | 2 +- 146 files changed, 5484 insertions(+), 4507 deletions(-) create mode 100644 cicecore/drivers/unittest/optargs/optargs.F90 create mode 100644 cicecore/drivers/unittest/optargs/optargs_subs.F90 create mode 100644 configuration/scripts/options/set_env.memlarge create mode 100644 configuration/scripts/options/set_env.memmed create mode 100644 configuration/scripts/options/set_env.memsmall create mode 100644 configuration/scripts/options/set_env.optargs create mode 100755 configuration/scripts/setup_machparams.csh create mode 100644 configuration/scripts/tests/qctest.yml diff --git a/cice.setup b/cice.setup index 60c56e5c2..586fe3464 100755 --- a/cice.setup +++ b/cice.setup @@ -489,6 +489,7 @@ else #!/bin/csh -f set nonomatch && rm -f ciceexe.* && unset nonomatch +rm -f suite.jobs set dobuild = true set doreuse = true @@ -637,7 +638,7 @@ EOF set bfbcomp_tmp = `echo $line | cut -d' ' -f5` # Append sets from .ts file to the $sets variable - set sets = "$sets_base,$sets_tmp" + set sets = "$sets_tmp,$sets_base" # Create a new bfbcomp_base variable to store bfbcomp passed to cice.setup # Use bfbcomp_base or bfbcomp_tmp @@ -760,7 +761,7 @@ EOF if (${docase} == 0) then set soptions = "" # Create sorted array and remove duplicates and "none" - set setsarray = `echo ${sets} | sed 's/,/ /g' | fmt -1 | sort -u` + set setsarray = `echo ${sets_tmp} | sed 's/,/ /g' | fmt -1 | sort -u` if ("${setsarray}" != "") then foreach field (${setsarray}) if (${field} != "none") then @@ -768,6 +769,15 @@ EOF endif end endif + # Add options from command line, sort and remove duplicates + set soptions_base = "" + set setsarray_base = `echo ${sets_base} | sed 's/,/ /g' | fmt -1 | sort -u` + if ("${setsarray_base}" != "") then + foreach field (${setsarray_base}) + set soptions = ${soptions}"_"${field} + set soptions_base = ${soptions_base}"_"${field} + end + endif # soptions starts with _ set testname_noid = "${machcomp}_${test}_${grid}_${pesx}${soptions}" set testname_base = "${machcomp}_${test}_${grid}_${pesx}${soptions}.${testid}" @@ -776,26 +786,8 @@ EOF if (${dosuite} == 1) then # Add -s flags in cice.setup to bfbcomp name - # Parse bfbcomp test_grid_pes and sets - # Add sets_base and sort unique - # Create fbfbcomp string that should be consistent with base casename - set bfbcomp_regex="\(.*_[0-9x]*\)_\(.*\)" - set bfbcomp_test_grid_pes=`echo ${bfbcomp} | sed "s/${bfbcomp_regex}/\1/"` - set bfbcomp_sets=`echo ${bfbcomp} | sed "s/${bfbcomp_regex}/\2/" | sed 's/_/,/g' ` - set bfbcomp_sets="${bfbcomp_sets},${sets_base}" - set bfbcomp_soptions = "" - # Create sorted array and remove duplicates and "none" - set bfbcomp_setsarray = `echo ${bfbcomp_sets} | sed 's/,/ /g' | fmt -1 | sort -u` - if ("${bfbcomp_setsarray}" != "") then - foreach field (${bfbcomp_setsarray}) - if (${field} != "none") then - set bfbcomp_soptions = ${bfbcomp_soptions}"_"${field} - endif - end - endif - set fbfbcomp = ${spval} if ($bfbcomp != ${spval}) then - set fbfbcomp = ${machcomp}_${bfbcomp_test_grid_pes}${bfbcomp_soptions} + set fbfbcomp = ${machcomp}_${bfbcomp}${soptions_base} endif endif endif @@ -1179,21 +1171,26 @@ echo "-------test--------------" echo "${testname_base}" cd ${testname_base} source ./cice.settings +set bldstat = 0 if (\${dobuild} == true) then if (\${doreuse} == true) then set ciceexe = "../ciceexe.\${ICE_TARGET}.\${ICE_ENVNAME}.\${ICE_COMMDIR}.\${ICE_BLDDEBUG}.\${ICE_THREADED}.\${ICE_IOTYPE}" ./cice.build --exe \${ciceexe} + set bldstat = \${status} if !(-e \${ciceexe}) cp -p \${ICE_RUNDIR}/cice \${ciceexe} else ./cice.build + set bldstat = \${status} endif endif -if (\${dosubmit} == true) then - set jobid = \`./cice.submit\` - echo "\$jobid" - echo "\$jobid \${ICE_TESTNAME} " >> ../suite.jobs -else if (\${dorun} == true) then - ./cice.test +if (\$bldstat == 0) then + if (\${dosubmit} == true) then + set jobid = \`./cice.submit\` + echo "\$jobid" + echo "\$jobid \${ICE_TESTNAME} " >> ../suite.jobs + else if (\${dorun} == true) then + ./cice.test + endif endif cd .. EOF diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedynB/analysis/ice_diagnostics.F90 index fb9fc5f03..83eb840d6 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics.F90 @@ -11,6 +11,7 @@ module ice_diagnostics use ice_kinds_mod + use ice_blocks, only: nx_block, ny_block use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c1 use ice_calendar, only: istep1 @@ -52,7 +53,7 @@ module ice_diagnostics real (kind=dbl_kind), parameter :: & umax_stab = 1.0_dbl_kind , & ! ice speed threshold for instability (m/s) aice_extmin = 0.15_dbl_kind ! min aice value for ice extent calc - + real (kind=dbl_kind), dimension(npnt), public :: & latpnt , & ! latitude of diagnostic points lonpnt ! longitude of diagnostic points @@ -112,7 +113,6 @@ module ice_diagnostics subroutine runtime_diags (dt) use ice_arrays_column, only: floe_rad_c - use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_constants, only: c1, c1000, c2, p001, p5, & field_loc_center, m2_to_km2 @@ -163,7 +163,7 @@ subroutine runtime_diags (dt) etotn, mtotn, micen, msnwn, pmaxn, ketotn, & etots, mtots, mices, msnws, pmaxs, ketots, & urmsn, albtotn, arean_alb, mpndn, ptotn, spondn, & - urmss, albtots, areas_alb, mpnds, ptots, sponds + urmss, albtots, areas_alb, mpnds, ptots, sponds ! hemispheric flux quantities real (kind=dbl_kind) :: & @@ -191,7 +191,7 @@ subroutine runtime_diags (dt) ! fields at diagnostic points real (kind=dbl_kind), dimension(npnt) :: & - paice, pTair, pQa, pfsnow, pfrain, pfsw, pflw, & + paice, pTair, pQa, pfsnow, pfrain, pfsw, pflw, & pTsfc, pevap, pfswabs, pflwout, pflat, pfsens, & pfsurf, pfcondtop, psst, psss, pTf, hiavg, hsavg, hbravg, & pfhocn, psalt, fsdavg, & @@ -202,7 +202,7 @@ subroutine runtime_diags (dt) work1, work2 real (kind=dbl_kind), parameter :: & - maxval_spval = -0.9_dbl_kind*HUGE(0.0_dbl_kind) ! spval to detect + maxval_spval = -0.9_dbl_kind*HUGE(0.0_dbl_kind) ! spval to detect ! undefined values returned from global_maxval. if global_maxval ! is applied to a region that does not exist (for instance ! southern hemisphere in box cases), global_maxval @@ -290,7 +290,7 @@ subroutine runtime_diags (dt) do n = 1, ncat work1(i,j,iblk) = work1(i,j,iblk) & + aicen(i,j,n,iblk) & - * trcrn(i,j,nt_apnd,n,iblk) & + * trcrn(i,j,nt_apnd,n,iblk) & * trcrn(i,j,nt_hpnd,n,iblk) enddo enddo @@ -369,8 +369,8 @@ subroutine runtime_diags (dt) enddo enddo !$OMP END PARALLEL DO - - arean_alb = global_sum(aice, distrb_info, field_loc_center, work2) + + arean_alb = global_sum(aice, distrb_info, field_loc_center, work2) albtotn = global_sum_prod(aice, work1, distrb_info, & field_loc_center, work2) @@ -395,7 +395,7 @@ subroutine runtime_diags (dt) enddo !$OMP END PARALLEL DO - areas_alb = global_sum(aice, distrb_info, field_loc_center, work2) + areas_alb = global_sum(aice, distrb_info, field_loc_center, work2) albtots = global_sum_prod(aice, work1, distrb_info, & field_loc_center, work2) @@ -506,7 +506,7 @@ subroutine runtime_diags (dt) if (pmaxs < maxval_spval) pmaxs = c0 pmaxn = pmaxn / c1000 ! convert to kN/m - pmaxs = pmaxs / c1000 + pmaxs = pmaxs / c1000 if (print_global) then @@ -617,14 +617,14 @@ subroutine runtime_diags (dt) enddo !$OMP END PARALLEL DO - else ! fsurf is computed by atmosphere model + else ! fsurf is computed by atmosphere model !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block work1(i,j,iblk) = & - (fsurf(i,j,iblk) - flat(i,j,iblk)) & + (fsurf(i,j,iblk) - flat(i,j,iblk)) & * aice(i,j,iblk) enddo enddo @@ -639,7 +639,7 @@ subroutine runtime_diags (dt) field_loc_center, tarean) fhatms = global_sum(work1, distrb_info, & field_loc_center, tareas) - + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block @@ -741,7 +741,7 @@ subroutine runtime_diags (dt) ! total ice, snow and pond mass mtotn = micen + msnwn + mpndn mtots = mices + msnws + mpnds - + ! mass change since beginning of time step delmin = mtotn - totmn delmis = mtots - totms @@ -760,14 +760,14 @@ subroutine runtime_diags (dt) fluxs = c0 if( arean > c0) then ! water associated with frazil ice included in fresh - fluxn = rnn + snn + evpn - sfreshn + fluxn = rnn + snn + evpn - sfreshn if (.not. update_ocn_f) then fluxn = fluxn + frzn endif endif if( areas > c0) then ! water associated with frazil ice included in fresh - fluxs = rns + sns + evps - sfreshs + fluxs = rns + sns + evps - sfreshs if (.not. update_ocn_f) then fluxs = fluxs + frzs endif @@ -933,7 +933,7 @@ subroutine runtime_diags (dt) pfsw(n) = fsw(i,j,iblk) ! shortwave radiation pflw(n) = flw(i,j,iblk) ! longwave radiation paice(n) = aice(i,j,iblk) ! ice area - + fsdavg(n) = c0 ! avg floe effective radius hiavg(n) = c0 ! avg snow/ice thickness hsavg(n) = c0 @@ -998,7 +998,7 @@ subroutine runtime_diags (dt) pcongel(n) = congel(i,j,iblk) ! congelation ice pdhi(n) = vice(i,j,iblk) - pdhi(n) ! ice thickness change pdhs(n) = vsno(i,j,iblk) - pdhs(n) ! snow thickness change - pde(n) =-(work1(i,j,iblk)- pde(n))/dt ! ice/snow energy change + pde(n) =-(work1(i,j,iblk)- pde(n))/dt ! ice/snow energy change psst(n) = sst(i,j,iblk) ! sea surface temperature psss(n) = sss(i,j,iblk) ! sea surface salinity pTf(n) = Tf(i,j,iblk) ! freezing temperature @@ -1045,7 +1045,7 @@ subroutine runtime_diags (dt) call broadcast_scalar(psss (n), pmloc(n)) call broadcast_scalar(pTf (n), pmloc(n)) call broadcast_scalar(pfhocn (n), pmloc(n)) - + enddo ! npnt endif ! print_points @@ -1093,7 +1093,7 @@ subroutine runtime_diags (dt) write(nu_diag,901) 'arwt snw mass (kg) = ',msnwn,msnws if (tr_pond_topo) & write(nu_diag,901) 'arwt pnd mass (kg) = ',mpndn,mpnds - + write(nu_diag,901) 'arwt tot mass (kg) = ',mtotn,mtots write(nu_diag,901) 'arwt tot mass chng(kg) = ',delmin,delmis write(nu_diag,901) 'arwt water flux = ',fluxn,fluxs @@ -1249,9 +1249,6 @@ subroutine runtime_diags (dt) endif ! print_points endif ! my_task = master_task - 799 format (27x,a24) - 800 format (a25,2x,f24.17) - 801 format (a25,2x,1pe24.17) 899 format (27x,a24,2x,a24) 900 format (a25,2x,f24.17,2x,f24.17) 901 format (a25,2x,1pe24.17,2x,1pe24.17) @@ -1268,7 +1265,6 @@ end subroutine runtime_diags subroutine init_mass_diags - use ice_blocks, only: nx_block, ny_block use ice_constants, only: field_loc_center use ice_domain, only: distrb_info, nblocks use ice_domain_size, only: n_iso, n_aero, ncat, max_blocks @@ -1387,7 +1383,7 @@ subroutine init_mass_diags do n = 1, ncat work1(i,j,iblk) = work1(i,j,iblk) & + aicen(i,j,n,iblk) & - * trcrn(i,j,nt_apnd,n,iblk) & + * trcrn(i,j,nt_apnd,n,iblk) & * trcrn(i,j,nt_hpnd,n,iblk) enddo enddo @@ -1412,7 +1408,6 @@ end subroutine init_mass_diags subroutine total_energy (work) - use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_domain_size, only: ncat, nilyr, nslyr, max_blocks use ice_grid, only: tmask @@ -1499,7 +1494,6 @@ end subroutine total_energy subroutine total_salt (work) - use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_domain_size, only: ncat, nilyr, max_blocks use ice_grid, only: tmask @@ -1623,7 +1617,7 @@ subroutine init_diags plat(:) = -999._dbl_kind plon(:) = -999._dbl_kind - ! find minimum distance to diagnostic points on this processor + ! find minimum distance to diagnostic points on this processor do n = 1, npnt if (lonpnt(n) > c180) lonpnt(n) = lonpnt(n) - c360 @@ -1638,7 +1632,7 @@ subroutine init_diags ! This is computing closest point, Could add a CRITICAL but it's just initialization !!$XXXOMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,latdis,londis,totdis) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1665,7 +1659,7 @@ subroutine init_diags endif - ! find global minimum distance to diagnostic points + ! find global minimum distance to diagnostic points mindis_g = global_minval(mindis, distrb_info) ! save indices of minimum-distance grid cell @@ -1708,16 +1702,10 @@ end subroutine init_diags subroutine debug_ice(iblk, plabeld) - use ice_kinds_mod - use ice_calendar, only: istep1 - use ice_communicate, only: my_task - use ice_blocks, only: nx_block, ny_block - character (char_len), intent(in) :: plabeld integer (kind=int_kind), intent(in) :: iblk - ! local - integer (kind=int_kind) :: i, j, m + ! local character(len=*), parameter :: subname='(debug_ice)' if (istep1 >= debug_model_step) then @@ -1757,15 +1745,16 @@ subroutine print_state(plabel,i,j,iblk) use ice_blocks, only: block, get_block use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr, nfsd - use ice_state, only: aice0, aicen, vicen, vsnon, uvel, vvel, & + use ice_grid, only: TLAT, TLON + use ice_state, only: aice, aice0, aicen, vicen, vsnon, uvel, vvel, & uvelE, vvelE, uvelN, vvelN, trcrn use ice_flux, only: uatm, vatm, potT, Tair, Qa, flw, frain, fsnow, & fsens, flat, evap, flwout, swvdr, swvdf, swidr, swidf, rhoa, & - frzmlt, sst, sss, Tf, Tref, Qref, Uref, uocn, vocn, strtltx, strtlty + frzmlt, sst, sss, Tf, Tref, Qref, Uref, uocn, vocn, strtltxU, strtltyU character (len=20), intent(in) :: plabel - integer (kind=int_kind), intent(in) :: & + integer (kind=int_kind), intent(in) :: & i, j , & ! horizontal indices iblk ! block index @@ -1799,15 +1788,20 @@ subroutine print_state(plabel,i,j,iblk) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) - write(nu_diag,*) subname,plabel - write(nu_diag,*) 'istep1, my_task, i, j, iblk:', & + write(nu_diag,*) subname,' ',trim(plabel) + write(nu_diag,*) subname,' istep1, my_task, i, j, iblk:', & istep1, my_task, i, j, iblk - write(nu_diag,*) 'Global i and j:', & + write(nu_diag,*) subname,' Global block:', this_block%block_id + write(nu_diag,*) subname,' Global i and j:', & this_block%i_glob(i), & - this_block%j_glob(j) + this_block%j_glob(j) + write (nu_diag,*) subname,' Lat, Lon (degrees):', & + TLAT(i,j,iblk)*rad_to_deg, & + TLON(i,j,iblk)*rad_to_deg write(nu_diag,*) ' ' + write(nu_diag,*) 'aice ', aice(i,j,iblk) write(nu_diag,*) 'aice0', aice0(i,j,iblk) do n = 1, ncat write(nu_diag,*) ' ' @@ -1907,14 +1901,14 @@ subroutine print_state(plabel,i,j,iblk) write(nu_diag,*) ' fsnow = ',fsnow(i,j,iblk) write(nu_diag,*) ' ' write(nu_diag,*) 'ocn states and fluxes' - write(nu_diag,*) ' frzmlt = ',frzmlt (i,j,iblk) - write(nu_diag,*) ' sst = ',sst (i,j,iblk) - write(nu_diag,*) ' sss = ',sss (i,j,iblk) - write(nu_diag,*) ' Tf = ',Tf (i,j,iblk) - write(nu_diag,*) ' uocn = ',uocn (i,j,iblk) - write(nu_diag,*) ' vocn = ',vocn (i,j,iblk) - write(nu_diag,*) ' strtltx = ',strtltx(i,j,iblk) - write(nu_diag,*) ' strtlty = ',strtlty(i,j,iblk) + write(nu_diag,*) ' frzmlt = ',frzmlt (i,j,iblk) + write(nu_diag,*) ' sst = ',sst (i,j,iblk) + write(nu_diag,*) ' sss = ',sss (i,j,iblk) + write(nu_diag,*) ' Tf = ',Tf (i,j,iblk) + write(nu_diag,*) ' uocn = ',uocn (i,j,iblk) + write(nu_diag,*) ' vocn = ',vocn (i,j,iblk) + write(nu_diag,*) ' strtltxU= ',strtltxU(i,j,iblk) + write(nu_diag,*) ' strtltyU= ',strtltyU(i,j,iblk) write(nu_diag,*) ' ' write(nu_diag,*) 'srf states and fluxes' write(nu_diag,*) ' Tref = ',Tref (i,j,iblk) @@ -1944,7 +1938,7 @@ subroutine print_points_state(plabel,ilabel) uvelE, vvelE, uvelE, vvelE, trcrn use ice_flux, only: uatm, vatm, potT, Tair, Qa, flw, frain, fsnow, & fsens, flat, evap, flwout, swvdr, swvdf, swidr, swidf, rhoa, & - frzmlt, sst, sss, Tf, Tref, Qref, Uref, uocn, vocn, strtltx, strtlty + frzmlt, sst, sss, Tf, Tref, Qref, Uref, uocn, vocn, strtltxU, strtltyU character (len=*), intent(in),optional :: plabel integer , intent(in),optional :: ilabel @@ -1977,7 +1971,7 @@ subroutine print_points_state(plabel,ilabel) i = piloc(m) j = pjloc(m) iblk = pbloc(m) - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) if (present(ilabel)) then write(llabel,'(i6,a1,i3,a1)') ilabel,':',m,':' @@ -1995,7 +1989,7 @@ subroutine print_points_state(plabel,ilabel) istep1, my_task, i, j, iblk write(nu_diag,*) trim(llabel),'Global i and j=', & this_block%i_glob(i), & - this_block%j_glob(j) + this_block%j_glob(j) write(nu_diag,*) trim(llabel),'aice0=', aice0(i,j,iblk) do n = 1, ncat @@ -2060,14 +2054,14 @@ subroutine print_points_state(plabel,ilabel) write(nu_diag,*) ' fsnow = ',fsnow(i,j,iblk) write(nu_diag,*) ' ' write(nu_diag,*) 'ocn states and fluxes' - write(nu_diag,*) ' frzmlt = ',frzmlt (i,j,iblk) - write(nu_diag,*) ' sst = ',sst (i,j,iblk) - write(nu_diag,*) ' sss = ',sss (i,j,iblk) - write(nu_diag,*) ' Tf = ',Tf (i,j,iblk) - write(nu_diag,*) ' uocn = ',uocn (i,j,iblk) - write(nu_diag,*) ' vocn = ',vocn (i,j,iblk) - write(nu_diag,*) ' strtltx = ',strtltx(i,j,iblk) - write(nu_diag,*) ' strtlty = ',strtlty(i,j,iblk) + write(nu_diag,*) ' frzmlt = ',frzmlt (i,j,iblk) + write(nu_diag,*) ' sst = ',sst (i,j,iblk) + write(nu_diag,*) ' sss = ',sss (i,j,iblk) + write(nu_diag,*) ' Tf = ',Tf (i,j,iblk) + write(nu_diag,*) ' uocn = ',uocn (i,j,iblk) + write(nu_diag,*) ' vocn = ',vocn (i,j,iblk) + write(nu_diag,*) ' strtltxU= ',strtltxU(i,j,iblk) + write(nu_diag,*) ' strtltyU= ',strtltyU(i,j,iblk) write(nu_diag,*) ' ' write(nu_diag,*) 'srf states and fluxes' write(nu_diag,*) ' Tref = ',Tref (i,j,iblk) @@ -2089,20 +2083,18 @@ end subroutine print_points_state ! prints error information prior to aborting - subroutine diagnostic_abort(istop, jstop, iblk, istep1, stop_label) + subroutine diagnostic_abort(istop, jstop, iblk, stop_label) use ice_blocks, only: block, get_block - use ice_communicate, only: my_task use ice_domain, only: blocks_ice use ice_grid, only: TLAT, TLON use ice_state, only: aice integer (kind=int_kind), intent(in) :: & istop, jstop, & ! indices of grid cell where model aborts - iblk , & ! block index - istep1 ! time step number + iblk ! block index - character (char_len), intent(in) :: stop_label + character (len=*), intent(in) :: stop_label ! local variables @@ -2118,20 +2110,17 @@ subroutine diagnostic_abort(istop, jstop, iblk, istep1, stop_label) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - this_block = get_block(blocks_ice(iblk),iblk) - - write (nu_diag,*) 'istep1, my_task, iblk =', & - istep1, my_task, iblk - write (nu_diag,*) 'Global block:', this_block%block_id - if (istop > 0 .and. jstop > 0) & - write (nu_diag,*) 'Global i and j:', & - this_block%i_glob(istop), & - this_block%j_glob(jstop) - write (nu_diag,*) 'Lat, Lon:', & - TLAT(istop,jstop,iblk)*rad_to_deg, & - TLON(istop,jstop,iblk)*rad_to_deg - write (nu_diag,*) 'aice:', & - aice(istop,jstop,iblk) + this_block = get_block(blocks_ice(iblk),iblk) + + call flush_fileunit(nu_diag) + if (istop > 0 .and. jstop > 0) then + call print_state(trim(stop_label),istop,jstop,iblk) + else + write (nu_diag,*) subname,' istep1, my_task, iblk =', & + istep1, my_task, iblk + write (nu_diag,*) subname,' Global block:', this_block%block_id + endif + call flush_fileunit(nu_diag) call abort_ice (subname//'ERROR: '//trim(stop_label)) end subroutine diagnostic_abort diff --git a/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 b/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 index 74485a5e2..f4528dd5d 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 @@ -41,7 +41,7 @@ module ice_diagnostics_bgc ! Nicole Jeffery, LANL subroutine hbrine_diags - + use ice_arrays_column, only: darcy_V use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_diagnostics, only: npnt, print_points, pmloc, piloc, pjloc, pbloc @@ -84,27 +84,27 @@ subroutine hbrine_diags if (my_task == pmloc(n)) then i = piloc(n) j = pjloc(n) - iblk = pbloc(n) - phinS1(n) = c0 - phinS(n) = c0 - pfbri(n) = trcrn(i,j,nt_fbri,1,iblk) + iblk = pbloc(n) + phinS1(n) = c0 + phinS(n) = c0 + pfbri(n) = trcrn(i,j,nt_fbri,1,iblk) pdarcy_V(n) = darcy_V(i,j,1,iblk) if (aice(i,j,iblk) > c0) & phinS(n) = trcr(i,j,nt_fbri,iblk)*vice(i,j,iblk)/aice(i,j,iblk) if (aicen(i,j,1,iblk)> c0) & phinS1(n) = trcrn(i,j,nt_fbri,1,iblk)*vicen(i,j,1,iblk)/& - aicen(i,j,1,iblk) + aicen(i,j,1,iblk) do k = 1,nilyr pSin1(n,k) = trcrn(i,j,nt_sice+k-1,1,iblk) pSin(n,k) = trcr(i,j,nt_sice+k-1,iblk) enddo endif ! my_task = pmloc - - call broadcast_array (pSin (n,:), pmloc(n)) - call broadcast_array (pSin1 (n,:), pmloc(n)) - call broadcast_scalar(pfbri (n), pmloc(n)) - call broadcast_scalar(phinS1 (n), pmloc(n)) - call broadcast_scalar(phinS (n), pmloc(n)) + + call broadcast_array (pSin (n,:), pmloc(n)) + call broadcast_array (pSin1 (n,:), pmloc(n)) + call broadcast_scalar(pfbri (n), pmloc(n)) + call broadcast_scalar(phinS1 (n), pmloc(n)) + call broadcast_scalar(phinS (n), pmloc(n)) call broadcast_scalar(pdarcy_V(n), pmloc(n)) enddo ! npnt endif ! print_points @@ -125,22 +125,22 @@ subroutine hbrine_diags write(nu_diag,*) '------ hbrine ------' write(nu_diag,900) 'hbrine, (m) = ',phinS(1),phinS(2) write(nu_diag,900) 'fbri, cat1 (m) = ',pfbri(1),pfbri(2) - write(nu_diag,900) 'hbrine cat1, (m) = ',phinS1(1),phinS1(2) - write(nu_diag,900) 'darcy_V cat1, (m/s)= ',pdarcy_V(1),pdarcy_V(2) - if (ktherm == 2) then + write(nu_diag,900) 'hbrine cat1, (m) = ',phinS1(1),phinS1(2) + write(nu_diag,900) 'darcy_V cat1, (m/s)= ',pdarcy_V(1),pdarcy_V(2) + if (ktherm == 2) then write(nu_diag,*) ' ' write(nu_diag,*) '------ Thermosaline Salinity ------' write(nu_diag,803) 'Sice1(1) cat1 S (ppt)','Sice1(2) cat1 S' write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,802) ((pSin1(n,k),n=1,2), k = 1,nilyr) + write(nu_diag,802) ((pSin1(n,k),n=1,2), k = 1,nilyr) write(nu_diag,*) ' ' write(nu_diag,803) 'Sice(1) bulk S (ppt) ','Sice(2) bulk S' write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,802) ((pSin(n,k),n=1,2), k = 1,nilyr) + write(nu_diag,802) ((pSin(n,k),n=1,2), k = 1,nilyr) write(nu_diag,*) ' ' endif endif ! print_points - endif ! my_task = master_task + endif ! my_task = master_task 802 format (f24.17,2x,f24.17) 803 format (a25,2x,a25) @@ -198,7 +198,7 @@ subroutine bgc_diags integer (kind=int_kind), dimension(icepack_max_doc) :: & nt_bgc_doc, nlt_bgc_DOC integer (kind=int_kind), dimension(icepack_max_don) :: & - nt_bgc_don, nlt_bgc_DON + nt_bgc_don, nlt_bgc_DON integer (kind=int_kind), dimension(icepack_max_aero) :: & nt_zaero, nlt_zaero, nlt_zaero_sw integer (kind=int_kind), dimension(icepack_max_fe) :: & @@ -211,7 +211,7 @@ subroutine bgc_diags real (kind=dbl_kind), dimension(npnt,icepack_max_don) :: & pDON_ac, pDON_sk real (kind=dbl_kind), dimension(npnt,icepack_max_fe ) :: & - pFed_ac, pFed_sk, pFep_ac, pFep_sk + pFed_ac, pFed_sk, pFep_ac, pFep_sk real (kind=dbl_kind), dimension(npnt,icepack_max_aero) :: & pflux_zaero, pflux_snow_zaero, pflux_atm_zaero, & pflux_atm_zaero_s @@ -226,7 +226,7 @@ subroutine bgc_diags real (kind=dbl_kind), dimension(npnt,2,icepack_max_don) :: & pDONs real (kind=dbl_kind), dimension(npnt,2,icepack_max_fe ) :: & - pFeds, pFeps + pFeds, pFeps real (kind=dbl_kind), dimension(npnt,2,icepack_max_aero) :: & pzaeros real (kind=dbl_kind), dimension(npnt,nblyr+1) :: & @@ -240,10 +240,10 @@ subroutine bgc_diags real (kind=dbl_kind), dimension(npnt,nblyr+1,icepack_max_don) :: & pDON real (kind=dbl_kind), dimension(npnt,nblyr+1,icepack_max_fe ) :: & - pFed, pFep - real (kind=dbl_kind), dimension (nblyr+1) :: & + pFed, pFep + real (kind=dbl_kind), dimension (nblyr+1) :: & zspace - real (kind=dbl_kind), dimension (npnt,nslyr+nilyr+2) :: & + real (kind=dbl_kind), dimension (npnt,nslyr+nilyr+2) :: & pchlsw real (kind=dbl_kind), dimension(npnt,nslyr+nilyr+2,icepack_max_aero) :: & pzaerosw @@ -275,7 +275,7 @@ subroutine bgc_diags zspace(:) = c1/real(nblyr,kind=dbl_kind) zspace(1) = zspace(1)*p5 - zspace(nblyr+1) = zspace(nblyr+1)*p5 + zspace(nblyr+1) = zspace(nblyr+1)*p5 klev = 1+nilyr+nslyr !----------------------------------------------------------------- @@ -307,26 +307,26 @@ subroutine bgc_diags pNit_ac(n) = c0 if (tr_bgc_N) then do k = 1,n_algae - pN_ac(n,k) = ocean_bio(i,j,nlt_bgc_N(k),iblk) + pN_ac(n,k) = ocean_bio(i,j,nlt_bgc_N(k),iblk) enddo !n_algae endif !tr_bgc_N if (tr_bgc_C) then do k = 1,n_doc - pDOC_ac(n,k) = ocean_bio(i,j,nlt_bgc_DOC(k),iblk) + pDOC_ac(n,k) = ocean_bio(i,j,nlt_bgc_DOC(k),iblk) enddo !n_algae endif !tr_bgc_N if (tr_bgc_DON) then do k = 1,n_don - pDON_ac(n,k) = ocean_bio(i,j,nlt_bgc_DON(k),iblk) - enddo + pDON_ac(n,k) = ocean_bio(i,j,nlt_bgc_DON(k),iblk) + enddo endif if (tr_bgc_Fe ) then - do k = 1,n_fed - pFed_ac (n,k) = ocean_bio(i,j,nlt_bgc_Fed (k),iblk) - enddo - do k = 1,n_fep - pFep_ac (n,k) = ocean_bio(i,j,nlt_bgc_Fep (k),iblk) - enddo + do k = 1,n_fed + pFed_ac (n,k) = ocean_bio(i,j,nlt_bgc_Fed (k),iblk) + enddo + do k = 1,n_fep + pFep_ac (n,k) = ocean_bio(i,j,nlt_bgc_Fep (k),iblk) + enddo endif if (tr_bgc_Nit) & pNit_ac(n) = ocean_bio(i,j,nlt_bgc_Nit,iblk) ! nit(i,j,iblk) @@ -359,10 +359,10 @@ subroutine bgc_diags pDON_sk(n,:) = c0 pFed_sk(n,:) = c0 pFep_sk(n,:) = c0 - - do k = 1,n_algae + + do k = 1,n_algae pN_sk(n,k) = trcr (i,j,nt_bgc_N(k), iblk) - pflux_N(n,k) = flux_bio(i,j,nlt_bgc_N(k), iblk)*mps_to_cmpdy/c100 + pflux_N(n,k) = flux_bio(i,j,nlt_bgc_N(k), iblk)*mps_to_cmpdy/c100 enddo if (tr_bgc_C) then do k = 1,n_doc @@ -375,27 +375,27 @@ subroutine bgc_diags enddo endif if (tr_bgc_Fe ) then - do k = 1,n_fed + do k = 1,n_fed pFed_sk (n,k)= trcr (i,j,nt_bgc_Fed(k), iblk) enddo - do k = 1,n_fep + do k = 1,n_fep pFep_sk (n,k)= trcr (i,j,nt_bgc_Fep(k), iblk) enddo endif if (tr_bgc_Nit) then - pNit_sk(n) = trcr (i,j, nt_bgc_Nit, iblk) - pflux_NO(n) = flux_bio(i,j,nlt_bgc_Nit, iblk)*mps_to_cmpdy/c100 + pNit_sk(n) = trcr (i,j, nt_bgc_Nit, iblk) + pflux_NO(n) = flux_bio(i,j,nlt_bgc_Nit, iblk)*mps_to_cmpdy/c100 endif if (tr_bgc_Am) then pAm_sk(n) = trcr (i,j, nt_bgc_Am, iblk) - pflux_Am(n) = flux_bio(i,j,nlt_bgc_Am, iblk)*mps_to_cmpdy/c100 + pflux_Am(n) = flux_bio(i,j,nlt_bgc_Am, iblk)*mps_to_cmpdy/c100 endif if (tr_bgc_Sil) then - pSil_sk(n) = trcr (i,j, nt_bgc_Sil, iblk) + pSil_sk(n) = trcr (i,j, nt_bgc_Sil, iblk) endif if (tr_bgc_hum) then - phum_sk(n) = trcr (i,j, nt_bgc_hum, iblk) - pflux_hum(n)= flux_bio(i,j,nlt_bgc_hum, iblk)*mps_to_cmpdy/c100 + phum_sk(n) = trcr (i,j, nt_bgc_hum, iblk) + pflux_hum(n)= flux_bio(i,j,nlt_bgc_hum, iblk)*mps_to_cmpdy/c100 endif if (tr_bgc_DMS) then pDMSPp_sk(n) = trcr (i,j,nt_bgc_DMSPp,iblk) @@ -419,26 +419,26 @@ subroutine bgc_diags pflux_atm_zaero(n,:) = c0 pflux_snow_zaero(n,:) = c0 if (tr_bgc_Nit) then - pflux_NO(n) = flux_bio(i,j,nlt_bgc_Nit,iblk)*mps_to_cmpdy/c100 - pflux_atm_NO(n) = fbio_atmice(i,j,nlt_bgc_Nit,iblk)*mps_to_cmpdy/c100 + pflux_NO(n) = flux_bio(i,j,nlt_bgc_Nit,iblk)*mps_to_cmpdy/c100 + pflux_atm_NO(n) = fbio_atmice(i,j,nlt_bgc_Nit,iblk)*mps_to_cmpdy/c100 pflux_snow_NO(n) = fbio_snoice(i,j,nlt_bgc_Nit,iblk)*mps_to_cmpdy/c100 endif if (tr_bgc_Am) then - pflux_Am(n) = flux_bio(i,j,nlt_bgc_Am,iblk)*mps_to_cmpdy/c100 - pflux_atm_Am(n) = fbio_atmice(i,j,nlt_bgc_Am,iblk)*mps_to_cmpdy/c100 + pflux_Am(n) = flux_bio(i,j,nlt_bgc_Am,iblk)*mps_to_cmpdy/c100 + pflux_atm_Am(n) = fbio_atmice(i,j,nlt_bgc_Am,iblk)*mps_to_cmpdy/c100 pflux_snow_Am(n) = fbio_snoice(i,j,nlt_bgc_Am,iblk)*mps_to_cmpdy/c100 - endif + endif if (tr_bgc_hum) then - pflux_hum(n) = flux_bio(i,j,nlt_bgc_hum,iblk)*mps_to_cmpdy/c100 + pflux_hum(n) = flux_bio(i,j,nlt_bgc_hum,iblk)*mps_to_cmpdy/c100 endif if (tr_bgc_N) then do k = 1,n_algae - pflux_N(n,k) = flux_bio(i,j,nlt_bgc_N(k),iblk)*mps_to_cmpdy/c100 + pflux_N(n,k) = flux_bio(i,j,nlt_bgc_N(k),iblk)*mps_to_cmpdy/c100 enddo endif if (tr_zaero) then do k = 1,n_zaero - pflux_zaero(n,k) = flux_bio(i,j,nlt_zaero(k),iblk)*mps_to_cmpdy/c100 + pflux_zaero(n,k) = flux_bio(i,j,nlt_zaero(k),iblk)*mps_to_cmpdy/c100 pflux_atm_zaero_s(n,k)= flux_bio_atm(i,j,nlt_zaero(k),iblk)*mps_to_cmpdy/c100 !*aice pflux_atm_zaero(n,k) = fbio_atmice(i,j,nlt_zaero(k),iblk)*mps_to_cmpdy/c100 pflux_snow_zaero(n,k) = fbio_snoice(i,j,nlt_zaero(k),iblk)*mps_to_cmpdy/c100 @@ -465,35 +465,35 @@ subroutine bgc_diags pPON(n,k) = c0 phum(n,k) = c0 pNO(n,k) = c0 - if (tr_bgc_Nit) pNO(n,k) = trcr(i,j,nt_bgc_Nit+k-1,iblk) - if (tr_bgc_Am) pAm(n,k) = trcr(i,j,nt_bgc_Am+k-1,iblk) + if (tr_bgc_Nit) pNO(n,k) = trcr(i,j,nt_bgc_Nit+k-1,iblk) + if (tr_bgc_Am) pAm(n,k) = trcr(i,j,nt_bgc_Am+k-1,iblk) if (tr_bgc_N) then do nn = 1, n_algae pN(n,k,nn) = trcr(i,j,nt_bgc_N(nn)+k-1,iblk) - enddo - endif + enddo + endif if (tr_bgc_C) then do nn = 1, n_doc pDOC(n,k,nn) = trcr(i,j,nt_bgc_DOC(nn)+k-1,iblk) - enddo - endif + enddo + endif if (tr_bgc_DON) then do nn = 1, n_don pDON(n,k,nn) = trcr(i,j,nt_bgc_DON(nn)+k-1,iblk) - enddo - endif + enddo + endif if (tr_bgc_Fe) then do nn = 1, n_fed pFed(n,k,nn) = trcr(i,j,nt_bgc_Fed(nn)+k-1,iblk) - enddo + enddo do nn = 1, n_fep pFep(n,k,nn) = trcr(i,j,nt_bgc_Fep(nn)+k-1,iblk) - enddo - endif + enddo + endif if (tr_zaero) then do nn = 1, n_zaero pzaero(n,k,nn) = trcr(i,j,nt_zaero(nn)+k-1,iblk) - enddo + enddo endif if (tr_bgc_PON) pPON(n,k) = trcr(i,j,nt_bgc_PON+k-1,iblk) if (tr_bgc_hum) phum(n,k) = trcr(i,j,nt_bgc_hum+k-1,iblk) @@ -515,7 +515,7 @@ subroutine bgc_diags pPONs(n,k) = c0 phums(n,k) = c0 pNOs(n,k) = c0 - if (tr_bgc_Nit) pNOs(n,k) = trcr(i,j,nt_bgc_Nit+nblyr+k,iblk) + if (tr_bgc_Nit) pNOs(n,k) = trcr(i,j,nt_bgc_Nit+nblyr+k,iblk) if (tr_bgc_Am) pAms(n,k) = trcr(i,j,nt_bgc_Am+nblyr+k,iblk) if (tr_bgc_N) then do nn = 1, n_algae @@ -533,10 +533,10 @@ subroutine bgc_diags enddo endif if (tr_bgc_Fe ) then - do nn = 1, n_fed + do nn = 1, n_fed pFeds(n,k,nn) = trcr(i,j,nt_bgc_Fed(nn)+nblyr+k,iblk) enddo - do nn = 1, n_fep + do nn = 1, n_fep pFeps(n,k,nn) = trcr(i,j,nt_bgc_Fep(nn)+nblyr+k,iblk) enddo endif @@ -547,7 +547,7 @@ subroutine bgc_diags endif if (tr_bgc_PON)pPONs(n,k) =trcr(i,j,nt_bgc_PON+nblyr+k,iblk) if (tr_bgc_hum)phums(n,k) =trcr(i,j,nt_bgc_hum+nblyr+k,iblk) - enddo !k + enddo !k endif pchlsw(n,:) = c0 pzaerosw(n,:,:) = c0 @@ -560,17 +560,17 @@ subroutine bgc_diags enddo endif enddo - endif ! dEdd_algae + endif ! dEdd_algae endif ! my_task = pmloc - - call broadcast_scalar (pNit_ac (n), pmloc(n)) - call broadcast_scalar (pAm_ac (n), pmloc(n)) - call broadcast_scalar (pSil_ac (n), pmloc(n)) - call broadcast_scalar (phum_ac (n), pmloc(n)) - call broadcast_scalar (pDMSP_ac (n), pmloc(n)) - call broadcast_scalar (pDMS_ac (n), pmloc(n)) - call broadcast_scalar (pflux_NO (n), pmloc(n)) - call broadcast_scalar (pflux_Am (n), pmloc(n)) + + call broadcast_scalar (pNit_ac (n), pmloc(n)) + call broadcast_scalar (pAm_ac (n), pmloc(n)) + call broadcast_scalar (pSil_ac (n), pmloc(n)) + call broadcast_scalar (phum_ac (n), pmloc(n)) + call broadcast_scalar (pDMSP_ac (n), pmloc(n)) + call broadcast_scalar (pDMS_ac (n), pmloc(n)) + call broadcast_scalar (pflux_NO (n), pmloc(n)) + call broadcast_scalar (pflux_Am (n), pmloc(n)) call broadcast_scalar (pflux_hum (n), pmloc(n)) call broadcast_array (pN_ac (n,:), pmloc(n)) call broadcast_array (pflux_N (n,:), pmloc(n)) @@ -578,8 +578,8 @@ subroutine bgc_diags call broadcast_array (pDON_ac (n,:), pmloc(n)) call broadcast_array (pFed_ac (n,:), pmloc(n)) call broadcast_array (pFep_ac (n,:), pmloc(n)) - call broadcast_array (pchlsw (n,:), pmloc(n)) - call broadcast_array (pzaerosw (n,:,:), pmloc(n)) + call broadcast_array (pchlsw (n,:), pmloc(n)) + call broadcast_array (pzaerosw (n,:,:), pmloc(n)) if (skl_bgc) then ! skl_bgc call broadcast_array (pN_sk (n,:), pmloc(n)) call broadcast_array (pDOC_sk (n,:), pmloc(n)) @@ -587,24 +587,24 @@ subroutine bgc_diags call broadcast_array (pFed_sk (n,:), pmloc(n)) call broadcast_array (pFep_sk (n,:), pmloc(n)) - call broadcast_scalar(pNit_sk (n), pmloc(n)) - call broadcast_scalar(pAm_sk (n), pmloc(n)) - call broadcast_scalar(pSil_sk (n), pmloc(n)) - call broadcast_scalar(phum_sk (n), pmloc(n)) - call broadcast_scalar(pDMSPp_sk (n), pmloc(n)) - call broadcast_scalar(pDMSPd_sk (n), pmloc(n)) - call broadcast_scalar(pDMS_sk (n), pmloc(n)) + call broadcast_scalar(pNit_sk (n), pmloc(n)) + call broadcast_scalar(pAm_sk (n), pmloc(n)) + call broadcast_scalar(pSil_sk (n), pmloc(n)) + call broadcast_scalar(phum_sk (n), pmloc(n)) + call broadcast_scalar(pDMSPp_sk (n), pmloc(n)) + call broadcast_scalar(pDMSPd_sk (n), pmloc(n)) + call broadcast_scalar(pDMS_sk (n), pmloc(n)) endif !tr_bgc_sk if (z_tracers) then ! z_bgc - call broadcast_array (pN_tot (n,:), pmloc(n)) - call broadcast_array (pflux_zaero (n,:), pmloc(n)) - call broadcast_array (pflux_atm_zaero_s(n,:), pmloc(n)) - call broadcast_array (pflux_atm_zaero (n,:), pmloc(n)) + call broadcast_array (pN_tot (n,:), pmloc(n)) + call broadcast_array (pflux_zaero (n,:), pmloc(n)) + call broadcast_array (pflux_atm_zaero_s(n,:), pmloc(n)) + call broadcast_array (pflux_atm_zaero (n,:), pmloc(n)) call broadcast_array (pflux_snow_zaero (n,:), pmloc(n)) - call broadcast_scalar(pflux_atm_NO (n), pmloc(n)) - call broadcast_scalar(pflux_atm_Am (n), pmloc(n)) - call broadcast_scalar(pflux_snow_NO (n), pmloc(n)) + call broadcast_scalar(pflux_atm_NO (n), pmloc(n)) + call broadcast_scalar(pflux_atm_Am (n), pmloc(n)) + call broadcast_scalar(pflux_snow_NO (n), pmloc(n)) call broadcast_scalar(pflux_snow_Am (n), pmloc(n)) call broadcast_scalar(pgrow_net (n), pmloc(n)) call broadcast_array (pzfswin (n,:), pmloc(n)) @@ -623,12 +623,12 @@ subroutine bgc_diags call broadcast_array (pAms (n,:), pmloc(n)) call broadcast_array (pPONs (n,:), pmloc(n)) call broadcast_array (phums (n,:), pmloc(n)) - call broadcast_array (pNs (n,:,:), pmloc(n)) - call broadcast_array (pDOCs (n,:,:), pmloc(n)) - call broadcast_array (pDONs (n,:,:), pmloc(n)) - call broadcast_array (pFeds (n,:,:), pmloc(n)) - call broadcast_array (pFeps (n,:,:), pmloc(n)) - call broadcast_array (pzaeros (n,:,:), pmloc(n)) + call broadcast_array (pNs (n,:,:), pmloc(n)) + call broadcast_array (pDOCs (n,:,:), pmloc(n)) + call broadcast_array (pDONs (n,:,:), pmloc(n)) + call broadcast_array (pFeds (n,:,:), pmloc(n)) + call broadcast_array (pFeps (n,:,:), pmloc(n)) + call broadcast_array (pzaeros (n,:,:), pmloc(n)) endif ! z_tracers enddo ! npnt endif ! print_points @@ -649,14 +649,14 @@ subroutine bgc_diags if (z_tracers) then write(nu_diag,803) 'zfswin(1) PAR ','zfswin(2) PAR ' write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,802) ((pzfswin(n,k),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' + write(nu_diag,802) ((pzfswin(n,k),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' write(nu_diag,803) 'Losses: Zoo(1)(mmol/m^3) ','Zoo(2)' write(nu_diag,803) ' Brine Conc. ',' Brine Conc' write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,802) ((pZoo(n,k),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' - endif + write(nu_diag,802) ((pZoo(n,k),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' + endif if (tr_bgc_Nit) then write(nu_diag,*) '---------------------------------------------------' write(nu_diag,*) ' nitrate conc. (mmol/m^3) or flux (mmol/m^2/d)' @@ -669,17 +669,17 @@ subroutine bgc_diags write(nu_diag,900) 'snow-ice flux = ',pflux_snow_NO(1),pflux_snow_NO(2) write(nu_diag,*) ' snow + ice conc' write(nu_diag,803) ' nitrate(1)',' nitrate(2)' - write(nu_diag,802) ((pNOs(n,k),n=1,2), k = 1,2) - write(nu_diag,802) ((pNO(n,k),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' + write(nu_diag,802) ((pNOs(n,k),n=1,2), k = 1,2) + write(nu_diag,802) ((pNO(n,k),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' endif endif if (tr_bgc_PON .and. z_tracers) then write(nu_diag,*) '---------------------------------------------------' write(nu_diag,*) ' PON snow + ice conc. (mmol/m^3)' write(nu_diag,803) ' PON(1)',' PON(2)' - write(nu_diag,802) ((pPONs(n,k),n=1,2), k = 1,2) - write(nu_diag,802) ((pPON(n,k),n=1,2), k = 1,nblyr+1) + write(nu_diag,802) ((pPONs(n,k),n=1,2), k = 1,2) + write(nu_diag,802) ((pPON(n,k),n=1,2), k = 1,nblyr+1) write(nu_diag,*) ' ' endif if (tr_bgc_hum) then @@ -691,8 +691,8 @@ subroutine bgc_diags write(nu_diag,900) 'Bulk ice conc. = ',phum_sk(1),phum_sk(2) elseif (z_tracers) then write(nu_diag,803) ' hum(1)',' hum(2)' - write(nu_diag,802) ((phums(n,k),n=1,2), k = 1,2) - write(nu_diag,802) ((phum(n,k),n=1,2), k = 1,nblyr+1) + write(nu_diag,802) ((phums(n,k),n=1,2), k = 1,2) + write(nu_diag,802) ((phum(n,k),n=1,2), k = 1,nblyr+1) write(nu_diag,*) ' ' endif endif @@ -708,9 +708,9 @@ subroutine bgc_diags write(nu_diag,900) 'snow-ice flux = ',pflux_snow_Am(1),pflux_snow_Am(2) write(nu_diag,*) ' snow + ice conc.' write(nu_diag,803) ' ammonium(1)',' ammonium (2)' - write(nu_diag,802) ((pAms(n,k),n=1,2), k = 1,2) - write(nu_diag,802) ((pAm(n,k),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' + write(nu_diag,802) ((pAms(n,k),n=1,2), k = 1,2) + write(nu_diag,802) ((pAm(n,k),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' endif endif if (tr_bgc_N) then @@ -727,9 +727,9 @@ subroutine bgc_diags write(nu_diag,900) 'Tot ice (mmolN/m^2) = ',pN_tot(1,kk),pN_tot(2,kk) write(nu_diag,*) ' snow + ice conc.' write(nu_diag,803) ' algal N(1)',' algal N(2) ' - write(nu_diag,802) ((pNs(n,k,kk),n=1,2), k = 1,2) - write(nu_diag,802) ((pN(n,k,kk),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' + write(nu_diag,802) ((pNs(n,k,kk),n=1,2), k = 1,2) + write(nu_diag,802) ((pN(n,k,kk),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' endif enddo endif @@ -744,9 +744,9 @@ subroutine bgc_diags elseif (z_tracers) then write(nu_diag,*) ' snow + ice conc.' write(nu_diag,803) ' DOC(1)',' DOC(2) ' - write(nu_diag,802) ((pDOCs(n,k,kk),n=1,2), k = 1,2) - write(nu_diag,802) ((pDOC(n,k,kk),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' + write(nu_diag,802) ((pDOCs(n,k,kk),n=1,2), k = 1,2) + write(nu_diag,802) ((pDOC(n,k,kk),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' endif enddo endif @@ -761,9 +761,9 @@ subroutine bgc_diags elseif (z_tracers) then write(nu_diag,*) ' snow + ice conc.' write(nu_diag,803) ' DON(1)',' DON(2) ' - write(nu_diag,802) ((pDONs(n,k,kk),n=1,2), k = 1,2) - write(nu_diag,802) ((pDON(n,k,kk),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' + write(nu_diag,802) ((pDONs(n,k,kk),n=1,2), k = 1,2) + write(nu_diag,802) ((pDON(n,k,kk),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' endif enddo endif @@ -778,9 +778,9 @@ subroutine bgc_diags elseif (z_tracers) then write(nu_diag,*) ' snow + ice conc.' write(nu_diag,803) ' Fed (1)',' Fed (2) ' - write(nu_diag,802) ((pFeds (n,k,kk),n=1,2), k = 1,2) - write(nu_diag,802) ((pFed (n,k,kk),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' + write(nu_diag,802) ((pFeds (n,k,kk),n=1,2), k = 1,2) + write(nu_diag,802) ((pFed (n,k,kk),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' endif enddo do kk = 1,n_fep @@ -793,9 +793,9 @@ subroutine bgc_diags elseif (z_tracers) then write(nu_diag,*) ' snow + ice conc.' write(nu_diag,803) ' Fep (1)',' Fep (2) ' - write(nu_diag,802) ((pFeps (n,k,kk),n=1,2), k = 1,2) - write(nu_diag,802) ((pFep (n,k,kk),n=1,2), k = 1,nblyr+1) - write(nu_diag,*) ' ' + write(nu_diag,802) ((pFeps (n,k,kk),n=1,2), k = 1,2) + write(nu_diag,802) ((pFep (n,k,kk),n=1,2), k = 1,nblyr+1) + write(nu_diag,*) ' ' endif enddo endif @@ -807,7 +807,7 @@ subroutine bgc_diags if (skl_bgc) then write(nu_diag,900) 'Ice DMSPp = ',pDMSPp_sk(1),pDMSPp_sk(2) write(nu_diag,900) 'Ice DMSPd = ',pDMSPd_sk(1),pDMSPd_sk(2) - write(nu_diag,900) 'Ice DMS = ',pDMS_sk(1),pDMS_sk(2) + write(nu_diag,900) 'Ice DMS = ',pDMS_sk(1),pDMS_sk(2) endif endif if (tr_zaero .and. z_tracers) then @@ -821,8 +821,8 @@ subroutine bgc_diags write(nu_diag,900) 'snow-ice flux*aice = ',pflux_snow_zaero(1,kk),pflux_snow_zaero(2,kk) write(nu_diag,*) ' snow + ice conc.' write(nu_diag,803) ' aerosol(1)',' aerosol(2) ' - write(nu_diag,802) ((pzaeros(n,k,kk),n=1,2), k = 1,2) - write(nu_diag,802) ((pzaero(n,k,kk),n=1,2), k = 1,nblyr+1) + write(nu_diag,802) ((pzaeros(n,k,kk),n=1,2), k = 1,2) + write(nu_diag,802) ((pzaero(n,k,kk),n=1,2), k = 1,nblyr+1) write(nu_diag,*) ' ' enddo endif @@ -830,23 +830,22 @@ subroutine bgc_diags if (tr_zaero) then do kk = 1,n_zaero write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,*) ' Cat 1 aerosol conc. (kg/m^3) on delta-Eddington grid ' - write(nu_diag,802) ((pzaerosw(n,k,kk),n=1,2), k = 1,klev +1) + write(nu_diag,*) ' Cat 1 aerosol conc. (kg/m^3) on delta-Eddington grid ' + write(nu_diag,802) ((pzaerosw(n,k,kk),n=1,2), k = 1,klev +1) enddo endif if (tr_bgc_N) then write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,*) ' Cat 1 chl (mg/m^3) on delta-Eddington grid ' - write(nu_diag,802) ((pchlsw(n,k),n=1,2), k = 1,klev +1) + write(nu_diag,*) ' Cat 1 chl (mg/m^3) on delta-Eddington grid ' + write(nu_diag,802) ((pchlsw(n,k),n=1,2), k = 1,klev +1) endif endif endif ! print_points - endif ! my_task = master_task + endif ! my_task = master_task 802 format (f24.17,2x,f24.17) 803 format (a25,2x,a25) 900 format (a25,2x,f24.17,2x,f24.17) - 902 format (a25,10x,f6.1,1x,f6.1,9x,f6.1,1x,f6.1) 1020 format (a30,2x,i6) ! integer end subroutine bgc_diags @@ -878,8 +877,8 @@ subroutine zsal_diags ! fields at diagnostic points real (kind=dbl_kind), dimension(npnt) :: & phinS, phinS1,& - phbrn,pdh_top1,pdh_bot1, psice_rho, pfzsal, & - pfzsal_g, pdarcy_V1 + phbrn,pdh_top1,pdh_bot1, psice_rho, pfzsal, & + pfzsal_g, pdarcy_V1 ! vertical fields of category 1 at diagnostic points for bgc layer model real (kind=dbl_kind), dimension(npnt,nblyr+2) :: & @@ -923,10 +922,10 @@ subroutine zsal_diags j = pjloc(n) iblk = pbloc(n) - pfzsal(n) = fzsal(i,j,iblk) - pfzsal_g(n) = fzsal_g(i,j,iblk) - phinS(n) = c0 - phinS1(n) = c0 + pfzsal(n) = fzsal(i,j,iblk) + pfzsal_g(n) = fzsal_g(i,j,iblk) + phinS(n) = c0 + phinS1(n) = c0 phbrn(n) = c0 psice_rho(n) = c0 pdh_top1(n) = c0 @@ -948,7 +947,7 @@ subroutine zsal_diags pdh_top1(n) = dhbr_top(i,j,1,iblk) pdh_bot1(n) = dhbr_bot(i,j,1,iblk) pdarcy_V1(n) = darcy_V(i,j,1,iblk) - endif + endif do k = 1, nblyr+1 pbTiz(n,k) = c0 piDin(n,k) = c0 @@ -958,7 +957,7 @@ subroutine zsal_diags enddo if (vice(i,j,iblk) > c0) then pbTiz(n,k) = pbTiz(n,k)/vice(i,j,iblk) - piDin(n,k) = piDin(n,k)/vice(i,j,iblk) + piDin(n,k) = piDin(n,k)/vice(i,j,iblk) endif enddo ! k do k = 1, nblyr+2 @@ -974,24 +973,24 @@ subroutine zsal_diags enddo do k = 1,nblyr pSin(n,k) = c0 - pSin1(n,k) = c0 - pSin(n,k)= trcr(i,j,nt_bgc_S+k-1,iblk) + pSin1(n,k) = c0 + pSin(n,k)= trcr(i,j,nt_bgc_S+k-1,iblk) if (aicen(i,j,1,iblk) > c0) pSin1(n,k) = trcrn(i,j,nt_bgc_S+k-1,1,iblk) - enddo + enddo do k = 1,nilyr pSice(n,k) = trcr(i,j,nt_sice+k-1,iblk) enddo endif ! my_task = pmloc - call broadcast_scalar(phinS (n), pmloc(n)) - call broadcast_scalar(phinS1 (n), pmloc(n)) - call broadcast_scalar(phbrn (n), pmloc(n)) - call broadcast_scalar(pdh_top1 (n), pmloc(n)) - call broadcast_scalar(pdh_bot1 (n), pmloc(n)) - call broadcast_scalar(psice_rho(n), pmloc(n)) - call broadcast_scalar(pfzsal_g (n), pmloc(n)) - call broadcast_scalar(pdarcy_V1(n), pmloc(n)) - call broadcast_scalar(pfzsal (n), pmloc(n)) + call broadcast_scalar(phinS (n), pmloc(n)) + call broadcast_scalar(phinS1 (n), pmloc(n)) + call broadcast_scalar(phbrn (n), pmloc(n)) + call broadcast_scalar(pdh_top1 (n), pmloc(n)) + call broadcast_scalar(pdh_bot1 (n), pmloc(n)) + call broadcast_scalar(psice_rho(n), pmloc(n)) + call broadcast_scalar(pfzsal_g (n), pmloc(n)) + call broadcast_scalar(pdarcy_V1(n), pmloc(n)) + call broadcast_scalar(pfzsal (n), pmloc(n)) call broadcast_array (pbTiz (n,:), pmloc(n)) call broadcast_array (piDin (n,:), pmloc(n)) call broadcast_array (pphin (n,:), pmloc(n)) @@ -1050,15 +1049,15 @@ subroutine zsal_diags write(nu_diag,*) ' ' write(nu_diag,803) 'zsal(1) cat 1 ','zsal(2) cat 1 ' write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,802) ((pSin1(n,k),n=1,2), k = 1,nblyr) + write(nu_diag,802) ((pSin1(n,k),n=1,2), k = 1,nblyr) write(nu_diag,*) ' ' write(nu_diag,803) 'zsal(1) Avg S ','zsal(2) Avg S ' write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,802) ((pSin(n,k),n=1,2), k = 1,nblyr) + write(nu_diag,802) ((pSin(n,k),n=1,2), k = 1,nblyr) write(nu_diag,*) ' ' write(nu_diag,803) 'Sice(1) Ice S ','Sice(2) Ice S ' write(nu_diag,*) '---------------------------------------------------' - write(nu_diag,802) ((pSice(n,k),n=1,2), k = 1,nilyr) + write(nu_diag,802) ((pSice(n,k),n=1,2), k = 1,nilyr) write(nu_diag,*) ' ' endif ! print_points @@ -1068,7 +1067,6 @@ subroutine zsal_diags 803 format (a25,2x,a25) 900 format (a25,2x,f24.17,2x,f24.17) 902 format (a25,10x,f6.1,1x,f6.1,9x,f6.1,1x,f6.1) - 903 format (a25,5x,i4,1x,i4,1x,i4,1x,i4,7x,i4,1x,i4,1x,i4,1x,i4) end subroutine zsal_diags diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedynB/analysis/ice_history.F90 index 2fc57044e..caaa56295 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedynB/analysis/ice_history.F90 @@ -1,24 +1,24 @@ !======================================================================= ! Driver for core history output ! -! The following variables are currently hard-wired as snapshots +! The following variables are currently hard-wired as snapshots ! (instantaneous rather than time-averages): ! divu, shear, sig1, sig2, sigP, trsig, mlt_onset, frz_onset, hisnap, aisnap ! ! Options for histfreq: '1','h','d','m','y','x', where x means that -! output stream will not be used (recommended for efficiency). -! histfreq_n can be any nonnegative integer, where 0 means that the +! output stream will not be used (recommended for efficiency). +! histfreq_n can be any nonnegative integer, where 0 means that the ! corresponding histfreq frequency will not be used. ! The flags (f_) can be set to '1','h','d','m','y' or 'x', where ! n means the field will not be written. To output the same field at -! more than one frequency, for instance monthy and daily, set +! more than one frequency, for instance monthy and daily, set ! f_ = 'md'. ! ! authors Tony Craig and Bruce Briegleb, NCAR ! Elizabeth C. Hunke and William H. Lipscomb, LANL ! C. M. Bitz, UW ! -! 2004 WHL: Block structure added +! 2004 WHL: Block structure added ! 2006 ECH: Accepted some CESM code into mainstream CICE ! Added ice_present, aicen, vicen; removed aice1...10, vice1...1. ! Added histfreq_n and histfreq='h' options, removed histfreq='w' @@ -44,7 +44,7 @@ module ice_history implicit none private public :: init_hist, accum_hist - + !======================================================================= contains @@ -294,7 +294,7 @@ subroutine init_hist (dt) f_yieldstress22 = 'x' endif - ! these must be output at the same frequency because of + ! these must be output at the same frequency because of ! cos(zenith angle) averaging if (f_albice(1:1) /= 'x' .and. f_albsni(1:1) /= 'x') f_albice = f_albsni if (f_albsno(1:1) /= 'x') f_albsno = f_albice @@ -686,7 +686,7 @@ subroutine init_hist (dt) call broadcast_scalar (f_e12, master_task) call broadcast_scalar (f_e22, master_task) call broadcast_scalar (f_s11, master_task) - call broadcast_scalar (f_s12, master_task) + call broadcast_scalar (f_s12, master_task) call broadcast_scalar (f_s22, master_task) call broadcast_scalar (f_yieldstress11, master_task) call broadcast_scalar (f_yieldstress12, master_task) @@ -697,13 +697,13 @@ subroutine init_hist (dt) if (histfreq(ns1) /= 'x') then !!!!! begin example -! call define_hist_field(n_example,"example","m",tstr2D, tcstr, & +! call define_hist_field(n_example,"example","m",tstr2D, tcstr, & ! "example: mean ice thickness", & ! "ice volume per unit grid cell area", c1, c0, & ! ns1, f_example) !!!!! end example - call define_hist_field(n_hi,"hi","m",tstr2D, tcstr, & + call define_hist_field(n_hi,"hi","m",tstr2D, tcstr, & "grid cell mean ice thickness", & "ice volume per unit grid cell area", c1, c0, & ns1, f_hi) @@ -742,12 +742,12 @@ subroutine init_hist (dt) "sea ice speed", & "vector magnitude on E grid", c1, c0, & ns1, f_icespdE) - + call define_hist_field(n_icedirE,"icedirE","deg",estr2D, ecstr, & "sea ice direction", & "vector direction - coming from on E grid", c1, c0, & ns1, f_icedirE) - + call define_hist_field(n_uvelN,"uvelN","m/s",nstr2D, ncstr, & "ice velocity (x)", & "positive is x direction on N grid", c1, c0, & @@ -762,12 +762,12 @@ subroutine init_hist (dt) "sea ice speed", & "vector magnitude on N grid", c1, c0, & ns1, f_icespdN) - + call define_hist_field(n_icedirN,"icedirN","deg",nstr2D, ncstr, & "sea ice direction", & "vector direction - coming from on N grid", c1, c0, & ns1, f_icedirN) - + call define_hist_field(n_uvel,"uvel","m/s",ustr2D, ucstr, & "ice velocity (x)", & "positive is x direction on U grid", c1, c0, & @@ -777,22 +777,22 @@ subroutine init_hist (dt) "ice velocity (y)", & "positive is y direction on U grid", c1, c0, & ns1, f_vvel) - + call define_hist_field(n_icespd,"icespd","m/s",ustr2D, ucstr, & "sea ice speed", & "vector magnitude", c1, c0, & ns1, f_icespd) - + call define_hist_field(n_icedir,"icedir","deg",ustr2D, ucstr, & "sea ice direction", & "vector direction - coming from", c1, c0, & ns1, f_icedir) - + call define_hist_field(n_uatm,"uatm","m/s",str2D_gau, cstr_gau, & "atm velocity (x)", & "positive is x direction on U grid", c1, c0, & ns1, f_uatm) - + call define_hist_field(n_vatm,"vatm","m/s",str2D_gav, cstr_gav, & "atm velocity (y)", & "positive is y direction on U grid", c1, c0, & @@ -802,67 +802,67 @@ subroutine init_hist (dt) "atmosphere wind speed", & "vector magnitude", c1, c0, & ns1, f_atmspd) - + call define_hist_field(n_atmdir,"atmdir","deg",str2D_gau, cstr_gau, & "atmosphere wind direction", & "vector direction - coming from", c1, c0, & ns1, f_atmdir) - + call define_hist_field(n_sice,"sice","ppt",tstr2D, tcstr, & "bulk ice salinity", & "none", c1, c0, & ns1, f_sice) - + call define_hist_field(n_fswup,"fswup","W/m^2",tstr2D, tcstr, & "upward solar flux", & "positive upward", c1, c0, & ns1, f_fswup) - + call define_hist_field(n_fswdn,"fswdn","W/m^2",tstr2D, tcstr, & "down solar flux", & "positive downward", c1, c0, & ns1, f_fswdn) - + call define_hist_field(n_flwdn,"flwdn","W/m^2",tstr2D, tcstr, & "down longwave flux", & "positive downward", c1, c0, & ns1, f_flwdn) - + call define_hist_field(n_snow,"snow","cm/day",tstr2D, tcstr, & "snowfall rate (cpl)", & "none", mps_to_cmpdy/rhofresh, c0, & ns1, f_snow) - + call define_hist_field(n_snow_ai,"snow_ai","cm/day",tstr2D, tcstr, & "snowfall rate", & "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_snow_ai) - + call define_hist_field(n_rain,"rain","cm/day",tstr2D, tcstr, & "rainfall rate (cpl)", & "none", mps_to_cmpdy/rhofresh, c0, & ns1, f_rain) - + call define_hist_field(n_rain_ai,"rain_ai","cm/day",tstr2D, tcstr, & "rainfall rate", & "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_rain_ai) - + call define_hist_field(n_sst,"sst","C",tstr2D, tcstr, & "sea surface temperature", & "none", c1, c0, & ns1, f_sst) - + call define_hist_field(n_sss,"sss","ppt",tstr2D, tcstr, & "sea surface salinity", & "none", c1, c0, & ns1, f_sss) - + call define_hist_field(n_uocn,"uocn","m/s",str2D_gou, cstr_gou, & "ocean current (x)", & "positive is x direction on U grid", c1, c0, & ns1, f_uocn) - + call define_hist_field(n_vocn,"vocn","m/s",str2D_gov, cstr_gov, & "ocean current (y)", & "positive is y direction on U grid", c1, c0, & @@ -872,17 +872,17 @@ subroutine init_hist (dt) "ocean current speed", & "vector magnitude", c1, c0, & ns1, f_ocnspd) - + call define_hist_field(n_ocndir,"ocndir","deg",str2D_gou, cstr_gou, & "ocean current direction", & "vector direction - going to", c1, c0, & ns1, f_ocndir) - + call define_hist_field(n_frzmlt,"frzmlt","W/m^2",tstr2D, tcstr, & "freeze/melt potential", & "if >0, new ice forms; if <0, ice melts", c1, c0, & ns1, f_frzmlt) - + call define_hist_field(n_fswfac,"scale_factor","1",tstr2D, tcstr, & "shortwave scaling factor", & "ratio of netsw new:old", c1, c0, & @@ -897,22 +897,22 @@ subroutine init_hist (dt) "snow/ice/ocn absorbed solar flux (cpl)", & "positive downward", c1, c0, & ns1, f_fswabs) - + call define_hist_field(n_fswabs_ai,"fswabs_ai","W/m^2",tstr2D, tcstr, & "snow/ice/ocn absorbed solar flux", & "weighted by ice area", c1, c0, & ns1, f_fswabs_ai) - + call define_hist_field(n_albsni,"albsni","%",tstr2D, tcstr, & "snow/ice broad band albedo", & "averaged for coszen>0, weighted by aice", c100, c0, & ns1, f_albsni) - + call define_hist_field(n_alvdr,"alvdr","%",tstr2D, tcstr, & "visible direct albedo", & "scaled (divided) by aice", c100, c0, & ns1, f_alvdr) - + call define_hist_field(n_alidr,"alidr","%",tstr2D, tcstr, & "near IR direct albedo", & "scaled (divided) by aice", c100, c0, & @@ -922,7 +922,7 @@ subroutine init_hist (dt) "visible diffuse albedo", & "scaled (divided) by aice", c100, c0, & ns1, f_alvdf) - + call define_hist_field(n_alidf,"alidf","%",tstr2D, tcstr, & "near IR diffuse albedo", & "scaled (divided) by aice", c100, c0, & @@ -932,7 +932,7 @@ subroutine init_hist (dt) "visible direct albedo", & " ", c100, c0, & ns1, f_alvdr_ai) - + call define_hist_field(n_alidr_ai,"alidr_ai","%",tstr2D, tcstr, & "near IR direct albedo", & " ", c100, c0, & @@ -942,7 +942,7 @@ subroutine init_hist (dt) "visible diffuse albedo", & " ", c100, c0, & ns1, f_alvdf_ai) - + call define_hist_field(n_alidf_ai,"alidf_ai","%",tstr2D, tcstr, & "near IR diffuse albedo", & " ", c100, c0, & @@ -952,17 +952,17 @@ subroutine init_hist (dt) "bare ice albedo", & "averaged for coszen>0, weighted by aice", c100, c0, & ns1, f_albice) - + call define_hist_field(n_albsno,"albsno","%",tstr2D, tcstr, & "snow albedo", & "averaged for coszen>0, weighted by aice", c100, c0, & ns1, f_albsno) - + call define_hist_field(n_albpnd,"albpnd","%",tstr2D, tcstr, & "melt pond albedo", & "averaged for coszen>0, weighted by aice", c100, c0, & ns1, f_albpnd) - + call define_hist_field(n_coszen,"coszen","radian",tstr2D, tcstr, & "cosine of the zenith angle", & "negative below horizon", c1, c0, & @@ -972,188 +972,188 @@ subroutine init_hist (dt) "latent heat flux (cpl)", & "positive downward", c1, c0, & ns1, f_flat) - + call define_hist_field(n_flat_ai,"flat_ai","W/m^2",tstr2D, tcstr, & "latent heat flux", & "weighted by ice area", c1, c0, & ns1, f_flat_ai) - + call define_hist_field(n_fsens,"fsens","W/m^2",tstr2D, tcstr, & "sensible heat flux (cpl)", & "positive downward", c1, c0, & ns1, f_fsens) - + call define_hist_field(n_fsens_ai,"fsens_ai","W/m^2",tstr2D, tcstr, & "sensible heat flux", & "weighted by ice area", c1, c0, & ns1, f_fsens_ai) - + call define_hist_field(n_flwup,"flwup","W/m^2",tstr2D, tcstr, & "upward longwave flux (cpl)", & "positive downward", c1, c0, & ns1, f_flwup) - + call define_hist_field(n_flwup_ai,"flwup_ai","W/m^2",tstr2D, tcstr, & "upward longwave flux", & "weighted by ice area", c1, c0, & ns1, f_flwup_ai) - + call define_hist_field(n_evap,"evap","cm/day",tstr2D, tcstr, & "evaporative water flux (cpl)", & "none", mps_to_cmpdy/rhofresh, c0, & ns1, f_evap) - + call define_hist_field(n_evap_ai,"evap_ai","cm/day",tstr2D, tcstr, & "evaporative water flux", & "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_evap_ai) - + call define_hist_field(n_Tair,"Tair","C",tstr2D, tcstr, & "air temperature", & "none", c1, -Tffresh, & ns1, f_Tair) - + call define_hist_field(n_Tref,"Tref","C",tstr2D, tcstr, & "2m reference temperature", & "none", c1, -Tffresh, & ns1, f_Tref) - + call define_hist_field(n_Qref,"Qref","g/kg",tstr2D, tcstr, & "2m reference specific humidity", & "none", kg_to_g, c0, & ns1, f_Qref) - + call define_hist_field(n_congel,"congel","cm/day",tstr2D, tcstr, & "congelation ice growth", & "none", mps_to_cmpdy/dt, c0, & ns1, f_congel) - + call define_hist_field(n_frazil,"frazil","cm/day",tstr2D, tcstr, & "frazil ice growth", & "none", mps_to_cmpdy/dt, c0, & ns1, f_frazil) - + call define_hist_field(n_snoice,"snoice","cm/day",tstr2D, tcstr, & "snow-ice formation", & "none", mps_to_cmpdy/dt, c0, & ns1, f_snoice) - + call define_hist_field(n_dsnow,"dsnow","cm/day",tstr2D, tcstr, & "snow formation", & "none", mps_to_cmpdy/dt, c0, & ns1, f_dsnow) - + call define_hist_field(n_meltt,"meltt","cm/day",tstr2D, tcstr, & "top ice melt", & "none", mps_to_cmpdy/dt, c0, & ns1, f_meltt) - + call define_hist_field(n_melts,"melts","cm/day",tstr2D, tcstr, & "top snow melt", & "none", mps_to_cmpdy/dt, c0, & ns1, f_melts) - + call define_hist_field(n_meltb,"meltb","cm/day",tstr2D, tcstr, & "basal ice melt", & "none", mps_to_cmpdy/dt, c0, & ns1, f_meltb) - + call define_hist_field(n_meltl,"meltl","cm/day",tstr2D, tcstr, & "lateral ice melt", & "none", mps_to_cmpdy/dt, c0, & ns1, f_meltl) - + call define_hist_field(n_fresh,"fresh","cm/day",tstr2D, tcstr, & "freshwtr flx ice to ocn (cpl)", & "if positive, ocean gains fresh water", & mps_to_cmpdy/rhofresh, c0, & ns1, f_fresh) - + call define_hist_field(n_fresh_ai,"fresh_ai","cm/day",tstr2D, tcstr, & "freshwtr flx ice to ocn", & "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_fresh_ai) - + call define_hist_field(n_fsalt,"fsalt","kg/m^2/s",tstr2D, tcstr, & "salt flux ice to ocn (cpl)", & "if positive, ocean gains salt", c1, c0, & ns1, f_fsalt) - + call define_hist_field(n_fsalt_ai,"fsalt_ai","kg/m^2/s",tstr2D, tcstr, & "salt flux ice to ocean", & "weighted by ice area", c1, c0, & ns1, f_fsalt_ai) - + call define_hist_field(n_fbot,"fbot","W/m^2",tstr2D, tcstr, & "heat flux ice to ocean (fbot)", & "if positive, ocean gains heat", c1, c0, & ns1, f_fbot) - + call define_hist_field(n_fhocn,"fhocn","W/m^2",tstr2D, tcstr, & "heat flux ice to ocn (cpl)", & "if positive, ocean gains heat", c1, c0, & ns1, f_fhocn) - + call define_hist_field(n_fhocn_ai,"fhocn_ai","W/m^2",tstr2D, tcstr, & "heat flux ice to ocean (fhocn_ai)", & "weighted by ice area", c1, c0, & ns1, f_fhocn_ai) - + call define_hist_field(n_fswthru,"fswthru","W/m^2",tstr2D, tcstr, & "SW thru ice to ocean (cpl)", & "if positive, ocean gains heat", c1, c0, & ns1, f_fswthru) - + call define_hist_field(n_fswthru_ai,"fswthru_ai","W/m^2",tstr2D, tcstr,& "SW flux thru ice to ocean", & "weighted by ice area", c1, c0, & ns1, f_fswthru_ai) - + call define_hist_field(n_strairx,"strairx","N/m^2",ustr2D, ucstr, & "atm/ice stress (x)", & "positive is x direction on U grid", c1, c0, & ns1, f_strairx) - + call define_hist_field(n_strairy,"strairy","N/m^2",ustr2D, ucstr, & "atm/ice stress (y)", & "positive is y direction on U grid", c1, c0, & ns1, f_strairy) - + call define_hist_field(n_strtltx,"strtltx","N/m^2",ustr2D, ucstr, & "sea sfc tilt stress (x)", & "none", c1, c0, & ns1, f_strtltx) - + call define_hist_field(n_strtlty,"strtlty","N/m^2",ustr2D, ucstr, & "sea sfc tilt stress (y)", & "none", c1, c0, & ns1, f_strtlty) - + call define_hist_field(n_strcorx,"strcorx","N/m^2",ustr2D, ucstr, & "coriolis stress (x)", & "positive is x direction on U grid", c1, c0, & ns1, f_strcorx) - + call define_hist_field(n_strcory,"strcory","N/m^2",ustr2D, ucstr, & "coriolis stress (y)", & "positive is y direction on U grid", c1, c0, & ns1, f_strcory) - + call define_hist_field(n_strocnx,"strocnx","N/m^2",ustr2D, ucstr, & "ocean/ice stress (x)", & "positive is x direction on U grid", c1, c0, & ns1, f_strocnx) - + call define_hist_field(n_strocny,"strocny","N/m^2",ustr2D, ucstr, & "ocean/ice stress (y)", & "positive is y direction on U grid", c1, c0, & ns1, f_strocny) - + call define_hist_field(n_strintx,"strintx","N/m^2",ustr2D, ucstr, & "internal ice stress (x)", & "positive is x direction on U grid", c1, c0, & ns1, f_strintx) - + call define_hist_field(n_strinty,"strinty","N/m^2",ustr2D, ucstr, & "internal ice stress (y)", & "positive is y direction on U grid", c1, c0, & @@ -1168,92 +1168,92 @@ subroutine init_hist (dt) "seabed (basal) stress (y)", & "positive is y direction on U grid", c1, c0, & ns1, f_tauby) - + call define_hist_field(n_strairxN,"strairxN","N/m^2",nstr2D, ncstr, & "atm/ice stress (x)", & "positive is x direction on N grid", c1, c0, & ns1, f_strairxN) - + call define_hist_field(n_strairyN,"strairyN","N/m^2",nstr2D, ncstr, & "atm/ice stress (y)", & "positive is y direction on N grid", c1, c0, & ns1, f_strairyN) - + call define_hist_field(n_strairxE,"strairxE","N/m^2",estr2D, ecstr, & "atm/ice stress (x)", & "positive is x direction on E grid", c1, c0, & ns1, f_strairxE) - + call define_hist_field(n_strairyE,"strairyE","N/m^2",estr2D, ecstr, & "atm/ice stress (y)", & "positive is y direction on E grid", c1, c0, & ns1, f_strairyE) - + call define_hist_field(n_strtltxN,"strtltxN","N/m^2",nstr2D, ncstr, & "sea sfc tilt stress (x)", & "positive is x direction on N grid", c1, c0, & ns1, f_strtltxN) - + call define_hist_field(n_strtltyN,"strtltyN","N/m^2",nstr2D, ncstr, & "sea sfc tilt stress (y)", & "positive is y direction on N grid", c1, c0, & ns1, f_strtltyN) - + call define_hist_field(n_strtltxE,"strtltxE","N/m^2",estr2D, ecstr, & "sea sfc tilt stress (x)", & "positive is x direction on E grid", c1, c0, & ns1, f_strtltxE) - + call define_hist_field(n_strtltyE,"strtltyE","N/m^2",estr2D, ecstr, & "sea sfc tilt stress (y)", & "positive is y direction on E grid", c1, c0, & ns1, f_strtltyE) - + call define_hist_field(n_strcorxN,"strcorxN","N/m^2",nstr2D, ncstr, & "coriolis stress (x)", & "positive is x direction on N grid", c1, c0, & ns1, f_strcorxN) - + call define_hist_field(n_strcoryN,"strcoryN","N/m^2",nstr2D, ncstr, & "coriolis stress (y)", & "positive is y direction on N grid", c1, c0, & ns1, f_strcoryN) - + call define_hist_field(n_strcorxE,"strcorxE","N/m^2",estr2D, ecstr, & "coriolis stress (x)", & "positive is x direction on E grid", c1, c0, & ns1, f_strcorxE) - + call define_hist_field(n_strcoryE,"strcoryE","N/m^2",estr2D, ecstr, & "coriolis stress (y)", & "positive is y direction on E grid", c1, c0, & ns1, f_strcoryE) - + call define_hist_field(n_strocnxN,"strocnxN","N/m^2",nstr2D, ncstr, & "ocean/ice stress (x)", & "positive is x direction on N grid", c1, c0, & ns1, f_strocnxN) - + call define_hist_field(n_strocnyN,"strocnyN","N/m^2",nstr2D, ncstr, & "ocean/ice stress (y)", & "positive is y direction on N grid", c1, c0, & ns1, f_strocnyN) - + call define_hist_field(n_strocnxE,"strocnxE","N/m^2",estr2D, ecstr, & "ocean/ice stress (x)", & "positive is x direction on E grid", c1, c0, & ns1, f_strocnxE) - + call define_hist_field(n_strocnyE,"strocnyE","N/m^2",estr2D, ecstr, & "ocean/ice stress (y)", & "positive is y direction on E grid", c1, c0, & ns1, f_strocnyE) - + call define_hist_field(n_strintxN,"strintxN","N/m^2",nstr2D, ncstr, & "internal ice stress (x)", & "positive is x direction on N grid", c1, c0, & ns1, f_strintxN) - + call define_hist_field(n_strintyN,"strintyN","N/m^2",nstr2D, ncstr, & "internal ice stress (y)", & "positive is y direction on N grid", c1, c0, & @@ -1263,7 +1263,7 @@ subroutine init_hist (dt) "internal ice stress (x)", & "positive is x direction on E grid", c1, c0, & ns1, f_strintxE) - + call define_hist_field(n_strintyE,"strintyE","N/m^2",estr2D, ecstr, & "internal ice stress (y)", & "positive is y direction on E grid", c1, c0, & @@ -1278,7 +1278,7 @@ subroutine init_hist (dt) "seabed (basal) stress (y)", & "positive is y direction on N grid", c1, c0, & ns1, f_taubyN) - + call define_hist_field(n_taubxE,"taubxE","N/m^2",estr2D, ecstr, & "seabed (basal) stress (x)", & "positive is x direction on E grid", c1, c0, & @@ -1288,22 +1288,22 @@ subroutine init_hist (dt) "seabed (basal) stress (y)", & "positive is y direction on E grid", c1, c0, & ns1, f_taubyE) - + call define_hist_field(n_strength,"strength","N/m",tstr2D, tcstr, & "compressive ice strength", & "none", c1, c0, & ns1, f_strength) - + call define_hist_field(n_divu,"divu","%/day",tstr2D, tcstr, & "strain rate (divergence)", & "none", secday*c100, c0, & ns1, f_divu) - + call define_hist_field(n_shear,"shear","%/day",tstr2D, tcstr, & "strain rate (shear)", & "none", secday*c100, c0, & ns1, f_shear) - + select case (grid_ice) case('B') description = ", on U grid (NE corner values)" @@ -1315,42 +1315,42 @@ subroutine init_hist (dt) "norm. principal stress 1", & "sig1 is instantaneous" // trim(description), c1, c0, & ns1, f_sig1) - + call define_hist_field(n_sig2,"sig2","1",ustr2D, ucstr, & "norm. principal stress 2", & "sig2 is instantaneous" // trim(description), c1, c0, & ns1, f_sig2) - + call define_hist_field(n_sigP,"sigP","1",ustr2D, ucstr, & "ice pressure", & "sigP is instantaneous" // trim(description), c1, c0, & ns1, f_sigP) - + call define_hist_field(n_dvidtt,"dvidtt","cm/day",tstr2D, tcstr, & "volume tendency thermo", & "none", mps_to_cmpdy, c0, & ns1, f_dvidtt) - + call define_hist_field(n_dvidtd,"dvidtd","cm/day",tstr2D, tcstr, & "volume tendency dynamics", & "none", mps_to_cmpdy, c0, & ns1, f_dvidtd) - + call define_hist_field(n_daidtt,"daidtt","%/day",tstr2D, tcstr, & "area tendency thermo", & "none", secday*c100, c0, & ns1, f_daidtt) - + call define_hist_field(n_daidtd,"daidtd","%/day",tstr2D, tcstr, & "area tendency dynamics", & "none", secday*c100, c0, & ns1, f_daidtd) - + call define_hist_field(n_dagedtt,"dagedtt","day/day",tstr2D, tcstr, & "age tendency thermo", & "excludes time step increment", c1, c0, & ns1, f_dagedtt) - + call define_hist_field(n_dagedtd,"dagedtd","day/day",tstr2D, tcstr, & "age tendency dynamics", & "excludes time step increment", c1, c0, & @@ -1370,22 +1370,22 @@ subroutine init_hist (dt) "ice volume snapshot", & "none", c1, c0, & ns1, f_hisnap) - + call define_hist_field(n_aisnap,"aisnap","1",tstr2D, tcstr, & "ice area snapshot", & "none", c1, c0, & ns1, f_aisnap) - + call define_hist_field(n_trsig,"trsig","N/m",tstr2D, tcstr, & "internal stress tensor trace", & "ice strength approximation", c1, c0, & ns1, f_trsig) - + call define_hist_field(n_icepresent,"ice_present","1",tstr2D, tcstr, & "fraction of time-avg interval that ice is present", & "ice extent flag", c1, c0, & ns1, f_icepresent) - + call define_hist_field(n_fsurf_ai,"fsurf_ai","W/m^2",tstr2D, tcstr, & "net surface heat flux", & "positive downward, excludes conductive flux, weighted by ice area", & @@ -1477,27 +1477,27 @@ subroutine init_hist (dt) "sea ice thickness", & "volume divided by area", c1, c0, & ns1, f_sithick) - + call define_hist_field(n_siage,"siage","s",tstr2D, tcstr, & "sea ice age", & "none", c1, c0, & ns1, f_siage) - + call define_hist_field(n_sisnthick,"sisnthick","m",tstr2D, tcstr, & "sea ice snow thickness", & "snow volume divided by area", c1, c0, & ns1, f_sisnthick) - + call define_hist_field(n_sitemptop,"sitemptop","K",tstr2D, tcstr, & "sea ice surface temperature", & "none", c1, c0, & ns1, f_sitemptop) - + call define_hist_field(n_sitempsnic,"sitempsnic","K",tstr2D, tcstr, & "snow ice interface temperature", & "surface temperature when no snow present", c1, c0, & ns1, f_sitempsnic) - + call define_hist_field(n_sitempbot,"sitempbot","K",tstr2D, tcstr, & "sea ice bottom temperature", & "none", c1, c0, & @@ -1512,37 +1512,37 @@ subroutine init_hist (dt) "ice y velocity component", & "none", c1, c0, & ns1, f_siv) - + call define_hist_field(n_sidmasstranx,"sidmasstranx","kg/s",ustr2D, ucstr, & "x component of snow and sea ice mass transport", & "none", c1, c0, & ns1, f_sidmasstranx) - + call define_hist_field(n_sidmasstrany,"sidmasstrany","kg/s",ustr2D, ucstr, & "y component of snow and sea ice mass transport", & "none", c1, c0, & ns1, f_sidmasstrany) - + call define_hist_field(n_sistrxdtop,"sistrxdtop","N m-2",ustr2D, ucstr, & "x component of atmospheric stress on sea ice", & "none", c1, c0, & ns1, f_sistrxdtop) - + call define_hist_field(n_sistrydtop,"sistrydtop","N m-2",ustr2D, ucstr, & "y component of atmospheric stress on sea ice", & "none", c1, c0, & ns1, f_sistrydtop) - + call define_hist_field(n_sistrxubot,"sistrxubot","N m-2",ustr2D, ucstr, & "x component of ocean stress on sea ice", & "none", c1, c0, & ns1, f_sistrxubot) - + call define_hist_field(n_sistryubot,"sistryubot","N m-2",ustr2D, ucstr, & "y component of ocean stress on sea ice", & "none", c1, c0, & ns1, f_sistryubot) - + call define_hist_field(n_sicompstren,"sicompstren","N m-1",tstr2D, tcstr, & "compressive sea ice strength", & "none", c1, c0, & @@ -1557,37 +1557,37 @@ subroutine init_hist (dt) "ice direction", & "vector direction - going to", c1, c0, & ns1, f_sidir) - + call define_hist_field(n_sialb,"sialb","1",tstr2D, tcstr, & "sea ice albedo", & "none", c1, c0, & ns1, f_sialb) - + call define_hist_field(n_sihc,"sihc","J m-2",tstr2D, tcstr, & "sea ice heat content", & "none", c1, c0, & ns1, f_sihc) - + call define_hist_field(n_sisnhc,"sisnhc","J m-2",tstr2D, tcstr, & "snow heat content", & "none", c1, c0, & ns1, f_sisnhc) - + call define_hist_field(n_sidconcth,"sidconcth","1/s",tstr2D, tcstr, & "sea ice area change from thermodynamics", & "none", c1, c0, & ns1, f_sidconcth) - + call define_hist_field(n_sidconcdyn,"sidconcdyn","1/s",tstr2D, tcstr, & "sea ice area change from dynamics", & "none", c1, c0, & ns1, f_sidconcdyn) - + call define_hist_field(n_sidmassth,"sidmassth","kg m-2 s-1",tstr2D, tcstr, & "sea ice mass change from thermodynamics", & "none", c1, c0, & ns1, f_sidmassth) - + call define_hist_field(n_sidmassdyn,"sidmassdyn","kg m-2 s-1",tstr2D, tcstr, & "sea ice mass change from dynamics", & "none", c1, c0, & @@ -1597,37 +1597,37 @@ subroutine init_hist (dt) "sea ice mass change from frazil", & "none", c1, c0, & ns1, f_sidmassgrowthwat) - + call define_hist_field(n_sidmassgrowthbot,"sidmassgrowthbot","kg m-2 s-1",tstr2D, tcstr, & "sea ice mass change from basal growth", & "none", c1, c0, & ns1, f_sidmassgrowthbot) - + call define_hist_field(n_sidmasssi,"sidmasssi","kg m-2 s-1",tstr2D, tcstr, & "sea ice mass change from snow-ice formation", & "none", c1, c0, & ns1, f_sidmasssi) - + call define_hist_field(n_sidmassevapsubl,"sidmassevapsubl","kg m-2 s-1",tstr2D, tcstr, & "sea ice mass change from evaporation and sublimation", & "none", c1, c0, & ns1, f_sidmassevapsubl) - + call define_hist_field(n_sndmasssubl,"sndmassubl","kg m-2 s-1",tstr2D, tcstr, & "snow mass change from evaporation and sublimation", & "none", c1, c0, & ns1, f_sndmasssubl) - + call define_hist_field(n_sidmassmelttop,"sidmassmelttop","kg m-2 s-1",tstr2D, tcstr, & "sea ice mass change top melt", & "none", c1, c0, & ns1, f_sidmassmelttop) - + call define_hist_field(n_sidmassmeltbot,"sidmassmeltbot","kg m-2 s-1",tstr2D, tcstr, & "sea ice mass change bottom melt", & "none", c1, c0, & ns1, f_sidmassmeltbot) - + call define_hist_field(n_sidmasslat,"sidmasslat","kg m-2 s-1",tstr2D, tcstr, & "sea ice mass change lateral melt", & "none", c1, c0, & @@ -1637,37 +1637,37 @@ subroutine init_hist (dt) "snow mass change from snow fall", & "none", c1, c0, & ns1, f_sndmasssnf) - + call define_hist_field(n_sndmassmelt,"sndmassmelt","kg m-2 s-1",tstr2D, tcstr, & "snow mass change from snow melt", & "none", c1, c0, & ns1, f_sndmassmelt) - + call define_hist_field(n_siflswdtop,"siflswdtop","W/m2",tstr2D, tcstr, & "down shortwave flux over sea ice", & "positive downward", c1, c0, & ns1, f_siflswdtop) - + call define_hist_field(n_siflswutop,"siflswutop","W/m2",tstr2D, tcstr, & "upward shortwave flux over sea ice", & "positive downward", c1, c0, & ns1, f_siflswutop) - + call define_hist_field(n_siflswdbot,"siflswdbot","W/m2",tstr2D, tcstr, & "down shortwave flux at bottom of ice", & "positive downward", c1, c0, & ns1, f_siflswdbot) - + call define_hist_field(n_sifllwdtop,"sifllwdtop","W/m2",tstr2D, tcstr, & "down longwave flux over sea ice", & "positive downward", c1, c0, & ns1, f_sifllwdtop) - + call define_hist_field(n_sifllwutop,"sifllwutop","W/m2",tstr2D, tcstr, & "upward longwave flux over sea ice", & "positive downward", c1, c0, & ns1, f_sifllwutop) - + call define_hist_field(n_siflsenstop,"siflsenstop","W/m2",tstr2D, tcstr, & "sensible heat flux over sea ice", & "positive downward", c1, c0, & @@ -1677,37 +1677,37 @@ subroutine init_hist (dt) "sensible heat flux at bottom of sea ice", & "positive downward", c1, c0, & ns1, f_siflsensupbot) - + call define_hist_field(n_sifllatstop,"sifllatstop","W/m2",tstr2D, tcstr, & "latent heat flux over sea ice", & "positive downward", c1, c0, & ns1, f_sifllatstop) - + call define_hist_field(n_siflcondtop,"siflcondtop","W/m2",tstr2D, tcstr, & "conductive heat flux at top of sea ice", & "positive downward", c1, c0, & ns1, f_siflcondtop) - + call define_hist_field(n_siflcondbot,"siflcondbot","W/m2",tstr2D, tcstr, & "conductive heat flux at bottom of sea ice", & "positive downward", c1, c0, & ns1, f_siflcondbot) - + call define_hist_field(n_sipr,"sipr","kg m-2 s-1",tstr2D, tcstr, & "rainfall over sea ice", & "none", c1, c0, & ns1, f_sipr) - + call define_hist_field(n_sifb,"sifb","m",tstr2D, tcstr, & "sea ice freeboard above sea level", & "none", c1, c0, & ns1, f_sifb) - + call define_hist_field(n_siflsaltbot,"siflsaltbot","kg m-2 s-1",tstr2D, tcstr, & "salt flux from sea ice", & "positive downward", c1, c0, & ns1, f_siflsaltbot) - + call define_hist_field(n_siflfwbot,"siflfwbot","kg m-2 s-1",tstr2D, tcstr, & "fresh water flux from sea ice", & "positive downward", c1, c0, & @@ -1717,37 +1717,37 @@ subroutine init_hist (dt) "fresh water drainage through sea ice", & "positive downward", c1, c0, & ns1, f_siflfwdrain) - + call define_hist_field(n_sidragtop,"sidragtop","1",tstr2D, tcstr, & "atmospheric drag over sea ice", & "none", c1, c0, & ns1, f_sidragtop) - + call define_hist_field(n_sirdgthick,"sirdgthick","m",tstr2D, tcstr, & "sea ice ridge thickness", & "vrdg divided by ardg", c1, c0, & ns1, f_sirdgthick) - + call define_hist_field(n_siforcetiltx,"siforcetiltx","N m-2",tstr2D, tcstr, & "sea surface tilt term", & "none", c1, c0, & ns1, f_siforcetiltx) - + call define_hist_field(n_siforcetilty,"siforcetilty","N m-2",tstr2D, tcstr, & "sea surface tile term", & "none", c1, c0, & ns1, f_siforcetilty) - + call define_hist_field(n_siforcecoriolx,"siforcecoriolx","N m-2",tstr2D, tcstr, & "coriolis term", & "none", c1, c0, & ns1, f_siforcecoriolx) - + call define_hist_field(n_siforcecorioly,"siforcecorioly","N m-2",tstr2D, tcstr, & "coriolis term", & "none", c1, c0, & ns1, f_siforcecorioly) - + call define_hist_field(n_siforceintstrx,"siforceintstrx","N m-2",tstr2D, tcstr, & "internal stress term", & "none", c1, c0, & @@ -1762,7 +1762,7 @@ subroutine init_hist (dt) "average normal stress", & "sistreave is instantaneous", c1, c0, & ns1, f_sistreave) - + call define_hist_field(n_sistremax,"sistremax","N m-1",ustr2D, ucstr, & "maximum shear stress", & "sistremax is instantaneous", c1, c0, & @@ -1797,12 +1797,12 @@ subroutine init_hist (dt) do ns1 = 1, nstreams if (histfreq(ns1) /= 'x') then - call define_hist_field(n_aicen,"aicen","1",tstr3Dc, tcstr, & - "ice area, categories","none", c1, c0, & + call define_hist_field(n_aicen,"aicen","1",tstr3Dc, tcstr, & + "ice area, categories","none", c1, c0, & ns1, f_aicen) - call define_hist_field(n_vicen,"vicen","m",tstr3Dc, tcstr, & - "ice volume, categories","none", c1, c0, & + call define_hist_field(n_vicen,"vicen","m",tstr3Dc, tcstr, & + "ice volume, categories","none", c1, c0, & ns1, f_vicen) call define_hist_field(n_vsnon,"vsnon","m",tstr3Dc, tcstr, & @@ -1814,29 +1814,29 @@ subroutine init_hist (dt) "snow fraction per unit grid cell area", c1, c0, & ns1, f_snowfracn) - call define_hist_field(n_fsurfn_ai,"fsurfn_ai","W/m^2",tstr3Dc, tcstr, & + call define_hist_field(n_fsurfn_ai,"fsurfn_ai","W/m^2",tstr3Dc, tcstr, & "net surface heat flux, categories","weighted by ice area", c1, c0, & ns1, f_fsurfn_ai) - + call define_hist_field(n_fcondtopn_ai,"fcondtopn_ai","W/m^2",tstr3Dc, tcstr, & "top sfc conductive heat flux, cat","weighted by ice area", c1, c0, & ns1, f_fcondtopn_ai) - call define_hist_field(n_fmelttn_ai,"fmelttn_ai","W/m^2",tstr3Dc, tcstr, & - "net sfc heat flux causing melt, cat","weighted by ice area", c1, c0, & + call define_hist_field(n_fmelttn_ai,"fmelttn_ai","W/m^2",tstr3Dc, tcstr, & + "net sfc heat flux causing melt, cat","weighted by ice area", c1, c0, & ns1, f_fmelttn_ai) - call define_hist_field(n_flatn_ai,"flatn_ai","W/m^2",tstr3Dc, tcstr, & - "latent heat flux, category","weighted by ice area", c1, c0, & + call define_hist_field(n_flatn_ai,"flatn_ai","W/m^2",tstr3Dc, tcstr, & + "latent heat flux, category","weighted by ice area", c1, c0, & ns1, f_flatn_ai) - call define_hist_field(n_fsensn_ai,"fsensn_ai","W/m^2",tstr3Dc, tcstr, & - "sensible heat flux, category","weighted by ice area", c1, c0, & + call define_hist_field(n_fsensn_ai,"fsensn_ai","W/m^2",tstr3Dc, tcstr, & + "sensible heat flux, category","weighted by ice area", c1, c0, & ns1, f_fsensn_ai) call define_hist_field(n_keffn_top,"keffn_top","W/m^2/K",tstr3Dc, tcstr, & "effective thermal conductivity of the top ice layer, categories", & - "multilayer scheme", c1, c0, & + "multilayer scheme", c1, c0, & ns1, f_keffn_top) ! CMIP 3D @@ -1876,16 +1876,16 @@ subroutine init_hist (dt) ! do ns1 = 1, nstreams ! if (histfreq(ns1) /= 'x') then -! call define_hist_field(n_field3dz,"field3dz","1",tstr3Dz, tcstr, & +! call define_hist_field(n_field3dz,"field3dz","1",tstr3Dz, tcstr, & ! "example 3dz field", & ! "vertical profile", c1, c0, & ! ns1, f_field3dz) ! endif ! if (histfreq(ns1) /= 'x') then -! enddo ! ns1 +! enddo ! ns1 ! biogeochemistry - call init_hist_bgc_3Db + call init_hist_bgc_3Db call init_hist_bgc_3Da !----------------------------------------------------------------- @@ -1902,12 +1902,12 @@ subroutine init_hist (dt) do ns1 = 1, nstreams if (histfreq(ns1) /= 'x') then - call define_hist_field(n_Tinz,"Tinz","C",tstr4Di, tcstr, & + call define_hist_field(n_Tinz,"Tinz","C",tstr4Di, tcstr, & "ice internal temperatures on CICE grid", & "vertical profile", c1, c0, & ns1, f_Tinz) - call define_hist_field(n_Sinz,"Sinz","ppt",tstr4Di, tcstr, & + call define_hist_field(n_Sinz,"Sinz","ppt",tstr4Di, tcstr, & "ice internal bulk salinity", & "vertical profile", c1, c0, & ns1, f_Sinz) @@ -1918,7 +1918,7 @@ subroutine init_hist (dt) do ns1 = 1, nstreams if (histfreq(ns1) /= 'x') then - call define_hist_field(n_Tsnz,"Tsnz","C",tstr4Ds, tcstr, & + call define_hist_field(n_Tsnz,"Tsnz","C",tstr4Ds, tcstr, & "snow internal temperatures", & "vertical profile", c1, c0, & ns1, f_Tsnz) @@ -2071,8 +2071,8 @@ subroutine init_hist (dt) if (restart .and. yday >= c2) then ! restarting midyear gives erroneous onset dates - mlt_onset = 999._dbl_kind - frz_onset = 999._dbl_kind + mlt_onset = 999._dbl_kind + frz_onset = 999._dbl_kind else mlt_onset = c0 frz_onset = c0 @@ -2104,13 +2104,13 @@ subroutine accum_hist (dt) Tair, Tref, Qref, congel, frazil, frazil_diag, snoice, dsnow, & melts, meltb, meltt, meltl, fresh, fsalt, fresh_ai, fsalt_ai, & fhocn, fhocn_ai, uatm, vatm, fbot, Tbot, Tsnice, fswthru_ai, & - strairx, strairy, strtltx, strtlty, strintx, strinty, & - taubx, tauby, strocnx, strocny, & + strairxU, strairyU, strtltxU, strtltyU, strintxU, strintyU, & + taubxU, taubyU, strocnxU, strocnyU, & strairxN, strairyN, strtltxN, strtltyN, strintxN, strintyN, & taubxN, taubyN, strocnxN, strocnyN, & strairxE, strairyE, strtltxE, strtltyE, strintxE, strintyE, & taubxE, taubyE, strocnxE, strocnyE, & - fm, fmN, fmE, daidtt, dvidtt, daidtd, dvidtd, fsurf, & + fmU, fmN, fmE, daidtt, dvidtt, daidtd, dvidtd, fsurf, & fcondtop, fcondbot, fsurfn, fcondtopn, flatn, fsensn, albcnt, snwcnt, & stressp_1, stressm_1, stress12_1, & stresspT, stressmT, stress12T, & @@ -2150,7 +2150,7 @@ subroutine accum_hist (dt) ravgct , & ! 1/avgct ravgctz ! 1/avgct - real (kind=dbl_kind) :: & + real (kind=dbl_kind) :: & qn , & ! temporary variable for enthalpy sn ! temporary variable for salinity @@ -2208,7 +2208,7 @@ subroutine accum_hist (dt) if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) & a2D(:,:,n,:) = c0 enddo - do n = n2D + 1, n3Dccum + do n = n2D + 1, n3Dccum nn = n - n2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) & a3Dc(:,:,:,nn,:) = c0 @@ -2267,7 +2267,7 @@ subroutine accum_hist (dt) !$OMP worka,workb,worka3,Tinz4d,Sinz4d,Tsnz4d) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -2526,31 +2526,31 @@ subroutine accum_hist (dt) call accum_hist_field(n_fswthru, iblk, fswthru(:,:,iblk), a2D) if (f_fswthru_ai(1:1)/= 'x') & call accum_hist_field(n_fswthru_ai,iblk, fswthru_ai(:,:,iblk), a2D) - + if (f_strairx(1:1) /= 'x') & - call accum_hist_field(n_strairx, iblk, strairx(:,:,iblk), a2D) + call accum_hist_field(n_strairx, iblk, strairxU(:,:,iblk), a2D) if (f_strairy(1:1) /= 'x') & - call accum_hist_field(n_strairy, iblk, strairy(:,:,iblk), a2D) + call accum_hist_field(n_strairy, iblk, strairyU(:,:,iblk), a2D) if (f_strtltx(1:1) /= 'x') & - call accum_hist_field(n_strtltx, iblk, strtltx(:,:,iblk), a2D) + call accum_hist_field(n_strtltx, iblk, strtltxU(:,:,iblk), a2D) if (f_strtlty(1:1) /= 'x') & - call accum_hist_field(n_strtlty, iblk, strtlty(:,:,iblk), a2D) + call accum_hist_field(n_strtlty, iblk, strtltyU(:,:,iblk), a2D) if (f_strcorx(1:1) /= 'x') & - call accum_hist_field(n_strcorx, iblk, fm(:,:,iblk)*vvel(:,:,iblk), a2D) + call accum_hist_field(n_strcorx, iblk, fmU(:,:,iblk)*vvel(:,:,iblk), a2D) if (f_strcory(1:1) /= 'x') & - call accum_hist_field(n_strcory, iblk,-fm(:,:,iblk)*uvel(:,:,iblk), a2D) + call accum_hist_field(n_strcory, iblk,-fmU(:,:,iblk)*uvel(:,:,iblk), a2D) if (f_strocnx(1:1) /= 'x') & - call accum_hist_field(n_strocnx, iblk, strocnx(:,:,iblk), a2D) + call accum_hist_field(n_strocnx, iblk, strocnxU(:,:,iblk), a2D) if (f_strocny(1:1) /= 'x') & - call accum_hist_field(n_strocny, iblk, strocny(:,:,iblk), a2D) + call accum_hist_field(n_strocny, iblk, strocnyU(:,:,iblk), a2D) if (f_strintx(1:1) /= 'x') & - call accum_hist_field(n_strintx, iblk, strintx(:,:,iblk), a2D) + call accum_hist_field(n_strintx, iblk, strintxU(:,:,iblk), a2D) if (f_strinty(1:1) /= 'x') & - call accum_hist_field(n_strinty, iblk, strinty(:,:,iblk), a2D) + call accum_hist_field(n_strinty, iblk, strintyU(:,:,iblk), a2D) if (f_taubx(1:1) /= 'x') & - call accum_hist_field(n_taubx, iblk, taubx(:,:,iblk), a2D) + call accum_hist_field(n_taubx, iblk, taubxU(:,:,iblk), a2D) if (f_tauby(1:1) /= 'x') & - call accum_hist_field(n_tauby, iblk, tauby(:,:,iblk), a2D) + call accum_hist_field(n_tauby, iblk, taubyU(:,:,iblk), a2D) if (f_strairxN(1:1) /= 'x') & call accum_hist_field(n_strairxN, iblk, strairxN(:,:,iblk), a2D) if (f_strairyN(1:1) /= 'x') & @@ -2791,7 +2791,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice_init(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*(aice(i,j,iblk)*strairx(i,j,iblk)/aice_init(i,j,iblk)) + worka(i,j) = aice(i,j,iblk)*(aice(i,j,iblk)*strairxU(i,j,iblk)/aice_init(i,j,iblk)) enddo enddo call accum_hist_field(n_sistrxdtop, iblk, worka(:,:), a2D) @@ -2802,7 +2802,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice_init(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*(aice(i,j,iblk)*strairy(i,j,iblk)/aice_init(i,j,iblk)) + worka(i,j) = aice(i,j,iblk)*(aice(i,j,iblk)*strairyU(i,j,iblk)/aice_init(i,j,iblk)) enddo enddo call accum_hist_field(n_sistrydtop, iblk, worka(:,:), a2D) @@ -2813,7 +2813,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*strocnx(i,j,iblk) + worka(i,j) = aice(i,j,iblk)*strocnxU(i,j,iblk) enddo enddo call accum_hist_field(n_sistrxubot, iblk, worka(:,:), a2D) @@ -2824,7 +2824,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*strocny(i,j,iblk) + worka(i,j) = aice(i,j,iblk)*strocnyU(i,j,iblk) enddo enddo call accum_hist_field(n_sistryubot, iblk, worka(:,:), a2D) @@ -3219,7 +3219,7 @@ subroutine accum_hist (dt) if ( ktherm == 2) then dfresh = -rhoi*(frazil(i,j,iblk)-frazil_diag(i,j,iblk))/dt else - dfresh = -rhoi*frazil(i,j,iblk)/dt + dfresh = -rhoi*frazil(i,j,iblk)/dt endif endif dfsalt = ice_ref_salinity*p001*dfresh @@ -3241,7 +3241,7 @@ subroutine accum_hist (dt) if ( ktherm == 2) then dfresh = -rhoi*(frazil(i,j,iblk)-frazil_diag(i,j,iblk))/dt else - dfresh = -rhoi*frazil(i,j,iblk)/dt + dfresh = -rhoi*frazil(i,j,iblk)/dt endif endif worka(i,j) = aice(i,j,iblk)*(fresh(i,j,iblk)+dfresh) @@ -3293,7 +3293,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*strtltx(i,j,iblk) + worka(i,j) = aice(i,j,iblk)*strtltxU(i,j,iblk) endif enddo enddo @@ -3305,7 +3305,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*strtlty(i,j,iblk) + worka(i,j) = aice(i,j,iblk)*strtltyU(i,j,iblk) endif enddo enddo @@ -3317,7 +3317,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fm(i,j,iblk)*vvel(i,j,iblk) + worka(i,j) = aice(i,j,iblk)*fmU(i,j,iblk)*vvel(i,j,iblk) endif enddo enddo @@ -3329,7 +3329,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - worka(i,j) = -aice(i,j,iblk)*fm(i,j,iblk)*uvel(i,j,iblk) + worka(i,j) = -aice(i,j,iblk)*fmU(i,j,iblk)*uvel(i,j,iblk) endif enddo enddo @@ -3341,7 +3341,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*strintx(i,j,iblk) + worka(i,j) = aice(i,j,iblk)*strintxU(i,j,iblk) endif enddo enddo @@ -3353,7 +3353,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*strinty(i,j,iblk) + worka(i,j) = aice(i,j,iblk)*strintyU(i,j,iblk) endif enddo enddo @@ -3395,7 +3395,7 @@ subroutine accum_hist (dt) if (f_fsensn_ai (1:1) /= 'x') & call accum_hist_field(n_fsensn_ai-n2D, iblk, ncat_hist, & fsensn(:,:,1:ncat_hist,iblk)*aicen_init(:,:,1:ncat_hist,iblk), a3Dc) - ! Calculate surface heat flux that causes melt (calculated by the + ! Calculate surface heat flux that causes melt (calculated by the ! atmos in HadGEM3 so needed for checking purposes) if (f_fmelttn_ai (1:1) /= 'x') & call accum_hist_field(n_fmelttn_ai-n2D, iblk, ncat_hist, & @@ -3484,7 +3484,7 @@ subroutine accum_hist (dt) call accum_hist_field(n_Sinz-n3Dfcum, iblk, nzilyr, ncat_hist, & Sinz4d(:,:,1:nzilyr,1:ncat_hist), a4Di) endif - + endif ! if (allocated(a3Dc)) if (allocated(a4Ds)) then @@ -3504,7 +3504,7 @@ subroutine accum_hist (dt) call accum_hist_field(n_Tsnz-n4Dicum, iblk, nzslyr, ncat_hist, & Tsnz4d(:,:,1:nzslyr,1:ncat_hist), a4Ds) endif - + endif ! if (allocated(a4Ds)) if (allocated(a3Dc) .and. allocated(a2D)) then @@ -3528,7 +3528,7 @@ subroutine accum_hist (dt) enddo endif - endif + endif !--------------------------------------------------------------- ! accumulate other history output !--------------------------------------------------------------- @@ -3569,14 +3569,14 @@ subroutine accum_hist (dt) if (write_history(ns) .or. write_ic) then !--------------------------------------------------------------- - ! Mask out land points and convert units + ! Mask out land points and convert units !--------------------------------------------------------------- ravgct = c1/avgct(ns) !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & !$OMP n,nn,ravgctz,ravgip,ravgipn) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -3611,7 +3611,7 @@ subroutine accum_hist (dt) endif do n = 1, num_avail_hist_fields_2D - if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then do j = jlo, jhi do i = ilo, ihi @@ -4125,7 +4125,7 @@ subroutine accum_hist (dt) if (avail_hist_fields(n)%vname(1:6) == 'albice') then do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then + if (tmask(i,j,iblk)) then ravgctz = c0 if (albcnt(i,j,iblk,ns) > puny) & ravgctz = c1/albcnt(i,j,iblk,ns) @@ -4145,7 +4145,7 @@ subroutine accum_hist (dt) if (avail_hist_fields(n)%vname(1:6) == 'albsni') then do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then + if (tmask(i,j,iblk)) then ravgctz = c0 if (albcnt(i,j,iblk,ns) > puny) & ravgctz = c1/albcnt(i,j,iblk,ns) @@ -4159,7 +4159,7 @@ subroutine accum_hist (dt) if (avail_hist_fields(n)%vname(1:8) == 'alvdr_ai') then do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then + if (tmask(i,j,iblk)) then ravgctz = c0 if (albcnt(i,j,iblk,ns) > puny) & ravgctz = c1/albcnt(i,j,iblk,ns) @@ -4216,7 +4216,7 @@ subroutine accum_hist (dt) do n = 1, num_avail_hist_fields_3Dc nn = n2D + n - if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then + if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then do k = 1, ncat_hist do j = jlo, jhi @@ -4265,7 +4265,7 @@ subroutine accum_hist (dt) do n = 1, num_avail_hist_fields_3Dz nn = n3Dccum + n - if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then + if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then do k = 1, nzilyr do j = jlo, jhi @@ -4283,7 +4283,7 @@ subroutine accum_hist (dt) enddo ! n do n = 1, num_avail_hist_fields_3Db nn = n3Dzcum + n - if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then + if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then do k = 1, nzblyr do j = jlo, jhi do i = ilo, ihi @@ -4301,7 +4301,7 @@ subroutine accum_hist (dt) do n = 1, num_avail_hist_fields_3Da nn = n3Dbcum + n - if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then + if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then do k = 1, nzalyr do j = jlo, jhi do i = ilo, ihi @@ -4337,7 +4337,7 @@ subroutine accum_hist (dt) do n = 1, num_avail_hist_fields_4Di nn = n3Dfcum + n - if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then + if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then do k = 1, nzilyr do ic = 1, ncat_hist do j = jlo, jhi @@ -4357,7 +4357,7 @@ subroutine accum_hist (dt) do n = 1, num_avail_hist_fields_4Ds nn = n4Dicum + n - if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then + if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then do k = 1, nzslyr do ic = 1, ncat_hist do j = jlo, jhi @@ -4460,7 +4460,7 @@ subroutine accum_hist (dt) if (n_sig2 (ns) /= 0) a2D(i,j,n_sig2(ns),iblk) = & sig2 (i,j,iblk)*avail_hist_fields(n_sig2(ns))%cona if (n_sigP (ns) /= 0) a2D(i,j,n_sigP(ns),iblk) = & - sigP (i,j,iblk)*avail_hist_fields(n_sigP(ns))%cona + sigP (i,j,iblk)*avail_hist_fields(n_sigP(ns))%cona if (n_sistreave(ns) /= 0) a2D(i,j,n_sistreave(ns),iblk) = & p5*(sig1(i,j,iblk)+sig2(i,j,iblk))*avail_hist_fields(n_sistreave(ns))%cona if (n_sistremax(ns) /= 0) a2D(i,j,n_sistremax(ns),iblk) = & @@ -4557,8 +4557,8 @@ subroutine accum_hist (dt) do n = 1,n2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) a2D(:,:,n,:) = c0 enddo - do n = n2D + 1, n3Dccum - nn = n - n2D + do n = n2D + 1, n3Dccum + nn = n - n2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) a3Dc(:,:,:,nn,:) = c0 enddo do n = n3Dccum + 1, n3Dzcum @@ -4595,7 +4595,7 @@ subroutine accum_hist (dt) !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -4606,13 +4606,13 @@ subroutine accum_hist (dt) do i=ilo,ihi ! reset NH Jan 1 if (lmask_n(i,j,iblk)) mlt_onset(i,j,iblk) = c0 - ! reset SH Jan 1 + ! reset SH Jan 1 if (lmask_s(i,j,iblk)) frz_onset(i,j,iblk) = c0 enddo enddo endif ! new_year - if ( (mmonth .eq. 7) .and. new_month ) then + if ( (mmonth .eq. 7) .and. new_month ) then do j=jlo,jhi do i=ilo,ihi ! reset SH Jul 1 diff --git a/cicecore/cicedynB/analysis/ice_history_bgc.F90 b/cicecore/cicedynB/analysis/ice_history_bgc.F90 index 8802cf431..003e76120 100644 --- a/cicecore/cicedynB/analysis/ice_history_bgc.F90 +++ b/cicecore/cicedynB/analysis/ice_history_bgc.F90 @@ -21,74 +21,74 @@ module ice_history_bgc icepack_query_tracer_indices, icepack_query_parameters, & icepack_query_parameters use ice_domain_size, only: max_nstrm, n_iso, n_aero, & - n_algae, n_dic, n_doc, n_don, n_zaero, n_fed, n_fep + n_algae, n_dic, n_doc, n_don, n_zaero, n_fed, n_fep implicit none private public :: init_hist_bgc_2D, init_hist_bgc_3Dc, & init_hist_bgc_3Db, init_hist_bgc_3Da,& accum_hist_bgc, init_history_bgc - + !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value !--------------------------------------------------------------- ! specified in input_templates !-------------------------------------------------------------- - character (len=max_nstrm), public :: & + character (len=max_nstrm), public :: & f_fiso_atm = 'x', f_fiso_ocn = 'x', & f_iso = 'x', & f_faero_atm = 'x', f_faero_ocn = 'x', & f_aero = 'x', & - f_fzsal = 'm', f_fzsal_ai = 'm', & + f_fzsal = 'm', f_fzsal_ai = 'm', & f_fzsal_g = 'm', f_fzsal_g_ai = 'm', & f_zsal = 'x', & - f_fbio = 'x', f_fbio_ai = 'x', & - f_zaero = 'x', f_bgc_S = 'x', & + f_fbio = 'x', f_fbio_ai = 'x', & + f_zaero = 'x', f_bgc_S = 'x', & f_bgc_N = 'x', f_bgc_C = 'x', & f_bgc_DOC = 'x', f_bgc_DIC = 'x', & f_bgc_chl = 'x', f_bgc_Nit = 'x', & f_bgc_Am = 'x', f_bgc_Sil = 'x', & f_bgc_DMSPp = 'x', f_bgc_DMSPd = 'x', & - f_bgc_DMS = 'x', f_bgc_DON = 'x', & + f_bgc_DMS = 'x', f_bgc_DON = 'x', & f_bgc_Fe = 'x', f_bgc_hum = 'x', & f_bgc_PON = 'x', f_bgc_ml = 'x', & - f_upNO = 'x', f_upNH = 'x', & - f_bTin = 'x', f_bphi = 'x', & - f_iDi = 'x', f_iki = 'x', & + f_upNO = 'x', f_upNH = 'x', & + f_bTin = 'x', f_bphi = 'x', & + f_iDi = 'x', f_iki = 'x', & f_fbri = 'x', f_hbri = 'x', & - f_zfswin = 'x', f_grownet = 'x', & - f_bionet = 'x', f_biosnow = 'x', & + f_zfswin = 'x', f_grownet = 'x', & + f_bionet = 'x', f_biosnow = 'x', & f_PPnet = 'x', f_algalpeak = 'x', & f_zbgc_frac = 'x', & !------------------------------------------------ ! specified by combinations of above values !------------------------------------------------- f_bgc_Fed = 'x', f_bgc_Fep = 'x', & - f_DONnet = 'x', & - f_DICnet = 'x', f_DOCnet = 'x', & - f_chlnet = 'x', f_Nitnet = 'x', & - f_Amnet = 'x', f_Cnet = 'x', & - f_Nnet = 'x', f_DMSPpnet = 'x', & - f_DMSPdnet = 'x', f_DMSnet = 'x', & - f_Fednet = 'x', f_Fepnet = 'x', & + f_DONnet = 'x', & + f_DICnet = 'x', f_DOCnet = 'x', & + f_chlnet = 'x', f_Nitnet = 'x', & + f_Amnet = 'x', f_Cnet = 'x', & + f_Nnet = 'x', f_DMSPpnet = 'x', & + f_DMSPdnet = 'x', f_DMSnet = 'x', & + f_Fednet = 'x', f_Fepnet = 'x', & f_Silnet = 'x', f_PONnet = 'x', & - f_zaeronet = 'x', f_humnet = 'x', & - f_chlsnow = 'x', f_Nitsnow = 'x', & - f_Amsnow = 'x', f_Csnow = 'x', & - f_Nsnow = 'x', f_DMSPpsnow = 'x', & - f_DMSPdsnow = 'x', f_DMSsnow = 'x', & - f_Fedsnow = 'x', f_Fepsnow = 'x', & - f_Silsnow = 'x', f_PONsnow = 'x', & + f_zaeronet = 'x', f_humnet = 'x', & + f_chlsnow = 'x', f_Nitsnow = 'x', & + f_Amsnow = 'x', f_Csnow = 'x', & + f_Nsnow = 'x', f_DMSPpsnow = 'x', & + f_DMSPdsnow = 'x', f_DMSsnow = 'x', & + f_Fedsnow = 'x', f_Fepsnow = 'x', & + f_Silsnow = 'x', f_PONsnow = 'x', & f_humsnow = 'x', & - f_DICsnow = 'x', f_DOCsnow = 'x', & + f_DICsnow = 'x', f_DOCsnow = 'x', & f_DONsnow = 'x', f_zaerosnow = 'x', & - f_chlfrac = 'x', f_Nitfrac = 'x', & - f_Amfrac = 'x', & - f_Nfrac = 'x', f_DMSPpfrac = 'x', & - f_DMSPdfrac = 'x', f_DMSfrac = 'x', & - f_Silfrac = 'x', f_PONfrac = 'x', & + f_chlfrac = 'x', f_Nitfrac = 'x', & + f_Amfrac = 'x', & + f_Nfrac = 'x', f_DMSPpfrac = 'x', & + f_DMSPdfrac = 'x', f_DMSfrac = 'x', & + f_Silfrac = 'x', f_PONfrac = 'x', & f_humfrac = 'x', & - f_DICfrac = 'x', f_DOCfrac = 'x', & + f_DICfrac = 'x', f_DOCfrac = 'x', & f_DONfrac = 'x', f_zaerofrac = 'x', & f_Fedfrac = 'x', f_Fepfrac = 'x', & f_fNit = 'x', f_fNit_ai = 'x', & @@ -99,13 +99,13 @@ module ice_history_bgc f_fDON = 'x', f_fDON_ai = 'x', & f_fFed = 'x', f_fFed_ai = 'x', & f_fFep = 'x', f_fFep_ai = 'x', & - f_fSil = 'x', f_fSil_ai = 'x', & - f_fPON = 'x', f_fPON_ai = 'x', & - f_fhum = 'x', f_fhum_ai = 'x', & - f_fDMSPp = 'x', f_fDMSPp_ai = 'x', & - f_fDMSPd = 'x', f_fDMSPd_ai = 'x', & - f_fDMS = 'x', f_fDMS_ai = 'x', & - f_fzaero = 'x', f_fzaero_ai = 'x', & + f_fSil = 'x', f_fSil_ai = 'x', & + f_fPON = 'x', f_fPON_ai = 'x', & + f_fhum = 'x', f_fhum_ai = 'x', & + f_fDMSPp = 'x', f_fDMSPp_ai = 'x', & + f_fDMSPd = 'x', f_fDMSPd_ai = 'x', & + f_fDMS = 'x', f_fDMS_ai = 'x', & + f_fzaero = 'x', f_fzaero_ai = 'x', & f_bgc_Sil_ml = 'x', & f_bgc_Nit_ml = 'x', f_bgc_Am_ml = 'x', & f_bgc_DMSP_ml = 'x', f_bgc_DMS_ml = 'x', & @@ -140,12 +140,12 @@ module ice_history_bgc f_bgc_DMS , f_bgc_DON , & f_bgc_Fe , f_bgc_hum , & f_bgc_PON , f_bgc_ml , & - f_upNO , f_upNH , & + f_upNO , f_upNH , & f_bTin , f_bphi , & - f_iDi , f_iki , & + f_iDi , f_iki , & f_fbri , f_hbri , & - f_zfswin , f_grownet , & - f_bionet , f_biosnow , & + f_zfswin , f_grownet , & + f_bionet , f_biosnow , & f_PPnet , f_algalpeak , & f_zbgc_frac @@ -154,9 +154,9 @@ module ice_history_bgc !--------------------------------------------------------------- integer (kind=int_kind), dimension(max_nstrm), public :: & - n_fzsal , n_fzsal_ai , & - n_fzsal_g , n_fzsal_g_ai , & - n_zsal + n_fzsal , n_fzsal_ai , & + n_fzsal_g , n_fzsal_g_ai , & + n_zsal integer(kind=int_kind), dimension(icepack_max_iso,max_nstrm) :: & n_fiso_atm , & @@ -216,7 +216,7 @@ module ice_history_bgc n_bgc_Fed_cat1, n_bgc_Fep_cat1 integer(kind=int_kind), dimension(max_nstrm) :: & - n_bgc_S , & + n_bgc_S , & n_fNit , n_fNit_ai , & n_fAm , n_fAm_ai , & n_fSil , n_fSil_ai , & @@ -233,25 +233,25 @@ module ice_history_bgc n_bgc_hum_ml , & n_bgc_Nit_ml , n_bgc_Am_ml , & n_bgc_DMSP_ml , n_bgc_DMS_ml , & - n_upNO , n_upNH , & + n_upNO , n_upNH , & n_bTin , n_bphi , & n_iDi , n_iki , & n_bgc_PON , & n_fbri , n_hbri , & - n_zfswin , n_Nitnet , & - n_Amnet , n_Silnet , & + n_zfswin , n_Nitnet , & + n_Amnet , n_Silnet , & n_humnet , & - n_DMSPpnet , n_DMSPdnet , & - n_DMSnet , n_PONnet , & + n_DMSPpnet , n_DMSPdnet , & + n_DMSnet , n_PONnet , & n_Nitsnow , n_Amsnow , & n_Silsnow , n_humsnow , & - n_DMSPpsnow , n_DMSPdsnow , & - n_DMSsnow , n_PONsnow , & + n_DMSPpsnow , n_DMSPdsnow , & + n_DMSsnow , n_PONsnow , & n_Nitfrac , n_Amfrac , & n_Silfrac , & n_humfrac , & - n_DMSPpfrac , n_DMSPdfrac , & - n_DMSfrac , n_PONfrac , & + n_DMSPpfrac , n_DMSPdfrac , & + n_DMSfrac , n_PONfrac , & n_grownet , n_PPnet , & n_bgc_Nit_cat1, n_bgc_Am_cat1 , & n_bgc_Sil_cat1, n_bgc_DMSPd_cat1,& @@ -295,7 +295,7 @@ subroutine init_hist_bgc_2D tr_bgc_PON_out=tr_bgc_PON, & tr_bgc_N_out =tr_bgc_N, tr_bgc_C_out =tr_bgc_C, & tr_bgc_chl_out=tr_bgc_chl, tr_bgc_DON_out=tr_bgc_DON, & - tr_bgc_Fe_out =tr_bgc_Fe, tr_bgc_hum_out=tr_bgc_hum ) + tr_bgc_Fe_out =tr_bgc_Fe, tr_bgc_hum_out=tr_bgc_hum ) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -336,14 +336,14 @@ subroutine init_hist_bgc_2D if (.not. tr_aero) then f_faero_atm = 'x' f_faero_ocn = 'x' - f_aero = 'x' + f_aero = 'x' endif - + if (.not. tr_brine) then f_fbri = 'x' f_hbri = 'x' endif - + f_zaeronet = f_bionet f_zaerosnow = f_biosnow f_zaerofrac = f_zbgc_frac @@ -352,7 +352,7 @@ subroutine init_hist_bgc_2D if (.not. tr_zaero) then f_zaero = 'x' - f_fzaero = 'x' + f_fzaero = 'x' f_fzaero_ai = 'x' f_zaeronet = 'x' f_zaerosnow = 'x' @@ -396,7 +396,7 @@ subroutine init_hist_bgc_2D f_DMSPdnet = f_bionet f_DMSnet = f_bionet f_PONnet = f_bionet - + f_Nitsnow = f_biosnow f_Amsnow = f_biosnow f_Nsnow = f_biosnow @@ -466,7 +466,7 @@ subroutine init_hist_bgc_2D f_fDMSPd_ai = f_fbio_ai f_fDMS_ai = f_fbio_ai - if (.not. tr_bgc_N) then + if (.not. tr_bgc_N) then f_bgc_N = 'x' f_bgc_N_ml = 'x' f_fN = 'x' @@ -478,8 +478,8 @@ subroutine init_hist_bgc_2D endif f_peakval = f_algalpeak - if (.not. tr_bgc_Nit) then - f_upNO = 'x' + if (.not. tr_bgc_Nit) then + f_upNO = 'x' f_bgc_Nit = 'x' f_bgc_Nit_ml= 'x' f_fNit = 'x' @@ -511,8 +511,8 @@ subroutine init_hist_bgc_2D f_chlsnow = 'x' f_chlfrac = 'x' endif - if (.not. tr_bgc_Am) then - f_upNH = 'x' + if (.not. tr_bgc_Am) then + f_upNH = 'x' f_bgc_Am = 'x' f_bgc_Am_ml = 'x' f_fAm = 'x' @@ -560,8 +560,8 @@ subroutine init_hist_bgc_2D f_DMSfrac = 'x' f_DMSPpfrac = 'x' f_DMSPdfrac = 'x' - endif - if (.not. tr_bgc_DON) then + endif + if (.not. tr_bgc_DON) then f_bgc_DON = 'x' f_bgc_DON_ml = 'x' f_DONsnow = 'x' @@ -569,8 +569,8 @@ subroutine init_hist_bgc_2D f_DONnet = 'x' f_fDON = 'x' f_fDON_ai = 'x' - endif - if (.not. tr_bgc_Fe ) then + endif + if (.not. tr_bgc_Fe ) then f_bgc_Fe = 'x' f_bgc_Fed = 'x' f_bgc_Fed_ml = 'x' @@ -587,7 +587,7 @@ subroutine init_hist_bgc_2D f_fFep = 'x' f_fFep_ai = 'x' endif - if (.not. tr_bgc_PON .or. skl_bgc) then + if (.not. tr_bgc_PON .or. skl_bgc) then f_bgc_PON = 'x' f_PONsnow = 'x' f_PONfrac = 'x' @@ -595,19 +595,19 @@ subroutine init_hist_bgc_2D f_fPON = 'x' f_fPON_ai = 'x' endif - - f_bgc_Nit_cat1 = f_bgc_Nit - f_bgc_Am_cat1 = f_bgc_Am + + f_bgc_Nit_cat1 = f_bgc_Nit + f_bgc_Am_cat1 = f_bgc_Am f_bgc_N_cat1 = f_bgc_N f_bgc_DOC_cat1 = f_bgc_DOC f_bgc_DIC_cat1 = f_bgc_DIC f_bgc_DON_cat1 = f_bgc_DON - f_bgc_Fed_cat1 = f_bgc_Fe - f_bgc_Fep_cat1 = f_bgc_Fe - f_bgc_Sil_cat1 = f_bgc_Sil - f_bgc_hum_cat1 = f_bgc_hum + f_bgc_Fed_cat1 = f_bgc_Fe + f_bgc_Fep_cat1 = f_bgc_Fe + f_bgc_Sil_cat1 = f_bgc_Sil + f_bgc_hum_cat1 = f_bgc_hum f_bgc_DMSPd_cat1 = f_bgc_DMSPd - f_bgc_DMS_cat1 = f_bgc_DMS + f_bgc_DMS_cat1 = f_bgc_DMS f_bgc_PON_cat1 = f_bgc_PON if (solve_zsal) then @@ -711,73 +711,73 @@ subroutine init_hist_bgc_2D call broadcast_scalar (f_bgc_Sil_ml, master_task) call broadcast_scalar (f_bgc_hum_ml, master_task) call broadcast_scalar (f_bgc_DMSP_ml, master_task) - call broadcast_scalar (f_bgc_DMS_ml, master_task) - call broadcast_scalar (f_bgc_DON_ml, master_task) - call broadcast_scalar (f_bgc_Fed_ml, master_task) - call broadcast_scalar (f_bgc_Fep_ml, master_task) - call broadcast_scalar (f_upNO, master_task) - call broadcast_scalar (f_upNH, master_task) + call broadcast_scalar (f_bgc_DMS_ml, master_task) + call broadcast_scalar (f_bgc_DON_ml, master_task) + call broadcast_scalar (f_bgc_Fed_ml, master_task) + call broadcast_scalar (f_bgc_Fep_ml, master_task) + call broadcast_scalar (f_upNO, master_task) + call broadcast_scalar (f_upNH, master_task) call broadcast_scalar (f_bTin, master_task) call broadcast_scalar (f_bphi, master_task) - call broadcast_scalar (f_iDi, master_task) - call broadcast_scalar (f_iki, master_task) - call broadcast_scalar (f_bgc_S, master_task) - call broadcast_scalar (f_zfswin, master_task) - call broadcast_scalar (f_PPnet, master_task) - call broadcast_scalar (f_algalpeak, master_task) - call broadcast_scalar (f_zbgc_frac, master_task) - call broadcast_scalar (f_peakval, master_task) - call broadcast_scalar (f_grownet, master_task) - call broadcast_scalar (f_chlnet, master_task) - call broadcast_scalar (f_Nitnet, master_task) - call broadcast_scalar (f_Nnet, master_task) - call broadcast_scalar (f_Cnet, master_task) - call broadcast_scalar (f_DOCnet, master_task) - call broadcast_scalar (f_DICnet, master_task) - call broadcast_scalar (f_Amnet, master_task) - call broadcast_scalar (f_Silnet, master_task) - call broadcast_scalar (f_humnet, master_task) - call broadcast_scalar (f_DMSPpnet, master_task) - call broadcast_scalar (f_DMSPdnet, master_task) - call broadcast_scalar (f_DMSnet, master_task) - call broadcast_scalar (f_PONnet, master_task) - call broadcast_scalar (f_DONnet, master_task) - call broadcast_scalar (f_Fednet, master_task) - call broadcast_scalar (f_Fepnet, master_task) - call broadcast_scalar (f_zaeronet, master_task) - call broadcast_scalar (f_chlsnow, master_task) - call broadcast_scalar (f_Nitsnow, master_task) - call broadcast_scalar (f_Nsnow, master_task) - call broadcast_scalar (f_Csnow, master_task) - call broadcast_scalar (f_DOCsnow, master_task) - call broadcast_scalar (f_DICsnow, master_task) - call broadcast_scalar (f_Amsnow, master_task) - call broadcast_scalar (f_Silsnow, master_task) - call broadcast_scalar (f_humsnow, master_task) - call broadcast_scalar (f_DMSPpsnow, master_task) - call broadcast_scalar (f_DMSPdsnow, master_task) - call broadcast_scalar (f_DMSsnow, master_task) - call broadcast_scalar (f_PONsnow, master_task) - call broadcast_scalar (f_DONsnow, master_task) - call broadcast_scalar (f_Fedsnow, master_task) - call broadcast_scalar (f_Fepsnow, master_task) - call broadcast_scalar (f_zaerosnow, master_task) - call broadcast_scalar (f_chlfrac, master_task) - call broadcast_scalar (f_Nitfrac, master_task) - call broadcast_scalar (f_Nfrac, master_task) - call broadcast_scalar (f_DOCfrac, master_task) - call broadcast_scalar (f_DICfrac, master_task) - call broadcast_scalar (f_Amfrac, master_task) - call broadcast_scalar (f_Silfrac, master_task) - call broadcast_scalar (f_humfrac, master_task) - call broadcast_scalar (f_DMSPpfrac, master_task) - call broadcast_scalar (f_DMSPdfrac, master_task) - call broadcast_scalar (f_DMSfrac, master_task) - call broadcast_scalar (f_PONfrac, master_task) - call broadcast_scalar (f_DONfrac, master_task) - call broadcast_scalar (f_Fedfrac, master_task) - call broadcast_scalar (f_Fepfrac, master_task) - call broadcast_scalar (f_zaerofrac, master_task) + call broadcast_scalar (f_iDi, master_task) + call broadcast_scalar (f_iki, master_task) + call broadcast_scalar (f_bgc_S, master_task) + call broadcast_scalar (f_zfswin, master_task) + call broadcast_scalar (f_PPnet, master_task) + call broadcast_scalar (f_algalpeak, master_task) + call broadcast_scalar (f_zbgc_frac, master_task) + call broadcast_scalar (f_peakval, master_task) + call broadcast_scalar (f_grownet, master_task) + call broadcast_scalar (f_chlnet, master_task) + call broadcast_scalar (f_Nitnet, master_task) + call broadcast_scalar (f_Nnet, master_task) + call broadcast_scalar (f_Cnet, master_task) + call broadcast_scalar (f_DOCnet, master_task) + call broadcast_scalar (f_DICnet, master_task) + call broadcast_scalar (f_Amnet, master_task) + call broadcast_scalar (f_Silnet, master_task) + call broadcast_scalar (f_humnet, master_task) + call broadcast_scalar (f_DMSPpnet, master_task) + call broadcast_scalar (f_DMSPdnet, master_task) + call broadcast_scalar (f_DMSnet, master_task) + call broadcast_scalar (f_PONnet, master_task) + call broadcast_scalar (f_DONnet, master_task) + call broadcast_scalar (f_Fednet, master_task) + call broadcast_scalar (f_Fepnet, master_task) + call broadcast_scalar (f_zaeronet, master_task) + call broadcast_scalar (f_chlsnow, master_task) + call broadcast_scalar (f_Nitsnow, master_task) + call broadcast_scalar (f_Nsnow, master_task) + call broadcast_scalar (f_Csnow, master_task) + call broadcast_scalar (f_DOCsnow, master_task) + call broadcast_scalar (f_DICsnow, master_task) + call broadcast_scalar (f_Amsnow, master_task) + call broadcast_scalar (f_Silsnow, master_task) + call broadcast_scalar (f_humsnow, master_task) + call broadcast_scalar (f_DMSPpsnow, master_task) + call broadcast_scalar (f_DMSPdsnow, master_task) + call broadcast_scalar (f_DMSsnow, master_task) + call broadcast_scalar (f_PONsnow, master_task) + call broadcast_scalar (f_DONsnow, master_task) + call broadcast_scalar (f_Fedsnow, master_task) + call broadcast_scalar (f_Fepsnow, master_task) + call broadcast_scalar (f_zaerosnow, master_task) + call broadcast_scalar (f_chlfrac, master_task) + call broadcast_scalar (f_Nitfrac, master_task) + call broadcast_scalar (f_Nfrac, master_task) + call broadcast_scalar (f_DOCfrac, master_task) + call broadcast_scalar (f_DICfrac, master_task) + call broadcast_scalar (f_Amfrac, master_task) + call broadcast_scalar (f_Silfrac, master_task) + call broadcast_scalar (f_humfrac, master_task) + call broadcast_scalar (f_DMSPpfrac, master_task) + call broadcast_scalar (f_DMSPdfrac, master_task) + call broadcast_scalar (f_DMSfrac, master_task) + call broadcast_scalar (f_PONfrac, master_task) + call broadcast_scalar (f_DONfrac, master_task) + call broadcast_scalar (f_Fedfrac, master_task) + call broadcast_scalar (f_Fepfrac, master_task) + call broadcast_scalar (f_zaerofrac, master_task) ! 2D variables @@ -820,28 +820,28 @@ subroutine init_hist_bgc_2D enddo endif - ! zsalinity - + ! zsalinity + call define_hist_field(n_fzsal,"fzsal","kg/m^2/s",tstr2D, tcstr, & "prognostic salt flux ice to ocn (cpl)", & "if positive, ocean gains salt", c1, c0, & ns, f_fzsal) - + call define_hist_field(n_fzsal_ai,"fzsal_ai","kg/m^2/s",tstr2D, tcstr, & "prognostic salt flux ice to ocean", & "weighted by ice area", c1, c0, & ns, f_fzsal_ai) - + call define_hist_field(n_fzsal_g,"fzsal_g","kg/m^2/s",tstr2D, tcstr, & "Gravity drainage salt flux ice to ocn (cpl)", & "if positive, ocean gains salt", c1, c0, & ns, f_fzsal_g) - + call define_hist_field(n_fzsal_g_ai,"fzsal_g_ai","kg/m^2/s",tstr2D, tcstr, & "Gravity drainage salt flux ice to ocean", & "weighted by ice area", c1, c0, & ns, f_fzsal_g_ai) - + call define_hist_field(n_zsal,"zsal_tot","g/m^2",tstr2D, tcstr, & "Total Salt content", & "In ice volume*fbri", c1, c0, & @@ -971,8 +971,8 @@ subroutine init_hist_bgc_2D "skeletal layer: bottom 3 cm", c1, c0, & ns, f_bgc_Fep ) enddo - endif !f_bgc_Fe - + endif !f_bgc_Fe + if (f_bgc_Nit(1:1) /= 'x') & call define_hist_field(n_bgc_Nit,"Nit","mmol/m^2",tstr2D, tcstr, & "Bulk skeletal nutrient (nitrate)", & @@ -1013,7 +1013,7 @@ subroutine init_hist_bgc_2D "Bulk dissolved skl trace gas (DMS)", & "skeletal layer: bottom 3 cm", c1, c0, & ns, f_bgc_DMS) - + endif !skl_bgc ! vertical and skeletal layer biogeochemistry @@ -1049,7 +1049,7 @@ subroutine init_hist_bgc_2D enddo endif if (f_bgc_Fed_ml (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'ml_dFe', trim(nchar) call define_hist_field(n_bgc_Fed_ml (n,:),vname_in,"nM",tstr2D, tcstr, & @@ -1059,7 +1059,7 @@ subroutine init_hist_bgc_2D enddo endif if (f_bgc_Fep_ml (1:1) /= 'x') then - do n = 1, n_fep + do n = 1, n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'ml_pFe', trim(nchar) call define_hist_field(n_bgc_Fep_ml (n,:),vname_in,"nM",tstr2D, tcstr, & @@ -1097,7 +1097,7 @@ subroutine init_hist_bgc_2D call define_hist_field(n_bgc_hum_ml,"ml_hum","mmol/m^3",tstr2D, tcstr, & "mixed layer humic material (carbon)", & "upper ocean", c1, c0, & - ns, f_bgc_hum_ml) + ns, f_bgc_hum_ml) if (f_bgc_DMSP_ml(1:1) /= 'x') & call define_hist_field(n_bgc_DMSP_ml,"ml_DMSP","mmol/m^3",tstr2D, tcstr, & "mixed layer precursor (DMSP)", & @@ -1108,30 +1108,30 @@ subroutine init_hist_bgc_2D "mixed layer trace gas (DMS)", & "upper ocean", c1, c0, & ns, f_bgc_DMS_ml) - + if (f_fNit(1:1) /= 'x') & call define_hist_field(n_fNit,"fNit","mmol/m^2/s",tstr2D, tcstr, & "nitrate flux ice to ocn (cpl)", & "if positive, ocean gains nitrate", c1, c0, & ns, f_fNit) - + if (f_fNit_ai(1:1) /= 'x') & call define_hist_field(n_fNit_ai,"fNit_ai","mmol/m^2/s",tstr2D, tcstr, & "nitrate flux ice to ocean", & "weighted by ice area", c1, c0, & ns, f_fNit_ai) - + if (f_fAm(1:1) /= 'x') & call define_hist_field(n_fAm,"fAm","mmol/m^2/s",tstr2D, tcstr, & "ammonium flux ice to ocn (cpl)", & "if positive, ocean gains ammonium", c1, c0, & ns, f_fAm) - + if (f_fAm_ai(1:1) /= 'x') & call define_hist_field(n_fAm_ai,"fAm_ai","mmol/m^2/s",tstr2D, tcstr, & "ammonium flux ice to ocean", & "weighted by ice area", c1, c0, & - ns, f_fAm_ai) + ns, f_fAm_ai) if (f_fN(1:1) /= 'x') then do n = 1, n_algae write(nchar,'(i3.3)') n @@ -1171,7 +1171,7 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fDOC_ai) enddo - endif + endif if (f_fDIC(1:1) /= 'x') then do n = 1, n_dic write(nchar,'(i3.3)') n @@ -1191,7 +1191,7 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fDIC_ai) enddo - endif + endif if (f_fDON(1:1) /= 'x') then do n = 1, n_don write(nchar,'(i3.3)') n @@ -1211,7 +1211,7 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fDON_ai) enddo - endif + endif if (f_fFed(1:1) /= 'x') then do n = 1, n_fed write(nchar,'(i3.3)') n @@ -1221,9 +1221,9 @@ subroutine init_hist_bgc_2D "positive to ocean", c1, c0, & ns, f_fFed ) enddo - endif + endif if (f_fFed_ai (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'fdFe_ai', trim(nchar) call define_hist_field(n_fFed_ai (n,:),vname_in,"umol/m^2/s",tstr2D, tcstr, & @@ -1231,7 +1231,7 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fFed_ai ) enddo - endif + endif if (f_fFep(1:1) /= 'x') then do n = 1, n_fep write(nchar,'(i3.3)') n @@ -1243,7 +1243,7 @@ subroutine init_hist_bgc_2D enddo endif if (f_fFep_ai (1:1) /= 'x') then - do n = 1, n_fep + do n = 1, n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'fpFe_ai', trim(nchar) call define_hist_field(n_fFep_ai (n,:),vname_in,"umol/m^2/s",tstr2D, tcstr, & @@ -1251,25 +1251,25 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fFep_ai ) enddo - endif + endif if (f_fSil(1:1) /= 'x') & call define_hist_field(n_fSil,"fSil","mmol/m^2/s",tstr2D, tcstr, & "silicate flux ice to ocn (cpl)", & "positive into ocean", c1, c0, & ns, f_fSil) - + if (f_fSil_ai(1:1) /= 'x') & call define_hist_field(n_fSil_ai,"fSil_ai","mmol/m^2/s",tstr2D, tcstr, & "silicate flux ice to ocean", & "weighted by ice area", c1, c0, & ns, f_fSil_ai) - + if (f_fhum(1:1) /= 'x') & call define_hist_field(n_fhum,"fhum","mmol/m^2/s",tstr2D, tcstr, & "humic matter (carbon) flux ice to ocn (cpl)", & "positive into ocean", c1, c0, & ns, f_fhum) - + if (f_fhum_ai(1:1) /= 'x') & call define_hist_field(n_fhum_ai,"fhum_ai","mmol/m^2/s",tstr2D, tcstr, & "humic matter (carbon) flux ice to ocean", & @@ -1336,19 +1336,19 @@ subroutine init_hist_bgc_2D "weighted by brine or skl volume ", c1, c0, & ns, f_grownet) - if (f_upNO(1:1) /= 'x') & + if (f_upNO(1:1) /= 'x') & call define_hist_field(n_upNO,"upNO","mmol/m^2/d",tstr2D, tcstr, & "Tot algal Nit uptake rate", & "weighted by ice area", c1, c0, & ns, f_upNO) - if (f_upNH(1:1) /= 'x') & + if (f_upNH(1:1) /= 'x') & call define_hist_field(n_upNH,"upNH","mmol/m^2/d",tstr2D, tcstr, & "Tot algal Am uptake rate", & "weighted by ice area", c1, c0,& ns, f_upNH) - ! vertical biogeochemistry + ! vertical biogeochemistry if (z_tracers) then if (f_fzaero(1:1) /= 'x') then @@ -1463,7 +1463,7 @@ subroutine init_hist_bgc_2D enddo endif !f_DONnet if (f_Fednet (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'dFe_net', trim(nchar) call define_hist_field(n_Fednet (n,:),vname_in,"umol/m^2",tstr2D, tcstr, & @@ -1471,9 +1471,9 @@ subroutine init_hist_bgc_2D "weighted by ice area ", c1, c0, & ns, f_Fednet ) enddo - endif !f_Fednet + endif !f_Fednet if (f_Fepnet (1:1) /= 'x') then - do n = 1, n_fep + do n = 1, n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'pFe_net', trim(nchar) call define_hist_field(n_Fepnet (n,:),vname_in,"umol/m^2",tstr2D, tcstr, & @@ -1481,7 +1481,7 @@ subroutine init_hist_bgc_2D "weighted by ice area ", c1, c0, & ns, f_Fepnet ) enddo - endif !f_Fepnet + endif !f_Fepnet if (f_Nitnet(1:1) /= 'x') & call define_hist_field(n_Nitnet,"Nit_net","mmol/m^2",tstr2D, tcstr, & "Net Nitrate", & @@ -1501,7 +1501,7 @@ subroutine init_hist_bgc_2D call define_hist_field(n_humnet,"hum_net","mmol/m^2",tstr2D, tcstr, & "Net humic material (carbon)", & "weighted by ice area", c1, c0, & - ns, f_humnet) + ns, f_humnet) if (f_DMSPpnet(1:1) /= 'x') & call define_hist_field(n_DMSPpnet,"DMSPp_net","mmol/m^2",tstr2D, tcstr, & "Net DMSPp", & @@ -1524,7 +1524,7 @@ subroutine init_hist_bgc_2D ns, f_PONnet) if (f_zaerosnow(1:1) /= 'x') then - do n = 1, n_zaero + do n = 1, n_zaero write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'zaero_snow', trim(nchar) call define_hist_field(n_zaerosnow(n,:),vname_in,"kg/m^2",tstr2D, tcstr, & @@ -1594,7 +1594,7 @@ subroutine init_hist_bgc_2D enddo endif !f_DONsnow if (f_Fedsnow (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'dFe_snow', trim(nchar) call define_hist_field(n_Fedsnow (n,:),vname_in,"umol/m^2",tstr2D, tcstr, & @@ -1602,9 +1602,9 @@ subroutine init_hist_bgc_2D "weighted by ice area ", c1, c0, & ns, f_Fedsnow ) enddo - endif !f_Fedsnow + endif !f_Fedsnow if (f_Fepsnow (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'pFe_snow', trim(nchar) call define_hist_field(n_Fepsnow (n,:),vname_in,"umol/m^2",tstr2D, tcstr, & @@ -1612,7 +1612,7 @@ subroutine init_hist_bgc_2D "weighted by ice area ", c1, c0, & ns, f_Fepsnow ) enddo - endif !f_Fepsnow + endif !f_Fepsnow if (f_Nitsnow(1:1) /= 'x') & call define_hist_field(n_Nitsnow,"Nit_snow","mmol/m^2",tstr2D, tcstr, & "Snow Nitrate", & @@ -1632,7 +1632,7 @@ subroutine init_hist_bgc_2D call define_hist_field(n_humsnow,"hum_snow","mmol/m^2",tstr2D, tcstr, & "Snow humic material (carbon)", & "weighted by ice area", c1, c0, & - ns, f_humsnow) + ns, f_humsnow) if (f_DMSPpsnow(1:1) /= 'x') & call define_hist_field(n_DMSPpsnow,"DMSPp_snow","mmol/m^2",tstr2D, tcstr, & "Snow DMSPp", & @@ -1655,7 +1655,7 @@ subroutine init_hist_bgc_2D ns, f_PONsnow) if (f_zaerofrac(1:1) /= 'x') then - do n = 1, n_zaero + do n = 1, n_zaero write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'zaero_frac', trim(nchar) call define_hist_field(n_zaerofrac(n,:),vname_in,"1",tstr2D, tcstr, & @@ -1715,7 +1715,7 @@ subroutine init_hist_bgc_2D enddo endif !f_DONfrac if (f_Fedfrac (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'dFe_frac', trim(nchar) call define_hist_field(n_Fedfrac (n,:),vname_in,"1",tstr2D, tcstr, & @@ -1723,9 +1723,9 @@ subroutine init_hist_bgc_2D "averaged over depth ", c1, c0, & ns, f_Fedfrac ) enddo - endif !f_Fedfrac + endif !f_Fedfrac if (f_Fepfrac (1:1) /= 'x') then - do n = 1, n_fep + do n = 1, n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'pFe_frac', trim(nchar) call define_hist_field(n_Fepfrac (n,:),vname_in,"1",tstr2D, tcstr, & @@ -1733,7 +1733,7 @@ subroutine init_hist_bgc_2D "averaged over depth ", c1, c0, & ns, f_Fepfrac ) enddo - endif !f_Fepfrac + endif !f_Fepfrac if (f_Nitfrac(1:1) /= 'x') & call define_hist_field(n_Nitfrac,"Nit_frac","1",tstr2D, tcstr, & "Mobile frac Nitrate", & @@ -1753,7 +1753,7 @@ subroutine init_hist_bgc_2D call define_hist_field(n_humfrac,"hum_frac","1",tstr2D, tcstr, & "Mobile frac humic material", & "averaged over depth", c1, c0, & - ns, f_humfrac) + ns, f_humfrac) if (f_DMSPpfrac(1:1) /= 'x') & call define_hist_field(n_DMSPpfrac,"DMSPp_frac","1",tstr2D, tcstr, & "Mobile frac DMSPp", & @@ -1787,8 +1787,8 @@ subroutine init_hist_bgc_2D endif ! histfreq(ns) /= 'x' enddo ! nstreams - endif ! tr_aero, etc - + endif ! tr_aero, etc + end subroutine init_hist_bgc_2D !======================================================================= @@ -1834,7 +1834,7 @@ subroutine init_hist_bgc_3Db real (kind=dbl_kind) :: secday logical (kind=log_kind) :: solve_zsal, z_tracers character(len=*), parameter :: subname = '(init_hist_bgc_3Db)' - + ! biology vertical grid call icepack_query_parameters(secday_out=secday) @@ -1848,7 +1848,7 @@ subroutine init_hist_bgc_3Db do ns = 1, nstreams if (histfreq(ns) /= 'x') then - + if (f_bTin(1:1) /= 'x') & call define_hist_field(n_bTin,"bTizn","C",tstr3Db, tcstr, & "ice internal temperatures on bio grid", & @@ -1859,27 +1859,27 @@ subroutine init_hist_bgc_3Db call define_hist_field(n_bphi,"bphizn","%",tstr3Db, tcstr, & "porosity", "brine volume fraction", c100, c0, & ns, f_bphi) - - if (f_iDi(1:1) /= 'x') & + + if (f_iDi(1:1) /= 'x') & call define_hist_field(n_iDi,"iDin","m^2/d",tstr3Db, tcstr, & "interface diffusivity", "on bio interface grid", secday, c0, & ns, f_iDi) - - if (f_iki(1:1) /= 'x') & + + if (f_iki(1:1) /= 'x') & call define_hist_field(n_iki,"ikin","mm^2",tstr3Db, tcstr, & "permeability", "on bio interface grid", 1.0e6_dbl_kind, c0, & ns, f_iki) - + if (f_bgc_S(1:1) /= 'x') & call define_hist_field(n_bgc_S,"bgc_S","ppt",tstr3Db, tcstr, & "bulk salinity", "on bio grid", c1, c0, & ns, f_bgc_S) - + if (f_zfswin(1:1) /= 'x') & call define_hist_field(n_zfswin,"zfswin","W/m^2",tstr3Db, tcstr, & "internal ice PAR", "on bio interface grid", c1, c0, & ns, f_zfswin) - + endif ! histfreq(ns) /= 'x' enddo ! ns @@ -1903,8 +1903,8 @@ subroutine accum_hist_bgc (iblk) use ice_flux, only: sss use ice_flux_bgc, only: fiso_atm, fiso_ocn, faero_atm, faero_ocn, & flux_bio, flux_bio_ai, fzsal_ai, fzsal_g_ai - use ice_history_shared, only: n2D, a2D, a3Dc, & - n3Dzcum, n3Dbcum, a3Db, a3Da, & + use ice_history_shared, only: n2D, a2D, a3Dc, & + n3Dzcum, n3Dbcum, a3Db, a3Da, & ncat_hist, accum_hist_field, nzblyr, nzalyr use ice_state, only: trcrn, trcr, aicen, aice, vicen @@ -1914,24 +1914,24 @@ subroutine accum_hist_bgc (iblk) ! local variables integer (kind=int_kind) :: & - i, j, n, k, & ! loop indices + i, j, n, k, & ! loop indices ilo,ihi,jlo,jhi ! beginning and end of physical domain - real (kind=dbl_kind), dimension (nx_block,ny_block,nblyr+4) :: & + real (kind=dbl_kind), dimension (nx_block,ny_block,nblyr+4) :: & workz, workz2 - real (kind=dbl_kind) :: & + real (kind=dbl_kind) :: & maxv, rhos, rhoi, rhow, puny, sk_l - real (kind=dbl_kind), dimension (nblyr+1) :: & + real (kind=dbl_kind), dimension (nblyr+1) :: & workv - real (kind=dbl_kind), dimension (nx_block,ny_block) :: & + real (kind=dbl_kind), dimension (nx_block,ny_block) :: & workni, worknj - integer (kind=int_kind), dimension (1) :: & + integer (kind=int_kind), dimension (1) :: & worki - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & workii logical (kind=log_kind) :: & @@ -1949,9 +1949,9 @@ subroutine accum_hist_bgc (iblk) integer (kind=int_kind), dimension(icepack_max_aero) :: & nlt_zaero_sw ! points to aerosol in trcrn_sw - + integer (kind=int_kind), dimension(icepack_max_algae) :: & - nt_bgc_N, nlt_bgc_N, & ! algae + nt_bgc_N, nlt_bgc_N, & ! algae nt_bgc_C, nlt_bgc_C, & ! nt_bgc_chl, nlt_bgc_chl ! @@ -2009,8 +2009,8 @@ subroutine accum_hist_bgc (iblk) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - - this_block = get_block(blocks_ice(iblk),iblk) + + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -2026,15 +2026,15 @@ subroutine accum_hist_bgc (iblk) if (tr_iso .or. tr_aero .or. tr_brine .or. solve_zsal .or. skl_bgc) then ! zsalinity - if (f_fzsal (1:1) /= 'x') & + if (f_fzsal (1:1) /= 'x') & call accum_hist_field(n_fzsal, iblk, fzsal(:,:,iblk), a2D) - if (f_fzsal_ai(1:1)/= 'x') & + if (f_fzsal_ai(1:1)/= 'x') & call accum_hist_field(n_fzsal_ai, iblk, fzsal_ai(:,:,iblk), a2D) - if (f_fzsal_g (1:1) /= 'x') & + if (f_fzsal_g (1:1) /= 'x') & call accum_hist_field(n_fzsal_g, iblk, fzsal_g(:,:,iblk), a2D) - if (f_fzsal_g_ai(1:1)/= 'x') & + if (f_fzsal_g_ai(1:1)/= 'x') & call accum_hist_field(n_fzsal_g_ai,iblk, fzsal_g_ai(:,:,iblk), a2D) - if (f_zsal (1:1) /= 'x') & + if (f_zsal (1:1) /= 'x') & call accum_hist_field(n_zsal, iblk, zsal_tot(:,:,iblk), a2D) ! isotopes @@ -2120,13 +2120,13 @@ subroutine accum_hist_bgc (iblk) enddo endif if (f_bgc_Fed (1:1)/= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_bgc_Fed (n,:), iblk, & sk_l*trcr(:,:,nt_bgc_Fed (n), iblk), a2D) enddo endif if (f_bgc_Fep (1:1)/= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_bgc_Fep (n,:), iblk, & sk_l*trcr(:,:,nt_bgc_Fep (n), iblk), a2D) enddo @@ -2139,32 +2139,32 @@ subroutine accum_hist_bgc (iblk) endif if (f_bgc_Nit(1:1)/= 'x') & call accum_hist_field(n_bgc_Nit, iblk, & - sk_l*trcr(:,:,nt_bgc_Nit, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_Nit, iblk), a2D) if (f_bgc_Am(1:1)/= 'x') & call accum_hist_field(n_bgc_Am, iblk, & - sk_l*trcr(:,:,nt_bgc_Am, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_Am, iblk), a2D) if (f_bgc_Sil(1:1)/= 'x') & call accum_hist_field(n_bgc_Sil, iblk, & - sk_l*trcr(:,:,nt_bgc_Sil, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_Sil, iblk), a2D) if (f_bgc_hum(1:1)/= 'x') & call accum_hist_field(n_bgc_hum, iblk, & - sk_l*trcr(:,:,nt_bgc_hum, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_hum, iblk), a2D) if (f_bgc_PON(1:1)/= 'x') & call accum_hist_field(n_bgc_PON, iblk, & - sk_l*trcr(:,:,nt_bgc_PON, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_PON, iblk), a2D) if (f_bgc_DMSPp(1:1)/= 'x') & call accum_hist_field(n_bgc_DMSPp,iblk, & - sk_l*trcr(:,:,nt_bgc_DMSPp,iblk), a2D) + sk_l*trcr(:,:,nt_bgc_DMSPp,iblk), a2D) if (f_bgc_DMSPd(1:1)/= 'x') & call accum_hist_field(n_bgc_DMSPd,iblk, & - sk_l*trcr(:,:,nt_bgc_DMSPd,iblk), a2D) + sk_l*trcr(:,:,nt_bgc_DMSPd,iblk), a2D) if (f_bgc_DMS(1:1)/= 'x') & call accum_hist_field(n_bgc_DMS, iblk, & - sk_l*trcr(:,:,nt_bgc_DMS, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_DMS, iblk), a2D) - endif !skl_bgc + endif !skl_bgc - ! skeletal layer and vertical bgc + ! skeletal layer and vertical bgc if (f_bgc_DOC_ml(1:1)/= 'x') then do n=1,n_doc @@ -2185,13 +2185,13 @@ subroutine accum_hist_bgc (iblk) enddo endif if (f_bgc_Fed_ml (1:1)/= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_bgc_Fed_ml (n,:), iblk, & ocean_bio(:,:,nlt_bgc_Fed (n), iblk), a2D) enddo endif if (f_bgc_Fep_ml (1:1)/= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_bgc_Fep_ml (n,:), iblk, & ocean_bio(:,:,nlt_bgc_Fep (n), iblk), a2D) enddo @@ -2204,22 +2204,22 @@ subroutine accum_hist_bgc (iblk) endif if (f_bgc_Nit_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_Nit_ml, iblk, & - ocean_bio(:,:,nlt_bgc_Nit, iblk), a2D) + ocean_bio(:,:,nlt_bgc_Nit, iblk), a2D) if (f_bgc_Am_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_Am_ml, iblk, & - ocean_bio(:,:,nlt_bgc_Am, iblk), a2D) + ocean_bio(:,:,nlt_bgc_Am, iblk), a2D) if (f_bgc_Sil_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_Sil_ml, iblk, & - ocean_bio(:,:,nlt_bgc_Sil, iblk), a2D) + ocean_bio(:,:,nlt_bgc_Sil, iblk), a2D) if (f_bgc_hum_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_hum_ml, iblk, & - ocean_bio(:,:,nlt_bgc_hum, iblk), a2D) + ocean_bio(:,:,nlt_bgc_hum, iblk), a2D) if (f_bgc_DMSP_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_DMSP_ml, iblk, & - ocean_bio(:,:,nlt_bgc_DMSPd, iblk), a2D) + ocean_bio(:,:,nlt_bgc_DMSPd, iblk), a2D) if (f_bgc_DMS_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_DMS_ml, iblk, & - ocean_bio(:,:,nlt_bgc_DMS, iblk), a2D) + ocean_bio(:,:,nlt_bgc_DMS, iblk), a2D) if (f_fNit (1:1) /= 'x') & call accum_hist_field(n_fNit, iblk, & @@ -2283,25 +2283,25 @@ subroutine accum_hist_bgc (iblk) enddo endif if (f_fFed (1:1)/= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_fFed (n,:), iblk, & flux_bio(:,:,nlt_bgc_Fed (n),iblk), a2D) enddo endif if (f_fFed_ai (1:1)/= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_fFed_ai (n,:), iblk, & flux_bio_ai(:,:,nlt_bgc_Fed (n),iblk), a2D) enddo endif if (f_fFep (1:1)/= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_fFep (n,:), iblk, & flux_bio(:,:,nlt_bgc_Fep (n),iblk), a2D) enddo endif if (f_fFep_ai (1:1)/= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_fFep_ai (n,:), iblk, & flux_bio_ai(:,:,nlt_bgc_Fep (n),iblk), a2D) enddo @@ -2347,7 +2347,7 @@ subroutine accum_hist_bgc (iblk) PP_net(:,:,iblk), a2D) if (f_grownet (1:1) /= 'x') & call accum_hist_field(n_grownet, iblk, & - grow_net(:,:,iblk), a2D) + grow_net(:,:,iblk), a2D) if (f_upNO (1:1) /= 'x') & call accum_hist_field(n_upNO, iblk, & upNO(:,:,iblk), a2D) @@ -2355,7 +2355,7 @@ subroutine accum_hist_bgc (iblk) call accum_hist_field(n_upNH, iblk, & upNH(:,:,iblk), a2D) - ! vertical biogeochemistry + ! vertical biogeochemistry if (z_tracers) then @@ -2396,7 +2396,7 @@ subroutine accum_hist_bgc (iblk) enddo ! n endif !f_algalpeak - ! + ! ! ice_bio_net ! if (f_zaeronet (1:1) /= 'x') then @@ -2424,35 +2424,35 @@ subroutine accum_hist_bgc (iblk) enddo endif !f_Cnet if (f_DOCnet (1:1) /= 'x') then - do n=1,n_doc + do n=1,n_doc call accum_hist_field(n_DOCnet(n,:), iblk, & ice_bio_net(:,:,nlt_bgc_DOC(n), iblk), a2D) enddo endif !f_DOCnet if (f_DICnet (1:1) /= 'x') then - do n=1,n_dic + do n=1,n_dic call accum_hist_field(n_DICnet(n,:), iblk, & ice_bio_net(:,:,nlt_bgc_DIC(n), iblk), a2D) enddo endif !f_DICnet if (f_DONnet (1:1) /= 'x') then - do n=1,n_don + do n=1,n_don call accum_hist_field(n_DONnet(n,:), iblk, & ice_bio_net(:,:,nlt_bgc_DON(n), iblk), a2D) enddo endif !f_DONnet if (f_Fednet (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_Fednet (n,:), iblk, & ice_bio_net(:,:,nlt_bgc_Fed (n), iblk), a2D) enddo - endif !f_Fednet + endif !f_Fednet if (f_Fepnet (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_Fepnet (n,:), iblk, & ice_bio_net(:,:,nlt_bgc_Fep (n), iblk), a2D) enddo - endif !f_Fepnet + endif !f_Fepnet if (f_Nitnet (1:1) /= 'x') & call accum_hist_field(n_Nitnet, iblk, & @@ -2480,7 +2480,7 @@ subroutine accum_hist_bgc (iblk) ice_bio_net(:,:,nlt_bgc_PON, iblk), a2D) ! ! snow_bio_net - ! + ! if (f_zaerosnow (1:1) /= 'x') then do n=1,n_zaero call accum_hist_field(n_zaerosnow(n,:), iblk, & @@ -2506,35 +2506,35 @@ subroutine accum_hist_bgc (iblk) enddo endif !f_Csnow if (f_DOCsnow (1:1) /= 'x') then - do n=1,n_doc + do n=1,n_doc call accum_hist_field(n_DOCsnow(n,:), iblk, & snow_bio_net(:,:,nlt_bgc_DOC(n), iblk), a2D) enddo endif !f_DOCsnow if (f_DICsnow (1:1) /= 'x') then - do n=1,n_dic + do n=1,n_dic call accum_hist_field(n_DICsnow(n,:), iblk, & snow_bio_net(:,:,nlt_bgc_DIC(n), iblk), a2D) enddo endif !f_DICsnow if (f_DONsnow (1:1) /= 'x') then - do n=1,n_don + do n=1,n_don call accum_hist_field(n_DONsnow(n,:), iblk, & snow_bio_net(:,:,nlt_bgc_DON(n), iblk), a2D) enddo endif !f_DONsnow if (f_Fedsnow (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_Fedsnow (n,:), iblk, & snow_bio_net(:,:,nlt_bgc_Fed (n), iblk), a2D) enddo - endif !f_Fedsnow + endif !f_Fedsnow if (f_Fepsnow (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_Fepsnow (n,:), iblk, & snow_bio_net(:,:,nlt_bgc_Fep (n), iblk), a2D) enddo - endif !f_Fepsnow + endif !f_Fepsnow if (f_Nitsnow (1:1) /= 'x') & call accum_hist_field(n_Nitsnow, iblk, & @@ -2562,7 +2562,7 @@ subroutine accum_hist_bgc (iblk) snow_bio_net(:,:,nlt_bgc_PON, iblk), a2D) ! ! mobile frac - ! + ! if (f_zaerofrac (1:1) /= 'x') then do n=1,n_zaero call accum_hist_field(n_zaerofrac(n,:), iblk, & @@ -2582,35 +2582,35 @@ subroutine accum_hist_bgc (iblk) enddo endif !f_Nfrac if (f_DOCfrac (1:1) /= 'x') then - do n=1,n_doc + do n=1,n_doc call accum_hist_field(n_DOCfrac(n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_DOC(n), iblk), a2D) enddo endif !f_DOCfrac if (f_DICfrac (1:1) /= 'x') then - do n=1,n_dic + do n=1,n_dic call accum_hist_field(n_DICfrac(n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_DIC(n), iblk), a2D) enddo endif !f_DICfrac if (f_DONfrac (1:1) /= 'x') then - do n=1,n_don + do n=1,n_don call accum_hist_field(n_DONfrac(n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_DON(n), iblk), a2D) enddo endif !f_DONfrac if (f_Fedfrac (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_Fedfrac (n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_Fed (n), iblk), a2D) enddo - endif !f_Fedfrac + endif !f_Fedfrac if (f_Fepfrac (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_Fepfrac (n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_Fep (n), iblk), a2D) enddo - endif !f_Fepfrac + endif !f_Fepfrac if (f_Nitfrac (1:1) /= 'x') & call accum_hist_field(n_Nitfrac, iblk, & @@ -2623,7 +2623,7 @@ subroutine accum_hist_bgc (iblk) trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_Sil, iblk), a2D) if (f_humfrac (1:1) /= 'x') & call accum_hist_field(n_humfrac, iblk, & - trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_hum, iblk), a2D) + trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_hum, iblk), a2D) if (f_DMSPpfrac (1:1) /= 'x') & call accum_hist_field(n_DMSPpfrac, iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_DMSPp, iblk), a2D) @@ -2736,11 +2736,11 @@ subroutine accum_hist_bgc (iblk) do i = ilo, ihi if (aicen(i,j,n,iblk) > c0) then workz(i,j,k) = workz(i,j,k) + iDi(i,j,k,n,iblk)*vicen(i,j,n,iblk)**2/aicen(i,j,n,iblk) - workz(i,j,nzblyr) = workz(i,j,nzblyr-1) + workz(i,j,nzblyr) = workz(i,j,nzblyr-1) endif enddo ! i enddo ! j - enddo ! k + enddo ! k enddo ! n call accum_hist_field(n_iDi-n3Dzcum, iblk, nzblyr, & workz(:,:,1:nzblyr), a3Db) @@ -2760,7 +2760,7 @@ subroutine accum_hist_bgc (iblk) endif enddo ! i enddo ! j - enddo ! k + enddo ! k enddo ! n call accum_hist_field(n_iki-n3Dzcum, iblk, nzblyr, & workz(:,:,1:nzblyr), a3Db) @@ -2778,7 +2778,7 @@ subroutine accum_hist_bgc (iblk) workz(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_zaero(k)+nblyr+1:nt_zaero(k)+nblyr+2,iblk)/rhos workz(i,j,3:nblyr+3) = & !ice @@ -2786,7 +2786,7 @@ subroutine accum_hist_bgc (iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_zaero(k),iblk)/rhow !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_zaeros(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) enddo !k @@ -2797,14 +2797,14 @@ subroutine accum_hist_bgc (iblk) workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_N(k)+nblyr+1:nt_bgc_N(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_N(k):nt_bgc_N(k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_N(k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_N(k)+nblyr+1:nt_bgc_N(k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2812,7 +2812,7 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_N(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_N(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_N_cat1(k,:)-n3Dbcum, iblk, nzalyr, & @@ -2824,7 +2824,7 @@ subroutine accum_hist_bgc (iblk) workz(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow R_C2N(k)*trcr(i,j,nt_bgc_N(k)+nblyr+1:nt_bgc_N(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice @@ -2832,25 +2832,25 @@ subroutine accum_hist_bgc (iblk) workz(i,j,nblyr+4) = R_C2N(k)*ocean_bio(i,j,nlt_bgc_N(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_C(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) enddo !k endif !f_bgc_C if (f_bgc_DOC (1:1) /= 'x') then - do k = 1,n_doc + do k = 1,n_doc workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DOC(k)+nblyr+1:nt_bgc_DOC(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_DOC(k):nt_bgc_DOC(k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DOC(k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DOC(k)+nblyr+1:nt_bgc_DOC(k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2858,7 +2858,7 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DOC(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DOC(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DOC_cat1(k,:)-n3Dbcum, iblk, nzalyr, & @@ -2866,19 +2866,19 @@ subroutine accum_hist_bgc (iblk) enddo !k endif !f_bgc_DOC if (f_bgc_DIC (1:1) /= 'x') then - do k = 1,n_dic + do k = 1,n_dic workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DIC(k)+nblyr+1:nt_bgc_DIC(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_DIC(k):nt_bgc_DIC(k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DIC(k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DIC(k)+nblyr+1:nt_bgc_DIC(k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2886,7 +2886,7 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DIC(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DIC(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DIC_cat1(k,:)-n3Dbcum, iblk, nzalyr, & @@ -2894,19 +2894,19 @@ subroutine accum_hist_bgc (iblk) enddo !k endif !f_bgc_DIC if (f_bgc_DON (1:1) /= 'x') then - do k = 1,n_don + do k = 1,n_don workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DON(k)+nblyr+1:nt_bgc_DON(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_DON(k):nt_bgc_DON(k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DON(k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DON(k)+nblyr+1:nt_bgc_DON(k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2914,7 +2914,7 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DON(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DON(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DON_cat1(k,:)-n3Dbcum, iblk, nzalyr, & @@ -2922,19 +2922,19 @@ subroutine accum_hist_bgc (iblk) enddo !k endif !f_bgc_DON if (f_bgc_Fed (1:1) /= 'x') then - do k = 1,n_fed + do k = 1,n_fed workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Fed (k)+nblyr+1:nt_bgc_Fed (k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_Fed (k):nt_bgc_Fed (k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Fed (k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Fed (k)+nblyr+1:nt_bgc_Fed (k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2942,27 +2942,27 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Fed (k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Fed (k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Fed_cat1 (k,:)-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) enddo !k - endif !f_bgc_Fed + endif !f_bgc_Fed if (f_bgc_Fep (1:1) /= 'x') then - do k = 1,n_fep + do k = 1,n_fep workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Fep (k)+nblyr+1:nt_bgc_Fep (k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_Fep (k):nt_bgc_Fep (k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Fep (k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Fep (k)+nblyr+1:nt_bgc_Fep (k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2970,19 +2970,19 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Fep (k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Fep (k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Fep_cat1 (k,:)-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) enddo !k - endif !f_bgc_Fep + endif !f_bgc_Fep if (f_bgc_chl (1:1) /= 'x') then do k = 1,n_algae workz(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_chl(k)+nblyr+1:nt_bgc_chl(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice @@ -2990,12 +2990,12 @@ subroutine accum_hist_bgc (iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_chl(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_chl(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) enddo !k endif !f_bgc_chl - + if (f_bgc_Nit (1:1) /= 'x') then workz(:,:,:) = c0 workz2(:,:,:) = c0 @@ -3005,18 +3005,18 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Nit+nblyr+2:nt_bgc_Nit+nblyr+3,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_Nit:nt_bgc_Nit+nblyr,iblk) + trcr(i,j,nt_bgc_Nit:nt_bgc_Nit+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Nit,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Nit+nblyr+2:nt_bgc_Nit+nblyr+3,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_Nit:nt_bgc_Nit+nblyr,1,iblk) + trcrn(i,j,nt_bgc_Nit:nt_bgc_Nit+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Nit,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Nit-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Nit_cat1-n3Dbcum, iblk, nzalyr, & @@ -3032,18 +3032,18 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Am+nblyr+1:nt_bgc_Am+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_Am:nt_bgc_Am+nblyr,iblk) + trcr(i,j,nt_bgc_Am:nt_bgc_Am+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Am,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Am+nblyr+1:nt_bgc_Am+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_Am:nt_bgc_Am+nblyr,1,iblk) + trcrn(i,j,nt_bgc_Am:nt_bgc_Am+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Am,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Am-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Am_cat1-n3Dbcum, iblk, nzalyr, & @@ -3059,24 +3059,24 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Sil+nblyr+1:nt_bgc_Sil+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_Sil:nt_bgc_Sil+nblyr,iblk) + trcr(i,j,nt_bgc_Sil:nt_bgc_Sil+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Sil,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Sil+nblyr+1:nt_bgc_Sil+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_Sil:nt_bgc_Sil+nblyr,1,iblk) + trcrn(i,j,nt_bgc_Sil:nt_bgc_Sil+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Sil,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Sil-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Sil_cat1-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) endif - + if (f_bgc_hum (1:1) /= 'x') then workz(:,:,:) = c0 @@ -3087,24 +3087,24 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_hum+nblyr+1:nt_bgc_hum+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_hum:nt_bgc_hum+nblyr,iblk) + trcr(i,j,nt_bgc_hum:nt_bgc_hum+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_hum,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_hum+nblyr+1:nt_bgc_hum+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_hum:nt_bgc_hum+nblyr,1,iblk) + trcrn(i,j,nt_bgc_hum:nt_bgc_hum+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_hum,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_hum-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_hum_cat1-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) endif - + if (f_bgc_DMSPd (1:1) /= 'x') then workz(:,:,:) = c0 workz2(:,:,:) = c0 @@ -3114,23 +3114,23 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DMSPd+nblyr+1:nt_bgc_DMSPd+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_DMSPd:nt_bgc_DMSPd+nblyr,iblk) + trcr(i,j,nt_bgc_DMSPd:nt_bgc_DMSPd+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMSPd,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DMSPd+nblyr+1:nt_bgc_DMSPd+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_DMSPd:nt_bgc_DMSPd+nblyr,1,iblk) + trcrn(i,j,nt_bgc_DMSPd:nt_bgc_DMSPd+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMSPd,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DMSPd-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DMSPd_cat1-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) - endif + endif if (f_bgc_DMSPp (1:1) /= 'x') then workz(:,:,:) = c0 @@ -3140,11 +3140,11 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DMSPp+nblyr+1:nt_bgc_DMSPp+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_DMSPp:nt_bgc_DMSPp+nblyr,iblk) + trcr(i,j,nt_bgc_DMSPp:nt_bgc_DMSPp+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMSPp,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DMSPp-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) endif @@ -3158,18 +3158,18 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DMS+nblyr+1:nt_bgc_DMS+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_DMS:nt_bgc_DMS+nblyr,iblk) + trcr(i,j,nt_bgc_DMS:nt_bgc_DMS+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMS,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DMS+nblyr+1:nt_bgc_DMS+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_DMS:nt_bgc_DMS+nblyr,1,iblk) + trcrn(i,j,nt_bgc_DMS:nt_bgc_DMS+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMS,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DMS-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DMS_cat1-n3Dbcum, iblk, nzalyr, & @@ -3185,18 +3185,18 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_PON+nblyr+1:nt_bgc_PON+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_PON:nt_bgc_PON+nblyr,iblk) + trcr(i,j,nt_bgc_PON:nt_bgc_PON+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_PON,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_PON+nblyr+1:nt_bgc_PON+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_PON:nt_bgc_PON+nblyr,1,iblk) + trcrn(i,j,nt_bgc_PON:nt_bgc_PON+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_PON,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_PON-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_PON_cat1-n3Dbcum, iblk, nzalyr, & @@ -3220,19 +3220,19 @@ subroutine init_hist_bgc_3Da character (len=3) :: nchar character (len=16):: vname_in ! variable name character(len=*), parameter :: subname = '(init_hist_bgc_3Da)' - + call icepack_query_parameters(z_tracers_out=z_tracers) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) ! snow+bio grid - + if (z_tracers) then do ns = 1, nstreams if (histfreq(ns) /= 'x') then - + !---------------------------------------------------------------------------- ! snow+bio grid ==> ! 1:2 snow (surface layer +interior), 3:nblyr+2 ice (bio grid), nblyr+3 ocean @@ -3247,12 +3247,12 @@ subroutine init_hist_bgc_3Da ns, f_zaero) enddo endif - - if (f_bgc_Nit(1:1) /= 'x') & + + if (f_bgc_Nit(1:1) /= 'x') & call define_hist_field(n_bgc_Nit,"bgc_Nit","mmol/m^3",tstr3Da, tcstr, & "bulk nitrate ", "snow+bio grid", c1, c0, & ns, f_bgc_Nit) - + if (f_bgc_Am(1:1) /= 'x') & call define_hist_field(n_bgc_Am,"bgc_Am","mmol/m^3",tstr3Da, tcstr, & "bulk ammonia/um ", "snow+bio grid", c1, c0, & @@ -3313,7 +3313,7 @@ subroutine init_hist_bgc_3Da enddo endif if (f_bgc_Fed (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'bgc_Fed', trim(nchar) call define_hist_field(n_bgc_Fed (n,:),vname_in,"umol/m^3",tstr3Da, tcstr, & @@ -3322,7 +3322,7 @@ subroutine init_hist_bgc_3Da enddo endif if (f_bgc_Fep (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'bgc_Fep', trim(nchar) call define_hist_field(n_bgc_Fep (n,:),vname_in,"umol/m^3",tstr3Da, tcstr, & @@ -3330,32 +3330,32 @@ subroutine init_hist_bgc_3Da ns, f_bgc_Fep ) enddo endif - + if (f_bgc_Sil(1:1) /= 'x') & call define_hist_field(n_bgc_Sil,"bgc_Sil","mmol/m^3",tstr3Da, tcstr, & "bulk silicate ", "snow+bio grid", c1, c0, & ns, f_bgc_Sil) - + if (f_bgc_hum(1:1) /= 'x') & call define_hist_field(n_bgc_hum,"bgc_hum","mmol/m^3",tstr3Da, tcstr, & "bulk humic (carbon) material ", "snow+bio grid", c1, c0, & ns, f_bgc_hum) - + if (f_bgc_DMSPp(1:1) /= 'x') & call define_hist_field(n_bgc_DMSPp,"bgc_DMSPp","mmol/m^3",tstr3Da, tcstr, & "bulk algal DMSP ", "snow+bio grid", c1, c0,& ns, f_bgc_DMSPp) - + if (f_bgc_DMSPd(1:1) /= 'x') & call define_hist_field(n_bgc_DMSPd,"bgc_DMSPd","mmol/m^3",tstr3Da, tcstr, & "bulk dissolved DMSP ", "snow+bio grid", c1, c0, & ns, f_bgc_DMSPd) - + if (f_bgc_DMS(1:1) /= 'x') & call define_hist_field(n_bgc_DMS,"bgc_DMS","mmol/m^3",tstr3Da, tcstr, & "bulk DMS gas ", "snow+bio grid", c1, c0, & ns, f_bgc_DMS) - + if (f_bgc_PON(1:1) /= 'x') & call define_hist_field(n_bgc_PON,"bgc_PON","mmol/m^3",tstr3Da, tcstr, & "other bulk nitrogen pool ", "snow+bio grid", c1, c0, & @@ -3365,11 +3365,11 @@ subroutine init_hist_bgc_3Da ! Category 1 BGC !---------------------------------------------- - if (f_bgc_Nit_cat1(1:1) /= 'x') & + if (f_bgc_Nit_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_Nit_cat1,"bgc_Nit_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk nitrate in cat 1 ", "snow+bio grid", c1, c0, & ns, f_bgc_Nit_cat1) - + if (f_bgc_Am_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_Am_cat1,"bgc_Am_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk ammonia/um in cat 1", "snow+bio grid", c1, c0, & @@ -3412,7 +3412,7 @@ subroutine init_hist_bgc_3Da enddo endif if (f_bgc_Fed_cat1 (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'bgc_Fed_cat1', trim(nchar) call define_hist_field(n_bgc_Fed_cat1 (n,:),vname_in,"umol/m^3",tstr3Da, tcstr, & @@ -3421,7 +3421,7 @@ subroutine init_hist_bgc_3Da enddo endif if (f_bgc_Fep_cat1 (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'bgc_Fep_cat1', trim(nchar) call define_hist_field(n_bgc_Fep_cat1 (n,:),vname_in,"umol/m^3",tstr3Da, tcstr, & @@ -3429,32 +3429,32 @@ subroutine init_hist_bgc_3Da ns, f_bgc_Fep_cat1 ) enddo endif - + if (f_bgc_Sil_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_Sil_cat1,"bgc_Sil_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk silicate in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_Sil_cat1) - + if (f_bgc_hum_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_hum,"bgc_hum_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk humic (carbon) material in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_hum_cat1) - + if (f_bgc_DMSPd_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_DMSPd_cat1,"bgc_DMSPd_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk dissolved DMSP in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_DMSPd_cat1) - + if (f_bgc_DMS_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_DMS_cat1,"bgc_DMS_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk DMS gas in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_DMS_cat1) - + if (f_bgc_PON_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_PON_cat1,"bgc_PON_cat1","mmol/m^3",tstr3Da, tcstr, & "other bulk nitrogen pool in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_PON_cat1) - + endif ! histfreq(ns) /= 'x' enddo !ns @@ -3473,7 +3473,7 @@ subroutine init_history_bgc use ice_arrays_column, only: PP_net, grow_net, hbri, & ice_bio_net, snow_bio_net, fbio_snoice, fbio_atmice, & - fzsal, fzsal_g, zfswin + fzsal, fzsal_g, zfswin use ice_flux_bgc, only: flux_bio, flux_bio_ai, fnit, fsil, & famm, fdmsp, fdms, fhum, fdust, falgalN, fdoc, fdic, & fdon, ffep, ffed diff --git a/cicecore/cicedynB/analysis/ice_history_drag.F90 b/cicecore/cicedynB/analysis/ice_history_drag.F90 index c0a1f99bd..fba19b364 100644 --- a/cicecore/cicedynB/analysis/ice_history_drag.F90 +++ b/cicecore/cicedynB/analysis/ice_history_drag.F90 @@ -1,7 +1,7 @@ !======================================================================= ! 2013 module for form drag parameters -! authors Michel Tsamados, David Schroeder, CPOM +! authors Michel Tsamados, David Schroeder, CPOM module ice_history_drag @@ -17,7 +17,7 @@ module ice_history_drag implicit none private public :: accum_hist_drag, init_hist_drag_2D - + !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value !--------------------------------------------------------------- @@ -31,7 +31,7 @@ module ice_history_drag !--------------------------------------------------------------- namelist / icefields_drag_nml / & - f_Cdn_atm, f_Cdn_ocn , & + f_Cdn_atm, f_Cdn_ocn , & f_drag !--------------------------------------------------------------- @@ -47,7 +47,7 @@ module ice_history_drag n_Cdn_atm_skin , n_Cdn_atm_floe, & n_Cdn_atm_pond , n_Cdn_atm_rdg, & n_Cdn_ocn_skin , n_Cdn_ocn_floe, & - n_Cdn_ocn_keel , n_Cdn_atm_ratio + n_Cdn_ocn_keel , n_Cdn_atm_ratio !======================================================================= @@ -124,43 +124,43 @@ subroutine init_hist_drag_2D "hdraft: draught", & "none", c1, c0, & ns, f_drag) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_hridge,"hridge","m",tstr2D, tcstr, & "hridge: ridge height", & "none", c1, c0, & ns, f_drag) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_distrdg,"distrdg","m",tstr2D, tcstr, & "distrdg: distance between ridges", & "none", c1, c0, & - ns, f_drag) + ns, f_drag) if (f_drag(1:1) /= 'x') & call define_hist_field(n_hkeel,"hkeel","m",tstr2D, tcstr, & "hkeel: keel depth", & "none", c1, c0, & ns, f_drag) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_dkeel,"dkeel","m",tstr2D, tcstr, & "dkeel: distance between keels", & "none", c1, c0, & - ns, f_drag) + ns, f_drag) if (f_drag(1:1) /= 'x') & call define_hist_field(n_lfloe,"lfloe","m",tstr2D, tcstr, & "lfloe: floe length", & "none", c1, c0, & ns, f_drag) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_dfloe,"dfloe","m",tstr2D, tcstr, & "dfloe: distance between floes", & "none", c1, c0, & - ns, f_drag) - + ns, f_drag) + if (f_Cdn_atm(1:1) /= 'x') & call define_hist_field(n_Cdn_atm,"Cdn_atm","none",tstr2D, tcstr, & "Ca: total ice-atm drag coefficient", & @@ -172,49 +172,49 @@ subroutine init_hist_drag_2D "Cdn_ocn: total ice-ocn drag coefficient", & "none", c1, c0, & ns, f_Cdn_ocn) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_Cdn_atm_skin,"Cdn_atm_skin","none", & tstr2D, tcstr, & "Cdn_atm_skin: neutral skin ice-atm drag coefficient", & "none", c1, c0, & ns, f_drag) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_Cdn_atm_floe,"Cdn_atm_floe","none", & tstr2D, tcstr, & "Cdn_atm_floe: neutral floe edge ice-atm drag coefficient", & "none", c1, c0, & - ns, f_drag) - + ns, f_drag) + if (f_drag(1:1) /= 'x') & call define_hist_field(n_Cdn_atm_pond,"Cdn_atm_pond","none", & tstr2D, tcstr, & "Cdn_atm_pond: neutral pond edge ice-atm drag coefficient", & "none", c1, c0, & ns, f_drag) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_Cdn_atm_rdg,"Cdn_atm_rdg","none", & tstr2D, tcstr, & "Cdn_atm_rdg: neutral ridge ice-atm drag coefficient", & "none", c1, c0, & ns, f_drag) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_Cdn_ocn_skin,"Cdn_ocn_skin","none", & tstr2D, tcstr, & "Cdn_ocn_skin: neutral skin ice-ocn drag coefficient", & "none", c1, c0, & ns, f_drag) - + if (f_drag(1:1) /= 'x') & call define_hist_field(n_Cdn_ocn_floe,"Cdn_ocn_floe","none", & tstr2D, tcstr, & "Cdn_ocn_floe: neutral floe edge ice-ocn drag coefficient", & "none", c1, c0, & - ns, f_drag) - + ns, f_drag) + if (f_drag(1:1) /= 'x') & call define_hist_field(n_Cdn_ocn_keel,"Cdn_ocn_keel","none", & tstr2D, tcstr, & @@ -281,21 +281,21 @@ subroutine accum_hist_drag (iblk) call accum_hist_field(n_lfloe, iblk, lfloe(:,:,iblk), a2D) call accum_hist_field(n_dfloe, iblk, dfloe(:,:,iblk), a2D) call accum_hist_field(n_Cdn_atm_rdg, & - iblk, Cdn_atm_rdg(:,:,iblk), a2D) + iblk, Cdn_atm_rdg(:,:,iblk), a2D) call accum_hist_field(n_Cdn_atm_floe, & iblk, Cdn_atm_floe(:,:,iblk), a2D) call accum_hist_field(n_Cdn_atm_pond, & iblk, Cdn_atm_pond(:,:,iblk), a2D) call accum_hist_field(n_Cdn_atm_skin, & - iblk, Cdn_atm_skin(:,:,iblk), a2D) + iblk, Cdn_atm_skin(:,:,iblk), a2D) call accum_hist_field(n_Cdn_atm_ratio, & iblk, Cdn_atm_ratio(:,:,iblk), a2D) call accum_hist_field(n_Cdn_ocn_keel, & - iblk, Cdn_ocn_keel(:,:,iblk), a2D) + iblk, Cdn_ocn_keel(:,:,iblk), a2D) call accum_hist_field(n_Cdn_ocn_floe, & iblk, Cdn_ocn_floe(:,:,iblk), a2D) call accum_hist_field(n_Cdn_ocn_skin, & - iblk, Cdn_ocn_skin(:,:,iblk), a2D) + iblk, Cdn_ocn_skin(:,:,iblk), a2D) end if endif ! if(allocated(a2D)) diff --git a/cicecore/cicedynB/analysis/ice_history_fsd.F90 b/cicecore/cicedynB/analysis/ice_history_fsd.F90 index c64ecbefa..50fee99e7 100644 --- a/cicecore/cicedynB/analysis/ice_history_fsd.F90 +++ b/cicecore/cicedynB/analysis/ice_history_fsd.F90 @@ -21,7 +21,7 @@ module ice_history_fsd private public :: accum_hist_fsd, init_hist_fsd_2D, init_hist_fsd_3Df, & init_hist_fsd_4Df - + !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value !--------------------------------------------------------------- @@ -79,7 +79,6 @@ subroutine init_hist_fsd_2D integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag - real (kind=dbl_kind) :: secday logical (kind=log_kind) :: tr_fsd, wave_spec character(len=*), parameter :: subname = '(init_hist_fsd_2D)' @@ -273,12 +272,12 @@ subroutine init_hist_fsd_4Df if (histfreq(ns) /= 'x') then if (f_afsdn(1:1) /= 'x') & - call define_hist_field(n_afsdn,"afsdn","1",tstr4Df, tcstr, & + call define_hist_field(n_afsdn,"afsdn","1",tstr4Df, tcstr, & "areal floe size and thickness distribution", & "per unit bin width", c1, c0, ns, f_afsdn) endif ! if (histfreq(ns) /= 'x') then - enddo ! ns + enddo ! ns endif ! tr_fsd @@ -398,7 +397,7 @@ subroutine accum_hist_fsd (iblk) if (f_fsdrad(1:1) /= 'x') then do j = 1, ny_block do i = 1, nx_block - worka(i,j) = c0 + worka(i,j) = c0 if (aice_init(i,j,iblk) > puny) then do k = 1, nfsd_hist do n = 1, ncat_hist @@ -450,7 +449,7 @@ subroutine accum_hist_fsd (iblk) end do call accum_hist_field(n_afsd-n3Dacum, iblk, nfsd_hist, worke, a3Df) endif - + if (f_dafsd_newi(1:1)/= 'x') & call accum_hist_field(n_dafsd_newi-n3Dacum, iblk, nfsd_hist, & d_afsd_newi(:,:,1:nfsd_hist,iblk), a3Df) @@ -473,7 +472,7 @@ subroutine accum_hist_fsd (iblk) if (f_afsdn(1:1) /= 'x') then do n = 1, ncat_hist - do k = 1, nfsd_hist + do k = 1, nfsd_hist do j = 1, ny_block do i = 1, nx_block workd(i,j,k,n) = trcrn(i,j,nt_fsd+k-1,n,iblk) & diff --git a/cicecore/cicedynB/analysis/ice_history_mechred.F90 b/cicecore/cicedynB/analysis/ice_history_mechred.F90 index 920a83b47..98c58bc39 100644 --- a/cicecore/cicedynB/analysis/ice_history_mechred.F90 +++ b/cicecore/cicedynB/analysis/ice_history_mechred.F90 @@ -20,7 +20,7 @@ module ice_history_mechred implicit none private public :: accum_hist_mechred, init_hist_mechred_2D, init_hist_mechred_3Dc - + !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value !--------------------------------------------------------------- @@ -188,13 +188,13 @@ subroutine init_hist_mechred_2D "ice area ridging rate", & "none", secday*c100, c0, & ns, f_dardg1dt) - + if (f_dardg2dt(1:1) /= 'x') & call define_hist_field(n_dardg2dt,"dardg2dt","%/day",tstr2D, tcstr, & "ridge area formation rate", & "none", secday*c100, c0, & ns, f_dardg2dt) - + if (f_dvirdgdt(1:1) /= 'x') & call define_hist_field(n_dvirdgdt,"dvirdgdt","cm/day",tstr2D, tcstr, & "ice volume ridging rate", & diff --git a/cicecore/cicedynB/analysis/ice_history_pond.F90 b/cicecore/cicedynB/analysis/ice_history_pond.F90 index 365bd4410..f6e4b8737 100644 --- a/cicecore/cicedynB/analysis/ice_history_pond.F90 +++ b/cicecore/cicedynB/analysis/ice_history_pond.F90 @@ -20,7 +20,7 @@ module ice_history_pond implicit none private public :: accum_hist_pond, init_hist_pond_2D, init_hist_pond_3Dc - + !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value !--------------------------------------------------------------- @@ -40,9 +40,9 @@ module ice_history_pond namelist / icefields_pond_nml / & f_apondn, f_apeffn , & f_hpondn, & - f_apond, f_apond_ai , & - f_hpond, f_hpond_ai , & - f_ipond, f_ipond_ai , & + f_apond, f_apond_ai , & + f_hpond, f_hpond_ai , & + f_ipond, f_ipond_ai , & f_apeff, f_apeff_ai !--------------------------------------------------------------- @@ -50,7 +50,7 @@ module ice_history_pond !--------------------------------------------------------------- integer (kind=int_kind), dimension(max_nstrm) :: & - n_apondn , n_apeffn , & + n_apondn , n_apeffn , & n_hpondn , & n_apond , n_apond_ai, & n_hpond , n_hpond_ai, & @@ -147,7 +147,7 @@ subroutine init_hist_pond_2D ns, f_apond) if (f_apond_ai(1:1) /= 'x') & - call define_hist_field(n_apond_ai,"apond_ai","1",tstr2D, tcstr, & + call define_hist_field(n_apond_ai,"apond_ai","1",tstr2D, tcstr, & "melt pond fraction of grid cell", & "weighted by ice area", c1, c0, & ns, f_apond_ai) @@ -159,7 +159,7 @@ subroutine init_hist_pond_2D ns, f_hpond) if (f_hpond_ai(1:1) /= 'x') & - call define_hist_field(n_hpond_ai,"hpond_ai","m",tstr2D, tcstr, & + call define_hist_field(n_hpond_ai,"hpond_ai","m",tstr2D, tcstr, & "mean melt pond depth over grid cell", & "weighted by ice area", c1, c0, & ns, f_hpond) @@ -171,7 +171,7 @@ subroutine init_hist_pond_2D ns, f_ipond) if (f_ipond_ai(1:1) /= 'x') & - call define_hist_field(n_ipond_ai,"ipond_ai","m",tstr2D, tcstr, & + call define_hist_field(n_ipond_ai,"ipond_ai","m",tstr2D, tcstr, & "mean pond ice thickness over grid cell", & "weighted by ice area", c1, c0, & ns, f_ipond_ai) @@ -192,7 +192,7 @@ subroutine init_hist_pond_2D enddo ! nstreams endif ! tr_pond - + end subroutine init_hist_pond_2D !======================================================================= @@ -212,14 +212,14 @@ subroutine init_hist_pond_3Dc file=__FILE__, line=__LINE__) if (tr_pond) then - + ! 3D (category) variables must be looped separately do ns = 1, nstreams if (histfreq(ns) /= 'x') then if (f_apondn(1:1) /= 'x') & call define_hist_field(n_apondn,"apondn","1",tstr3Dc, tcstr, & - "melt pond fraction, category","none", c1, c0, & + "melt pond fraction, category","none", c1, c0, & ns, f_apondn) if (f_hpondn(1:1) /= 'x') & @@ -268,9 +268,13 @@ subroutine accum_hist_pond (iblk) integer (kind=int_kind) :: & nt_apnd, nt_hpnd, nt_alvl, nt_ipnd - +#ifdef UNDEPRECATE_CESMPONDS logical (kind=log_kind) :: & tr_pond_cesm, tr_pond_lvl, tr_pond_topo +#else + logical (kind=log_kind) :: & + tr_pond_lvl, tr_pond_topo +#endif real (kind=dbl_kind) :: & puny @@ -285,8 +289,13 @@ subroutine accum_hist_pond (iblk) !--------------------------------------------------------------- call icepack_query_parameters(puny_out=puny) +#ifdef UNDEPRECATE_CESMPONDS call icepack_query_tracer_flags(tr_pond_cesm_out=tr_pond_cesm, & tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) +#else + call icepack_query_tracer_flags(tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo) +#endif call icepack_query_tracer_indices(nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & nt_alvl_out=nt_alvl, nt_ipnd_out=nt_ipnd) call icepack_warnings_flush(nu_diag) @@ -294,6 +303,7 @@ subroutine accum_hist_pond (iblk) file=__FILE__, line=__LINE__) if (allocated(a2D)) then +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then if (f_apond(1:1)/= 'x') & call accum_hist_field(n_apond, iblk, & @@ -311,6 +321,9 @@ subroutine accum_hist_pond (iblk) * trcr(:,:,nt_hpnd,iblk), a2D) elseif (tr_pond_lvl) then +#else + if (tr_pond_lvl) then +#endif if (f_apond(1:1)/= 'x') & call accum_hist_field(n_apond, iblk, & trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk), a2D) @@ -363,7 +376,7 @@ subroutine accum_hist_pond (iblk) * trcr(:,:,nt_ipnd,iblk), a2D) endif ! ponds - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index 66c4401c7..ee48a9996 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -2,17 +2,17 @@ ! ! Output files: netCDF or binary data, Fortran unformatted dumps ! -! The following variables are currently hard-wired as snapshots +! The following variables are currently hard-wired as snapshots ! (instantaneous rather than time-averages): ! divu, shear, sig1, sig2, sigP, trsig, mlt_onset, frz_onset, hisnap, aisnap ! ! Options for histfreq: '1','h','d','m','y','x', where x means that -! output stream will not be used (recommended for efficiency). -! histfreq_n can be any nonnegative integer, where 0 means that the +! output stream will not be used (recommended for efficiency). +! histfreq_n can be any nonnegative integer, where 0 means that the ! corresponding histfreq frequency will not be used. ! The flags (f_) can be set to '1','h','d','m','y' or 'x', where ! n means the field will not be written. To output the same field at -! more than one frequency, for instance monthy and daily, set +! more than one frequency, for instance monthy and daily, set ! f_ = 'md'. ! ! authors Tony Craig and Bruce Briegleb, NCAR @@ -34,7 +34,7 @@ module ice_history_shared private public :: define_hist_field, accum_hist_field, icefields_nml, construct_filename - + integer (kind=int_kind), public :: history_precision logical (kind=log_kind), public :: & @@ -142,7 +142,7 @@ module ice_history_shared a4Di(:,:,:,:,:,:), & ! field accumulations/averages, 4D categories,vertical, ice a4Ds(:,:,:,:,:,:), & ! field accumulations/averages, 4D categories,vertical, snow a4Df(:,:,:,:,:,:) ! field accumulations/averages, 4D floe size, thickness categories - + real (kind=dbl_kind), allocatable, public :: & Tinz4d (:,:,:,:) , & ! array for Tin Tsnz4d (:,:,:,:) , & ! array for Tsn @@ -199,7 +199,7 @@ module ice_history_shared nstr4Df = 'NLON NLAT NFSD NCAT', & ! vcoord for N cell, 4D, fsd estr4Df = 'ELON ELAT NFSD NCAT' ! vcoord for E cell, 4D, fsd !ferret -! tstr4Di = 'TLON TLAT VGRDi NCAT time', & ! ferret can not handle time +! tstr4Di = 'TLON TLAT VGRDi NCAT time', & ! ferret can not handle time ! ustr4Di = 'ULON ULAT VGRDi NCAT time', & ! index on 4D variables. ! tstr4Ds = 'TLON TLAT VGRDs NCAT time', & ! Use 'ferret' lines instead ! ustr4Ds = 'ULON ULAT VGRDs NCAT time', & ! (below also) @@ -367,10 +367,10 @@ module ice_history_shared f_keffn_top = 'x', & f_Tinz = 'x', f_Sinz = 'x', & f_Tsnz = 'x', & - f_a11 = 'x', f_a12 = 'x', & - f_e11 = 'x', f_e12 = 'x', & + f_a11 = 'x', f_a12 = 'x', & + f_e11 = 'x', f_e12 = 'x', & f_e22 = 'x', & - f_s11 = 'x', f_s12 = 'x', & + f_s11 = 'x', f_s12 = 'x', & f_s22 = 'x', & f_yieldstress11 = 'x', & f_yieldstress12 = 'x', & @@ -411,7 +411,7 @@ module ice_history_shared f_atmspd, f_atmdir , & f_fswup, & f_fswdn, f_flwdn , & - f_snow, f_snow_ai , & + f_snow, f_snow_ai , & f_rain, f_rain_ai , & f_sst, f_sss , & f_uocn, f_vocn , & @@ -436,8 +436,8 @@ module ice_history_shared f_snoice, f_dsnow , & f_meltt, f_melts , & f_meltb, f_meltl , & - f_fresh, f_fresh_ai , & - f_fsalt, f_fsalt_ai , & + f_fresh, f_fresh_ai , & + f_fsalt, f_fsalt_ai , & f_fbot, & f_fhocn, f_fhocn_ai , & f_fswthru, f_fswthru_ai,& @@ -715,7 +715,7 @@ module ice_history_shared n_trsig , n_icepresent , & n_iage , n_FY , & n_fsurf_ai , & - n_fcondtop_ai, n_fmeltt_ai , & + n_fcondtop_ai, n_fmeltt_ai , & n_aicen , n_vicen , & n_fsurfn_ai , & n_fcondtopn_ai, & @@ -765,7 +765,7 @@ subroutine construct_filename(ncfile,suffix,ns) iyear = myear imonth = mmonth iday = mday - isec = msec - dt + isec = int(msec - dt,int_kind) ! construct filename if (write_ic) then @@ -863,7 +863,7 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & character (len=*), intent(in) :: & vhistfreq ! history frequency - + integer (kind=int_kind), intent(in) :: & ns ! history file stream index @@ -970,7 +970,7 @@ subroutine accum_hist_field_2D(id, iblk, field_accum, field) integer (int_kind), dimension(:), intent(in) :: & ! max_nstrm id ! location in avail_fields array for use in ! later routines - + integer (kind=int_kind), intent(in) :: iblk real (kind=dbl_kind), intent(in) :: & @@ -1030,7 +1030,7 @@ subroutine accum_hist_field_3D(id, iblk, ndim, field_accum, field) integer (int_kind), dimension(:), intent(in) :: & ! max_nstrm id ! location in avail_fields array for use in ! later routines - + integer (kind=int_kind), intent(in) :: iblk integer (kind=int_kind), intent(in) :: & @@ -1095,7 +1095,7 @@ subroutine accum_hist_field_4D(id, iblk, ndim3, ndim4, field_accum, field) integer (int_kind), dimension(:), intent(in) :: & ! max_nstrm id ! location in avail_fields array for use in ! later routines - + integer (kind=int_kind), intent(in) :: iblk integer (kind=int_kind), intent(in) :: & diff --git a/cicecore/cicedynB/analysis/ice_history_snow.F90 b/cicecore/cicedynB/analysis/ice_history_snow.F90 index 090759759..0ec4144bf 100644 --- a/cicecore/cicedynB/analysis/ice_history_snow.F90 +++ b/cicecore/cicedynB/analysis/ice_history_snow.F90 @@ -18,7 +18,7 @@ module ice_history_snow implicit none private public :: accum_hist_snow, init_hist_snow_2D, init_hist_snow_3Dc - + !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value !--------------------------------------------------------------- @@ -193,7 +193,7 @@ subroutine init_hist_snow_2D (dt) endif ! histfreq(ns) /= 'x' enddo ! nstreams endif ! tr_snow - + end subroutine init_hist_snow_2D !======================================================================= @@ -206,7 +206,7 @@ subroutine init_hist_snow_3Dc integer (kind=int_kind) :: ns logical (kind=log_kind) :: tr_snow character(len=*), parameter :: subname = '(init_hist_pond_3Dc)' - + call icepack_query_tracer_flags(tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -263,7 +263,6 @@ subroutine accum_hist_snow (iblk) use ice_arrays_column, only: meltsliq use ice_blocks, only: block, nx_block, ny_block - use ice_domain, only: blocks_ice use ice_flux, only: fsloss use ice_history_shared, only: n2D, a2D, a3Dc, ncat_hist, & accum_hist_field, nzslyr @@ -275,7 +274,7 @@ subroutine accum_hist_snow (iblk) ! local variables integer (kind=int_kind) :: & - i, j, k, n + k, n integer (kind=int_kind) :: & nt_smice, nt_smliq, nt_rhos, nt_rsnw @@ -356,7 +355,7 @@ subroutine accum_hist_snow (iblk) if (f_fsloss(1:1)/= 'x') & call accum_hist_field(n_fsloss, iblk, & fsloss(:,:,iblk), a2D) - + endif ! allocated(a2D) ! 3D category fields @@ -422,7 +421,7 @@ subroutine accum_hist_snow (iblk) endif ! allocated(a3Dc) endif ! tr_snow - + end subroutine accum_hist_snow !======================================================================= diff --git a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 index 317a6ba0d..f71d959da 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 @@ -136,15 +136,15 @@ subroutine eap (dt) seabed_stress_method, seabed_stress, & stack_fields, unstack_fields use ice_flux, only: rdg_conv, strairxT, strairyT, & - strairx, strairy, uocn, vocn, ss_tltx, ss_tlty, iceumask, fm, & - strtltx, strtlty, strocnx, strocny, strintx, strinty, taubx, tauby, & + strairxU, strairyU, uocn, vocn, ss_tltx, ss_tlty, fmU, & + strtltxU, strtltyU, strocnxU, strocnyU, strintxU, strintyU, taubxU, taubyU, & strocnxT, strocnyT, strax, stray, & - Tbu, hwater, & + TbU, hwater, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 use ice_grid, only: tmask, umask, dxT, dyT, dxhy, dyhx, cxp, cyp, cxm, cym, & - tarear, uarear, grid_average_X2Y, & + tarear, uarear, grid_average_X2Y, iceumask, & grid_atm_dynu, grid_atm_dynv, grid_ocn_dynu, grid_ocn_dynv use ice_state, only: aice, vice, vsno, uvel, vvel, divu, shear, & aice_init, aice0, aicen, vicen, strength @@ -178,11 +178,11 @@ subroutine eap (dt) ss_tltxU , & ! sea surface slope, x-direction (m/m) ss_tltyU , & ! sea surface slope, y-direction (m/m) tmass , & ! total mass of ice and snow (kg/m^2) - waterx , & ! for ocean stress calculation, x (m/s) - watery , & ! for ocean stress calculation, y (m/s) - forcex , & ! work array: combined atm stress and ocn tilt, x - forcey , & ! work array: combined atm stress and ocn tilt, y - aiu , & ! ice fraction on u-grid + waterxU , & ! for ocean stress calculation, x (m/s) + wateryU , & ! for ocean stress calculation, y (m/s) + forcexU , & ! work array: combined atm stress and ocn tilt, x + forceyU , & ! work array: combined atm stress and ocn tilt, y + aiU , & ! ice fraction on u-grid umass , & ! total mass of ice and snow (u grid) umassdti ! mass of U-cell/dte (kg/m^2 s) @@ -271,7 +271,7 @@ subroutine eap (dt) !----------------------------------------------------------------- call grid_average_X2Y('F', tmass , 'T' , umass, 'U') - call grid_average_X2Y('F', aice_init, 'T' , aiu , 'U') + call grid_average_X2Y('F', aice_init, 'T' , aiU , 'U') call grid_average_X2Y('S', uocn , grid_ocn_dynu, uocnU , 'U') call grid_average_X2Y('S', vocn , grid_ocn_dynv, vocnU , 'U') call grid_average_X2Y('S', ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') @@ -288,15 +288,15 @@ subroutine eap (dt) file=__FILE__, line=__LINE__) if (.not. calc_strair) then - call grid_average_X2Y('F', strax, grid_atm_dynu, strairx, 'U') - call grid_average_X2Y('F', stray, grid_atm_dynv, strairy, 'U') + call grid_average_X2Y('F', strax, grid_atm_dynu, strairxU, 'U') + call grid_average_X2Y('F', stray, grid_atm_dynv, strairyU, 'U') else call ice_HaloUpdate (strairxT, halo_info, & field_loc_center, field_type_vector) call ice_HaloUpdate (strairyT, halo_info, & field_loc_center, field_type_vector) - call grid_average_X2Y('F', strairxT, 'T', strairx, 'U') - call grid_average_X2Y('F', strairyT, 'T', strairy, 'U') + call grid_average_X2Y('F', strairxT, 'T', strairxU, 'U') + call grid_average_X2Y('F', strairyT, 'T', strairyU, 'U') endif !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) SCHEDULE(runtime) @@ -317,20 +317,20 @@ subroutine eap (dt) icellt (iblk), icellu (iblk), & indxti (:,iblk), indxtj (:,iblk), & indxui (:,iblk), indxuj (:,iblk), & - aiu (:,:,iblk), umass (:,:,iblk), & + aiU (:,:,iblk), umass (:,:,iblk), & umassdti (:,:,iblk), fcor_blk (:,:,iblk), & umask (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - strairx (:,:,iblk), strairy (:,:,iblk), & + strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & icetmask (:,:,iblk), iceumask (:,:,iblk), & - fm (:,:,iblk), dt, & - strtltx (:,:,iblk), strtlty (:,:,iblk), & - strocnx (:,:,iblk), strocny (:,:,iblk), & - strintx (:,:,iblk), strinty (:,:,iblk), & - taubx (:,:,iblk), tauby (:,:,iblk), & - waterx (:,:,iblk), watery (:,:,iblk), & - forcex (:,:,iblk), forcey (:,:,iblk), & + fmU (:,:,iblk), dt, & + strtltxU (:,:,iblk), strtltyU (:,:,iblk), & + strocnxU (:,:,iblk), strocnyU (:,:,iblk), & + strintxU (:,:,iblk), strintyU (:,:,iblk), & + taubxU (:,:,iblk), taubyU (:,:,iblk), & + waterxU (:,:,iblk), wateryU (:,:,iblk), & + forcexU (:,:,iblk), forceyU (:,:,iblk), & stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & @@ -339,7 +339,7 @@ subroutine eap (dt) stress12_3(:,:,iblk), stress12_4(:,:,iblk), & uvel_init (:,:,iblk), vvel_init (:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & - Tbu (:,:,iblk)) + TbU (:,:,iblk)) !----------------------------------------------------------------- ! Initialize structure tensor @@ -413,7 +413,7 @@ subroutine eap (dt) endif !----------------------------------------------------------------- - ! seabed stress factor Tbu (Tbu is part of Cb coefficient) + ! seabed stress factor TbU (TbU is part of Cb coefficient) !----------------------------------------------------------------- if (seabed_stress) then @@ -424,7 +424,7 @@ subroutine eap (dt) icellu (iblk), & indxui (:,iblk), indxuj(:,iblk), & vice (:,:,iblk), aice(:,:,iblk), & - hwater(:,:,iblk), Tbu (:,:,iblk)) + hwater(:,:,iblk), TbU (:,:,iblk)) enddo !$OMP END PARALLEL DO @@ -435,7 +435,7 @@ subroutine eap (dt) icellt(iblk), indxti(:,iblk), indxtj(:,iblk), & icellu(iblk), indxui(:,iblk), indxuj(:,iblk), & aicen(:,:,:,iblk), vicen(:,:,:,iblk), & - hwater (:,:,iblk), Tbu (:,:,iblk)) + hwater (:,:,iblk), TbU (:,:,iblk)) enddo !$OMP END PARALLEL DO endif @@ -493,17 +493,17 @@ subroutine eap (dt) call stepu (nx_block, ny_block, & icellu (iblk), Cdn_ocn (:,:,iblk), & indxui (:,iblk), indxuj (:,iblk), & - aiu (:,:,iblk), strtmp (:,:,:), & + aiU (:,:,iblk), strtmp (:,:,:), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - waterx (:,:,iblk), watery (:,:,iblk), & - forcex (:,:,iblk), forcey (:,:,iblk), & - umassdti (:,:,iblk), fm (:,:,iblk), & + waterxU (:,:,iblk), wateryU (:,:,iblk), & + forcexU (:,:,iblk), forceyU (:,:,iblk), & + umassdti (:,:,iblk), fmU (:,:,iblk), & uarear (:,:,iblk), & - strintx (:,:,iblk), strinty (:,:,iblk), & - taubx (:,:,iblk), tauby (:,:,iblk), & + strintxU (:,:,iblk), strintyU (:,:,iblk), & + taubxU (:,:,iblk), taubyU (:,:,iblk), & uvel_init(:,:,iblk), vvel_init(:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & - Tbu (:,:,iblk)) + TbU (:,:,iblk)) ! call ice_timer_stop(timer_tmp2,iblk) !----------------------------------------------------------------- @@ -561,15 +561,15 @@ subroutine eap (dt) indxui (:,iblk), indxuj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - aiu (:,:,iblk), fm (:,:,iblk), & - strocnx (:,:,iblk), strocny (:,:,iblk)) + aiU (:,:,iblk), fmU (:,:,iblk), & + strocnxU(:,:,iblk), strocnyU(:,:,iblk)) enddo !$OMP END PARALLEL DO - ! strocn computed on U, N, E as needed. Map strocn U divided by aiu to T + ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T ! TODO: This should be done elsewhere as part of generalization? - ! conservation requires aiu be divided before averaging + ! conservation requires aiU be divided before averaging work1 = c0 work2 = c0 !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) @@ -577,8 +577,8 @@ subroutine eap (dt) do ij = 1, icellu(iblk) i = indxui(ij,iblk) j = indxuj(ij,iblk) - work1(i,j,iblk) = strocnx(i,j,iblk)/aiu(i,j,iblk) - work2(i,j,iblk) = strocny(i,j,iblk)/aiu(i,j,iblk) + work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) + work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) enddo enddo call ice_HaloUpdate (work1, halo_info, & @@ -1271,7 +1271,7 @@ subroutine stress_eap (nx_block, ny_block, & csigmne, csigmnw, csigmse, csigmsw , & csig12ne, csig12nw, csig12se, csig12sw , & str12ew, str12we, str12ns, str12sn , & - strp_tmp, strm_tmp + strp_tmp, strm_tmp real (kind=dbl_kind) :: & alpharne, alpharnw, alpharsw, alpharse, & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index 866775132..c2060285a 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -83,14 +83,14 @@ subroutine evp (dt) use ice_domain, only: nblocks, blocks_ice, halo_info, maskhalo_dyn use ice_domain_size, only: max_blocks, ncat, nx_global, ny_global use ice_flux, only: rdg_conv, rdg_shear, strairxT, strairyT, & - strairx, strairy, uocn, vocn, ss_tltx, ss_tlty, iceumask, fm, & - strtltx, strtlty, strocnx, strocny, strintx, strinty, taubx, tauby, & + strairxU, strairyU, uocn, vocn, ss_tltx, ss_tlty, fmU, & + strtltxU, strtltyU, strocnxU, strocnyU, strintxU, strintyU, taubxU, taubyU, & strocnxT, strocnyT, strax, stray, & - Tbu, hwater, & - strairxN, strairyN, icenmask, fmN, & + TbU, hwater, & + strairxN, strairyN, fmN, & strtltxN, strtltyN, strocnxN, strocnyN, strintxN, strintyN, taubxN, taubyN, & TbN, & - strairxE, strairyE, iceemask, fmE, & + strairxE, strairyE, fmE, & strtltxE, strtltyE, strocnxE, strocnyE, strintxE, strintyE, taubxE, taubyE, & TbE, & stressp_1, stressp_2, stressp_3, stressp_4, & @@ -98,11 +98,12 @@ subroutine evp (dt) stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U - use ice_grid, only: hm, tmask, umask, umaskCD, nmask, emask, uvm, epm, npm, & + use ice_grid, only: tmask, umask, umaskCD, nmask, emask, uvm, epm, npm, & + iceumask, iceemask, icenmask, & dxE, dxN, dxT, dxU, dyE, dyN, dyT, dyU, & ratiodxN, ratiodxNr, ratiodyE, ratiodyEr, & dxhy, dyhx, cxp, cyp, cxm, cym, & - tarear, uarear, earear, narear, grid_average_X2Y, tarea, uarea, & + tarear, uarear, earear, narear, grid_average_X2Y, uarea, & grid_type, grid_ice, & grid_atm_dynu, grid_atm_dynv, grid_ocn_dynu, grid_ocn_dynv use ice_state, only: aice, vice, vsno, uvel, vvel, uvelN, vvelN, & @@ -150,11 +151,11 @@ subroutine evp (dt) ss_tltxU , & ! sea surface slope, x-direction (m/m) ss_tltyU , & ! sea surface slope, y-direction (m/m) tmass , & ! total mass of ice and snow (kg/m^2) - waterx , & ! for ocean stress calculation, x (m/s) - watery , & ! for ocean stress calculation, y (m/s) - forcex , & ! work array: combined atm stress and ocn tilt, x - forcey , & ! work array: combined atm stress and ocn tilt, y - aiu , & ! ice fraction on u-grid + waterxU , & ! for ocean stress calculation, x (m/s) + wateryU , & ! for ocean stress calculation, y (m/s) + forcexU , & ! work array: combined atm stress and ocn tilt, x + forceyU , & ! work array: combined atm stress and ocn tilt, y + aiU , & ! ice fraction on u-grid umass , & ! total mass of ice and snow (u grid) umassdti ! mass of U-cell/dte (kg/m^2 s) @@ -317,7 +318,7 @@ subroutine evp (dt) !----------------------------------------------------------------- call grid_average_X2Y('F', tmass , 'T' , umass , 'U') - call grid_average_X2Y('F', aice_init, 'T' , aiu , 'U') + call grid_average_X2Y('F', aice_init, 'T' , aiU , 'U') call grid_average_X2Y('S', uocn , grid_ocn_dynu, uocnU , 'U') call grid_average_X2Y('S', vocn , grid_ocn_dynv, vocnU , 'U') call grid_average_X2Y('S', ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') @@ -348,15 +349,15 @@ subroutine evp (dt) file=__FILE__, line=__LINE__) if (.not. calc_strair) then - call grid_average_X2Y('F', strax, grid_atm_dynu, strairx, 'U') - call grid_average_X2Y('F', stray, grid_atm_dynv, strairy, 'U') + call grid_average_X2Y('F', strax, grid_atm_dynu, strairxU, 'U') + call grid_average_X2Y('F', stray, grid_atm_dynv, strairyU, 'U') else call ice_HaloUpdate (strairxT, halo_info, & field_loc_center, field_type_vector) call ice_HaloUpdate (strairyT, halo_info, & field_loc_center, field_type_vector) - call grid_average_X2Y('F', strairxT, 'T', strairx, 'U') - call grid_average_X2Y('F', strairyT, 'T', strairy, 'U') + call grid_average_X2Y('F', strairxT, 'T', strairxU, 'U') + call grid_average_X2Y('F', strairyT, 'T', strairyU, 'U') endif if (grid_ice == 'CD' .or. grid_ice == 'C') then @@ -392,20 +393,20 @@ subroutine evp (dt) icellt (iblk), icellu (iblk), & indxti (:,iblk), indxtj (:,iblk), & indxui (:,iblk), indxuj (:,iblk), & - aiu (:,:,iblk), umass (:,:,iblk), & + aiU (:,:,iblk), umass (:,:,iblk), & umassdti (:,:,iblk), fcor_blk (:,:,iblk), & umask (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - strairx (:,:,iblk), strairy (:,:,iblk), & + strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & icetmask (:,:,iblk), iceumask (:,:,iblk), & - fm (:,:,iblk), dt, & - strtltx (:,:,iblk), strtlty (:,:,iblk), & - strocnx (:,:,iblk), strocny (:,:,iblk), & - strintx (:,:,iblk), strinty (:,:,iblk), & - taubx (:,:,iblk), tauby (:,:,iblk), & - waterx (:,:,iblk), watery (:,:,iblk), & - forcex (:,:,iblk), forcey (:,:,iblk), & + fmU (:,:,iblk), dt, & + strtltxU (:,:,iblk), strtltyU (:,:,iblk), & + strocnxU (:,:,iblk), strocnyU (:,:,iblk), & + strintxU (:,:,iblk), strintyU (:,:,iblk), & + taubxU (:,:,iblk), taubyU (:,:,iblk), & + waterxU (:,:,iblk), wateryU (:,:,iblk), & + forcexU (:,:,iblk), forceyU (:,:,iblk), & stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & @@ -414,7 +415,7 @@ subroutine evp (dt) stress12_3(:,:,iblk), stress12_4(:,:,iblk), & uvel_init (:,:,iblk), vvel_init (:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & - Tbu (:,:,iblk)) + TbU (:,:,iblk)) elseif (trim(grid_ice) == 'CD' .or. grid_ice == 'C') then call dyn_prep2 (nx_block, ny_block, & @@ -422,20 +423,20 @@ subroutine evp (dt) icellt (iblk), icellu (iblk), & indxti (:,iblk), indxtj (:,iblk), & indxui (:,iblk), indxuj (:,iblk), & - aiu (:,:,iblk), umass (:,:,iblk), & + aiU (:,:,iblk), umass (:,:,iblk), & umassdti (:,:,iblk), fcor_blk (:,:,iblk), & umaskCD (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - strairx (:,:,iblk), strairy (:,:,iblk), & + strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & icetmask (:,:,iblk), iceumask (:,:,iblk), & - fm (:,:,iblk), dt, & - strtltx (:,:,iblk), strtlty (:,:,iblk), & - strocnx (:,:,iblk), strocny (:,:,iblk), & - strintx (:,:,iblk), strinty (:,:,iblk), & - taubx (:,:,iblk), tauby (:,:,iblk), & - waterx (:,:,iblk), watery (:,:,iblk), & - forcex (:,:,iblk), forcey (:,:,iblk), & + fmU (:,:,iblk), dt, & + strtltxU (:,:,iblk), strtltyU (:,:,iblk), & + strocnxU (:,:,iblk), strocnyU (:,:,iblk), & + strintxU (:,:,iblk), strintyU (:,:,iblk), & + taubxU (:,:,iblk), taubyU (:,:,iblk), & + waterxU (:,:,iblk), wateryU (:,:,iblk), & + forcexU (:,:,iblk), forceyU (:,:,iblk), & stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & @@ -444,7 +445,7 @@ subroutine evp (dt) stress12_3(:,:,iblk), stress12_4(:,:,iblk), & uvel_init (:,:,iblk), vvel_init (:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & - Tbu (:,:,iblk)) + TbU (:,:,iblk)) endif !----------------------------------------------------------------- @@ -642,7 +643,7 @@ subroutine evp (dt) endif !----------------------------------------------------------------- - ! seabed stress factor Tbu (Tbu is part of Cb coefficient) + ! seabed stress factor TbU (TbU is part of Cb coefficient) !----------------------------------------------------------------- if (seabed_stress) then @@ -656,7 +657,7 @@ subroutine evp (dt) icellu (iblk), & indxui (:,iblk), indxuj(:,iblk), & vice (:,:,iblk), aice(:,:,iblk), & - hwater(:,:,iblk), Tbu (:,:,iblk)) + hwater(:,:,iblk), TbU (:,:,iblk)) enddo !$OMP END PARALLEL DO @@ -667,7 +668,7 @@ subroutine evp (dt) icellt(iblk), indxti(:,iblk), indxtj(:,iblk), & icellu(iblk), indxui(:,iblk), indxuj(:,iblk), & aicen(:,:,:,iblk), vicen(:,:,:,iblk) , & - hwater (:,:,iblk), Tbu (:,:,iblk)) + hwater (:,:,iblk), TbU (:,:,iblk)) enddo !$OMP END PARALLEL DO endif @@ -697,7 +698,7 @@ subroutine evp (dt) icellt(iblk), indxti(:,iblk), indxtj(:,iblk), & icellu(iblk), indxui(:,iblk), indxuj(:,iblk), & aicen(:,:,:,iblk), vicen(:,:,:,iblk) , & - hwater (:,:,iblk), Tbu (:,:,iblk) , & + hwater (:,:,iblk), TbU (:,:,iblk) , & TbE (:,:,iblk), TbN (:,:,iblk) , & icelle(iblk), indxei(:,iblk), indxej(:,iblk), & icelln(iblk), indxni(:,iblk), indxnj(:,iblk) ) @@ -724,8 +725,8 @@ subroutine evp (dt) call ice_dyn_evp_1d_copyin( & nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost, & icetmask, iceumask, & - cdn_ocn,aiu,uocnU,vocnU,forcex,forcey,Tbu, & - umassdti,fm,uarear,tarear,strintx,strinty,uvel_init,vvel_init,& + cdn_ocn,aiU,uocnU,vocnU,forcexU,forceyU,TbU, & + umassdti,fmU,uarear,tarear,strintxU,strintyU,uvel_init,vvel_init,& strength,uvel,vvel,dxT,dyT, & stressp_1 ,stressp_2, stressp_3, stressp_4, & stressm_1 ,stressm_2, stressm_3, stressm_4, & @@ -733,12 +734,12 @@ subroutine evp (dt) call ice_dyn_evp_1d_kernel() call ice_dyn_evp_1d_copyout( & nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost, & -!strocn uvel,vvel, strocnx,strocny, strintx,strinty, & - uvel,vvel, strintx,strinty, & +!strocn uvel,vvel, strocnxU,strocnyU, strintxU,strintyU, & + uvel,vvel, strintxU,strintyU, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1,stress12_2,stress12_3,stress12_4, & - divu,rdg_conv,rdg_shear,shear,taubx,tauby ) + divu,rdg_conv,rdg_shear,shear,taubxU,taubyU ) call ice_timer_stop(timer_evp_1d) else ! evp_algorithm == standard_2d (Standard CICE) @@ -794,17 +795,17 @@ subroutine evp (dt) call stepu (nx_block , ny_block , & icellu (iblk), Cdn_ocn (:,:,iblk), & indxui (:,iblk), indxuj (:,iblk), & - aiu (:,:,iblk), strtmp (:,:,:), & + aiU (:,:,iblk), strtmp (:,:,:), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - waterx (:,:,iblk), watery (:,:,iblk), & - forcex (:,:,iblk), forcey (:,:,iblk), & - umassdti (:,:,iblk), fm (:,:,iblk), & + waterxU (:,:,iblk), wateryU (:,:,iblk), & + forcexU (:,:,iblk), forceyU (:,:,iblk), & + umassdti (:,:,iblk), fmU (:,:,iblk), & uarear (:,:,iblk), & - strintx (:,:,iblk), strinty (:,:,iblk), & - taubx (:,:,iblk), tauby (:,:,iblk), & + strintxU (:,:,iblk), strintyU(:,:,iblk), & + taubxU (:,:,iblk), taubyU (:,:,iblk), & uvel_init(:,:,iblk), vvel_init(:,:,iblk),& uvel (:,:,iblk), vvel (:,:,iblk), & - Tbu (:,:,iblk)) + TbU (:,:,iblk)) enddo ! iblk !$OMP END PARALLEL DO @@ -870,7 +871,7 @@ subroutine evp (dt) shearU (:,:,iblk), & shear (:,:,iblk), divu (:,:,iblk), & rdg_conv(:,:,iblk), rdg_shear(:,:,iblk)) - + endif enddo !$OMP END PARALLEL DO @@ -1263,8 +1264,8 @@ subroutine evp (dt) indxui (:,iblk), indxuj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - aiu (:,:,iblk), fm (:,:,iblk), & - strocnx (:,:,iblk), strocny (:,:,iblk)) + aiU (:,:,iblk), fmU (:,:,iblk), & + strocnxU(:,:,iblk), strocnyU(:,:,iblk)) enddo !$OMP END PARALLEL DO @@ -1296,10 +1297,10 @@ subroutine evp (dt) endif - ! strocn computed on U, N, E as needed. Map strocn U divided by aiu to T + ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T ! TODO: This should be done elsewhere as part of generalization? ! TODO: Rename strocn[x,y]T since it's different than strocn[x,y][U,N,E] - ! conservation requires aiu be divided before averaging + ! conservation requires aiU be divided before averaging work1 = c0 work2 = c0 !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) SCHEDULE(runtime) @@ -1307,8 +1308,8 @@ subroutine evp (dt) do ij = 1, icellu(iblk) i = indxui(ij,iblk) j = indxuj(ij,iblk) - work1(i,j,iblk) = strocnx(i,j,iblk)/aiu(i,j,iblk) - work2(i,j,iblk) = strocny(i,j,iblk)/aiu(i,j,iblk) + work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) + work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) enddo enddo !$OMP END PARALLEL DO @@ -1320,8 +1321,8 @@ subroutine evp (dt) call grid_average_X2Y('F', work2, 'U', strocnyT, 'T') if (grid_ice == 'CD' .or. grid_ice == 'C') then - call grid_average_X2Y('S', strintxE, 'E', strintx, 'U') ! diagnostic - call grid_average_X2Y('S', strintyN, 'N', strinty, 'U') ! diagnostic + call grid_average_X2Y('S', strintxE, 'E', strintxU, 'U') ! diagnostic + call grid_average_X2Y('S', strintyN, 'N', strintyU, 'U') ! diagnostic endif call ice_timer_stop(timer_dynamics) ! dynamics @@ -1407,7 +1408,7 @@ subroutine stress (nx_block, ny_block, & csigmne, csigmnw, csigmse, csigmsw , & csig12ne, csig12nw, csig12se, csig12sw , & str12ew, str12we, str12ns, str12sn , & - strp_tmp, strm_tmp, tmp + strp_tmp, strm_tmp character(len=*), parameter :: subname = '(stress)' diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 index 2f5389d06..fe04a3d63 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 @@ -779,8 +779,7 @@ subroutine stepu_last(NA_len, rhow, lb, ub, Cw, aiu, uocn, vocn, & use ice_kinds_mod use ice_constants, only : c0, c1 - use ice_dyn_shared, only : brlx, revp, u0, cosw, sinw, & - seabed_stress + use ice_dyn_shared, only : brlx, revp, u0, cosw, sinw implicit none diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index a30cc1b1c..95d2eedb1 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -192,14 +192,14 @@ subroutine init_dyn (dt) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks, halo_dynbundle use ice_domain_size, only: max_blocks - use ice_flux, only: rdg_conv, rdg_shear, iceumask, iceemask, icenmask, & + use ice_flux, only: rdg_conv, rdg_shear, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U use ice_state, only: uvel, vvel, uvelE, vvelE, uvelN, vvelN, divu, shear - use ice_grid, only: ULAT, NLAT, ELAT, tarea + use ice_grid, only: ULAT, NLAT, ELAT, tarea, iceumask, iceemask, icenmask real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -376,8 +376,8 @@ end subroutine set_evp_parameters subroutine dyn_prep1 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & aice, vice, & - vsno, tmask, & - tmass, icetmask) + vsno, Tmask, & + Tmass, iceTmask) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -389,13 +389,13 @@ subroutine dyn_prep1 (nx_block, ny_block, & vsno ! volume per unit area of snow (m) logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & - tmask ! land/boundary mask, thickness (T-cell) + Tmask ! land/boundary mask, thickness (T-cell) real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & - tmass ! total mass of ice and snow (kg/m^2) + Tmass ! total mass of ice and snow (kg/m^2) integer (kind=int_kind), dimension (nx_block,ny_block), intent(out) :: & - icetmask ! ice extent mask (T-cell) + iceTmask ! ice extent mask (T-cell) ! local variables @@ -423,22 +423,22 @@ subroutine dyn_prep1 (nx_block, ny_block, & ! NOTE: vice and vsno must be up to date in all grid cells, ! including ghost cells !----------------------------------------------------------------- - if (tmask(i,j)) then - tmass(i,j) = (rhoi*vice(i,j) + rhos*vsno(i,j)) ! kg/m^2 + if (Tmask(i,j)) then + Tmass(i,j) = (rhoi*vice(i,j) + rhos*vsno(i,j)) ! kg/m^2 else - tmass(i,j) = c0 + Tmass(i,j) = c0 endif !----------------------------------------------------------------- ! ice extent mask (T-cells) !----------------------------------------------------------------- - tmphm(i,j) = tmask(i,j) .and. (aice (i,j) > a_min) & - .and. (tmass(i,j) > m_min) + tmphm(i,j) = Tmask(i,j) .and. (aice (i,j) > a_min) & + .and. (Tmass(i,j) > m_min) !----------------------------------------------------------------- ! augmented mask (land + open ocean) !----------------------------------------------------------------- - icetmask (i,j) = 0 + iceTmask (i,j) = 0 enddo enddo @@ -450,10 +450,10 @@ subroutine dyn_prep1 (nx_block, ny_block, & if (tmphm(i-1,j+1) .or. tmphm(i,j+1) .or. tmphm(i+1,j+1) .or. & tmphm(i-1,j) .or. tmphm(i,j) .or. tmphm(i+1,j) .or. & tmphm(i-1,j-1) .or. tmphm(i,j-1) .or. tmphm(i+1,j-1) ) then - icetmask(i,j) = 1 + iceTmask(i,j) = 1 endif - if (.not.tmask(i,j)) icetmask(i,j) = 0 + if (.not.Tmask(i,j)) iceTmask(i,j) = 0 enddo enddo @@ -472,16 +472,16 @@ end subroutine dyn_prep1 subroutine dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & - icellt, icellu, & - indxti, indxtj, & - indxui, indxuj, & - aiu, umass, & - umassdti, fcor, & - umask, & + icellT, icellX, & + indxTi, indxTj, & + indxXi, indxXj, & + aiX, Xmass, & + Xmassdti, fcor, & + Xmask, & uocn, vocn, & strairx, strairy, & ss_tltx, ss_tlty, & - icetmask, iceumask, & + iceTmask, iceXmask, & fm, dt, & strtltx, strtlty, & strocnx, strocny, & @@ -497,34 +497,34 @@ subroutine dyn_prep2 (nx_block, ny_block, & stress12_3, stress12_4, & uvel_init, vvel_init, & uvel, vvel, & - Tbu) + TbU) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions ilo,ihi,jlo,jhi ! beginning and end of physical domain integer (kind=int_kind), intent(out) :: & - icellt , & ! no. of cells where icetmask = 1 - icellu ! no. of cells where iceumask = 1 + icellT , & ! no. of cells where iceTmask = 1 + icellX ! no. of cells where iceXmask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(out) :: & - indxti , & ! compressed index in i-direction - indxtj , & ! compressed index in j-direction - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction on T grid + indxTj , & ! compressed index in j-direction + indxXi , & ! compressed index in i-direction on X grid, grid depends on call + indxXj ! compressed index in j-direction logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & - umask ! land/boundary mask, thickness (U-cell) + Xmask ! land/boundary mask, thickness (X-grid-cell) integer (kind=int_kind), dimension (nx_block,ny_block), intent(in) :: & - icetmask ! ice extent mask (T-cell) + iceTmask ! ice extent mask (T-cell) logical (kind=log_kind), dimension (nx_block,ny_block), intent(inout) :: & - iceumask ! ice extent mask (U-cell) + iceXmask ! ice extent mask (X-grid-cell) real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & - aiu , & ! ice fraction on u-grid - umass , & ! total mass of ice and snow (u grid) + aiX , & ! ice fraction on u-grid (X grid) + Xmass , & ! total mass of ice and snow (X grid) fcor , & ! Coriolis parameter (1/s) strairx , & ! stress on ice by air, x-direction strairy , & ! stress on ice by air, y-direction @@ -537,10 +537,10 @@ subroutine dyn_prep2 (nx_block, ny_block, & dt ! time step real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & - Tbu, & ! seabed stress factor (N/m^2) + TbU, & ! seabed stress factor (N/m^2) uvel_init,& ! x-component of velocity (m/s), beginning of time step vvel_init,& ! y-component of velocity (m/s), beginning of time step - umassdti, & ! mass of U-cell/dt (kg/m^2 s) + Xmassdti, & ! mass of X-grid-cell/dt (kg/m^2 s) waterx , & ! for ocean stress calculation, x (m/s) watery , & ! for ocean stress calculation, y (m/s) forcex , & ! work array: combined atm stress and ocn tilt, x @@ -571,7 +571,7 @@ subroutine dyn_prep2 (nx_block, ny_block, & gravit logical (kind=log_kind), dimension(nx_block,ny_block) :: & - iceumask_old ! old-time iceumask + iceXmask_old ! old-time iceXmask character(len=*), parameter :: subname = '(dyn_prep2)' @@ -585,12 +585,12 @@ subroutine dyn_prep2 (nx_block, ny_block, & watery (i,j) = c0 forcex (i,j) = c0 forcey (i,j) = c0 - umassdti (i,j) = c0 - Tbu (i,j) = c0 + Xmassdti (i,j) = c0 + TbU (i,j) = c0 taubx (i,j) = c0 tauby (i,j) = c0 - if (icetmask(i,j)==0) then + if (iceTmask(i,j)==0) then stressp_1 (i,j) = c0 stressp_2 (i,j) = c0 stressp_3 (i,j) = c0 @@ -608,44 +608,44 @@ subroutine dyn_prep2 (nx_block, ny_block, & enddo ! j !----------------------------------------------------------------- - ! Identify cells where icetmask = 1 - ! Note: The icellt mask includes north and east ghost cells + ! Identify cells where iceTmask = 1 + ! Note: The icellT mask includes north and east ghost cells ! where stresses are needed. !----------------------------------------------------------------- - icellt = 0 + icellT = 0 do j = jlo, jhi+1 do i = ilo, ihi+1 - if (icetmask(i,j) == 1) then - icellt = icellt + 1 - indxti(icellt) = i - indxtj(icellt) = j + if (iceTmask(i,j) == 1) then + icellT = icellT + 1 + indxTi(icellT) = i + indxTj(icellT) = j endif enddo enddo !----------------------------------------------------------------- - ! Define iceumask - ! Identify cells where iceumask is true + ! Define iceXmask + ! Identify cells where iceXmask is true ! Initialize velocity where needed !----------------------------------------------------------------- - icellu = 0 + icellX = 0 do j = jlo, jhi do i = ilo, ihi - iceumask_old(i,j) = iceumask(i,j) ! save + iceXmask_old(i,j) = iceXmask(i,j) ! save ! ice extent mask (U-cells) - iceumask(i,j) = (umask(i,j)) .and. (aiu (i,j) > a_min) & - .and. (umass(i,j) > m_min) + iceXmask(i,j) = (Xmask(i,j)) .and. (aiX (i,j) > a_min) & + .and. (Xmass(i,j) > m_min) - if (iceumask(i,j)) then - icellu = icellu + 1 - indxui(icellu) = i - indxuj(icellu) = j + if (iceXmask(i,j)) then + icellX = icellX + 1 + indxXi(icellX) = i + indxXj(icellX) = j ! initialize velocity for new ice points to ocean sfc current - if (.not. iceumask_old(i,j)) then + if (.not. iceXmask_old(i,j)) then uvel(i,j) = uocn(i,j) vvel(i,j) = vocn(i,j) endif @@ -675,13 +675,13 @@ subroutine dyn_prep2 (nx_block, ny_block, & file=__FILE__, line=__LINE__) endif - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellX + i = indxXi(ij) + j = indxXj(ij) - umassdti(i,j) = umass(i,j)/dt ! kg/m^2 s + Xmassdti(i,j) = Xmass(i,j)/dt ! kg/m^2 s - fm(i,j) = fcor(i,j)*umass(i,j) ! Coriolis * mass + fm(i,j) = fcor(i,j)*Xmass(i,j) ! Coriolis * mass ! for ocean stress waterx(i,j) = uocn(i,j)*cosw - vocn(i,j)*sinw*sign(c1,fm(i,j)) @@ -693,8 +693,8 @@ subroutine dyn_prep2 (nx_block, ny_block, & strtltx(i,j) = -fm(i,j)*vocn(i,j) strtlty(i,j) = fm(i,j)*uocn(i,j) elseif (trim(ssh_stress) == 'coupled') then - strtltx(i,j) = -gravit*umass(i,j)*ss_tltx(i,j) - strtlty(i,j) = -gravit*umass(i,j)*ss_tlty(i,j) + strtltx(i,j) = -gravit*Xmass(i,j)*ss_tltx(i,j) + strtlty(i,j) = -gravit*Xmass(i,j)*ss_tlty(i,j) else call abort_ice(subname//' ERROR: unknown ssh_stress='//trim(ssh_stress), & file=__FILE__, line=__LINE__) @@ -713,38 +713,38 @@ end subroutine dyn_prep2 ! author: Elizabeth C. Hunke, LANL subroutine stepu (nx_block, ny_block, & - icellu, Cw, & - indxui, indxuj, & - aiu, str, & + icellU, Cw, & + indxUi, indxUj, & + aiX, str, & uocn, vocn, & waterx, watery, & forcex, forcey, & - umassdti, fm, & + Umassdti, fm, & uarear, & strintx, strinty, & taubx, tauby, & uvel_init, vvel_init,& uvel, vvel, & - Tbu) + TbU) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! total count when iceumask is true + icellU ! total count when iceumask is true integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & - Tbu, & ! seabed stress factor (N/m^2) + TbU, & ! seabed stress factor (N/m^2) uvel_init,& ! x-component of velocity (m/s), beginning of timestep vvel_init,& ! y-component of velocity (m/s), beginning of timestep - aiu , & ! ice fraction on u-grid + aiX , & ! ice fraction on X-grid waterx , & ! for ocean stress calculation, x (m/s) watery , & ! for ocean stress calculation, y (m/s) forcex , & ! work array: combined atm stress and ocn tilt, x forcey , & ! work array: combined atm stress and ocn tilt, y - umassdti, & ! mass of U-cell/dt (kg/m^2 s) + Umassdti, & ! mass of U-cell/dt (kg/m^2 s) uocn , & ! ocean current, x-direction (m/s) vocn , & ! ocean current, y-direction (m/s) fm , & ! Coriolis param. * mass in U-cell (kg/s) @@ -790,23 +790,23 @@ subroutine stepu (nx_block, ny_block, & if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - do ij =1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij =1, icellU + i = indxUi(ij) + j = indxUj(ij) uold = uvel(i,j) vold = vvel(i,j) ! (magnitude of relative ocean current)*rhow*drag*aice - vrel = aiu(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & + vrel = aiX(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & (vocn(i,j) - vold)**2) ! m/s ! ice/ocean stress taux = vrel*waterx(i,j) ! NOTE this is not the entire tauy = vrel*watery(i,j) ! ocn stress term - Cb = Tbu(i,j) / (sqrt(uold**2 + vold**2) + u0) ! for seabed stress + Cb = TbU(i,j) / (sqrt(uold**2 + vold**2) + u0) ! for seabed stress ! revp = 0 for classic evp, 1 for revised evp - cca = (brlx + revp)*umassdti(i,j) + vrel * cosw + Cb ! kg/m^2 s + cca = (brlx + revp)*Umassdti(i,j) + vrel * cosw + Cb ! kg/m^2 s ccb = fm(i,j) + sign(c1,fm(i,j)) * vrel * sinw ! kg/m^2 s @@ -820,9 +820,9 @@ subroutine stepu (nx_block, ny_block, & ! finally, the velocity components cc1 = strintx(i,j) + forcex(i,j) + taux & - + umassdti(i,j)*(brlx*uold + revp*uvel_init(i,j)) + + Umassdti(i,j)*(brlx*uold + revp*uvel_init(i,j)) cc2 = strinty(i,j) + forcey(i,j) + tauy & - + umassdti(i,j)*(brlx*vold + revp*vvel_init(i,j)) + + Umassdti(i,j)*(brlx*vold + revp*vvel_init(i,j)) uvel(i,j) = (cca*cc1 + ccb*cc2) / ab2 ! m/s vvel(i,j) = (cca*cc2 - ccb*cc1) / ab2 @@ -839,18 +839,18 @@ end subroutine stepu ! Integration of the momentum equation to find velocity (u,v) at E and N locations subroutine stepuv_CD (nx_block, ny_block, & - icell, Cw, & - indxi, indxj, & - aiu, & - uocn, vocn, & - waterx, watery, & - forcex, forcey, & - massdti, fm, & - strintx, strinty, & - taubx, tauby, & - uvel_init, vvel_init,& - uvel, vvel, & - Tb) + icell, Cw, & + indxi, indxj, & + aiX, & + uocn, vocn, & + waterx, watery, & + forcex, forcey, & + massdti, fm, & + strintx, strinty, & + taubx, tauby, & + uvel_init, vvel_init,& + uvel, vvel, & + Tb) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -864,7 +864,7 @@ subroutine stepuv_CD (nx_block, ny_block, & Tb, & ! seabed stress factor (N/m^2) uvel_init,& ! x-component of velocity (m/s), beginning of timestep vvel_init,& ! y-component of velocity (m/s), beginning of timestep - aiu , & ! ice fraction on [en]-grid + aiX , & ! ice fraction on X-grid waterx , & ! for ocean stress calculation, x (m/s) watery , & ! for ocean stress calculation, y (m/s) forcex , & ! work array: combined atm stress and ocn tilt, x @@ -920,7 +920,7 @@ subroutine stepuv_CD (nx_block, ny_block, & vold = vvel(i,j) ! (magnitude of relative ocean current)*rhow*drag*aice - vrel = aiu(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & + vrel = aiX(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & (vocn(i,j) - vold)**2) ! m/s ! ice/ocean stress taux = vrel*waterx(i,j) ! NOTE this is not the entire @@ -956,16 +956,16 @@ end subroutine stepuv_CD ! Integration of the momentum equation to find velocity u at E location on C grid subroutine stepu_C (nx_block, ny_block, & - icell, Cw, & - indxi, indxj, & - aiu, & - uocn, vocn, & - waterx, forcex, & - massdti, fm, & - strintx, taubx, & - uvel_init, & - uvel, vvel, & - Tb) + icell, Cw, & + indxi, indxj, & + aiX, & + uocn, vocn, & + waterx, forcex, & + massdti, fm, & + strintx, taubx, & + uvel_init, & + uvel, vvel, & + Tb) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -978,7 +978,7 @@ subroutine stepu_C (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & Tb, & ! seabed stress factor (N/m^2) uvel_init,& ! x-component of velocity (m/s), beginning of timestep - aiu , & ! ice fraction on [en]-grid + aiX , & ! ice fraction on X-grid waterx , & ! for ocean stress calculation, x (m/s) forcex , & ! work array: combined atm stress and ocn tilt, x massdti , & ! mass of e-cell/dt (kg/m^2 s) @@ -1025,7 +1025,7 @@ subroutine stepu_C (nx_block, ny_block, & vold = vvel(i,j) ! (magnitude of relative ocean current)*rhow*drag*aice - vrel = aiu(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & + vrel = aiX(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & (vocn(i,j) - vold)**2) ! m/s ! ice/ocean stress taux = vrel*waterx(i,j) ! NOTE this is not the entire @@ -1055,16 +1055,16 @@ end subroutine stepu_C ! Integration of the momentum equation to find velocity v at N location on C grid subroutine stepv_C (nx_block, ny_block, & - icell, Cw, & - indxi, indxj, & - aiu, & - uocn, vocn, & - watery, forcey, & - massdti, fm, & - strinty, tauby, & - vvel_init, & - uvel, vvel, & - Tb) + icell, Cw, & + indxi, indxj, & + aiX, & + uocn, vocn, & + watery, forcey, & + massdti, fm, & + strinty, tauby, & + vvel_init, & + uvel, vvel, & + Tb) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -1077,7 +1077,7 @@ subroutine stepv_C (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & Tb, & ! seabed stress factor (N/m^2) vvel_init,& ! y-component of velocity (m/s), beginning of timestep - aiu , & ! ice fraction on [en]-grid + aiX , & ! ice fraction on X-grid watery , & ! for ocean stress calculation, y (m/s) forcey , & ! work array: combined atm stress and ocn tilt, y massdti , & ! mass of n-cell/dt (kg/m^2 s) @@ -1124,7 +1124,7 @@ subroutine stepv_C (nx_block, ny_block, & vold = vvel(i,j) ! (magnitude of relative ocean current)*rhow*drag*aice - vrel = aiu(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & + vrel = aiX(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & (vocn(i,j) - vold)**2) ! m/s ! ice/ocean stress tauy = vrel*watery(i,j) ! NOTE this is not the entire ocn stress @@ -1157,27 +1157,27 @@ end subroutine stepv_C ! author: Elizabeth C. Hunke, LANL subroutine dyn_finish (nx_block, ny_block, & - icellu, Cw, & - indxui, indxuj, & + icellU, Cw, & + indxUi, indxUj, & uvel, vvel, & uocn, vocn, & - aiu, fm, & + aiX, fm, & strocnx, strocny) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! total count when iceumask is true + icellU ! total count when iceumask is true integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvel , & ! x-component of velocity (m/s) vvel , & ! y-component of velocity (m/s) uocn , & ! ocean current, x-direction (m/s) vocn , & ! ocean current, y-direction (m/s) - aiu , & ! ice fraction on u-grid + aiX , & ! ice fraction on X-grid fm ! Coriolis param. * mass in U-cell (kg/s) real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & @@ -1204,20 +1204,20 @@ subroutine dyn_finish (nx_block, ny_block, & file=__FILE__, line=__LINE__) ! ocean-ice stress for coupling - do ij =1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij =1, icellU + i = indxUi(ij) + j = indxUj(ij) vrel = rhow*Cw(i,j)*sqrt((uocn(i,j) - uvel(i,j))**2 + & (vocn(i,j) - vvel(i,j))**2) ! m/s ! strocnx(i,j) = strocnx(i,j) & -! - vrel*(uvel(i,j)*cosw - vvel(i,j)*sinw) * aiu(i,j) +! - vrel*(uvel(i,j)*cosw - vvel(i,j)*sinw) * aiX(i,j) ! strocny(i,j) = strocny(i,j) & -! - vrel*(vvel(i,j)*cosw + uvel(i,j)*sinw) * aiu(i,j) +! - vrel*(vvel(i,j)*cosw + uvel(i,j)*sinw) * aiX(i,j) ! update strocnx to most recent iterate and complete the term - vrel = vrel * aiu(i,j) + vrel = vrel * aiX(i,j) strocnx(i,j) = vrel*((uocn(i,j) - uvel(i,j))*cosw & - (vocn(i,j) - vvel(i,j))*sinw*sign(c1,fm(i,j))) strocny(i,j) = vrel*((vocn(i,j) - vvel(i,j))*cosw & @@ -1233,7 +1233,7 @@ subroutine dyn_finish (nx_block, ny_block, & end subroutine dyn_finish !======================================================================= -! Computes seabed (basal) stress factor Tbu (landfast ice) based on mean +! Computes seabed (basal) stress factor TbU (landfast ice) based on mean ! thickness and bathymetry data. LKD refers to linear keel draft. This ! parameterization assumes that the largest keel draft varies linearly ! with the mean thickness. @@ -1248,25 +1248,25 @@ end subroutine dyn_finish ! ! author: JF Lemieux, Philippe Blain (ECCC) ! -! note1: Tbu is a part of the Cb as defined in Lemieux et al. 2015 and 2016. +! note1: TbU is a part of the Cb as defined in Lemieux et al. 2015 and 2016. ! note2: Seabed stress (better name) was called basal stress in Lemieux et al. 2015 subroutine seabed_stress_factor_LKD (nx_block, ny_block, & - icellu, & - indxui, indxuj, & + icellU, & + indxUi, indxUj, & vice, aice, & - hwater, Tbu, & + hwater, TbU, & grid_location) use ice_grid, only: grid_neighbor_min, grid_neighbor_max integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! no. of cells where ice[uen]mask = 1 + icellU ! no. of cells where ice[uen]mask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & aice , & ! concentration of ice at tracer location @@ -1274,7 +1274,7 @@ subroutine seabed_stress_factor_LKD (nx_block, ny_block, & hwater ! water depth at tracer location (m) real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & - Tbu ! seabed stress factor at 'grid_location' (N/m^2) + TbU ! seabed stress factor at 'grid_location' (N/m^2) character(len=*), optional, intent(inout) :: & grid_location ! grid location (U, E, N), U assumed if not present @@ -1301,9 +1301,9 @@ subroutine seabed_stress_factor_LKD (nx_block, ny_block, & l_grid_location = grid_location endif - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) ! convert quantities to grid_location @@ -1319,14 +1319,14 @@ subroutine seabed_stress_factor_LKD (nx_block, ny_block, & hcu = au * hwu / k1 ! 2- calculate seabed stress factor - Tbu(i,j) = docalc_tbu*k2 * max(c0,(hu - hcu)) * exp(-alphab * (c1 - au)) + TbU(i,j) = docalc_tbu*k2 * max(c0,(hu - hcu)) * exp(-alphab * (c1 - au)) enddo ! ij end subroutine seabed_stress_factor_LKD !======================================================================= -! Computes seabed (basal) stress factor Tbu (landfast ice) based on +! Computes seabed (basal) stress factor TbU (landfast ice) based on ! probability of contact between the ITD and the seabed. The water depth ! could take into account variations of the SSH. In the simplest ! formulation, hwater is simply the value of the bathymetry. To calculate @@ -1340,13 +1340,13 @@ end subroutine seabed_stress_factor_LKD ! authors: D. Dumont, J.F. Lemieux, E. Dumas-Lefebvre, F. Dupont ! subroutine seabed_stress_factor_prob (nx_block, ny_block, & - icellt, indxti, indxtj, & - icellu, indxui, indxuj, & + icellT, indxTi, indxTj, & + icellU, indxUi, indxUj, & aicen, vicen, & - hwater, Tbu, & + hwater, TbU, & TbE, TbN, & - icelle, indxei, indxej, & - icelln, indxni, indxnj) + icellE, indxEi, indxEj, & + icellN, indxNi, indxNj) ! use modules use ice_arrays_column, only: hin_max @@ -1355,13 +1355,13 @@ subroutine seabed_stress_factor_prob (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt, icellu ! no. of cells where ice[tu]mask = 1 + icellT, icellU ! no. of cells where ice[tu]mask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj , & ! compressed index in j-direction - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj , & ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & hwater ! water depth at tracer location (m) @@ -1371,20 +1371,20 @@ subroutine seabed_stress_factor_prob (nx_block, ny_block, & vicen ! partial volume for last thickness category in ITD (m) real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & - Tbu ! seabed stress factor at U location (N/m^2) + TbU ! seabed stress factor at U location (N/m^2) real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout), optional :: & TbE, & ! seabed stress factor at E location (N/m^2) TbN ! seabed stress factor at N location (N/m^2) integer (kind=int_kind), intent(in), optional :: & - icelle, icelln ! no. of cells where ice[en]mask = 1 + icellE, icellN ! no. of cells where ice[en]mask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(in), optional :: & - indxei , & ! compressed index in i-direction - indxej , & ! compressed index in j-direction - indxni , & ! compressed index in i-direction - indxnj ! compressed index in j-direction + indxEi , & ! compressed index in i-direction + indxEj , & ! compressed index in j-direction + indxNi , & ! compressed index in i-direction + indxNj ! compressed index in j-direction ! local variables @@ -1444,9 +1444,9 @@ subroutine seabed_stress_factor_prob (nx_block, ny_block, & Tbt=c0 - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) atot = sum(aicen(i,j,1:ncat)) @@ -1517,27 +1517,27 @@ subroutine seabed_stress_factor_prob (nx_block, ny_block, & enddo if (grid_ice == "B") then - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) ! convert quantities to U-location - Tbu(i,j) = grid_neighbor_max(Tbt, i, j, 'U') + TbU(i,j) = grid_neighbor_max(Tbt, i, j, 'U') enddo ! ij elseif (grid_ice == "C" .or. grid_ice == "CD") then if (present(Tbe) .and. present(TbN) .and. & - present(icelle) .and. present(icelln) .and. & - present(indxei) .and. present(indxej) .and. & - present(indxni) .and. present(indxnj)) then + present(icellE) .and. present(icellN) .and. & + present(indxEi) .and. present(indxEj) .and. & + present(indxNi) .and. present(indxNj)) then - do ij = 1, icelle - i = indxei(ij) - j = indxej(ij) + do ij = 1, icellE + i = indxEi(ij) + j = indxEj(ij) ! convert quantities to E-location TbE(i,j) = grid_neighbor_max(Tbt, i, j, 'E') enddo - do ij = 1, icelln - i = indxni(ij) - j = indxnj(ij) + do ij = 1, icellN + i = indxNi(ij) + j = indxNj(ij) ! convert quantities to N-location TbN(i,j) = grid_neighbor_max(Tbt, i, j, 'N') enddo @@ -1621,8 +1621,8 @@ end subroutine principal_stress ! 2019: subroutine created by Philippe Blain, ECCC subroutine deformations (nx_block, ny_block, & - icellt, & - indxti, indxtj, & + icellT, & + indxTi, indxTj, & uvel, vvel, & dxT, dyT, & cxp, cyp, & @@ -1635,11 +1635,11 @@ subroutine deformations (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvel , & ! x-component of velocity (m/s) @@ -1672,9 +1672,9 @@ subroutine deformations (nx_block, ny_block, & character(len=*), parameter :: subname = '(deformations)' - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! strain rates @@ -1719,8 +1719,8 @@ end subroutine deformations ! Nov 2021 subroutine deformationsCD_T (nx_block, ny_block, & - icellt, & - indxti, indxtj, & + icellT, & + indxTi, indxTj, & uvelE, vvelE, & uvelN, vvelN, & dxN, dyE, & @@ -1733,11 +1733,11 @@ subroutine deformationsCD_T (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvelE , & ! x-component of velocity (m/s) at the E point @@ -1778,8 +1778,8 @@ subroutine deformationsCD_T (nx_block, ny_block, & !----------------------------------------------------------------- call strain_rates_T (nx_block , ny_block , & - icellt , & - indxti(:) , indxtj (:) , & + icellT , & + indxTi(:) , indxTj (:) , & uvelE (:,:), vvelE (:,:), & uvelN (:,:), vvelN (:,:), & dxN (:,:), dyE (:,:), & @@ -1787,9 +1787,9 @@ subroutine deformationsCD_T (nx_block, ny_block, & divT (:,:), tensionT(:,:), & shearT(:,:), DeltaT (:,:) ) - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! deformations for mechanical redistribution @@ -1815,8 +1815,8 @@ end subroutine deformationsCD_T ! Nov 2021 subroutine deformationsC_T (nx_block, ny_block, & - icellt, & - indxti, indxtj, & + icellT, & + indxTi, indxTj, & uvelE, vvelE, & uvelN, vvelN, & dxN, dyE, & @@ -1830,11 +1830,11 @@ subroutine deformationsC_T (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvelE , & ! x-component of velocity (m/s) at the E point @@ -1878,8 +1878,8 @@ subroutine deformationsC_T (nx_block, ny_block, & !----------------------------------------------------------------- call strain_rates_T (nx_block , ny_block , & - icellt , & - indxti(:) , indxtj (:) , & + icellT , & + indxTi(:) , indxTj (:) , & uvelE (:,:), vvelE (:,:), & uvelN (:,:), vvelN (:,:), & dxN (:,:), dyE (:,:), & @@ -1889,14 +1889,14 @@ subroutine deformationsC_T (nx_block, ny_block, & ! DeltaT is calc by strain_rates_T but replaced by calculation below. - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! deformations for mechanical redistribution !----------------------------------------------------------------- - + shearTsqr = (shearU(i ,j )**2 * uarea(i ,j ) & + shearU(i ,j-1)**2 * uarea(i ,j-1) & + shearU(i-1,j-1)**2 * uarea(i-1,j-1) & @@ -2014,22 +2014,22 @@ end subroutine strain_rates ! Nov 2021 subroutine strain_rates_Tdtsd (nx_block, ny_block, & - icellt, & - indxti, indxtj, & - uvelE, vvelE, & - uvelN, vvelN, & - dxN, dyE, & - dxT, dyT, & - divT, tensionT, & - shearT, DeltaT ) + icellT, & + indxTi, indxTj, & + uvelE, vvelE, & + uvelN, vvelN, & + dxN, dyE, & + dxT, dyT, & + divT, tensionT, & + shearT, DeltaT ) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt + icellT integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvelE , & ! x-component of velocity (m/s) at the E point @@ -2061,8 +2061,8 @@ subroutine strain_rates_Tdtsd (nx_block, ny_block, & ! compute divT, tensionT call strain_rates_Tdt (nx_block, ny_block, & - icellt, & - indxti, indxtj, & + icellT, & + indxTi, indxTj, & uvelE, vvelE, & uvelN, vvelN, & dxN, dyE, & @@ -2072,9 +2072,9 @@ subroutine strain_rates_Tdtsd (nx_block, ny_block, & shearT (:,:) = c0 deltaT (:,:) = c0 - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) ! shearing strain rate = 2*e_12 shearT(i,j) = (dxT(i,j)**2)*(uvelN(i,j)/dxN(i,j) - uvelN(i,j-1)/dxN(i,j-1)) & @@ -2094,21 +2094,21 @@ end subroutine strain_rates_Tdtsd ! Nov 2021 subroutine strain_rates_Tdt (nx_block, ny_block, & - icellt, & - indxti, indxtj, & - uvelE, vvelE, & - uvelN, vvelN, & - dxN, dyE, & - dxT, dyT, & - divT, tensionT ) + icellT, & + indxTi, indxTj, & + uvelE, vvelE, & + uvelN, vvelN, & + dxN, dyE, & + dxT, dyT, & + divT, tensionT ) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt + icellT integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvelE , & ! x-component of velocity (m/s) at the E point @@ -2139,9 +2139,9 @@ subroutine strain_rates_Tdt (nx_block, ny_block, & divT (:,:) = c0 tensionT(:,:) = c0 - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) ! divergence = e_11 + e_22 divT (i,j)= dyE(i,j)*uvelE(i ,j ) - dyE(i-1,j)*uvelE(i-1,j ) & @@ -2162,8 +2162,8 @@ end subroutine strain_rates_Tdt ! Nov 2021 subroutine strain_rates_U (nx_block, ny_block, & - icellu, & - indxui, indxuj, & + icellU, & + indxUi, indxUj, & uvelE, vvelE, & uvelN, vvelN, & uvelU, vvelU, & @@ -2177,11 +2177,11 @@ subroutine strain_rates_U (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu + icellU integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvelE , & ! x-component of velocity (m/s) at the E point @@ -2227,9 +2227,9 @@ subroutine strain_rates_U (nx_block, ny_block, & shearU (:,:) = c0 deltaU (:,:) = c0 - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) uNip1j = uvelN(i+1,j) * npm(i+1,j) & +(npm(i,j)-npm(i+1,j)) * npm(i,j) * ratiodxN(i,j) * uvelN(i,j) @@ -2326,7 +2326,7 @@ end subroutine visc_replpress subroutine dyn_haloUpdate1(halo_info, halo_info_mask, field_loc, field_type, fld1) use ice_boundary, only: ice_halo, ice_HaloUpdate - use ice_domain, only: nblocks, maskhalo_dyn, halo_dynbundle + use ice_domain, only: maskhalo_dyn, halo_dynbundle use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound type (ice_halo), intent(in) :: & @@ -2342,12 +2342,6 @@ subroutine dyn_haloUpdate1(halo_info, halo_info_mask, field_loc, field_type, fld ! local variables - integer (kind=int_kind) :: & - iblk ! iblock - - real (kind=dbl_kind), dimension (nx_block,ny_block,1,max_blocks) :: & - fldbundle ! work array for boundary updates - character(len=*), parameter :: subname = '(dyn_haloUpdate1)' call ice_timer_start(timer_bound) @@ -2370,7 +2364,7 @@ end subroutine dyn_haloUpdate1 subroutine dyn_haloUpdate2(halo_info, halo_info_mask, field_loc, field_type, fld1, fld2) use ice_boundary, only: ice_halo, ice_HaloUpdate - use ice_domain, only: nblocks, maskhalo_dyn, halo_dynbundle + use ice_domain, only: maskhalo_dyn, halo_dynbundle use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound type (ice_halo), intent(in) :: & @@ -2387,9 +2381,6 @@ subroutine dyn_haloUpdate2(halo_info, halo_info_mask, field_loc, field_type, fld ! local variables - integer (kind=int_kind) :: & - iblk ! iblock - real (kind=dbl_kind), dimension (nx_block,ny_block,2,max_blocks) :: & fldbundle ! work array for boundary updates @@ -2434,7 +2425,7 @@ end subroutine dyn_haloUpdate2 subroutine dyn_haloUpdate3(halo_info, halo_info_mask, field_loc, field_type, fld1, fld2, fld3) use ice_boundary, only: ice_halo, ice_HaloUpdate - use ice_domain, only: nblocks, maskhalo_dyn, halo_dynbundle + use ice_domain, only: maskhalo_dyn, halo_dynbundle use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound type (ice_halo), intent(in) :: & @@ -2452,9 +2443,6 @@ subroutine dyn_haloUpdate3(halo_info, halo_info_mask, field_loc, field_type, fld ! local variables - integer (kind=int_kind) :: & - iblk ! iblock - real (kind=dbl_kind), dimension (nx_block,ny_block,3,max_blocks) :: & fldbundle ! work array for boundary updates @@ -2503,7 +2491,7 @@ end subroutine dyn_haloUpdate3 subroutine dyn_haloUpdate4(halo_info, halo_info_mask, field_loc, field_type, fld1, fld2, fld3, fld4) use ice_boundary, only: ice_halo, ice_HaloUpdate - use ice_domain, only: nblocks, maskhalo_dyn, halo_dynbundle + use ice_domain, only: maskhalo_dyn, halo_dynbundle use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound type (ice_halo), intent(in) :: & @@ -2522,9 +2510,6 @@ subroutine dyn_haloUpdate4(halo_info, halo_info_mask, field_loc, field_type, fld ! local variables - integer (kind=int_kind) :: & - iblk ! iblock - real (kind=dbl_kind), dimension (nx_block,ny_block,4,max_blocks) :: & fldbundle ! work array for boundary updates @@ -2577,7 +2562,7 @@ end subroutine dyn_haloUpdate4 subroutine dyn_haloUpdate5(halo_info, halo_info_mask, field_loc, field_type, fld1, fld2, fld3, fld4, fld5) use ice_boundary, only: ice_halo, ice_HaloUpdate - use ice_domain, only: nblocks, maskhalo_dyn, halo_dynbundle + use ice_domain, only: maskhalo_dyn, halo_dynbundle use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound type (ice_halo), intent(in) :: & @@ -2597,9 +2582,6 @@ subroutine dyn_haloUpdate5(halo_info, halo_info_mask, field_loc, field_type, fld ! local variables - integer (kind=int_kind) :: & - iblk ! iblock - real (kind=dbl_kind), dimension (nx_block,ny_block,5,max_blocks) :: & fldbundle ! work array for boundary updates diff --git a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 index 24421a91f..17fd0b73f 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 @@ -50,7 +50,7 @@ module ice_dyn_vp seabed_stress_factor_LKD, seabed_stress_factor_prob, seabed_stress_method, & seabed_stress, Ktens, stack_fields, unstack_fields use ice_fileunits, only: nu_diag - use ice_flux, only: fm + use ice_flux, only: fmU use ice_global_reductions, only: global_sum, global_allreduce_sum use ice_grid, only: dxT, dyT, dxhy, dyhx, cxp, cyp, cxm, cym, uarear use ice_exit, only: abort_ice @@ -69,7 +69,8 @@ module ice_dyn_vp dim_pgmres , & ! size of pgmres Krylov subspace maxits_fgmres , & ! max nb of iteration for fgmres maxits_pgmres , & ! max nb of iteration for pgmres - fpfunc_andacc , & ! fixed point function for Anderson acceleration: 1: g(x) = FMGRES(A(x),b(x)), 2: g(x) = x - A(x)x + b(x) + fpfunc_andacc , & ! fixed point function for Anderson acceleration: + ! 1: g(x) = FMGRES(A(x),b(x)), 2: g(x) = x - A(x)x + b(x) dim_andacc , & ! size of Anderson minimization matrix (number of saved previous residuals) start_andacc ! acceleration delay factor (acceleration starts at this iteration) @@ -87,7 +88,8 @@ module ice_dyn_vp reltol_andacc ! relative tolerance for Anderson acceleration character (len=char_len), public :: & - precond , & ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), 'pgmres' (Jacobi-preconditioned GMRES) + precond , & ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), + ! 'pgmres' (Jacobi-preconditioned GMRES) algo_nonlin , & ! nonlinear algorithm: 'picard' (Picard iteration), 'anderson' (Anderson acceleration) ortho_type ! type of orthogonalization for FGMRES ('cgs' or 'mgs') @@ -167,15 +169,15 @@ subroutine implicit_solver (dt) use ice_domain_size, only: max_blocks, ncat use ice_dyn_shared, only: deformations use ice_flux, only: rdg_conv, rdg_shear, strairxT, strairyT, & - strairx, strairy, uocn, vocn, ss_tltx, ss_tlty, iceumask, fm, & - strtltx, strtlty, strocnx, strocny, strintx, strinty, taubx, tauby, & + strairxU, strairyU, uocn, vocn, ss_tltx, ss_tlty, fmU, & + strtltxU, strtltyU, strocnxU, strocnyU, strintxU, strintyU, taubxU, taubyU, & strocnxT, strocnyT, strax, stray, & - Tbu, hwater, & + TbU, hwater, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 use ice_grid, only: tmask, umask, dxT, dyT, cxp, cyp, cxm, cym, & - tarear, grid_type, grid_average_X2Y, & + tarear, grid_type, grid_average_X2Y, iceumask, & grid_atm_dynu, grid_atm_dynv, grid_ocn_dynu, grid_ocn_dynv use ice_state, only: aice, vice, vsno, uvel, vvel, divu, shear, & aice_init, aice0, aicen, vicen, strength @@ -199,16 +201,16 @@ subroutine implicit_solver (dt) ss_tltxU , & ! sea surface slope, x-direction (m/m) ss_tltyU , & ! sea surface slope, y-direction (m/m) tmass , & ! total mass of ice and snow (kg/m^2) - waterx , & ! for ocean stress calculation, x (m/s) - watery , & ! for ocean stress calculation, y (m/s) - forcex , & ! work array: combined atm stress and ocn tilt, x - forcey , & ! work array: combined atm stress and ocn tilt, y + waterxU , & ! for ocean stress calculation, x (m/s) + wateryU , & ! for ocean stress calculation, y (m/s) + forcexU , & ! work array: combined atm stress and ocn tilt, x + forceyU , & ! work array: combined atm stress and ocn tilt, y bxfix , & ! part of bx that is constant during Picard byfix , & ! part of by that is constant during Picard Cb , & ! seabed stress coefficient fpresx , & ! fixed point residual vector, x components: fx = uvel - uprev_k fpresy , & ! fixed point residual vector, y components: fy = vvel - vprev_k - aiu , & ! ice fraction on u-grid + aiU , & ! ice fraction on u-grid umass , & ! total mass of ice and snow (u grid) umassdti ! mass of U-cell/dte (kg/m^2 s) @@ -303,7 +305,7 @@ subroutine implicit_solver (dt) !----------------------------------------------------------------- call grid_average_X2Y('F',tmass , 'T', umass, 'U') - call grid_average_X2Y('F',aice_init, 'T', aiu , 'U') + call grid_average_X2Y('F',aice_init, 'T', aiU , 'U') call grid_average_X2Y('S',uocn , grid_ocn_dynu, uocnU , 'U') call grid_average_X2Y('S',vocn , grid_ocn_dynv, vocnU , 'U') call grid_average_X2Y('S',ss_tltx, grid_ocn_dynu, ss_tltxU, 'U') @@ -319,15 +321,15 @@ subroutine implicit_solver (dt) file=__FILE__, line=__LINE__) if (.not. calc_strair) then - call grid_average_X2Y('F', strax, grid_atm_dynu, strairx, 'U') - call grid_average_X2Y('F', stray, grid_atm_dynv, strairy, 'U') + call grid_average_X2Y('F', strax, grid_atm_dynu, strairxU, 'U') + call grid_average_X2Y('F', stray, grid_atm_dynv, strairyU, 'U') else call ice_HaloUpdate (strairxT, halo_info, & field_loc_center, field_type_vector) call ice_HaloUpdate (strairyT, halo_info, & field_loc_center, field_type_vector) - call grid_average_X2Y('F',strairxT,'T',strairx,'U') - call grid_average_X2Y('F',strairyT,'T',strairy,'U') + call grid_average_X2Y('F',strairxT,'T',strairxU,'U') + call grid_average_X2Y('F',strairyT,'T',strairyU,'U') endif ! tcraig, tcx, threading here leads to some non-reproducbile results and failures in icepack_ice_strength @@ -350,20 +352,20 @@ subroutine implicit_solver (dt) icellt(iblk), icellu(iblk), & indxti (:,iblk), indxtj (:,iblk), & indxui (:,iblk), indxuj (:,iblk), & - aiu (:,:,iblk), umass (:,:,iblk), & + aiU (:,:,iblk), umass (:,:,iblk), & umassdti (:,:,iblk), fcor_blk (:,:,iblk), & umask (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - strairx (:,:,iblk), strairy (:,:,iblk), & + strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & icetmask (:,:,iblk), iceumask (:,:,iblk), & - fm (:,:,iblk), dt, & - strtltx (:,:,iblk), strtlty (:,:,iblk), & - strocnx (:,:,iblk), strocny (:,:,iblk), & - strintx (:,:,iblk), strinty (:,:,iblk), & - taubx (:,:,iblk), tauby (:,:,iblk), & - waterx (:,:,iblk), watery (:,:,iblk), & - forcex (:,:,iblk), forcey (:,:,iblk), & + fmU (:,:,iblk), dt, & + strtltxU (:,:,iblk), strtltyU (:,:,iblk), & + strocnxU (:,:,iblk), strocnyU (:,:,iblk), & + strintxU (:,:,iblk), strintyU (:,:,iblk), & + taubxU (:,:,iblk), taubyU (:,:,iblk), & + waterxU (:,:,iblk), wateryU (:,:,iblk), & + forcexU (:,:,iblk), forceyU (:,:,iblk), & stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & @@ -372,13 +374,13 @@ subroutine implicit_solver (dt) stress12_3(:,:,iblk), stress12_4(:,:,iblk), & uvel_init (:,:,iblk), vvel_init (:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & - Tbu (:,:,iblk)) + TbU (:,:,iblk)) call calc_bfix (nx_block , ny_block , & icellu(iblk) , & indxui (:,iblk), indxuj (:,iblk), & umassdti (:,:,iblk), & - forcex (:,:,iblk), forcey (:,:,iblk), & + forcexU (:,:,iblk), forceyU (:,:,iblk), & uvel_init (:,:,iblk), vvel_init (:,:,iblk), & bxfix (:,:,iblk), byfix (:,:,iblk)) @@ -427,7 +429,7 @@ subroutine implicit_solver (dt) endif !----------------------------------------------------------------- - ! seabed stress factor Tbu (Tbu is part of Cb coefficient) + ! seabed stress factor TbU (TbU is part of Cb coefficient) !----------------------------------------------------------------- if (seabed_stress) then if ( seabed_stress_method == 'LKD' ) then @@ -437,7 +439,7 @@ subroutine implicit_solver (dt) icellu (iblk), & indxui(:,iblk), indxuj(:,iblk), & vice(:,:,iblk), aice(:,:,iblk), & - hwater(:,:,iblk), Tbu(:,:,iblk)) + hwater(:,:,iblk), TbU(:,:,iblk)) enddo !$OMP END PARALLEL DO @@ -449,7 +451,7 @@ subroutine implicit_solver (dt) icellt(iblk), indxti(:,iblk), indxtj(:,iblk), & icellu(iblk), indxui(:,iblk), indxuj(:,iblk), & aicen(:,:,:,iblk), vicen(:,:,:,iblk), & - hwater(:,:,iblk), Tbu(:,:,iblk)) + hwater(:,:,iblk), TbU(:,:,iblk)) enddo !$OMP END PARALLEL DO @@ -472,17 +474,17 @@ subroutine implicit_solver (dt) !----------------------------------------------------------------- ! Start of nonlinear iteration !----------------------------------------------------------------- - call anderson_solver (icellt , icellu, & - indxti , indxtj, & - indxui , indxuj, & - aiu , ntot , & - uocnU , vocnU , & - waterx , watery, & - bxfix , byfix , & - umassdti, sol , & - fpresx , fpresy, & - zetax2 , etax2 , & - rep_prs , & + call anderson_solver (icellt , icellu , & + indxti , indxtj , & + indxui , indxuj , & + aiU , ntot , & + uocnU , vocnU , & + waterxU , wateryU, & + bxfix , byfix , & + umassdti, sol , & + fpresx , fpresy , & + zetax2 , etax2 , & + rep_prs , & Cb, halo_info_mask) !----------------------------------------------------------------- ! End of nonlinear iteration @@ -544,7 +546,7 @@ subroutine implicit_solver (dt) indxui (:,iblk), indxuj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & Cb (:,:,iblk), & - taubx (:,:,iblk), tauby (:,:,iblk)) + taubxU (:,:,iblk), taubyU (:,:,iblk)) enddo !$OMP END PARALLEL DO endif @@ -630,17 +632,17 @@ subroutine implicit_solver (dt) indxui (:,iblk), indxuj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & - aiu (:,:,iblk), fm (:,:,iblk), & -! strintx (:,:,iblk), strinty (:,:,iblk), & -! strairx (:,:,iblk), strairy (:,:,iblk), & - strocnx (:,:,iblk), strocny (:,:,iblk)) + aiU (:,:,iblk), fmU (:,:,iblk), & +! strintxU(:,:,iblk), strintyU(:,:,iblk), & +! strairxU(:,:,iblk), strairyU(:,:,iblk), & + strocnxU(:,:,iblk), strocnyU(:,:,iblk)) enddo !$OMP END PARALLEL DO - ! strocn computed on U, N, E as needed. Map strocn U divided by aiu to T + ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T ! TODO: This should be done elsewhere as part of generalization? - ! conservation requires aiu be divided before averaging + ! conservation requires aiU be divided before averaging work1 = c0 work2 = c0 !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) @@ -648,8 +650,8 @@ subroutine implicit_solver (dt) do ij = 1, icellu(iblk) i = indxui(ij,iblk) j = indxuj(ij,iblk) - work1(i,j,iblk) = strocnx(i,j,iblk)/aiu(i,j,iblk) - work2(i,j,iblk) = strocny(i,j,iblk)/aiu(i,j,iblk) + work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) + work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) enddo enddo call ice_HaloUpdate (work1, halo_info, & @@ -683,17 +685,17 @@ end subroutine implicit_solver ! H. F. Walker, “Anderson Acceleration: Algorithms and Implementations” ! [Online]. Available: https://users.wpi.edu/~walker/Papers/anderson_accn_algs_imps.pdf - subroutine anderson_solver (icellt , icellu, & - indxti , indxtj, & - indxui , indxuj, & - aiu , ntot , & - uocn , vocn , & - waterx , watery, & - bxfix , byfix , & - umassdti, sol , & - fpresx , fpresy, & - zetax2 , etax2 , & - rep_prs , & + subroutine anderson_solver (icellt , icellu , & + indxti , indxtj , & + indxui , indxuj , & + aiU , ntot , & + uocn , vocn , & + waterxU , wateryU, & + bxfix , byfix , & + umassdti, sol , & + fpresx , fpresy , & + zetax2 , etax2 , & + rep_prs , & Cb, halo_info_mask) use ice_arrays_column, only: Cdn_ocn @@ -702,7 +704,7 @@ subroutine anderson_solver (icellt , icellu, & use ice_constants, only: c1 use ice_domain, only: maskhalo_dyn, halo_info use ice_domain_size, only: max_blocks - use ice_flux, only: fm, Tbu + use ice_flux, only: fmU, TbU use ice_grid, only: dxT, dyT, dxhy, dyhx, cxp, cyp, cxm, cym, & uarear use ice_dyn_shared, only: DminTarea @@ -723,11 +725,11 @@ subroutine anderson_solver (icellt , icellu, & indxuj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & - aiu , & ! ice fraction on u-grid + aiU , & ! ice fraction on u-grid uocn , & ! i ocean current (m/s) vocn , & ! j ocean current (m/s) - waterx , & ! for ocean stress calculation, x (m/s) - watery , & ! for ocean stress calculation, y (m/s) + waterxU , & ! for ocean stress calculation, x (m/s) + wateryU , & ! for ocean stress calculation, y (m/s) bxfix , & ! part of bx that is constant during Picard byfix , & ! part of by that is constant during Picard umassdti ! mass of U-cell/dte (kg/m^2 s) @@ -862,7 +864,7 @@ subroutine anderson_solver (icellt , icellu, & call calc_vrel_Cb (nx_block , ny_block , & icellu (iblk), Cdn_ocn (:,:,iblk), & indxui (:,iblk), indxuj (:,iblk), & - aiu (:,:,iblk), Tbu (:,:,iblk), & + aiU (:,:,iblk), TbU (:,:,iblk), & uocn (:,:,iblk), vocn (:,:,iblk), & ulin (:,:,iblk), vlin (:,:,iblk), & vrel (:,:,iblk), Cb (:,:,iblk)) @@ -872,7 +874,7 @@ subroutine anderson_solver (icellt , icellu, & icellu (iblk), & indxui (:,iblk), indxuj (:,iblk), & stress_Pr (:,:,:), uarear (:,:,iblk), & - waterx (:,:,iblk), watery (:,:,iblk), & + waterxU (:,:,iblk), wateryU (:,:,iblk), & bxfix (:,:,iblk), byfix (:,:,iblk), & bx (:,:,iblk), by (:,:,iblk), & vrel (:,:,iblk)) @@ -889,7 +891,7 @@ subroutine anderson_solver (icellt , icellu, & uprev_k (:,:,iblk) , vprev_k (:,:,iblk), & vrel (:,:,iblk) , Cb (:,:,iblk), & zetax2 (:,:,iblk,:), etax2 (:,:,iblk,:), & - umassdti (:,:,iblk) , fm (:,:,iblk), & + umassdti (:,:,iblk) , fmU (:,:,iblk), & uarear (:,:,iblk) , & Au (:,:,iblk) , Av (:,:,iblk)) call residual_vec (nx_block , ny_block , & @@ -1095,7 +1097,8 @@ subroutine anderson_solver (icellt , icellu, & endif #else ! Anderson solver is not usable without LAPACK; abort - call abort_ice(error_message=subname // " CICE was not compiled with LAPACK, and Anderson solver was chosen (algo_nonlin = 'anderson')" , & + call abort_ice(error_message=subname // " CICE was not compiled with LAPACK, "// & + "and Anderson solver was chosen (algo_nonlin = 'anderson')" , & file=__FILE__, line=__LINE__) #endif endif @@ -1457,7 +1460,7 @@ end subroutine stress_vp subroutine calc_vrel_Cb (nx_block, ny_block, & icellu , Cw , & indxui , indxuj , & - aiu , Tbu , & + aiU , TbU , & uocn , vocn , & uvel , vvel , & vrel , Cb) @@ -1473,8 +1476,8 @@ subroutine calc_vrel_Cb (nx_block, ny_block, & indxuj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & - Tbu, & ! seabed stress factor (N/m^2) - aiu , & ! ice fraction on u-grid + TbU, & ! seabed stress factor (N/m^2) + aiU , & ! ice fraction on u-grid uocn , & ! ocean current, x-direction (m/s) vocn , & ! ocean current, y-direction (m/s) Cw ! ocean-ice neutral drag coefficient @@ -1507,10 +1510,10 @@ subroutine calc_vrel_Cb (nx_block, ny_block, & j = indxuj(ij) ! (magnitude of relative ocean current)*rhow*drag*aice - vrel(i,j) = aiu(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uvel(i,j))**2 + & + vrel(i,j) = aiU(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uvel(i,j))**2 + & (vocn(i,j) - vvel(i,j))**2) ! m/s - Cb(i,j) = Tbu(i,j) / (sqrt(uvel(i,j)**2 + vvel(i,j)**2) + u0) ! for seabed stress + Cb(i,j) = TbU(i,j) / (sqrt(uvel(i,j)**2 + vvel(i,j)**2) + u0) ! for seabed stress enddo ! ij end subroutine calc_vrel_Cb @@ -1524,7 +1527,7 @@ subroutine calc_seabed_stress (nx_block, ny_block, & indxui , indxuj , & uvel , vvel , & Cb , & - taubx , tauby) + taubxU , taubyU) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -1540,8 +1543,8 @@ subroutine calc_seabed_stress (nx_block, ny_block, & Cb ! seabed stress coefficient real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & - taubx , & ! seabed stress, x-direction (N/m^2) - tauby ! seabed stress, y-direction (N/m^2) + taubxU , & ! seabed stress, x-direction (N/m^2) + taubyU ! seabed stress, y-direction (N/m^2) ! local variables @@ -1554,8 +1557,8 @@ subroutine calc_seabed_stress (nx_block, ny_block, & i = indxui(ij) j = indxuj(ij) - taubx(i,j) = -uvel(i,j)*Cb(i,j) - tauby(i,j) = -vvel(i,j)*Cb(i,j) + taubxU(i,j) = -uvel(i,j)*Cb(i,j) + taubyU(i,j) = -vvel(i,j)*Cb(i,j) enddo ! ij end subroutine calc_seabed_stress @@ -1577,7 +1580,7 @@ subroutine matvec (nx_block, ny_block, & uvel , vvel , & vrel , Cb , & zetax2 , etax2 , & - umassdti, fm , & + umassdti, fmU , & uarear , & Au , Av) @@ -1610,7 +1613,7 @@ subroutine matvec (nx_block, ny_block, & vrel , & ! coefficient for tauw Cb , & ! coefficient for seabed stress umassdti, & ! mass of U-cell/dt (kg/m^2 s) - fm , & ! Coriolis param. * mass in U-cell (kg/s) + fmU , & ! Coriolis param. * mass in U-cell (kg/s) uarear ! 1/uarea real (kind=dbl_kind), dimension(nx_block,ny_block,4), intent(in) :: & @@ -1816,7 +1819,7 @@ subroutine matvec (nx_block, ny_block, & ccaimp = umassdti(i,j) + vrel(i,j) * cosw + Cb(i,j) ! kg/m^2 s - ccb = fm(i,j) + sign(c1,fm(i,j)) * vrel(i,j) * sinw ! kg/m^2 s + ccb = fmU(i,j) + sign(c1,fmU(i,j)) * vrel(i,j) * sinw ! kg/m^2 s ! divergence of the internal stress tensor strintx = uarear(i,j)* & @@ -1839,7 +1842,7 @@ subroutine calc_bfix (nx_block , ny_block , & icellu , & indxui , indxuj , & umassdti , & - forcex , forcey , & + forcexU , forceyU , & uvel_init, vvel_init, & bxfix , byfix) @@ -1855,8 +1858,8 @@ subroutine calc_bfix (nx_block , ny_block , & uvel_init,& ! x-component of velocity (m/s), beginning of time step vvel_init,& ! y-component of velocity (m/s), beginning of time step umassdti, & ! mass of U-cell/dt (kg/m^2 s) - forcex , & ! work array: combined atm stress and ocn tilt, x - forcey ! work array: combined atm stress and ocn tilt, y + forcexU , & ! work array: combined atm stress and ocn tilt, x + forceyU ! work array: combined atm stress and ocn tilt, y real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & bxfix , & ! bx = taux + bxfix @@ -1873,8 +1876,8 @@ subroutine calc_bfix (nx_block , ny_block , & i = indxui(ij) j = indxuj(ij) - bxfix(i,j) = umassdti(i,j)*uvel_init(i,j) + forcex(i,j) - byfix(i,j) = umassdti(i,j)*vvel_init(i,j) + forcey(i,j) + bxfix(i,j) = umassdti(i,j)*uvel_init(i,j) + forcexU(i,j) + byfix(i,j) = umassdti(i,j)*vvel_init(i,j) + forceyU(i,j) enddo end subroutine calc_bfix @@ -1889,7 +1892,7 @@ subroutine calc_bvec (nx_block, ny_block, & icellu , & indxui , indxuj , & stPr , uarear , & - waterx , watery , & + waterxU , wateryU , & bxfix , byfix , & bx , by , & vrel) @@ -1904,8 +1907,8 @@ subroutine calc_bvec (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uarear , & ! 1/uarea - waterx , & ! for ocean stress calculation, x (m/s) - watery , & ! for ocean stress calculation, y (m/s) + waterxU , & ! for ocean stress calculation, x (m/s) + wateryU , & ! for ocean stress calculation, y (m/s) bxfix , & ! bx = taux + bxfix byfix , & ! by = tauy + byfix vrel ! relative ice-ocean velocity @@ -1943,8 +1946,8 @@ subroutine calc_bvec (nx_block, ny_block, & j = indxuj(ij) ! ice/ocean stress - taux = vrel(i,j)*waterx(i,j) ! NOTE this is not the entire - tauy = vrel(i,j)*watery(i,j) ! ocn stress term + taux = vrel(i,j)*waterxU(i,j) ! NOTE this is not the entire + tauy = vrel(i,j)*wateryU(i,j) ! ocn stress term ! divergence of the internal stress tensor (only Pr part, i.e. dPr/dx, dPr/dy) strintx = uarear(i,j)* & @@ -2831,7 +2834,7 @@ subroutine fgmres (zetax2 , etax2 , & solx (:,:,iblk) , soly (:,:,iblk), & vrel (:,:,iblk) , Cb (:,:,iblk), & zetax2 (:,:,iblk,:), etax2 (:,:,iblk,:), & - umassdti (:,:,iblk) , fm (:,:,iblk), & + umassdti (:,:,iblk) , fmU (:,:,iblk), & uarear (:,:,iblk) , & workspace_x(:,:,iblk) , workspace_y(:,:,iblk)) call residual_vec (nx_block , ny_block , & @@ -2938,7 +2941,7 @@ subroutine fgmres (zetax2 , etax2 , & workspace_x(:,:,iblk) , workspace_y(:,:,iblk), & vrel (:,:,iblk) , Cb (:,:,iblk), & zetax2 (:,:,iblk,:), etax2 (:,:,iblk,:), & - umassdti (:,:,iblk) , fm (:,:,iblk), & + umassdti (:,:,iblk) , fmU (:,:,iblk), & uarear (:,:,iblk) , & arnoldi_basis_x(:,:,iblk,nextit), & arnoldi_basis_y(:,:,iblk,nextit)) @@ -3224,7 +3227,7 @@ subroutine pgmres (zetax2 , etax2 , & solx (:,:,iblk) , soly (:,:,iblk), & vrel (:,:,iblk) , Cb (:,:,iblk), & zetax2 (:,:,iblk,:), etax2 (:,:,iblk,:), & - umassdti (:,:,iblk) , fm (:,:,iblk), & + umassdti (:,:,iblk) , fmU (:,:,iblk), & uarear (:,:,iblk) , & workspace_x(:,:,iblk) , workspace_y(:,:,iblk)) call residual_vec (nx_block , ny_block , & @@ -3320,7 +3323,7 @@ subroutine pgmres (zetax2 , etax2 , & workspace_x(:,:,iblk) , workspace_y(:,:,iblk), & vrel (:,:,iblk) , Cb (:,:,iblk), & zetax2 (:,:,iblk,:), etax2 (:,:,iblk,:), & - umassdti (:,:,iblk) , fm (:,:,iblk), & + umassdti (:,:,iblk) , fmU (:,:,iblk), & uarear (:,:,iblk) , & arnoldi_basis_x(:,:,iblk,nextit), & arnoldi_basis_y(:,:,iblk,nextit)) diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 index c3bf4cd15..43fe5af13 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 @@ -19,6 +19,7 @@ module ice_transport_driver field_type_scalar, field_type_vector, & field_loc_NEcorner, & field_loc_Nface, field_loc_Eface + use ice_diagnostics, only: diagnostic_abort use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -690,7 +691,7 @@ subroutine transport_remap (dt) if (ckflag) then write (nu_diag,*) 'istep1, my_task, iblk, cat =', & istep1, my_task, iblk, n - call abort_ice(subname//'ERROR: monotonicity error') + call diagnostic_abort(istop,jstop,iblk,' monotonicity error') endif enddo ! n @@ -1533,8 +1534,13 @@ subroutine state_to_work (nx_block, ny_block, & integer (kind=int_kind) :: & nt_alvl, nt_apnd, nt_fbri +#ifdef UNDEPRECATE_CESMPONDS logical (kind=log_kind) :: & tr_pond_cesm, tr_pond_lvl, tr_pond_topo +#else + logical (kind=log_kind) :: & + tr_pond_lvl, tr_pond_topo +#endif integer (kind=int_kind) :: & i, j, n, it, & ! counting indices @@ -1542,8 +1548,13 @@ subroutine state_to_work (nx_block, ny_block, & character(len=*), parameter :: subname = '(state_to_work)' +#ifdef UNDEPRECATE_CESMPONDS call icepack_query_tracer_flags(tr_pond_cesm_out=tr_pond_cesm, & tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) +#else + call icepack_query_tracer_flags(tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo) +#endif call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_apnd_out=nt_apnd, & nt_fbri_out=nt_fbri) call icepack_warnings_flush(nu_diag) @@ -1602,8 +1613,13 @@ subroutine state_to_work (nx_block, ny_block, & * trcrn(i,j,it ,n) enddo enddo +#ifdef UNDEPRECATE_CESMPONDS elseif (trcr_depend(it) == 2+nt_apnd .and. & tr_pond_cesm .or. tr_pond_topo) then +#else + elseif (trcr_depend(it) == 2+nt_apnd .and. & + tr_pond_topo) then +#endif do j = 1, ny_block do i = 1, nx_block works(i,j,narrays+it) = aicen(i,j ,n) & diff --git a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 b/cicecore/cicedynB/dynamics/ice_transport_remap.F90 index 330816529..6fd037b7b 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_remap.F90 @@ -30,11 +30,13 @@ module ice_transport_remap use ice_kinds_mod use ice_blocks, only: nx_block, ny_block + use ice_calendar, only: istep1 use ice_communicate, only: my_task use ice_constants, only: c0, c1, c2, c12, p333, p4, p5, p6, & eps13, eps16, & field_loc_center, field_type_scalar, & field_loc_NEcorner, field_type_vector + use ice_diagnostics, only: diagnostic_abort use ice_domain_size, only: max_blocks, ncat use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice @@ -329,7 +331,6 @@ subroutine horizontal_remap (dt, ntrace, & tarear, hm, & xav, yav, xxav, yyav ! xyav, xxxav, xxyav, xyyav, yyyav - use ice_calendar, only: istep1 use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound real (kind=dbl_kind), intent(in) :: & @@ -556,14 +557,7 @@ subroutine horizontal_remap (dt, ntrace, & istop, jstop) if (l_stop) then - write(nu_diag,*) 'istep1, my_task, iblk =', & - istep1, my_task, iblk - write (nu_diag,*) 'Global block:', this_block%block_id - if (istop > 0 .and. jstop > 0) & - write(nu_diag,*) 'Global i and j:', & - this_block%i_glob(istop), & - this_block%j_glob(jstop) - call abort_ice(subname//'ERROR: bad departure points') + call diagnostic_abort(istop,jstop,iblk,'bad departure points') endif enddo ! iblk @@ -832,15 +826,7 @@ subroutine horizontal_remap (dt, ntrace, & mm (:,:,0,iblk)) if (l_stop) then - this_block = get_block(blocks_ice(iblk),iblk) - write (nu_diag,*) 'istep1, my_task, iblk, cat =', & - istep1, my_task, iblk, '0' - write (nu_diag,*) 'Global block:', this_block%block_id - if (istop > 0 .and. jstop > 0) & - write(nu_diag,*) 'Global i and j:', & - this_block%i_glob(istop), & - this_block%j_glob(jstop) - call abort_ice (subname//'ERROR: negative area (open water)') + call diagnostic_abort(istop,jstop,iblk,'negative area (open water)') endif ! ice categories @@ -860,12 +846,7 @@ subroutine horizontal_remap (dt, ntrace, & if (l_stop) then write (nu_diag,*) 'istep1, my_task, iblk, cat =', & istep1, my_task, iblk, n - write (nu_diag,*) 'Global block:', this_block%block_id - if (istop > 0 .and. jstop > 0) & - write(nu_diag,*) 'Global i and j:', & - this_block%i_glob(istop), & - this_block%j_glob(jstop) - call abort_ice (subname//'ERROR: negative area (ice)') + call diagnostic_abort(istop,jstop,iblk,'negative area (ice)') endif enddo ! n diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 18727b63e..a7e5aa584 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -65,16 +65,16 @@ module ice_flux sig1 , & ! normalized principal stress component sig2 , & ! normalized principal stress component sigP , & ! internal ice pressure (N/m) - taubx , & ! seabed stress (x) (N/m^2) - tauby , & ! seabed stress (y) (N/m^2) - strairx , & ! stress on ice by air, x-direction at U points - strairy , & ! stress on ice by air, y-direction at U points - strocnx , & ! ice-ocean stress, x-direction at U points, computed in dyn_finish - strocny , & ! ice-ocean stress, y-direction at U points, computed in dyn_finish - strtltx , & ! stress due to sea surface slope, x-direction - strtlty , & ! stress due to sea surface slope, y-direction - strintx , & ! divergence of internal ice stress, x (N/m^2) - strinty , & ! divergence of internal ice stress, y (N/m^2) + taubxU , & ! seabed stress (x) (N/m^2) + taubyU , & ! seabed stress (y) (N/m^2) + strairxU, & ! stress on ice by air, x-direction at U points + strairyU, & ! stress on ice by air, y-direction at U points + strocnxU, & ! ice-ocean stress, x-direction at U points, computed in dyn_finish + strocnyU, & ! ice-ocean stress, y-direction at U points, computed in dyn_finish + strtltxU, & ! stress due to sea surface slope, x-direction + strtltyU, & ! stress due to sea surface slope, y-direction + strintxU, & ! divergence of internal ice stress, x (N/m^2) + strintyU, & ! divergence of internal ice stress, y (N/m^2) taubxN , & ! seabed stress (x) at N points (N/m^2) taubyN , & ! seabed stress (y) at N points (N/m^2) strairxN, & ! stress on ice by air, x-direction at N points @@ -129,23 +129,11 @@ module ice_flux stresspT, stressmT, stress12T, & ! sigma11+sigma22, sigma11-sigma22, sigma12 stresspU, stressmU, stress12U ! " - logical (kind=log_kind), & - dimension (:,:,:), allocatable, public :: & - iceumask ! ice extent mask (U-cell) - - logical (kind=log_kind), & - dimension (:,:,:), allocatable, public :: & - icenmask ! ice extent mask (N-cell) - - logical (kind=log_kind), & - dimension (:,:,:), allocatable, public :: & - iceemask ! ice extent mask (E-cell) - ! internal real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - fm , & ! Coriolis param. * mass in U-cell (kg/s) - Tbu , & ! factor for seabed stress (N/m^2) + fmU , & ! Coriolis param. * mass in U-cell (kg/s) + TbU , & ! factor for seabed stress (N/m^2) fmE , & ! Coriolis param. * mass in E-cell (kg/s) TbE , & ! factor for seabed stress (N/m^2) fmN , & ! Coriolis param. * mass in N-cell (kg/s) @@ -323,7 +311,7 @@ module ice_flux mlt_onset, &! day of year that sfc melting begins frz_onset, &! day of year that freezing begins (congel or frazil) frazil_diag ! frazil ice growth diagnostic (m/step-->cm/day) - + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & fsurfn, & ! category fsurf @@ -342,7 +330,7 @@ module ice_flux ! ice diagnostics and history files as these are more accurate. ! (The others suffer from problem of incorrect values at grid boxes ! that change from an ice free state to an icy state.) - + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & fresh_ai, & ! fresh water flux to ocean (kg/m^2/s) fsalt_ai, & ! salt flux to ocean (kg/m^2/s) @@ -356,7 +344,7 @@ module ice_flux real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & fswthrun_ai ! per-category fswthru * ai (W/m^2) - + logical (kind=log_kind), public :: send_i2x_per_cat = .false. !----------------------------------------------------------------- @@ -372,7 +360,7 @@ module ice_flux coszen , & ! cosine solar zenith angle, < 0 for sun below horizon rdg_conv, & ! convergence term for ridging (1/s) rdg_shear ! shear term for ridging (1/s) - + real (kind=dbl_kind), dimension(:,:,:,:), allocatable, public :: & salinz ,& ! initial salinity profile (ppt) Tmltz ! initial melting temperature (^oC) @@ -406,16 +394,16 @@ subroutine alloc_flux sig1 (nx_block,ny_block,max_blocks), & ! normalized principal stress component sig2 (nx_block,ny_block,max_blocks), & ! normalized principal stress component sigP (nx_block,ny_block,max_blocks), & ! internal ice pressure (N/m) - taubx (nx_block,ny_block,max_blocks), & ! seabed stress (x) (N/m^2) - tauby (nx_block,ny_block,max_blocks), & ! seabed stress (y) (N/m^2) - strairx (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction - strairy (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction - strocnx (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction - strocny (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction - strtltx (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction - strtlty (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction - strintx (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x (N/m^2) - strinty (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y (N/m^2) + taubxU (nx_block,ny_block,max_blocks), & ! seabed stress (x) (N/m^2) + taubyU (nx_block,ny_block,max_blocks), & ! seabed stress (y) (N/m^2) + strairxU (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction + strairyU (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction + strocnxU (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction + strocnyU (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction + strtltxU (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction + strtltyU (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction + strintxU (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x (N/m^2) + strintyU (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y (N/m^2) daidtd (nx_block,ny_block,max_blocks), & ! ice area tendency due to transport (1/s) dvidtd (nx_block,ny_block,max_blocks), & ! ice volume tendency due to transport (m/s) dagedtd (nx_block,ny_block,max_blocks), & ! ice age tendency due to transport (s/s) @@ -435,9 +423,8 @@ subroutine alloc_flux stress12_2 (nx_block,ny_block,max_blocks), & ! sigma12 stress12_3 (nx_block,ny_block,max_blocks), & ! sigma12 stress12_4 (nx_block,ny_block,max_blocks), & ! sigma12 - iceumask (nx_block,ny_block,max_blocks), & ! ice extent mask (U-cell) - fm (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in U-cell (kg/s) - Tbu (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) + fmU (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in U-cell (kg/s) + TbU (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) zlvl (nx_block,ny_block,max_blocks), & ! atm level height (momentum) (m) zlvs (nx_block,ny_block,max_blocks), & ! atm level height (scalar quantities) (m) uatm (nx_block,ny_block,max_blocks), & ! wind velocity components (m/s) @@ -461,7 +448,8 @@ subroutine alloc_flux Tf (nx_block,ny_block,max_blocks), & ! freezing temperature (C) qdp (nx_block,ny_block,max_blocks), & ! deep ocean heat flux (W/m^2), negative upward hmix (nx_block,ny_block,max_blocks), & ! mixed layer depth (m) - daice_da (nx_block,ny_block,max_blocks), & ! data assimilation concentration increment rate (concentration s-1)(only used in hadgem drivers) + daice_da (nx_block,ny_block,max_blocks), & ! data assimilation concentration increment rate (concentration s-1) + ! (only used in hadgem drivers) fsens (nx_block,ny_block,max_blocks), & ! sensible heat flux (W/m^2) flat (nx_block,ny_block,max_blocks), & ! latent heat flux (W/m^2) fswabs (nx_block,ny_block,max_blocks), & ! shortwave flux absorbed in ice and ocean (W/m^2) @@ -592,7 +580,6 @@ subroutine alloc_flux strtltyN (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction at N points strintxN (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x at N points (N/m^2) strintyN (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y at N points (N/m^2) - icenmask (nx_block,ny_block,max_blocks), & ! ice extent mask (N-cell) fmN (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in N-cell (kg/s) TbN (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) taubxE (nx_block,ny_block,max_blocks), & ! seabed stress (x) at E points (N/m^2) @@ -605,7 +592,6 @@ subroutine alloc_flux strtltyE (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction at E points strintxE (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x at E points (N/m^2) strintyE (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y at E points (N/m^2) - iceemask (nx_block,ny_block,max_blocks), & ! ice extent mask (E-cell) fmE (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in E-cell (kg/s) TbE (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) stresspT (nx_block,ny_block,max_blocks), & ! sigma11+sigma22 @@ -806,7 +792,7 @@ subroutine init_coupler_flux fdon (:,:,:,:)= c0 ffep (:,:,:,:)= c0 ffed (:,:,:,:)= c0 - + allocate(fswthrun_ai(nx_block,ny_block,ncat,max_blocks)) fswthrun_ai(:,:,:,:) = c0 @@ -1041,17 +1027,17 @@ subroutine init_history_dyn sig1 (:,:,:) = c0 sig2 (:,:,:) = c0 - taubx (:,:,:) = c0 - tauby (:,:,:) = c0 + taubxU (:,:,:) = c0 + taubyU (:,:,:) = c0 strength (:,:,:) = c0 - strocnx (:,:,:) = c0 - strocny (:,:,:) = c0 - strairx (:,:,:) = c0 - strairy (:,:,:) = c0 - strtltx (:,:,:) = c0 - strtlty (:,:,:) = c0 - strintx (:,:,:) = c0 - strinty (:,:,:) = c0 + strocnxU(:,:,:) = c0 + strocnyU(:,:,:) = c0 + strairxU(:,:,:) = c0 + strairyU(:,:,:) = c0 + strtltxU(:,:,:) = c0 + strtltyU(:,:,:) = c0 + strintxU(:,:,:) = c0 + strintyU(:,:,:) = c0 dardg1dt(:,:,:) = c0 dardg2dt(:,:,:) = c0 dvirdgdt(:,:,:) = c0 @@ -1060,7 +1046,7 @@ subroutine init_history_dyn dvidtd (:,:,:) = vice(:,:,:) ! temporary initial volume if (tr_iage) & dagedtd (:,:,:) = trcr(:,:,nt_iage,:) ! temporary initial age - fm (:,:,:) = c0 + fmU (:,:,:) = c0 ardgn (:,:,:,:) = c0 vrdgn (:,:,:,:) = c0 krdgn (:,:,:,:) = c1 @@ -1293,7 +1279,7 @@ subroutine scale_fluxes (nx_block, ny_block, & ! Scale fluxes for history output if (present(fsurf) .and. present(fcondtop) ) then - + do j = 1, ny_block do i = 1, nx_block if (tmask(i,j) .and. aice(i,j) > c0) then @@ -1306,9 +1292,9 @@ subroutine scale_fluxes (nx_block, ny_block, & endif ! tmask and aice > 0 enddo ! i enddo ! j - + endif ! present(fsurf & fcondtop) - + end subroutine scale_fluxes !======================================================================= diff --git a/cicecore/cicedynB/general/ice_flux_bgc.F90 b/cicecore/cicedynB/general/ice_flux_bgc.F90 index 56e644431..0d9184fb7 100644 --- a/cicecore/cicedynB/general/ice_flux_bgc.F90 +++ b/cicecore/cicedynB/general/ice_flux_bgc.F90 @@ -26,13 +26,13 @@ module ice_flux_bgc real (kind=dbl_kind), & ! coupling variable for both tr_aero and tr_zaero dimension (:,:,:,:), allocatable, public :: & fiso_atm, & ! isotope deposition rate (kg/m^2 s) - faero_atm ! aerosol deposition rate (kg/m^2 s) + faero_atm ! aerosol deposition rate (kg/m^2 s) real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & flux_bio_atm ! all bio fluxes to ice from atmosphere - ! out to ocean + ! out to ocean real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & @@ -45,8 +45,8 @@ module ice_flux_bgc flux_bio_ai ! all bio fluxes to ocean, averaged over grid cell real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - fzsal_ai, & ! salt flux to ocean from zsalinity (kg/m^2/s) - fzsal_g_ai ! gravity drainage salt flux to ocean (kg/m^2/s) + fzsal_ai, & ! salt flux to ocean from zsalinity (kg/m^2/s) + fzsal_g_ai ! gravity drainage salt flux to ocean (kg/m^2/s) ! internal @@ -58,7 +58,7 @@ module ice_flux_bgc dsnown ! change in snow thickness in category n (m) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - nit , & ! ocean nitrate (mmol/m^3) + nit , & ! ocean nitrate (mmol/m^3) amm , & ! ammonia/um (mmol/m^3) sil , & ! silicate (mmol/m^3) dmsp , & ! dmsp (mmol/m^3) @@ -85,15 +85,15 @@ module ice_flux_bgc fdon ! ice-ocean don flux (mmol/m^2/s) (proteins and amino acids) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - dic , & ! ocean dic (mmol/m^3) - fdic ! ice-ocean dic flux (mmol/m^2/s) + dic , & ! ocean dic (mmol/m^3) + fdic ! ice-ocean dic flux (mmol/m^2/s) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - fed, fep , & ! ocean dissolved and particulate fe (nM) - ffed, ffep ! ice-ocean dissolved and particulate fe flux (umol/m^2/s) + fed, fep , & ! ocean dissolved and particulate fe (nM) + ffed, ffep ! ice-ocean dissolved and particulate fe flux (umol/m^2/s) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - zaeros ! ocean aerosols (mmol/m^3) + zaeros ! ocean aerosols (mmol/m^3) ! isotopes real (kind=dbl_kind), & ! coupling variable for tr_iso @@ -114,16 +114,16 @@ module ice_flux_bgc !======================================================================= ! -! Allocate space for all variables +! Allocate space for all variables ! subroutine alloc_flux_bgc integer (int_kind) :: ierr allocate( & - fzsal_ai (nx_block,ny_block,max_blocks), & ! salt flux to ocean from zsalinity (kg/m^2/s) - fzsal_g_ai (nx_block,ny_block,max_blocks), & ! gravity drainage salt flux to ocean (kg/m^2/s) - nit (nx_block,ny_block,max_blocks), & ! ocean nitrate (mmol/m^3) + fzsal_ai (nx_block,ny_block,max_blocks), & ! salt flux to ocean from zsalinity (kg/m^2/s) + fzsal_g_ai (nx_block,ny_block,max_blocks), & ! gravity drainage salt flux to ocean (kg/m^2/s) + nit (nx_block,ny_block,max_blocks), & ! ocean nitrate (mmol/m^3) amm (nx_block,ny_block,max_blocks), & ! ammonia/um (mmol/m^3) sil (nx_block,ny_block,max_blocks), & ! silicate (mmol/m^3) dmsp (nx_block,ny_block,max_blocks), & ! dmsp (mmol/m^3) @@ -138,32 +138,32 @@ subroutine alloc_flux_bgc fdust (nx_block,ny_block,max_blocks), & ! ice-ocean dust flux (kg/m^2/s), positive to ocean hin_old (nx_block,ny_block,ncat,max_blocks), & ! old ice thickness dsnown (nx_block,ny_block,ncat,max_blocks), & ! change in snow thickness in category n (m) - HDO_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of HDO (kg/kg) - H2_16O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_16O (kg/kg) - H2_18O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_18O (kg/kg) - Qa_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope specific humidity (kg/kg) - Qref_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! 2m atm reference isotope spec humidity (kg/kg) - fiso_atm (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope deposition rate (kg/m^2 s) - fiso_evap (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope evaporation rate (kg/m^2 s) - fiso_ocn (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope flux to ocean (kg/m^2/s) - faero_atm (nx_block,ny_block,icepack_max_aero,max_blocks), & ! aerosol deposition rate (kg/m^2 s) + HDO_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of HDO (kg/kg) + H2_16O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_16O (kg/kg) + H2_18O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_18O (kg/kg) + Qa_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope specific humidity (kg/kg) + Qref_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! 2m atm reference isotope spec humidity (kg/kg) + fiso_atm (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope deposition rate (kg/m^2 s) + fiso_evap (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope evaporation rate (kg/m^2 s) + fiso_ocn (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope flux to ocean (kg/m^2/s) + faero_atm (nx_block,ny_block,icepack_max_aero,max_blocks), & ! aerosol deposition rate (kg/m^2 s) faero_ocn (nx_block,ny_block,icepack_max_aero,max_blocks), & ! aerosol flux to ocean (kg/m^2/s) - zaeros (nx_block,ny_block,icepack_max_aero,max_blocks), & ! ocean aerosols (mmol/m^3) + zaeros (nx_block,ny_block,icepack_max_aero,max_blocks), & ! ocean aerosols (mmol/m^3) flux_bio_atm(nx_block,ny_block,icepack_max_nbtrcr,max_blocks), & ! all bio fluxes to ice from atmosphere flux_bio (nx_block,ny_block,icepack_max_nbtrcr,max_blocks), & ! all bio fluxes to ocean flux_bio_ai (nx_block,ny_block,icepack_max_nbtrcr,max_blocks), & ! all bio fluxes to ocean, averaged over grid cell algalN (nx_block,ny_block,icepack_max_algae,max_blocks), & ! ocean algal nitrogen (mmol/m^3) (diatoms, pico, phaeo) - falgalN (nx_block,ny_block,icepack_max_algae,max_blocks), & ! ice-ocean algal nitrogen flux (mmol/m^2/s) (diatoms, pico, phaeo) + falgalN (nx_block,ny_block,icepack_max_algae,max_blocks), & ! ice-ocn algalN flux (mmol/m^2/s) (diatoms, pico, phaeo) doc (nx_block,ny_block,icepack_max_doc,max_blocks), & ! ocean doc (mmol/m^3) (saccharids, lipids, tbd ) fdoc (nx_block,ny_block,icepack_max_doc,max_blocks), & ! ice-ocean doc flux (mmol/m^2/s) (saccharids, lipids, tbd) don (nx_block,ny_block,icepack_max_don,max_blocks), & ! ocean don (mmol/m^3) (proteins and amino acids) fdon (nx_block,ny_block,icepack_max_don,max_blocks), & ! ice-ocean don flux (mmol/m^2/s) (proteins and amino acids) - dic (nx_block,ny_block,icepack_max_dic,max_blocks), & ! ocean dic (mmol/m^3) - fdic (nx_block,ny_block,icepack_max_dic,max_blocks), & ! ice-ocean dic flux (mmol/m^2/s) - fed (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ocean dissolved fe (nM) - fep (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ocean particulate fe (nM) - ffed (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ice-ocean dissolved fe flux (umol/m^2/s) - ffep (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ice-ocean particulate fe flux (umol/m^2/s) + dic (nx_block,ny_block,icepack_max_dic,max_blocks), & ! ocean dic (mmol/m^3) + fdic (nx_block,ny_block,icepack_max_dic,max_blocks), & ! ice-ocean dic flux (mmol/m^2/s) + fed (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ocean dissolved fe (nM) + fep (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ocean particulate fe (nM) + ffed (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ice-ocean dissolved fe flux (umol/m^2/s) + ffep (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ice-ocean particulate fe flux (umol/m^2/s) stat=ierr) if (ierr/=0) call abort_ice('(alloc_flux_bgc): Out of memory') @@ -214,10 +214,10 @@ subroutine bgcflux_ice_to_ocn(nx_block, & ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & i,j , & ! horizontal indices k ! tracer index - + logical (kind=log_kind) :: & skl_bgc, solve_zbgc, & tr_bgc_Nit, tr_bgc_N, & @@ -226,14 +226,14 @@ subroutine bgcflux_ice_to_ocn(nx_block, & integer (kind=int_kind) :: & nlt_bgc_Nit, nlt_bgc_Am, & - nlt_bgc_Sil, nlt_bgc_DMSPd, nlt_bgc_DMS, nlt_bgc_hum + nlt_bgc_Sil, nlt_bgc_DMSPd, nlt_bgc_DMS, nlt_bgc_hum integer (kind=int_kind), dimension(icepack_max_algae) :: & nlt_bgc_N, nlt_bgc_C ! algae integer (kind=int_kind), dimension(icepack_max_doc) :: & nlt_bgc_DOC ! disolved organic carbon integer (kind=int_kind), dimension(icepack_max_don) :: & - nlt_bgc_DON ! + nlt_bgc_DON ! integer (kind=int_kind), dimension(icepack_max_dic) :: & nlt_bgc_DIC ! disolved inorganic carbon integer (kind=int_kind), dimension(icepack_max_fe) :: & diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index 36dbfe88c..edff03b9f 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -76,7 +76,7 @@ module ice_forcing sst_file, & sss_file, & sublim_file, & - snow_file + snow_file character (char_len_long), dimension(:), allocatable, public :: & ! input data file names topmelt_file, & @@ -106,7 +106,7 @@ module ice_forcing rhoa_data, & flw_data, & sst_data, & - sss_data, & + sss_data, & uocn_data, & vocn_data, & sublim_data, & @@ -116,7 +116,7 @@ module ice_forcing topmelt_data, & botmelt_data - character(char_len), public :: & + character(char_len), public :: & atm_data_format, & ! 'bin'=binary or 'nc'=netcdf ocn_data_format, & ! 'bin'=binary or 'nc'=netcdf atm_data_type, & ! 'default', 'monthly', 'ncar', 'box2001' @@ -132,15 +132,15 @@ module ice_forcing logical (kind=log_kind), public :: & rotate_wind ! rotate wind/stress to computational grid from true north directed - - character(char_len_long), public :: & + + character(char_len_long), public :: & atm_data_dir , & ! top directory for atmospheric data ocn_data_dir , & ! top directory for ocean data wave_spec_dir, & ! dir name for wave spectrum wave_spec_file,& ! file name for wave spectrum oceanmixed_file ! file name for ocean forcing data - integer (kind=int_kind), parameter :: & + integer (kind=int_kind), parameter :: & nfld = 8 ! number of fields to search for in forcing file ! as in the dummy atm (latm) @@ -159,7 +159,7 @@ module ice_forcing integer (kind=int_kind), public :: & trestore ! restoring time scale (days) - real (kind=dbl_kind), public :: & + real (kind=dbl_kind), public :: & trest ! restoring time scale (sec) logical (kind=log_kind), public :: & @@ -196,7 +196,7 @@ module ice_forcing !======================================================================= ! -! Allocate space for all variables +! Allocate space for all variables ! subroutine alloc_forcing integer (int_kind) :: ierr @@ -288,7 +288,7 @@ subroutine init_forcing_atmo endif !------------------------------------------------------------------- - ! Get filenames for input forcing data + ! Get filenames for input forcing data !------------------------------------------------------------------- ! default forcing values from init_flux_atm @@ -310,7 +310,7 @@ subroutine init_forcing_atmo call monthly_files(fyear) elseif (trim(atm_data_type) == 'oned') then call oned_files - elseif (trim(atm_data_type) == 'ISPOL') then + elseif (trim(atm_data_type) == 'ISPOL') then call ISPOL_files elseif (trim(atm_data_type) == 'box2001') then call box2001_data_atm @@ -331,7 +331,8 @@ subroutine init_forcing_atmo elseif (trim(atm_data_type) == 'default') then ! don't need to do anything more else - call abort_ice (error_message=subname//' ERROR atm_data_type unknown = '//trim(atm_data_type), file=__FILE__, line=__LINE__) + call abort_ice (error_message=subname//' ERROR atm_data_type unknown = '// & + trim(atm_data_type), file=__FILE__, line=__LINE__) endif end subroutine init_forcing_atmo @@ -340,13 +341,13 @@ end subroutine init_forcing_atmo subroutine init_forcing_ocn(dt) -! Set sea surface salinity and freezing temperature to annual mean value +! Set sea surface salinity and freezing temperature to annual mean value ! using a 12-month climatology. ! Read sst data for current month, and adjust sst based on freezing ! temperature. No interpolation in time. -! Note: SST is subsequently prognosed if CICE is run -! with a mixed layer ocean (oceanmixed_ice = T), and can be +! Note: SST is subsequently prognosed if CICE is run +! with a mixed layer ocean (oceanmixed_ice = T), and can be ! restored to data (restore_ocn = T). use ice_blocks, only: nx_block, ny_block @@ -362,14 +363,14 @@ subroutine init_forcing_ocn(dt) integer (kind=int_kind) :: & i, j, iblk , & ! horizontal indices k , & ! month index - fid , & ! file id for netCDF file + fid , & ! file id for netCDF file nbits logical (kind=log_kind) :: diag real (kind=dbl_kind) :: secday - character (char_len) :: & + character (char_len) :: & fieldname ! field name in netcdf file real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & @@ -488,7 +489,7 @@ subroutine init_forcing_ocn(dt) elseif (trim(ocn_data_type) == 'hadgem_sst' .or. & trim(ocn_data_type) == 'hadgem_sst_uvocn') then - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information sst_file = trim (ocn_data_dir)//'/MONTHLY/sst.1997.nc' @@ -500,11 +501,11 @@ subroutine init_forcing_ocn(dt) call ice_open_nc(sst_file,fid) endif - + fieldname='sst' call ice_read_nc(fid,mmonth,fieldname,sst,diag) - if (my_task == master_task) call ice_close_nc(fid) + if (my_task == master_task) call ice_close_nc(fid) ! Make sure sst is not less than freezing temperature Tf !$OMP PARALLEL DO PRIVATE(iblk,i,j) @@ -539,7 +540,8 @@ subroutine init_forcing_ocn(dt) elseif (trim(ocn_data_type) == 'default') then ! don't need to do anything more else - call abort_ice (error_message=subname//' ERROR ocn_data_type unknown = '//trim(ocn_data_type), file=__FILE__, line=__LINE__) + call abort_ice (error_message=subname//' ERROR ocn_data_type unknown = '// & + trim(ocn_data_type), file=__FILE__, line=__LINE__) endif end subroutine init_forcing_ocn @@ -694,7 +696,7 @@ subroutine get_forcing_atmo ilo, ihi, jlo, jhi, & hm (:,:,iblk), & Tair (:,:,iblk), & - fsw (:,:,iblk), & + fsw (:,:,iblk), & cldf (:,:,iblk), & flw (:,:,iblk), & frain (:,:,iblk), & @@ -761,10 +763,10 @@ subroutine get_forcing_ocn (dt) call ocn_data_clim(dt) elseif (trim(ocn_data_type) == 'ncar' .or. & trim(ocn_data_type) == 'ISPOL') then - call ocn_data_ncar(dt) + call ocn_data_ncar(dt) elseif (trim(ocn_data_type) == 'hadgem_sst' .or. & trim(ocn_data_type) == 'hadgem_sst_uvocn') then - call ocn_data_hadgem(dt) + call ocn_data_hadgem(dt) elseif (trim(ocn_data_type) == 'oned') then call ocn_data_oned elseif (trim(ocn_data_type) == 'hycom') then @@ -1039,7 +1041,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & arg = 1 nrec = recd + n2 - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & field_loc, field_type) @@ -1053,7 +1055,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + ixx - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & field_loc, field_type) @@ -1079,7 +1081,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + n4 - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & field_loc, field_type) endif ! ixp /= -99 @@ -1311,21 +1313,21 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & if (ixm /= -99) then arg = 1 nrec = recd + ixm - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & debug_forcing, field_loc, field_type) endif arg = arg + 1 nrec = recd + ixx - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & debug_forcing, field_loc, field_type) if (ixp /= -99) then arg = arg + 1 nrec = recd + ixp - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & debug_forcing, field_loc, field_type) endif @@ -1449,7 +1451,7 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) else ! recslot = 1 if (dataloc==1) then ! data located at middle of interval t1 = (rcnum-p5)*secint - else + else t1 = rcnum*secint ! data located at end of interval endif t2 = t1 + secint ! + 1 interval @@ -1574,7 +1576,7 @@ end subroutine file_year subroutine prepare_forcing (nx_block, ny_block, & ilo, ihi, jlo, jhi, & hm, & - Tair, fsw, & + Tair, fsw, & cldf, flw, & frain, fsnow, & Qa, rhoa, & @@ -1597,7 +1599,7 @@ subroutine prepare_forcing (nx_block, ny_block, & sst , & ! sea surface temperature aice , & ! ice area fraction hm ! land mask - + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout) :: & fsw , & ! incoming shortwave radiation (W/m^2) cldf , & ! cloud fraction @@ -1654,7 +1656,7 @@ subroutine prepare_forcing (nx_block, ny_block, & rhoa (i,j) = max(rhoa(i,j),c0) Qa (i,j) = max(Qa(i,j),c0) -! if (rhoa(i,j) .lt. puny) rhoa(i,j) = 1.3_dbl_kind +! if (rhoa(i,j) .lt. puny) rhoa(i,j) = 1.3_dbl_kind ! if (Tair(i,j) .lt. puny) Tair(i,j) = Tffresh ! if (Qa(i,j) .lt. puny) Qa(i,j) = 0.0035_dbl_kind enddo ! i @@ -1699,12 +1701,12 @@ subroutine prepare_forcing (nx_block, ny_block, & enddo enddo #endif - elseif (trim(atm_data_type) == 'oned') then ! rectangular grid + elseif (trim(atm_data_type) == 'oned') then ! rectangular grid ! precip is in kg/m^2/s zlvl0 = c10 - + do j = jlo, jhi do i = ilo, ihi @@ -1736,7 +1738,7 @@ subroutine prepare_forcing (nx_block, ny_block, & elseif (trim(precip_units) == 'mm_per_day') then precip_factor = c1/secday elseif (trim(precip_units) == 'mm_per_sec' .or. & - trim(precip_units) == 'mks') then + trim(precip_units) == 'mks') then precip_factor = c1 ! mm/sec = kg/m^2 s elseif (trim(precip_units) == 'm_per_sec') then precip_factor = c1000 @@ -1753,20 +1755,20 @@ subroutine prepare_forcing (nx_block, ny_block, & swvdf(i,j) = fsw(i,j)*frcvdf ! visible diffuse swidr(i,j) = fsw(i,j)*frcidr ! near IR direct swidf(i,j) = fsw(i,j)*frcidf ! near IR diffuse - + ! convert precipitation units to kg/m^2 s fsnow(i,j) = fsnow(i,j) * precip_factor enddo ! i enddo ! j ! determine whether precip is rain or snow - ! HadGEM forcing provides separate snowfall and rainfall rather + ! HadGEM forcing provides separate snowfall and rainfall rather ! than total precipitation if (trim(atm_data_type) /= 'hadgem') then do j = jlo, jhi do i = ilo, ihi - frain(i,j) = c0 + frain(i,j) = c0 if (Tair(i,j) >= Tffresh) then frain(i,j) = fsnow(i,j) fsnow(i,j) = c0 @@ -1789,8 +1791,8 @@ subroutine prepare_forcing (nx_block, ny_block, & ! then interpolate to the U-cell centers (otherwise we ! interpolate across the pole). ! Use ANGLET which is on the T grid ! - ! Atmo variables are needed in T cell centers in subroutine - ! atmo_boundary_layer, and are interpolated to the U grid later as + ! Atmo variables are needed in T cell centers in subroutine + ! atmo_boundary_layer, and are interpolated to the U grid later as ! necessary. !----------------------------------------------------------------- workx = uatm(i,j) ! wind velocity, m/s @@ -1838,12 +1840,12 @@ subroutine longwave_parkinson_washington(Tair, cldf, flw) ! (for now) ! Parkinson, C. L. and W. M. Washington (1979), ! Large-scale numerical-model of sea ice, - ! JGR, 84, 311-337, doi:10.1029/JC084iC01p00311 + ! JGR, 84, 311-337, doi:10.1029/JC084iC01p00311 real(kind=dbl_kind), intent(in) :: & Tair , & ! air temperature (K) cldf ! cloud fraction - + real(kind=dbl_kind), intent(out) :: & flw ! incoming longwave radiation (W/m^2) @@ -1859,12 +1861,12 @@ subroutine longwave_parkinson_washington(Tair, cldf, flw) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - + flw = stefan_boltzmann*Tair**4 & * (c1 - 0.261_dbl_kind & * exp(-7.77e-4_dbl_kind*(Tffresh - Tair)**2)) & * (c1 + 0.275_dbl_kind*cldf) - + end subroutine longwave_parkinson_washington !======================================================================= @@ -1874,11 +1876,11 @@ subroutine longwave_rosati_miyakoda(cldf, Tsfc, & Qa, Tair, & hm, flw) - ! based on - ! Rosati, A. and K. Miyakoda (1988), - ! A general-circulation model for upper ocean simulation, - ! J. Physical Oceanography, 18, 1601-1626, - ! doi:10.1175/1520-0485(1988)018<1601:AGCMFU>2.0.CO;2 + ! based on + ! Rosati, A. and K. Miyakoda (1988), + ! A general-circulation model for upper ocean simulation, + ! J. Physical Oceanography, 18, 1601-1626, + ! doi:10.1175/1520-0485(1988)018<1601:AGCMFU>2.0.CO;2 real(kind=dbl_kind), intent(in) :: & cldf , & ! cloud fraction @@ -1897,7 +1899,7 @@ subroutine longwave_rosati_miyakoda(cldf, Tsfc, & sstk , & ! ice/ocean surface temperature (K) rtea , & ! square root of the vapour pressure ptem , & ! potential air temperature (K) - qlwm + qlwm real(kind=dbl_kind) :: & Tffresh, stefan_boltzmann, emissivity @@ -1924,7 +1926,7 @@ subroutine longwave_rosati_miyakoda(cldf, Tsfc, & + c4*(sstk-ptem) ) flw = emissivity*stefan_boltzmann * ( sstk**4 - qlwm ) flw = flw * hm ! land mask - + end subroutine longwave_rosati_miyakoda !======================================================================= @@ -2068,7 +2070,7 @@ subroutine ncar_data else call abort_ice (error_message=subname//'nonbinary atm_data_format unavailable', & file=__FILE__, line=__LINE__) -! The routine exists, for example: +! The routine exists, for example: ! call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & ! maxrec, fsw_file, 'fsw', fsw_data, & ! field_loc_center, field_type_scalar) @@ -2197,7 +2199,7 @@ subroutine LY_files (yr) if (my_task == master_task) then write (nu_diag,*) ' ' - write (nu_diag,*) 'Forcing data year = ', fyear + write (nu_diag,*) 'Forcing data year = ', fyear write (nu_diag,*) 'Atmospheric data files:' write (nu_diag,*) trim(flw_file) write (nu_diag,*) trim(rain_file) @@ -2287,7 +2289,7 @@ subroutine LY_data use ice_grid, only: hm, tlon, tlat, tmask, umask use ice_state, only: aice - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & i, j , & ixm,ixx,ixp , & ! record numbers for neighboring months recnum , & ! record number @@ -2321,9 +2323,9 @@ subroutine LY_data file=__FILE__, line=__LINE__) !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- @@ -2348,7 +2350,7 @@ subroutine LY_data ! Find interpolation coefficients call interp_coeff_monthly (recslot) - ! Read 2 monthly values + ! Read 2 monthly values readm = .false. if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. @@ -2362,7 +2364,7 @@ subroutine LY_data !------------------------------------------------------------------- ! 6-hourly data - ! + ! ! Assume that the 6-hourly value is located at the end of the ! 6-hour period. This is the convention for NCEP reanalysis data. ! E.g. record 1 gives conditions at 6 am GMT on 1 January. @@ -2464,29 +2466,29 @@ subroutine LY_data if (debug_forcing) then if (my_task == master_task) write (nu_diag,*) 'LY_bulk_data' vmin = global_minval(fsw,distrb_info,tmask) - + vmax = global_maxval(fsw,distrb_info,tmask) if (my_task.eq.master_task) & - write (nu_diag,*) 'fsw',vmin,vmax + write (nu_diag,*) 'fsw',vmin,vmax vmin = global_minval(cldf,distrb_info,tmask) vmax = global_maxval(cldf,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'cldf',vmin,vmax vmin =global_minval(fsnow,distrb_info,tmask) vmax =global_maxval(fsnow,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'fsnow',vmin,vmax vmin = global_minval(Tair,distrb_info,tmask) vmax = global_maxval(Tair,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'Tair',vmin,vmax vmin = global_minval(uatm,distrb_info,umask) vmax = global_maxval(uatm,distrb_info,umask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'uatm',vmin,vmax vmin = global_minval(vatm,distrb_info,umask) vmax = global_maxval(vatm,distrb_info,umask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'vatm',vmin,vmax vmin = global_minval(Qa,distrb_info,tmask) vmax = global_maxval(Qa,distrb_info,tmask) @@ -2503,9 +2505,9 @@ subroutine JRA55_data use ice_blocks, only: block, get_block use ice_global_reductions, only: global_minval, global_maxval - use ice_domain, only: nblocks, distrb_info, blocks_ice + use ice_domain, only: nblocks, distrb_info use ice_flux, only: fsnow, Tair, uatm, vatm, Qa, fsw, flw - use ice_grid, only: hm, tlon, tlat, tmask, umask + use ice_grid, only: hm, tmask, umask use ice_state, only: aice use ice_calendar, only: days_per_year @@ -2782,7 +2784,7 @@ subroutine compute_shortwave(nx_block, ny_block, & secday , & pi , & lontmp , & - deg2rad + deg2rad integer (kind=int_kind) :: & i, j @@ -2823,7 +2825,7 @@ subroutine compute_shortwave(nx_block, ny_block, & sw0 = max(sw0,c0) ! total downward shortwave for cice - Fsw(i,j) = sw0*(c1-p6*cldf(i,j)**3) + Fsw(i,j) = sw0*(c1-p6*cldf(i,j)**3) Fsw(i,j) = Fsw(i,j)*hm(i,j) enddo enddo @@ -2865,7 +2867,7 @@ subroutine Qa_fixLY(nx_block, ny_block, Tair, Qa) /(c1 + 0.00412_dbl_kind*worka) & ! 2+ converts ea mb -> Pa + 0.00422_dbl_kind*worka ! for ice ! vapor pressure - worka = (c10**worka) ! saturated + worka = (c10**worka) ! saturated worka = max(worka,puny) ! puny over land to prevent division by zero ! specific humidity worka = 0.622_dbl_kind*worka/(1.e5_dbl_kind-0.378_dbl_kind*worka) @@ -2981,13 +2983,13 @@ subroutine hadgem_files (yr) endif ! calc_strair ! -------------------------------------------------------------- - ! Atmosphere properties. Even if these fields are not + ! Atmosphere properties. Even if these fields are not ! being used to force the ice (i.e. calc_Tsfc=.false.), they ! are still needed to generate forcing for mixed layer model or ! to calculate wind stress ! -------------------------------------------------------------- - if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then + if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then fsw_file = & trim(atm_data_dir)//'/MONTHLY/SW_incoming.1996.nc' @@ -3032,14 +3034,14 @@ subroutine hadgem_files (yr) trim(atm_data_dir)//'/MONTHLY/topmeltn',n,'.1996.nc' call file_year(topmelt_file(n),yr) - ! 'botmelt' = fcondtop. + ! 'botmelt' = fcondtop. write(botmelt_file(n), '(a,i1,a)') & trim(atm_data_dir)//'/MONTHLY/botmeltn',n,'.1996.nc' call file_year(botmelt_file(n),yr) enddo - ! 'sublim' = - flat / Lsub. + ! 'sublim' = - flat / Lsub. sublim_file = & trim(atm_data_dir)//'/MONTHLY/sublim.1996.nc' call file_year(sublim_file,yr) @@ -3085,7 +3087,7 @@ subroutine hadgem_data botmelt, & sublim - character (char_len) :: & + character (char_len) :: & fieldname ! field name in netcdf file real (kind=dbl_kind) :: & @@ -3212,15 +3214,15 @@ subroutine hadgem_data endif ! calc_strair ! ----------------------------------------------------------- - ! SW incoming, LW incoming, air temperature, density and - ! humidity at 10m. + ! SW incoming, LW incoming, air temperature, density and + ! humidity at 10m. ! - ! Even if these fields are not being used to force the ice - ! (i.e. calc_Tsfc=.false.), they are still needed to generate + ! Even if these fields are not being used to force the ice + ! (i.e. calc_Tsfc=.false.), they are still needed to generate ! forcing for mixed layer model or to calculate wind stress ! ----------------------------------------------------------- - if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then + if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then fieldname='SW_incoming' call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & @@ -3287,7 +3289,7 @@ subroutine hadgem_data ! botmelt = fcondtop (as zero layer) ! ! Convert UM sublimation data into CICE LH flux - ! (sublim = - flatn / Lsub) and have same value for all + ! (sublim = - flatn / Lsub) and have same value for all ! categories !-------------------------------------------------------- @@ -3296,7 +3298,7 @@ subroutine hadgem_data do j = 1, ny_block do i = 1, nx_block fcondtopn_f(i,j,n,iblk) = botmelt(i,j,iblk) - fsurfn_f(i,j,n,iblk) = topmelt(i,j,iblk) & + fsurfn_f(i,j,n,iblk) = topmelt(i,j,iblk) & + botmelt(i,j,iblk) flatn_f(i,j,n,iblk) = - sublim(i,j,iblk)*Lsub enddo @@ -3306,12 +3308,12 @@ subroutine hadgem_data enddo ! ncat - endif ! .not. calc_Tsfc + endif ! .not. calc_Tsfc end subroutine hadgem_data !======================================================================= -! monthly forcing +! monthly forcing !======================================================================= subroutine monthly_files (yr) @@ -3359,7 +3361,7 @@ subroutine monthly_files (yr) if (my_task == master_task) then write (nu_diag,*) ' ' - write (nu_diag,*) 'Forcing data year = ', fyear + write (nu_diag,*) 'Forcing data year = ', fyear write (nu_diag,*) 'Atmospheric data files:' write (nu_diag,*) trim(flw_file) write (nu_diag,*) trim(rain_file) @@ -3382,7 +3384,7 @@ subroutine monthly_data use ice_flux, only: fsnow, Tair, Qa, wind, strax, stray, fsw use ice_grid, only: hm, tlon, tlat, tmask, umask - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & i, j , & ixm,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number @@ -3398,15 +3400,15 @@ subroutine monthly_data type (block) :: & this_block ! block information for current block - + character(len=*), parameter :: subname = '(monthly_data)' if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- @@ -3431,7 +3433,7 @@ subroutine monthly_data ! Find interpolation coefficients call interp_coeff_monthly (recslot) - ! Read 2 monthly values + ! Read 2 monthly values readm = .false. if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. @@ -3505,30 +3507,30 @@ subroutine monthly_data vmin = global_minval(fsw,distrb_info,tmask) vmax = global_maxval(fsw,distrb_info,tmask) if (my_task.eq.master_task) & - write (nu_diag,*) 'fsw',vmin,vmax + write (nu_diag,*) 'fsw',vmin,vmax vmin = global_minval(cldf,distrb_info,tmask) vmax = global_maxval(cldf,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'cldf',vmin,vmax vmin =global_minval(fsnow,distrb_info,tmask) vmax =global_maxval(fsnow,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'fsnow',vmin,vmax vmin = global_minval(Tair,distrb_info,tmask) vmax = global_maxval(Tair,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'Tair',vmin,vmax vmin = global_minval(wind,distrb_info,umask) vmax = global_maxval(wind,distrb_info,umask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'wind',vmin,vmax vmin = global_minval(strax,distrb_info,umask) vmax = global_maxval(strax,distrb_info,umask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'strax',vmin,vmax vmin = global_minval(stray,distrb_info,umask) vmax = global_maxval(stray,distrb_info,umask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'stray',vmin,vmax vmin = global_minval(Qa,distrb_info,tmask) vmax = global_maxval(Qa,distrb_info,tmask) @@ -3549,7 +3551,7 @@ subroutine oned_data ! local parameters - character (char_len_long) :: & + character (char_len_long) :: & met_file, & ! netcdf filename fieldname ! field name in netcdf file @@ -3570,79 +3572,79 @@ subroutine oned_data Psat , & ! saturation vapour pressure (hPa) ws ! saturation mixing ratio - real (kind=dbl_kind), parameter :: & ! coefficients for Hyland-Wexler Qa - ps1 = 0.58002206e4_dbl_kind, & ! (K) + real (kind=dbl_kind), parameter :: & ! coefficients for Hyland-Wexler Qa + ps1 = 0.58002206e4_dbl_kind, & ! (K) ps2 = 1.3914993_dbl_kind, & ! - ps3 = 0.48640239e-1_dbl_kind, & ! (K^-1) + ps3 = 0.48640239e-1_dbl_kind, & ! (K^-1) ps4 = 0.41764768e-4_dbl_kind, & ! (K^-2) ps5 = 0.14452093e-7_dbl_kind, & ! (K^-3) ps6 = 6.5459673_dbl_kind, & ! - ws1 = 621.97_dbl_kind, & ! for saturation mixing ratio - Pair = 1020._dbl_kind ! Sea level pressure (hPa) - + ws1 = 621.97_dbl_kind, & ! for saturation mixing ratio + Pair = 1020._dbl_kind ! Sea level pressure (hPa) + character(len=*), parameter :: subname = '(oned_data)' if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - diag = .false. ! write diagnostic information - + diag = .false. ! write diagnostic information + if (trim(atm_data_format) == 'nc') then ! read nc file - ! hourly data beginning Jan 1, 1989, 01:00 + ! hourly data beginning Jan 1, 1989, 01:00 ! HARDWIRED for dt = 1 hour! met_file = uwind_file call ice_open_nc(met_file,fid) - fieldname='Uatm' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='Uatm' + call ice_read_nc(fid,istep1,fieldname,work,diag) uatm(:,:,:) = work - fieldname='Vatm' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='Vatm' + call ice_read_nc(fid,istep1,fieldname,work,diag) vatm(:,:,:) = work - fieldname='Tair' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='Tair' + call ice_read_nc(fid,istep1,fieldname,work,diag) Temp = work - Tair(:,:,:) = Temp + Tair(:,:,:) = Temp call ice_close_nc(fid) - ! hourly solar data beginning Jan 1, 1989, 01:00 + ! hourly solar data beginning Jan 1, 1989, 01:00 met_file = fsw_file call ice_open_nc(met_file,fid) - fieldname='fsw' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='fsw' + call ice_read_nc(fid,istep1,fieldname,work,diag) fsw(:,:,:) = work call ice_close_nc(fid) - ! hourly interpolated monthly data beginning Jan 1, 1989, 01:00 + ! hourly interpolated monthly data beginning Jan 1, 1989, 01:00 met_file = humid_file call ice_open_nc(met_file,fid) - fieldname='rh' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='rh' + call ice_read_nc(fid,istep1,fieldname,work,diag) rh = work - - fieldname='fsnow' - call ice_read_nc(fid,istep1,fieldname,work,diag) + + fieldname='fsnow' + call ice_read_nc(fid,istep1,fieldname,work,diag) fsnow(:,:,:) = work call ice_close_nc(fid) !------------------------------------------------------------------- ! Find specific humidity using Hyland-Wexler formulation - ! Hyland, R.W. and A. Wexler, Formulations for the Thermodynamic - ! Properties of the saturated phases of H20 from 173.15K to 473.15K, + ! Hyland, R.W. and A. Wexler, Formulations for the Thermodynamic + ! Properties of the saturated phases of H20 from 173.15K to 473.15K, ! ASHRAE Trans, 89(2A), 500-519, 1983 !------------------------------------------------------------------- - - Psat = exp(-ps1/Temp + ps2 - ps3*Temp + ps4*Temp**2 - ps5 * Temp**3 & + + Psat = exp(-ps1/Temp + ps2 - ps3*Temp + ps4*Temp**2 - ps5 * Temp**3 & + ps6 * log(Temp))*p01 ! saturation vapour pressure ws = ws1 * Psat/(Pair - Psat) ! saturation mixing ratio - Qa(:,:,:) = rh * ws * p01/(c1 + rh * ws * p01) * p001 + Qa(:,:,:) = rh * ws * p01/(c1 + rh * ws * p01) * p001 ! specific humidity (kg/kg) endif ! atm_data_format @@ -3650,7 +3652,7 @@ subroutine oned_data rhoa (:,:,:) = 1.3_dbl_kind ! air density (kg/m^3) cldf (:,:,:) = p25 ! cloud fraction frain(:,:,:) = c0 ! this is available in hourlymet_rh file - + end subroutine oned_data !======================================================================= @@ -3831,19 +3833,19 @@ end subroutine ocn_data_clim subroutine ocn_data_ncar_init ! Reads NCAR pop ocean forcing data set 'pop_frc_gx1v3_010815.nc' -! +! ! List of ocean forcing fields: Note that order is important! ! (order is determined by field list in vname). -! -! For ocean mixed layer-----------------------------units -! -! 1 sst------temperature---------------------------(C) -! 2 sss------salinity------------------------------(ppt) -! 3 hbl------depth---------------------------------(m) -! 4 u--------surface u current---------------------(m/s) -! 5 v--------surface v current---------------------(m/s) -! 6 dhdx-----surface tilt x direction--------------(m/m) -! 7 dhdy-----surface tilt y direction--------------(m/m) +! +! For ocean mixed layer-----------------------------units +! +! 1 sst------temperature---------------------------(C) +! 2 sss------salinity------------------------------(ppt) +! 3 hbl------depth---------------------------------(m) +! 4 u--------surface u current---------------------(m/s) +! 5 v--------surface v current---------------------(m/s) +! 6 dhdx-----surface tilt x direction--------------(m/m) +! 7 dhdy-----surface tilt y direction--------------(m/m) ! 8 qdp------ocean sub-mixed layer heat flux-------(W/m2) ! ! Fields 4, 5, 6, 7 are on the U-grid; 1, 2, 3, and 8 are @@ -3858,7 +3860,7 @@ subroutine ocn_data_ncar_init use netcdf #endif - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & n , & ! field index m , & ! month index nrec, & ! record number for direct access @@ -3870,12 +3872,10 @@ subroutine ocn_data_ncar_init 'T', 'S', 'hblt', 'U', 'V', & 'dhdx', 'dhdy', 'qdp' / - integer (kind=int_kind) :: & - fid , & ! file id - dimid ! dimension id - integer (kind=int_kind) :: & status , & ! status flag + fid , & ! file id + dimid , & ! dimension id nlat , & ! number of longitudes of data nlon ! number of latitudes of data @@ -3894,7 +3894,7 @@ subroutine ocn_data_ncar_init write (nu_diag,*) 'WARNING: Alter ice_dyn_evp.F90 if desired.' if (restore_ocn) write (nu_diag,*) & - 'SST restoring timescale = ',trestore,' days' + 'SST restoring timescale = ',trestore,' days' sst_file = trim(ocn_data_dir)//'/'//trim(oceanmixed_file) ! not just sst @@ -3914,7 +3914,7 @@ subroutine ocn_data_ncar_init ! status = nf90_inq_dimid(fid,'nlon',dimid) status = nf90_inq_dimid(fid,'ni',dimid) status = nf90_inquire_dimension(fid,dimid,len=nlon) - + ! status = nf90_inq_dimid(fid,'nlat',dimid) status = nf90_inq_dimid(fid,'nj',dimid) status = nf90_inquire_dimension(fid,dimid,len=nlat) @@ -3933,7 +3933,7 @@ subroutine ocn_data_ncar_init ! Read in ocean forcing data for all 12 months do n=1,nfld do m=1,12 - + ! Note: netCDF does single to double conversion if necessary ! if (n >= 4 .and. n <= 7) then ! call ice_read_nc(fid, m, vname(n), work1, debug_forcing, & @@ -3989,19 +3989,19 @@ end subroutine ocn_data_ncar_init subroutine ocn_data_ncar_init_3D ! Reads NCAR pop ocean forcing data set 'oceanmixed_ice_depth.nc' -! +! ! List of ocean forcing fields: Note that order is important! ! (order is determined by field list in vname). -! -! For ocean mixed layer-----------------------------units -! -! 1 sst------temperature---------------------------(C) -! 2 sss------salinity------------------------------(ppt) -! 3 hbl------depth---------------------------------(m) -! 4 u--------surface u current---------------------(m/s) -! 5 v--------surface v current---------------------(m/s) -! 6 dhdx-----surface tilt x direction--------------(m/m) -! 7 dhdy-----surface tilt y direction--------------(m/m) +! +! For ocean mixed layer-----------------------------units +! +! 1 sst------temperature---------------------------(C) +! 2 sss------salinity------------------------------(ppt) +! 3 hbl------depth---------------------------------(m) +! 4 u--------surface u current---------------------(m/s) +! 5 v--------surface v current---------------------(m/s) +! 6 dhdx-----surface tilt x direction--------------(m/m) +! 7 dhdy-----surface tilt y direction--------------(m/m) ! 8 qdp------ocean sub-mixed layer heat flux-------(W/m2) ! ! All fields are on the T-grid. @@ -4018,7 +4018,7 @@ subroutine ocn_data_ncar_init_3D #endif #ifdef USE_NETCDF - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & n , & ! field index m , & ! month index nzlev ! z level of currents @@ -4030,8 +4030,8 @@ subroutine ocn_data_ncar_init_3D 'dhdx', 'dhdy', 'qdp' / integer (kind=int_kind) :: & - fid , & ! file id - dimid ! dimension id + fid , & ! file id + dimid ! dimension id integer (kind=int_kind) :: & status , & ! status flag @@ -4054,7 +4054,7 @@ subroutine ocn_data_ncar_init_3D write (nu_diag,*) 'WARNING: Alter ice_dyn_evp.F if desired.' if (restore_ocn) write (nu_diag,*) & - 'SST restoring timescale = ',trestore,' days' + 'SST restoring timescale = ',trestore,' days' sst_file = trim(ocn_data_dir)//'/'//trim(oceanmixed_file) ! not just sst @@ -4075,7 +4075,7 @@ subroutine ocn_data_ncar_init_3D ! status = nf90_inq_dimid(fid,'nlon',dimid) status = nf90_inq_dimid(fid,'ni',dimid) status = nf90_inquire_dimension(fid,dimid,len=nlon) - + ! status = nf90_inq_dimid(fid,'nlat',dimid) status = nf90_inq_dimid(fid,'nj',dimid) status = nf90_inquire_dimension(fid,dimid,len=nlat) @@ -4094,7 +4094,7 @@ subroutine ocn_data_ncar_init_3D ! Read in ocean forcing data for all 12 months do n=1,nfld do m=1,12 - + ! Note: netCDF does single to double conversion if necessary if (n == 4 .or. n == 5) then ! 3D currents nzlev = 1 ! surface currents @@ -4105,7 +4105,7 @@ subroutine ocn_data_ncar_init_3D field_loc_center, field_type_scalar) endif - ! the land mask used in ocean_mixed_depth.nc does not + ! the land mask used in ocean_mixed_depth.nc does not ! match our gx1v3 mask (hm) where (work1(:,:,:) < -900.) work1(:,:,:) = c0 @@ -4168,7 +4168,7 @@ subroutine ocn_data_ncar(dt) real (kind=dbl_kind), intent(in) :: & dt ! time step - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & i, j, n, iblk , & ixm,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number @@ -4186,12 +4186,12 @@ subroutine ocn_data_ncar(dt) if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- - + midmonth = 15 ! data is given on 15th of every month ! midmonth = fix(p5 * real(daymo(mmonth),kind=dbl_kind)) ! exact middle @@ -4228,8 +4228,8 @@ subroutine ocn_data_ncar(dt) call interpolate_data (sst_data,work1) ! masking by hm is necessary due to NaNs in the data file - do j = 1, ny_block - do i = 1, nx_block + do j = 1, ny_block + do i = 1, nx_block if (n == 2) sss (i,j,:) = c0 if (n == 3) hmix (i,j,:) = c0 if (n == 4) uocn (i,j,:) = c0 @@ -4252,21 +4252,21 @@ subroutine ocn_data_ncar(dt) enddo enddo - do j = 1, ny_block - do i = 1, nx_block - sss (i,j,:) = max (sss(i,j,:), c0) - hmix(i,j,:) = max(hmix(i,j,:), c0) - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + sss (i,j,:) = max (sss(i,j,:), c0) + hmix(i,j,:) = max(hmix(i,j,:), c0) + enddo + enddo call ocn_freezing_temperature if (restore_ocn) then - do j = 1, ny_block - do i = 1, nx_block - sst(i,j,:) = sst(i,j,:) + (work1(i,j,:)-sst(i,j,:))*dt/trest - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + sst(i,j,:) = sst(i,j,:) + (work1(i,j,:)-sst(i,j,:))*dt/trest + enddo + enddo ! else sst is only updated in ice_ocean.F endif @@ -4275,16 +4275,16 @@ subroutine ocn_data_ncar(dt) call interpolate_data (sst_data,sst) !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block + do j = 1, ny_block + do i = 1, nx_block if (hm(i,j,iblk) == c1) then - sst(i,j,iblk) = max (sst(i,j,iblk), Tf(i,j,iblk)) + sst(i,j,iblk) = max (sst(i,j,iblk), Tf(i,j,iblk)) else sst(i,j,iblk) = c0 endif - enddo - enddo - enddo + enddo + enddo + enddo !$OMP END PARALLEL DO endif @@ -4365,12 +4365,13 @@ subroutine ocn_data_hadgem(dt) ! Reads in HadGEM ocean forcing data as required from netCDF files ! Current options (selected by ocn_data_type) -! hadgem_sst: read in sst only +! hadgem_sst: read in sst only ! hadgem_sst_uvocn: read in sst plus uocn and vocn ! authors: Ann Keen, Met Office use ice_domain, only: nblocks + use ice_domain_size, only: max_blocks use ice_flux, only: sst, uocn, vocn use ice_grid, only: grid_average_X2Y, ANGLET @@ -4387,17 +4388,14 @@ subroutine ocn_data_hadgem(dt) real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & sstdat ! data value toward which SST is restored - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1 ! temporary array - real (kind=dbl_kind) :: workx, worky logical (kind=log_kind) :: readm - character (char_len) :: & + character (char_len) :: & fieldname ! field name in netcdf file - character (char_len_long) :: & + character (char_len_long) :: & filename ! name of netCDF file character(len=*), parameter :: subname = '(ocn_data_hadgem)' @@ -4458,7 +4456,7 @@ subroutine ocn_data_hadgem(dt) call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, sst_file, fieldname, sst_data, & field_loc_center, field_type_scalar) - + ! Interpolate to current time step call interpolate_data (sst_data, sstdat) @@ -4474,14 +4472,14 @@ subroutine ocn_data_hadgem(dt) enddo enddo !$OMP END PARALLEL DO - endif + endif ! ----------------------------------------------------------- ! Ocean currents ! -------------- - ! Values read in are on T grid and oriented geographically, hence + ! Values read in are on T grid and oriented geographically, hence ! vectors need to be rotated to model grid and then interpolated - ! to U grid. + ! to U grid. ! Also need to be converted from cm s-1 (UM) to m s-1 (CICE) ! ----------------------------------------------------------- @@ -4492,7 +4490,7 @@ subroutine ocn_data_hadgem(dt) call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, filename, fieldname, uocn_data, & field_loc_center, field_type_vector) - + ! Interpolate to current time step call interpolate_data (uocn_data, uocn) @@ -4501,25 +4499,25 @@ subroutine ocn_data_hadgem(dt) call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & maxrec, filename, fieldname, vocn_data, & field_loc_center, field_type_vector) - + ! Interpolate to current time step call interpolate_data (vocn_data, vocn) - !----------------------------------------------------------------- - ! Rotate zonal/meridional vectors to local coordinates, + !----------------------------------------------------------------- + ! Rotate zonal/meridional vectors to local coordinates, ! and change units - !----------------------------------------------------------------- + !----------------------------------------------------------------- !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - workx = uocn(i,j,iblk) + workx = uocn(i,j,iblk) worky = vocn(i,j,iblk) - uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & - + worky*sin(ANGLET(i,j,iblk)) - vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & + uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & + + worky*sin(ANGLET(i,j,iblk)) + vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & - workx*sin(ANGLET(i,j,iblk)) uocn(i,j,iblk) = uocn(i,j,iblk) * cm_to_m @@ -4530,15 +4528,11 @@ subroutine ocn_data_hadgem(dt) enddo ! nblocks !$OMP END PARALLEL DO - !----------------------------------------------------------------- - ! Interpolate to U grid - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! Interpolate to U grid + !----------------------------------------------------------------- ! tcraig, this is now computed in dynamics for consistency - !work1 = uocn - !call grid_average_X2Y('F',work1,'T',uocn,'U') - !work1 = vocn - !call grid_average_X2Y('F',work1,'T',vocn,'U') endif ! ocn_data_type = hadgem_sst_uvocn @@ -4688,7 +4682,7 @@ subroutine hycom_atm_data call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) - ! current time in HYCOM jday units (HYCOM ref year: 1900,12,31,000000) + ! current time in HYCOM jday units (HYCOM ref year: 1900,12,31,000000) hcdate = real(compute_days_between(1900,12,31,myear,mmonth,mday)) + msec/secday ! Init recnum try @@ -4897,13 +4891,13 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & ! write(nu_diag,*) 'ixm, ixx, ixp ', ixm, ixx, ixp ! write(nu_diag,*) 'maxrec ', maxrec ! write(nu_diag,*) 'fieldname ', fieldname - + call ice_open_nc (data_file, fid) arg = 1 nrec = recd + n2 - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(arg), debug_forcing, & field_loc, field_type) @@ -4918,7 +4912,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + ixx - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(arg), debug_forcing, & field_loc, field_type) @@ -4944,7 +4938,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + n4 - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(arg), debug_forcing, & field_loc, field_type) endif ! ixp /= -99 @@ -4966,7 +4960,7 @@ subroutine ISPOL_files if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' fsw_file = & - trim(atm_data_dir)//'/fsw_sfc_4Xdaily.nc' + trim(atm_data_dir)//'/fsw_sfc_4Xdaily.nc' flw_file = & trim(atm_data_dir)//'/flw_sfc_4Xdaily.nc' @@ -4978,10 +4972,10 @@ subroutine ISPOL_files trim(atm_data_dir)//'/uatm_10m_daily.nc' vwind_file = & - trim(atm_data_dir)//'/vatm_10m_daily.nc' + trim(atm_data_dir)//'/vatm_10m_daily.nc' tair_file = & - trim(atm_data_dir)//'/Tair_2m_daily.nc' + trim(atm_data_dir)//'/Tair_2m_daily.nc' humid_file = & trim(atm_data_dir)//'/Qa_2m_daily.nc' @@ -5004,7 +4998,7 @@ end subroutine ISPOL_files subroutine ISPOL_data -! Defines atmospheric data fields for Antarctic Weddell sea location +! Defines atmospheric data fields for Antarctic Weddell sea location ! authors: Nicole Jeffery, LANL ! @@ -5013,7 +5007,7 @@ subroutine ISPOL_data !local parameters - character (char_len_long) :: & + character (char_len_long) :: & met_file, & ! netcdf filename fieldname ! field name in netcdf file @@ -5022,19 +5016,19 @@ subroutine ISPOL_data Qa_data_p, fsnow_data_p, & fsw_data_p, flw_data_p, & uatm_data_p, vatm_data_p - - real (kind=dbl_kind), parameter :: & ! coefficients for Hyland-Wexler Qa - ps1 = 0.58002206e4_dbl_kind, & ! (K) + + real (kind=dbl_kind), parameter :: & ! coefficients for Hyland-Wexler Qa + ps1 = 0.58002206e4_dbl_kind, & ! (K) ps2 = 1.3914993_dbl_kind, & ! - ps3 = 0.48640239e-1_dbl_kind, & ! (K^-1) + ps3 = 0.48640239e-1_dbl_kind, & ! (K^-1) ps4 = 0.41764768e-4_dbl_kind, & ! (K^-2) ps5 = 0.14452093e-7_dbl_kind, & ! (K^-3) ps6 = 6.5459673_dbl_kind, & ! - ws1 = 621.97_dbl_kind, & ! for saturation mixing ratio - Pair = 1020._dbl_kind, & ! Sea level pressure (hPa) + ws1 = 621.97_dbl_kind, & ! for saturation mixing ratio + Pair = 1020._dbl_kind, & ! Sea level pressure (hPa) lapse_rate = 0.0065_dbl_kind ! (K/m) lapse rate over sea level - - ! for interpolation of hourly data + + ! for interpolation of hourly data integer (kind=int_kind) :: & ixm,ixx,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number @@ -5043,7 +5037,7 @@ subroutine ISPOL_data ! = 2 for date located at end of time interval real (kind=dbl_kind) :: & secday , & - Qa_pnt + Qa_pnt real (kind=dbl_kind) :: & sec1hr ! number of seconds in 1 hour @@ -5062,20 +5056,20 @@ subroutine ISPOL_data call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - + if (trim(atm_data_format) == 'nc') then ! read nc file - + !------------------------------------------------------------------- ! data from NCEP_DOE Reanalysis 2 and Bareiss et al 2008 - ! daily data located at the end of the 24-hour period. + ! daily data located at the end of the 24-hour period. !------------------------------------------------------------------- dataloc = 2 ! data located at end of interval sec1hr = secday ! seconds in day - maxrec = 366 ! + maxrec = 366 ! ! current record number - recnum = int(yday) + recnum = int(yday) ! Compute record numbers for surrounding data (2 on each side) ixm = mod(recnum+maxrec-2,maxrec) + 1 @@ -5092,11 +5086,11 @@ subroutine ISPOL_data read1 = .false. if (istep==1 .or. oldrecnum .ne. recnum) read1 = .true. - + ! Daily 2m Air temperature 1991 - - met_file = tair_file - fieldname='Tair' + + met_file = tair_file + fieldname='Tair' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, Tair_data_p, & @@ -5106,7 +5100,7 @@ subroutine ISPOL_data + c2intp * Tair_data_p(2) & - lapse_rate*8.0_dbl_kind - met_file = humid_file + met_file = humid_file fieldname='Qa' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & @@ -5114,7 +5108,7 @@ subroutine ISPOL_data field_loc_center, field_type_scalar) Qa_pnt= c1intp * Qa_data_p(1) & - + c2intp * Qa_data_p(2) + + c2intp * Qa_data_p(2) Qa(:,:,:) = Qa_pnt met_file = uwind_file @@ -5125,19 +5119,19 @@ subroutine ISPOL_data field_loc_center, field_type_scalar) uatm(:,:,:) = c1intp * uatm_data_p(1) & - + c2intp * uatm_data_p(2) + + c2intp * uatm_data_p(2) met_file = vwind_file fieldname='vatm' - + call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, vatm_data_p, & field_loc_center, field_type_scalar) vatm(:,:,:) = c1intp * vatm_data_p(1) & + c2intp * vatm_data_p(2) - - met_file = rain_file + + met_file = rain_file fieldname='fsnow' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & @@ -5145,7 +5139,7 @@ subroutine ISPOL_data field_loc_center, field_type_scalar) fsnow(:,:,:) = (c1intp * fsnow_data_p(1) + & - c2intp * fsnow_data_p(2)) + c2intp * fsnow_data_p(2)) !----------------------------- !fsw and flw are every 6 hours @@ -5155,7 +5149,7 @@ subroutine ISPOL_data maxrec = 1460 ! 366*4 ! current record number - recnum4X = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec1hr) + recnum4X = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec1hr) ! Compute record numbers for surrounding data (2 on each side) ixm = mod(recnum4X+maxrec-2,maxrec) + 1 @@ -5183,14 +5177,14 @@ subroutine ISPOL_data + c2intp * fsw_data_p(2) met_file = flw_file - fieldname='flw' + fieldname='flw' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, flw_data_p, & field_loc_center, field_type_scalar) flw(:,:,:) = c1intp * flw_data_p(1) & - + c2intp * flw_data_p(2) + + c2intp * flw_data_p(2) endif !nc !flw given cldf and Tair calculated in prepare_forcing @@ -5202,7 +5196,7 @@ subroutine ISPOL_data rhoa (:,:,:) = 1.3_dbl_kind ! air density (kg/m^3) cldf(:,:,:) = c1 !0.25_dbl_kind ! cloud fraction frain(:,:,:) = c0 ! this is available in hourlymet_rh file - + ! Save record number for next time step oldrecnum = recnum oldrecnum4X = recnum4X @@ -5211,20 +5205,20 @@ end subroutine ISPOL_data !======================================================================= - subroutine ocn_data_ispol_init + subroutine ocn_data_ispol_init ! Reads NCAR pop ocean forcing data set 'pop_frc_gx1v3_010815.nc' ! at the ISPOL location -67.4677N, 310.4375E ! -! For ocean mixed layer-----------------------------units -! -! 1 sst------temperature---------------------------(C) -! 2 sss------salinity------------------------------(ppt) -! 3 hbl------depth---------------------------------(m) -! 4 u--------surface u current---------------------(m/s) +! For ocean mixed layer-----------------------------units +! +! 1 sst------temperature---------------------------(C) +! 2 sss------salinity------------------------------(ppt) +! 3 hbl------depth---------------------------------(m) +! 4 u--------surface u current---------------------(m/s) ! 5 v--------surface v current---------------------(m/s) -! 6 dhdx-----surface tilt x direction--------------(m/m) -! 7 dhdy-----surface tilt y direction--------------(m/m) +! 6 dhdx-----surface tilt x direction--------------(m/m) +! 7 dhdy-----surface tilt y direction--------------(m/m) ! 8 qdp------ocean sub-mixed layer heat flux-------(W/m2) ! ! Fields 4, 5, 6, 7 are on the U-grid; 1, 2, 3, and 8 are @@ -5235,7 +5229,7 @@ subroutine ocn_data_ispol_init use ice_gather_scatter use ice_read_write - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & n , & ! field index m ! month index @@ -5246,13 +5240,10 @@ subroutine ocn_data_ispol_init 'dhdx', 'dhdy', 'qdp' / real (kind=dbl_kind) :: & - work - - integer (kind=int_kind) :: & - fid ! file id + work integer (kind=int_kind) :: & - status ! status flag + fid ! file id character(len=*), parameter :: subname = '(ocn_data_ispol_init)' @@ -5261,7 +5252,7 @@ subroutine ocn_data_ispol_init if (my_task == master_task) then if (restore_ocn) write (nu_diag,*) & - 'SST restoring timescale = ',trestore,' days' + 'SST restoring timescale = ',trestore,' days' sst_file = trim(ocn_data_dir)//'/'//trim(oceanmixed_file) ! not just sst @@ -5280,14 +5271,14 @@ subroutine ocn_data_ispol_init ! Read in ocean forcing data for all 12 months do n=1,nfld - do m=1,12 + do m=1,12 ! Note: netCDF does single to double conversion if necessary if (n >= 4 .and. n <= 7) then call ice_read_nc(fid, m, vname(n), work, debug_forcing, & field_loc_NEcorner, field_type_vector) else call ice_read_nc(fid, m, vname(n), work, debug_forcing, & - field_loc_center, field_type_scalar) + field_loc_center, field_type_scalar) endif ocn_frc_m(:,:,:,n,m) = work enddo ! month loop @@ -5316,7 +5307,6 @@ subroutine box2001_data_atm ! authors: Elizabeth Hunke, LANL use ice_domain, only: nblocks, blocks_ice - use ice_domain_size, only: max_blocks use ice_calendar, only: timesecs use ice_blocks, only: block, get_block, nx_block, ny_block, nghost use ice_flux, only: uatm, vatm, wind, rhoa, strax, stray @@ -5347,8 +5337,8 @@ subroutine box2001_data_atm period = c4*secday do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block + do j = 1, ny_block + do i = 1, nx_block this_block = get_block(blocks_ice(iblk),iblk) iglob = this_block%i_glob @@ -5396,8 +5386,8 @@ subroutine box2001_data_atm ! / real(ny_global,kind=dbl_kind) ! initialization test - enddo - enddo + enddo + enddo enddo ! nblocks end subroutine box2001_data_atm @@ -5411,8 +5401,6 @@ subroutine box2001_data_ocn ! authors: Elizabeth Hunke, LANL use ice_domain, only: nblocks, blocks_ice - use ice_domain_size, only: max_blocks - use ice_calendar, only: timesecs use ice_blocks, only: block, get_block, nx_block, ny_block, nghost use ice_flux, only: uocn, vocn use ice_grid, only: uvm @@ -5429,16 +5417,13 @@ subroutine box2001_data_ocn type (block) :: & this_block ! block information for current block - real (kind=dbl_kind) :: & - secday, pi , puny, period, pi2, tau - character(len=*), parameter :: subname = '(box2001_data_ocn)' if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block + do j = 1, ny_block + do i = 1, nx_block this_block = get_block(blocks_ice(iblk),iblk) iglob = this_block%i_glob @@ -5454,8 +5439,8 @@ subroutine box2001_data_ocn uocn(i,j,iblk) = uocn(i,j,iblk) * uvm(i,j,iblk) vocn(i,j,iblk) = vocn(i,j,iblk) * uvm(i,j,iblk) - enddo - enddo + enddo + enddo enddo ! nblocks end subroutine box2001_data_ocn @@ -5466,7 +5451,6 @@ subroutine uniform_data_atm(dir,spd) ! uniform wind fields in some direction use ice_domain, only: nblocks - use ice_domain_size, only: max_blocks use ice_blocks, only: nx_block, ny_block, nghost use ice_flux, only: uatm, vatm, wind, rhoa, strax, stray use ice_state, only: aice @@ -5516,17 +5500,17 @@ subroutine uniform_data_atm(dir,spd) endif do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block + do j = 1, ny_block + do i = 1, nx_block ! wind stress wind(i,j,iblk) = sqrt(uatm(i,j,iblk)**2 + vatm(i,j,iblk)**2) tau = rhoa(i,j,iblk) * 0.0012_dbl_kind * wind(i,j,iblk) strax(i,j,iblk) = aice(i,j,iblk) * tau * uatm(i,j,iblk) stray(i,j,iblk) = aice(i,j,iblk) * tau * vatm(i,j,iblk) - + + enddo enddo - enddo enddo ! nblocks end subroutine uniform_data_atm @@ -5537,25 +5521,19 @@ subroutine uniform_data_ocn(dir,spd) ! uniform current fields in some direction - use ice_domain, only: nblocks - use ice_domain_size, only: max_blocks - use ice_blocks, only: nx_block, ny_block, nghost use ice_flux, only: uocn, vocn character(len=*), intent(in) :: dir - real(kind=dbl_kind), intent(in), optional :: spd ! velocity + real(kind=dbl_kind), intent(in), optional :: spd ! velocity ! local parameters - integer (kind=int_kind) :: & - iblk, i,j ! loop indices - real(kind=dbl_kind) :: & ocn_val ! value to use for ocean currents character(len=*), parameter :: subname = '(uniform_data_ocn)' - + if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' if (present(spd)) then @@ -5583,9 +5561,9 @@ end subroutine uniform_data_ocn !======================================================================= subroutine get_wave_spec - + use ice_read_write, only: ice_read_nc_xyf - use ice_arrays_column, only: wave_spectrum, wave_sig_ht, & + use ice_arrays_column, only: wave_spectrum, & dwavefreq, wavefreq use ice_constants, only: c0 use ice_domain_size, only: nfreq @@ -5593,8 +5571,7 @@ subroutine get_wave_spec ! local variables integer (kind=int_kind) :: & - fid, & ! file id for netCDF routines - k + fid ! file id for netCDF routines real(kind=dbl_kind), dimension(nfreq) :: & wave_spectrum_profile ! wave spectrum @@ -5686,9 +5663,6 @@ subroutine init_snowtable snw_aging_table, & ! aging table setting fieldname ! field name in netcdf file - integer (kind=int_kind) :: & - j, k ! indices - character(len=*), parameter :: subname = '(init_snowtable)' !----------------------------------------------------------------- @@ -5816,7 +5790,8 @@ subroutine init_snowtable write(nu_diag,*) subname,' snoage_tau (1,1,1) = ',snowage_tau (1,1,1) write(nu_diag,*) subname,' snoage_kappa (1,1,1) = ',snowage_kappa(1,1,1) write(nu_diag,*) subname,' snoage_drdt0 (1,1,1) = ',snowage_drdt0(1,1,1) - write(nu_diag,*) subname,' Data at rhos, Tgrd, T = ',snowage_rhos(idx_rhos_max),snowage_Tgrd(idx_Tgrd_max),snowage_T(idx_T_max) + write(nu_diag,*) subname,' Data at rhos, Tgrd, T = ', & + snowage_rhos(idx_rhos_max),snowage_Tgrd(idx_Tgrd_max),snowage_T(idx_T_max) write(nu_diag,*) subname,' snoage_tau (max,max,max) = ',snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max) write(nu_diag,*) subname,' snoage_kappa (max,max,max) = ',snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max) write(nu_diag,*) subname,' snoage_drdt0 (max,max,max) = ',snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max) diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedynB/general/ice_forcing_bgc.F90 index 383d388de..fc440834c 100644 --- a/cicecore/cicedynB/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedynB/general/ice_forcing_bgc.F90 @@ -192,7 +192,7 @@ subroutine get_forcing_bgc ! Read two monthly silicate values and interpolate. ! Restore toward interpolated value. !------------------------------------------------------------------- - + if (trim(bgc_data_type)=='clim' .AND. tr_bgc_Sil) then ! call read_clim_data (readm, 0, ixm, mmonth, ixp, & ! sil_file, sil_data, & @@ -202,7 +202,7 @@ subroutine get_forcing_bgc sil_file, fieldname, sil_data, & field_loc_center, field_type_scalar) call interpolate_data (sil_data, sildat) - + if (istep == 1 .or. .NOT. restore_bgc) then !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) @@ -275,7 +275,7 @@ subroutine get_forcing_bgc ! Restore toward interpolated value. !------------------------------------------------------------------- - if (trim(bgc_data_type)=='clim' .AND. tr_bgc_Nit) then + if (trim(bgc_data_type)=='clim' .AND. tr_bgc_Nit) then ! call read_clim_data (readm, 0, ixm, mmonth, ixp, & ! nit_file, nit_data, & ! field_loc_center, field_type_scalar) @@ -321,7 +321,7 @@ subroutine get_forcing_bgc do i = ilo, ihi nit(i,j,iblk) = nit(i,j,iblk) & - + (nitdat(i,j,iblk)-nit(i,j,iblk))*dt/trest + + (nitdat(i,j,iblk)-nit(i,j,iblk))*dt/trest ks = icepack_max_algae + 1 ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic @@ -332,7 +332,7 @@ subroutine get_forcing_bgc !$OMP END PARALLEL DO endif !restore_bgc -! elseif (trim(nit_data_type) == 'sss' .AND. tr_bgc_Nit) then +! elseif (trim(nit_data_type) == 'sss' .AND. tr_bgc_Nit) then ! !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) ! do iblk = 1, nblocks @@ -345,11 +345,11 @@ subroutine get_forcing_bgc ! do j = jlo, jhi ! do i = ilo, ihi -! nit(i,j,iblk) = sss(i,j,iblk) +! nit(i,j,iblk) = sss(i,j,iblk) ! ks = icepack_max_algae + 1 -! ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit +! ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ! ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic -! ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON +! ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON ! enddo ! enddo ! enddo @@ -367,12 +367,12 @@ subroutine get_forcing_bgc do j = jlo, jhi do i = ilo, ihi - + nit(i,j,iblk) = 12.0_dbl_kind ks = icepack_max_algae + 1 - ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit + ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic - ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON + ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON enddo enddo enddo @@ -381,15 +381,15 @@ subroutine get_forcing_bgc endif !tr_bgc_Nit !------------------------------------------------------------------- - ! Data from Papdimitrious et al., 2007, Limnol. Oceanogr. - ! and WOA at 68oS, 304.5oE : - ! daily data located at the end of the 24-hour period. + ! Data from Papdimitrious et al., 2007, Limnol. Oceanogr. + ! and WOA at 68oS, 304.5oE : + ! daily data located at the end of the 24-hour period. !------------------------------------------------------------------- if (trim(bgc_data_type) == 'ISPOL') then nit_file = trim(bgc_data_dir)//'nutrients_daily_ISPOL_WOA_field3.nc' - sil_file = trim(bgc_data_dir)//'nutrients_daily_ISPOL_WOA_field3.nc' + sil_file = trim(bgc_data_dir)//'nutrients_daily_ISPOL_WOA_field3.nc' if (my_task == master_task .and. istep == 1) then if (tr_bgc_Sil) then @@ -408,45 +408,45 @@ subroutine get_forcing_bgc dataloc = 2 ! data located at end of interval sec1hr = secday ! seconds in day - maxrec = 365 ! + maxrec = 365 ! ! current record number - recnum = int(yday) + recnum = int(yday) ! Compute record numbers for surrounding data (2 on each side) ixm = mod(recnum+maxrec-2,maxrec) + 1 ixx = mod(recnum-1, maxrec) + 1 - + recslot = 2 ixp = -99 call interp_coeff (recnum, recslot, sec1hr, dataloc) read1 = .false. if (istep==1 .or. bgcrecnum .ne. recnum) read1 = .true. - - + + if (tr_bgc_Sil) then met_file = sil_file - fieldname= 'silicate' + fieldname= 'silicate' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, sil_data_p, & field_loc_center, field_type_scalar) - + sil(:,:,:) = c1intp * sil_data_p(1) & + c2intp * sil_data_p(2) endif if (tr_bgc_Nit) then met_file = nit_file - fieldname= 'nitrate' + fieldname= 'nitrate' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, nit_data_p, & field_loc_center, field_type_scalar) - + nit(:,:,:) = c1intp * nit_data_p(1) & + c2intp * nit_data_p(2) endif - + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks @@ -458,13 +458,13 @@ subroutine get_forcing_bgc do j = jlo, jhi do i = ilo, ihi - + ks = 2*icepack_max_algae + icepack_max_doc + 3 + icepack_max_dic - ocean_bio_all(i,j,ks,iblk) = sil(i,j,iblk) !Sil + ocean_bio_all(i,j,ks,iblk) = sil(i,j,iblk) !Sil ks = icepack_max_algae + 1 ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic - ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON + ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON enddo enddo enddo @@ -480,11 +480,11 @@ end subroutine get_forcing_bgc ! ! author: Nicole Jeffery, LANL - subroutine get_atm_bgc + subroutine get_atm_bgc use ice_blocks, only: block, get_block use ice_domain, only: nblocks, blocks_ice - use ice_domain_size, only: n_zaero + use ice_domain_size, only: n_zaero use ice_flux_bgc, only: flux_bio_atm, faero_atm ! local variables @@ -492,7 +492,7 @@ subroutine get_atm_bgc integer (kind=int_kind) :: & i, j, nn , & ! horizontal indices ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - iblk ! block index + iblk ! block index logical (kind=log_kind) :: & tr_zaero @@ -520,15 +520,15 @@ subroutine get_atm_bgc !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,nn) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - - do nn = 1, n_zaero + + do nn = 1, n_zaero do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi flux_bio_atm(i,j,nlt_zaero(nn),iblk) = faero_atm(i,j,nn,iblk) enddo enddo @@ -569,10 +569,10 @@ subroutine faero_default faero_atm(:,:,1,:) = 1.e-12_dbl_kind ! kg/m^2 s faero_atm(:,:,2,:) = 1.e-13_dbl_kind - faero_atm(:,:,3,:) = 1.e-14_dbl_kind - faero_atm(:,:,4,:) = 1.e-14_dbl_kind - faero_atm(:,:,5,:) = 1.e-14_dbl_kind - faero_atm(:,:,6,:) = 1.e-14_dbl_kind + faero_atm(:,:,3,:) = 1.e-14_dbl_kind + faero_atm(:,:,4,:) = 1.e-14_dbl_kind + faero_atm(:,:,5,:) = 1.e-14_dbl_kind + faero_atm(:,:,6,:) = 1.e-14_dbl_kind end subroutine faero_default @@ -598,11 +598,11 @@ subroutine faero_data aero2_data , & ! field values at 2 temporal data points aero3_data ! field values at 2 temporal data points - character (char_len_long) :: & + character (char_len_long) :: & aero_file, & ! netcdf filename fieldname ! field name in netcdf file - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ixm,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number recslot , & ! spline slot for current record @@ -618,9 +618,9 @@ subroutine faero_data !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- @@ -645,12 +645,12 @@ subroutine faero_data ! Find interpolation coefficients call interp_coeff_monthly (recslot) - ! Read 2 monthly values + ! Read 2 monthly values readm = .false. if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. -! aero_file = trim(atm_data_dir)//'faero.nc' - aero_file = '/usr/projects/climate/eclare/DATA/gx1v3/faero.nc' +! aero_file = trim(atm_data_dir)//'faero.nc' + aero_file = '/usr/projects/climate/eclare/DATA/gx1v3/faero.nc' fieldname='faero_atm001' call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & @@ -695,11 +695,11 @@ subroutine fzaero_data save :: & aero_data ! field values at 2 temporal data points - character (char_len_long) :: & + character (char_len_long) :: & aero_file, & ! netcdf filename fieldname ! field name in netcdf file - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ixm,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number recslot , & ! spline slot for current record @@ -720,9 +720,9 @@ subroutine fzaero_data allocate( aero_data(nx_block,ny_block,2,max_blocks) ) !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- @@ -747,13 +747,13 @@ subroutine fzaero_data ! Find interpolation coefficients call interp_coeff_monthly (recslot) - ! Read 2 monthly values + ! Read 2 monthly values readm = .false. if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. -! aero_file = trim(atm_data_dir)//'faero.nc' +! aero_file = trim(atm_data_dir)//'faero.nc' ! Cam5 monthly total black carbon deposition on the gx1 grid" - aero_file = '/usr/projects/climate/njeffery/DATA/CAM/Hailong_Wang/Cam5_bc_monthly_popgrid.nc' + aero_file = '/usr/projects/climate/njeffery/DATA/CAM/Hailong_Wang/Cam5_bc_monthly_popgrid.nc' fieldname='bcd' call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & @@ -786,11 +786,11 @@ subroutine init_bgc_data (fed1,fep1) ! local parameters integer (kind=int_kind) :: & - fid ! file id for netCDF file + fid ! file id for netCDF file logical (kind=log_kind) :: diag - character (char_len_long) :: & + character (char_len_long) :: & iron_file, & ! netcdf filename fieldname ! field name in netcdf file @@ -802,7 +802,7 @@ subroutine init_bgc_data (fed1,fep1) !------------------------------------------------------------------- if (trim(fe_data_type) == 'clim') then - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information iron_file = trim(bgc_data_dir)//'dFe_50m_annual_Tagliabue_gx1.nc' if (my_task == master_task) then @@ -814,12 +814,12 @@ subroutine init_bgc_data (fed1,fep1) fieldname='dFe' ! Currently only first fed value is read - call ice_read_nc(fid,1,fieldname,fed1,diag) - where ( fed1(:,:,:) > 1.e20) fed1(:,:,:) = p1 + call ice_read_nc(fid,1,fieldname,fed1,diag) + where ( fed1(:,:,:) > 1.e20) fed1(:,:,:) = p1 - if (my_task == master_task) call ice_close_nc(fid) + if (my_task == master_task) call ice_close_nc(fid) - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information iron_file = trim(bgc_data_dir)//'pFe_bathy_gx1.nc' if (my_task == master_task) then @@ -831,13 +831,13 @@ subroutine init_bgc_data (fed1,fep1) fieldname='pFe' ! Currently only first fep value is read - call ice_read_nc(fid,1,fieldname,fep1,diag) - where ( fep1(:,:,:) > 1.e20) fep1(:,:,:) = p1 + call ice_read_nc(fid,1,fieldname,fep1,diag) + where ( fep1(:,:,:) > 1.e20) fep1(:,:,:) = p1 + + if (my_task == master_task) call ice_close_nc(fid) - if (my_task == master_task) call ice_close_nc(fid) - endif - + end subroutine init_bgc_data !======================================================================= @@ -871,7 +871,7 @@ subroutine faero_optics logical (kind=log_kind) :: modal_aero - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & varid , & ! variable id status , & ! status output from netcdf routines n, k ! index @@ -880,9 +880,9 @@ subroutine faero_optics amin, amax, asum ! min, max values and sum of input array integer (kind=int_kind) :: & - fid ! file id for netCDF file + fid ! file id for netCDF file - character (char_len_long) :: & + character (char_len_long) :: & fieldname ! field name in netcdf file character(len=*), parameter :: subname = '(faero_optics)' @@ -972,12 +972,12 @@ subroutine faero_optics fieldname=optics_file_fieldname status = nf90_inq_varid(fid, trim(fieldname), varid) - + if (status /= nf90_noerr) then call abort_ice (subname//'ERROR: Cannot find variable '//trim(fieldname)) endif status = nf90_get_var( fid, varid, bcenh, & - start=(/1,1,1,1/), & + start=(/1,1,1,1/), & count=(/3,10,8,1/) ) do n=1,10 amin = minval(bcenh(:,n,:)) @@ -985,13 +985,13 @@ subroutine faero_optics asum = sum (bcenh(:,n,:)) write(nu_diag,*) ' min, max, sum =', amin, amax, asum enddo - call ice_close_nc(fid) + call ice_close_nc(fid) endif !master_task do n=1,3 do k=1,8 call broadcast_array(bcenh(n,:,k), master_task) enddo - enddo + enddo #else call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 9b6bf673c..c2cc986f8 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -5,7 +5,7 @@ ! authors Elizabeth C. Hunke and William H. Lipscomb, LANL ! C. M. Bitz, UW ! -! 2004 WHL: Block structure added +! 2004 WHL: Block structure added ! 2006 ECH: Added namelist variables, warnings. ! Replaced old default initial ice conditions with 3.14 version. ! Converted to free source form (F90). @@ -14,7 +14,8 @@ module ice_init use ice_kinds_mod use ice_communicate, only: my_task, master_task, ice_barrier - use ice_constants, only: c0, c1, c2, c3, c5, p2, p3, p5, p75, p166 + use ice_constants, only: c0, c1, c2, c3, c5, c12, p2, p3, p5, p75, p166, & + cm_to_m use ice_exit, only: abort_ice use ice_fileunits, only: nu_nml, nu_diag, nu_diag_set, nml_filename, diag_type, & ice_stdout, get_fileunit, release_fileunit, bfbflag, flush_fileunit, & @@ -72,9 +73,15 @@ subroutine input_data npt, dt, ndtd, days_per_year, use_leap_years, & write_ic, dump_last, npt_unit use ice_arrays_column, only: oceanmixed_ice +#ifdef UNDEPRECATE_CESMPONDS use ice_restart_column, only: restart_age, restart_FY, restart_lvl, & restart_pond_cesm, restart_pond_lvl, restart_pond_topo, restart_aero, & restart_fsd, restart_iso, restart_snow +#else + use ice_restart_column, only: restart_age, restart_FY, restart_lvl, & + restart_pond_lvl, restart_pond_topo, restart_aero, & + restart_fsd, restart_iso, restart_snow +#endif use ice_restart_shared, only: & restart, restart_ext, restart_coszen, restart_dir, restart_file, pointer_file, & runid, runtype, use_restart_time, restart_format, lcdf64 @@ -90,7 +97,7 @@ subroutine input_data atm_data_format, ocn_data_format, & bgc_data_type, & ocn_data_type, ocn_data_dir, wave_spec_file, & - oceanmixed_file, restore_ocn, trestore, & + oceanmixed_file, restore_ocn, trestore, & ice_data_type, ice_data_conc, ice_data_dist, & snw_filename, & snw_tau_fname, snw_kappa_fname, snw_drdt0_fname, & @@ -101,7 +108,7 @@ subroutine input_data bathymetry_format, kmt_type, & grid_type, grid_format, & grid_ice, grid_ice_thrm, grid_ice_dynu, grid_ice_dynv, & - grid_ocn, grid_ocn_thrm, grid_ocn_dynu, grid_ocn_dynv, & + grid_ocn, grid_ocn_thrm, grid_ocn_dynu, grid_ocn_dynv, & grid_atm, grid_atm_thrm, grid_atm_dynu, grid_atm_dynv, & dxrect, dyrect, & pgl_global_ext @@ -133,7 +140,9 @@ subroutine input_data nml_error, & ! namelist i/o error flag n ! loop index +#ifdef CESMCOUPLED logical :: exists +#endif real (kind=dbl_kind) :: ustar_min, albicev, albicei, albsnowv, albsnowi, & ahmax, R_ice, R_pnd, R_snw, dT_mlt, rsnw_mlt, emissivity, & @@ -156,13 +165,23 @@ subroutine input_data logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd, tr_snow +#ifdef UNDEPRECATE_CESMPONDS logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo +#else + logical (kind=log_kind) :: tr_pond_lvl, tr_pond_topo +#endif integer (kind=int_kind) :: numin, numax ! unit number limits - integer (kind=int_kind) :: rpcesm, rplvl, rptopo +#ifdef UNDEPRECATE_CESMPONDS + integer (kind=int_kind) :: rpcesm, rplvl, rptopo +#else + integer (kind=int_kind) :: rplvl, rptopo +#endif real (kind=dbl_kind) :: Cf, ksno, puny character (len=char_len) :: abort_list +#ifdef CESMCOUPLED character (len=64) :: tmpstr +#endif character (len=128) :: tmpstr2 character(len=*), parameter :: subname='(input_data)' @@ -200,7 +219,9 @@ subroutine input_data tr_iage, restart_age, & tr_FY, restart_FY, & tr_lvl, restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm, restart_pond_cesm, & +#endif tr_pond_lvl, restart_pond_lvl, & tr_pond_topo, restart_pond_topo, & tr_snow, restart_snow, & @@ -232,7 +253,7 @@ subroutine input_data k1, k2, alphab, threshold_hw, & deltaminEVP, deltaminVP, capping_method, & Cf, Pstar, Cstar, Ktens - + namelist /shortwave_nml/ & shortwave, albedo_type, & albicev, albicei, albsnowv, albsnowi, & @@ -287,11 +308,11 @@ subroutine input_data istep0 = 0 ! no. of steps taken in previous integrations, ! real (dumped) or imagined (to set calendar) #ifndef CESMCOUPLED - dt = 3600.0_dbl_kind ! time step, s + dt = 3600.0_dbl_kind ! time step, s #endif numin = 11 ! min allowed unit number numax = 99 ! max allowed unit number - npt = 99999 ! total number of time steps (dt) + npt = 99999 ! total number of time steps (dt) npt_unit = '1' ! units of npt 'y', 'm', 'd', 's', '1' diagfreq = 24 ! how often diag output is written debug_model = .false. ! debug output @@ -312,7 +333,7 @@ subroutine input_data histfreq(3) = 'd' ! output frequency option for different streams histfreq(4) = 'm' ! output frequency option for different streams histfreq(5) = 'y' ! output frequency option for different streams - histfreq_n(:) = 1 ! output frequency + histfreq_n(:) = 1 ! output frequency histfreq_base = 'zero' ! output frequency reference date hist_avg = .true. ! if true, write time-averages (not snapshots) history_format = 'default' ! history file format @@ -360,20 +381,20 @@ subroutine input_data kdyn = 1 ! type of dynamics (-1, 0 = off, 1 = evp, 2 = eap, 3 = vp) ndtd = 1 ! dynamic time steps per thermodynamic time step ndte = 120 ! subcycles per dynamics timestep: ndte=dt_dyn/dte - evp_algorithm = 'standard_2d' ! EVP kernel (=standard_2d: standard cice evp; =shared_mem_1d: 1d shared memory and no mpi. if more mpi processors then executed on master + evp_algorithm = 'standard_2d' ! EVP kernel (standard_2d=standard cice evp; shared_mem_1d=1d shared memory and no mpi elasticDamp = 0.36_dbl_kind ! coefficient for calculating the parameter E pgl_global_ext = .false. ! if true, init primary grid lengths (global ext.) brlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared arlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared revised_evp = .false. ! if true, use revised procedure for evp dynamics - yield_curve = 'ellipse' ! yield curve + yield_curve = 'ellipse' ! yield curve kstrength = 1 ! 1 = Rothrock 75 strength, 0 = Hibler 79 Pstar = 2.75e4_dbl_kind ! constant in Hibler strength formula (kstrength = 0) Cstar = 20._dbl_kind ! constant in Hibler strength formula (kstrength = 0) krdg_partic = 1 ! 1 = new participation, 0 = Thorndike et al 75 krdg_redist = 1 ! 1 = new redistribution, 0 = Hibler 80 mu_rdg = 3 ! e-folding scale of ridged ice, krdg_partic=1 (m^0.5) - Cf = 17.0_dbl_kind ! ratio of ridging work to PE change in ridging + Cf = 17.0_dbl_kind ! ratio of ridging work to PE change in ridging ksno = 0.3_dbl_kind ! snow thermal conductivity dxrect = 0.0_dbl_kind ! user defined grid spacing in cm in x direction dyrect = 0.0_dbl_kind ! user defined grid spacing in cm in y direction @@ -385,14 +406,15 @@ subroutine input_data alphab = 20.0_dbl_kind ! alphab=Cb factor in Lemieux et al 2015 threshold_hw = 30.0_dbl_kind ! max water depth for grounding Ktens = 0.0_dbl_kind ! T=Ktens*P (tensile strength: see Konig and Holland, 2010) - e_yieldcurve = 2.0_dbl_kind ! VP aspect ratio of elliptical yield curve + e_yieldcurve = 2.0_dbl_kind ! VP aspect ratio of elliptical yield curve e_plasticpot = 2.0_dbl_kind ! VP aspect ratio of elliptical plastic potential - visc_method = 'avg_strength' ! calc viscosities at U point: avg_strength, avg_zeta + visc_method = 'avg_zeta' ! calc viscosities at U point: avg_strength, avg_zeta deltaminEVP = 1e-11_dbl_kind ! minimum delta for viscosities (EVP, Hunke 2001) deltaminVP = 2e-9_dbl_kind ! minimum delta for viscosities (VP, Hibler 1979) capping_method = 'max' ! method for capping of viscosities (max=Hibler 1979,sum=Kreyscher2000) maxits_nonlin = 4 ! max nb of iteration for nonlinear solver - precond = 'pgmres' ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), 'pgmres' (Jacobi-preconditioned GMRES) + precond = 'pgmres' ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), + ! 'pgmres' (Jacobi-preconditioned GMRES) dim_fgmres = 50 ! size of fgmres Krylov subspace dim_pgmres = 5 ! size of pgmres Krylov subspace maxits_fgmres = 50 ! max nb of iteration for fgmres @@ -405,7 +427,8 @@ subroutine input_data reltol_fgmres = 1e-2_dbl_kind ! fgmres stopping criterion: reltol_fgmres*res(k) reltol_pgmres = 1e-6_dbl_kind ! pgmres stopping criterion: reltol_pgmres*res(k) algo_nonlin = 'picard' ! nonlinear algorithm: 'picard' (Picard iteration), 'anderson' (Anderson acceleration) - fpfunc_andacc = 1 ! fixed point function for Anderson acceleration: 1: g(x) = FMGRES(A(x),b(x)), 2: g(x) = x - A(x)x + b(x) + fpfunc_andacc = 1 ! fixed point function for Anderson acceleration: + ! 1: g(x) = FMGRES(A(x),b(x)), 2: g(x) = x - A(x)x + b(x) dim_andacc = 5 ! size of Anderson minimization matrix (number of saved previous residuals) reltol_andacc = 1e-6_dbl_kind ! relative tolerance for Anderson acceleration damping_andacc = 0 ! damping factor for Anderson acceleration @@ -415,7 +438,11 @@ subroutine input_data conserv_check = .false. ! tracer conservation check shortwave = 'ccsm3' ! 'ccsm3' or 'dEdd' (delta-Eddington) albedo_type = 'ccsm3' ! 'ccsm3' or 'constant' +#ifdef UNDEPRECATE_0LAYER ktherm = 1 ! -1 = OFF, 0 = 0-layer, 1 = BL99, 2 = mushy thermo +#else + ktherm = 1 ! -1 = OFF, 1 = BL99, 2 = mushy thermo +#endif conduct = 'bubbly' ! 'MU71' or 'bubbly' (Pringle et al 2007) coriolis = 'latitude' ! latitude dependent, or 'constant' ssh_stress = 'geostrophic' ! 'geostrophic' or 'coupled' @@ -442,7 +469,7 @@ subroutine input_data hp1 = 0.01_dbl_kind ! critical pond lid thickness for topo ponds hs0 = 0.03_dbl_kind ! snow depth for transition to bare sea ice (m) hs1 = 0.03_dbl_kind ! snow depth for transition to bare pond ice (m) - dpscale = c1 ! alter e-folding time scale for flushing + dpscale = c1 ! alter e-folding time scale for flushing frzpnd = 'cesm' ! melt pond refreezing parameterization rfracmin = 0.15_dbl_kind ! minimum retained fraction of meltwater rfracmax = 0.85_dbl_kind ! maximum retained fraction of meltwater @@ -523,10 +550,12 @@ subroutine input_data restart_age = .false. ! ice age restart tr_FY = .false. ! ice age restart_FY = .false. ! ice age restart - tr_lvl = .false. ! level ice + tr_lvl = .false. ! level ice restart_lvl = .false. ! level ice restart +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm = .false. ! CESM melt ponds restart_pond_cesm = .false. ! melt ponds restart +#endif tr_pond_lvl = .false. ! level-ice melt ponds restart_pond_lvl = .false. ! melt ponds restart tr_pond_topo = .false. ! explicit melt ponds (topographic) @@ -795,7 +824,7 @@ subroutine input_data call broadcast_scalar(diag_file, master_task) do n = 1, max_nstrm call broadcast_scalar(histfreq(n), master_task) - enddo + enddo call broadcast_array(histfreq_n, master_task) call broadcast_scalar(histfreq_base, master_task) call broadcast_scalar(hist_avg, master_task) @@ -866,7 +895,7 @@ subroutine input_data call broadcast_scalar(Ktens, master_task) call broadcast_scalar(e_yieldcurve, master_task) call broadcast_scalar(e_plasticpot, master_task) - call broadcast_scalar(visc_method, master_task) + call broadcast_scalar(visc_method, master_task) call broadcast_scalar(deltaminEVP, master_task) call broadcast_scalar(deltaminVP, master_task) call broadcast_scalar(capping_method, master_task) @@ -992,8 +1021,10 @@ subroutine input_data call broadcast_scalar(restart_FY, master_task) call broadcast_scalar(tr_lvl, master_task) call broadcast_scalar(restart_lvl, master_task) +#ifdef UNDEPRECATE_CESMPONDS call broadcast_scalar(tr_pond_cesm, master_task) call broadcast_scalar(restart_pond_cesm, master_task) +#endif call broadcast_scalar(tr_pond_lvl, master_task) call broadcast_scalar(restart_pond_lvl, master_task) call broadcast_scalar(tr_pond_topo, master_task) @@ -1086,7 +1117,9 @@ subroutine input_data restart_age = .false. restart_fy = .false. restart_lvl = .false. +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm = .false. +#endif restart_pond_lvl = .false. restart_pond_topo = .false. restart_snow = .false. @@ -1202,18 +1235,30 @@ subroutine input_data abort_list = trim(abort_list)//":45" endif endif - + +#ifdef UNDEPRECATE_CESMPONDS rpcesm = 0 +#endif rplvl = 0 rptopo = 0 +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) rpcesm = 1 +#endif if (tr_pond_lvl ) rplvl = 1 if (tr_pond_topo) rptopo = 1 tr_pond = .false. ! explicit melt ponds +#ifdef UNDEPRECATE_CESMPONDS if (rpcesm + rplvl + rptopo > 0) tr_pond = .true. +#else + if (rplvl + rptopo > 0) tr_pond = .true. +#endif +#ifdef UNDEPRECATE_CESMPONDS if (rpcesm + rplvl + rptopo > 1) then +#else + if (rplvl + rptopo > 1) then +#endif if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: Must use only one melt pond scheme' endif @@ -1437,10 +1482,12 @@ subroutine input_data abort_list = trim(abort_list)//":16" endif +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and frzpnd=cesm' abort_list = trim(abort_list)//":17" endif +#endif if (.not. tr_lvl) then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and tr_lvl=F' @@ -1452,7 +1499,7 @@ subroutine input_data if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=F and fbot_xfer_type=Cdn_ocn' abort_list = trim(abort_list)//":19" endif - + if(history_precision .ne. 4 .and. history_precision .ne. 8) then write (nu_diag,*) subname//' ERROR: bad value for history_precision, allowed values: 4, 8' abort_list = trim(abort_list)//":22" @@ -1489,12 +1536,12 @@ subroutine input_data endif abort_list = trim(abort_list)//":60" endif - + if (trim(algo_nonlin) == 'picard') then ! Picard solver is implemented in the Anderson solver; reset number of saved residuals to zero dim_andacc = 0 endif - + if (.not. (trim(precond) == 'ident' .or. trim(precond) == 'diag' .or. trim(precond) == 'pgmres')) then if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: unknown precond: '//precond @@ -1502,7 +1549,7 @@ subroutine input_data endif abort_list = trim(abort_list)//":61" endif - + if (.not. (trim(ortho_type) == 'cgs' .or. trim(ortho_type) == 'mgs')) then if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: unknown ortho_type: '//ortho_type @@ -1697,7 +1744,7 @@ subroutine input_data tmpstr2 = ' : revised EVP formulation not used' endif write(nu_diag,1010) ' revised_evp = ', revised_evp,trim(tmpstr2) - + if (evp_algorithm == 'standard_2d') then tmpstr2 = ' : standard 2d EVP solver' elseif (evp_algorithm == 'shared_mem_1d') then @@ -1766,7 +1813,7 @@ subroutine input_data tmpstr2 = ' : no seabed stress parameterization' endif write(nu_diag,1010) ' seabed_stress = ', seabed_stress,trim(tmpstr2) - if (seabed_stress) then + if (seabed_stress) then write(nu_diag,1030) ' seabed method = ',trim(seabed_stress_method) if (seabed_stress_method == 'LKD') then write(nu_diag,1002) ' k1 = ', k1, ' : free parameter for landfast ice' @@ -1780,7 +1827,7 @@ subroutine input_data if (grid_ice == 'C' .or. grid_ice == 'CD') then write(nu_diag,1030) ' visc_method= ', trim(visc_method),' : viscosities method (U point)' endif - + write(nu_diag,1002) ' Ktens = ', Ktens, ' : tensile strength factor' if (kdyn == 3) then @@ -1860,8 +1907,10 @@ subroutine input_data tmpstr2 = ' : Bitz and Lipscomb 1999 thermo' elseif (ktherm == 2) then tmpstr2 = ' : mushy-layer thermo' +#ifdef UNDEPRECATE_0LAYER elseif (ktherm == 0) then tmpstr2 = ' : zero-layer thermo' +#endif elseif (ktherm < 0) then tmpstr2 = ' : Thermodynamics disabled' else @@ -2042,10 +2091,14 @@ subroutine input_data write(nu_diag,*) ' ' write(nu_diag,*) ' Melt ponds' write(nu_diag,*) '--------------------------------' +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then write(nu_diag,1010) ' tr_pond_cesm = ', tr_pond_cesm,' : CESM pond formulation' write(nu_diag,1002) ' pndaspect = ', pndaspect,' : ratio of pond depth to area fraction' elseif (tr_pond_lvl) then +#else + if (tr_pond_lvl) then +#endif write(nu_diag,1010) ' tr_pond_lvl = ', tr_pond_lvl,' : level-ice pond formulation' write(nu_diag,1002) ' pndaspect = ', pndaspect,' : ratio of pond depth to area fraction' write(nu_diag,1000) ' dpscale = ', dpscale,' : time scale for flushing in permeable ice' @@ -2158,7 +2211,9 @@ subroutine input_data if (tr_lvl) write(nu_diag,1010) ' tr_lvl = ', tr_lvl,' : ridging related tracers' if (tr_pond_lvl) write(nu_diag,1010) ' tr_pond_lvl = ', tr_pond_lvl,' : level-ice pond formulation' if (tr_pond_topo) write(nu_diag,1010) ' tr_pond_topo = ', tr_pond_topo,' : topo pond formulation' +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) write(nu_diag,1010) ' tr_pond_cesm = ', tr_pond_cesm,' : CESM pond formulation' +#endif if (tr_snow) write(nu_diag,1010) ' tr_snow = ', tr_snow,' : advanced snow physics' if (tr_iage) write(nu_diag,1010) ' tr_iage = ', tr_iage,' : chronological ice age' if (tr_FY) write(nu_diag,1010) ' tr_FY = ', tr_FY,' : first-year ice area' @@ -2267,11 +2322,11 @@ subroutine input_data trim(ocn_data_type) /= 'default') then write(nu_diag,1031) ' ocn_data_dir = ', trim(ocn_data_dir) write(nu_diag,1011) ' restore_ocn = ', restore_ocn - endif + endif write(nu_diag,1011) ' restore_ice = ', restore_ice if (restore_ice .or. restore_ocn) & write(nu_diag,1021) ' trestore = ', trestore - + write(nu_diag,*) ' ' write(nu_diag,'(a31,2f8.2)') 'Diagnostic point 1: lat, lon =', & latpnt(1), lonpnt(1) @@ -2283,7 +2338,9 @@ subroutine input_data write(nu_diag,1011) ' restart_age = ', restart_age write(nu_diag,1011) ' restart_FY = ', restart_FY write(nu_diag,1011) ' restart_lvl = ', restart_lvl +#ifdef UNDEPRECATE_CESMPONDS write(nu_diag,1011) ' restart_pond_cesm= ', restart_pond_cesm +#endif write(nu_diag,1011) ' restart_pond_lvl = ', restart_pond_lvl write(nu_diag,1011) ' restart_pond_topo= ', restart_pond_topo write(nu_diag,1011) ' restart_snow = ', restart_snow @@ -2341,9 +2398,9 @@ subroutine input_data if (kdyn == 1 .and. & evp_algorithm /= 'standard_2d' .and. & - evp_algorithm /= 'shared_mem_1d') then + evp_algorithm /= 'shared_mem_1d') then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown evp_algorithm=',trim(evp_algorithm) - abort_list = trim(abort_list)//":21" + abort_list = trim(abort_list)//":21" endif if (abort_list /= "") then @@ -2382,7 +2439,11 @@ subroutine input_data call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & tr_fsd_in=tr_fsd, tr_snow_in=tr_snow, tr_pond_in=tr_pond, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm_in=tr_pond_cesm, tr_pond_lvl_in=tr_pond_lvl, tr_pond_topo_in=tr_pond_topo) +#else + tr_pond_lvl_in=tr_pond_lvl, tr_pond_topo_in=tr_pond_topo) +#endif call icepack_init_tracer_sizes(ncat_in=ncat, nilyr_in=nilyr, nslyr_in=nslyr, nblyr_in=nblyr, & nfsd_in=nfsd, n_algae_in=n_algae, n_iso_in=n_iso, n_aero_in=n_aero, & n_DOC_in=n_DOC, n_DON_in=n_DON, & @@ -2439,12 +2500,18 @@ subroutine init_state it , & ! tracer index iblk ! block index +#ifdef UNDEPRECATE_0LAYER logical (kind=log_kind) :: & heat_capacity ! from icepack +#endif integer (kind=int_kind) :: ntrcr logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero +#ifdef UNDEPRECATE_CESMPONDS logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo +#else + logical (kind=log_kind) :: tr_pond_lvl, tr_pond_topo +#endif logical (kind=log_kind) :: tr_snow, tr_fsd integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd @@ -2458,11 +2525,17 @@ subroutine init_state !----------------------------------------------------------------- +#ifdef UNDEPRECATE_0LAYER call icepack_query_parameters(heat_capacity_out=heat_capacity) +#endif call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_iso_out=tr_iso, tr_aero_out=tr_aero, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & +#else + tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & +#endif tr_snow_out=tr_snow, tr_fsd_out=tr_fsd) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & nt_qice_out=nt_qice, nt_qsno_out=nt_qsno, nt_iage_out=nt_iage, nt_fy_out=nt_fy, & @@ -2481,7 +2554,7 @@ subroutine init_state !----------------------------------------------------------------- if (my_task == master_task) then - + if (nilyr < 1) then write(nu_diag,*) subname//' ERROR: Must have at least one ice layer' write(nu_diag,*) subname//' ERROR: nilyr =', nilyr @@ -2496,6 +2569,7 @@ subroutine init_state file=__FILE__, line=__LINE__) endif +#ifdef UNDEPRECATE_0LAYER if (.not.heat_capacity) then if (nilyr > 1) then @@ -2513,7 +2587,7 @@ subroutine init_state endif endif ! heat_capacity = F - +#endif endif ! my_task !----------------------------------------------------------------- @@ -2532,10 +2606,12 @@ subroutine init_state if (tr_FY) trcr_depend(nt_FY) = 0 ! area-weighted first-year ice area if (tr_lvl) trcr_depend(nt_alvl) = 0 ! level ice area if (tr_lvl) trcr_depend(nt_vlvl) = 1 ! level ice volume +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then trcr_depend(nt_apnd) = 0 ! melt pond area trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth endif +#endif if (tr_pond_lvl) then trcr_depend(nt_apnd) = 2+nt_alvl ! melt pond area trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth @@ -2579,11 +2655,11 @@ subroutine init_state do it = 1, ntrcr ! mask for base quantity on which tracers are carried if (trcr_depend(it) == 0) then ! area - trcr_base(it,1) = c1 + trcr_base(it,1) = c1 elseif (trcr_depend(it) == 1) then ! ice volume - trcr_base(it,2) = c1 + trcr_base(it,2) = c1 elseif (trcr_depend(it) == 2) then ! snow volume - trcr_base(it,3) = c1 + trcr_base(it,3) = c1 else trcr_base(it,1) = c1 ! default: ice area trcr_base(it,2) = c0 @@ -2597,10 +2673,12 @@ subroutine init_state nt_strata (it,2) = 0 enddo +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then n_trcr_strata(nt_hpnd) = 1 ! melt pond depth nt_strata (nt_hpnd,1) = nt_apnd ! on melt pond area endif +#endif if (tr_pond_lvl) then n_trcr_strata(nt_apnd) = 1 ! melt pond area nt_strata (nt_apnd,1) = nt_alvl ! on level ice area @@ -2626,7 +2704,7 @@ subroutine init_state !$OMP iglob,jglob) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -2664,13 +2742,13 @@ subroutine init_state call grid_average_X2Y('S',vvel,'U',vvelN,'N') call grid_average_X2Y('S',uvel,'U',uvelE,'E') call grid_average_X2Y('S',vvel,'U',vvelE,'E') - + ! Halo update on North, East faces call ice_HaloUpdate(uvelN, halo_info, & field_loc_Nface, field_type_scalar) call ice_HaloUpdate(vvelN, halo_info, & field_loc_Nface, field_type_scalar) - + call ice_HaloUpdate(uvelE, halo_info, & field_loc_Eface, field_type_scalar) call ice_HaloUpdate(vvelE, halo_info, & @@ -2749,7 +2827,7 @@ subroutine set_state_var (nx_block, ny_block, & use ice_arrays_column, only: hin_max use ice_domain_size, only: nilyr, nslyr, nx_global, ny_global, ncat - use ice_grid, only: grid_type, dxrect, dyrect + use ice_grid, only: dxrect, dyrect use ice_forcing, only: ice_data_type, ice_data_conc, ice_data_dist integer (kind=int_kind), intent(in) :: & @@ -2759,7 +2837,7 @@ subroutine set_state_var (nx_block, ny_block, & iglob(nx_block) , & ! global indices jglob(ny_block) ! - character(len=char_len_long), intent(in) :: & + character(len=char_len_long), intent(in) :: & ice_ic ! method of ice cover initialization logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & @@ -2771,8 +2849,8 @@ subroutine set_state_var (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & Tair , & ! air temperature (K) - Tf , & ! freezing temperature (C) - sst ! sea surface temperature (C) + Tf , & ! freezing temperature (C) + sst ! sea surface temperature (C) real (kind=dbl_kind), dimension (nx_block,ny_block,nilyr), intent(in) :: & salinz , & ! initial salinity profile @@ -2789,7 +2867,7 @@ subroutine set_state_var (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & uvel , & ! ice velocity B grid - vvel ! + vvel ! ! local variables integer (kind=int_kind) :: & @@ -2830,9 +2908,16 @@ subroutine set_state_var (nx_block, ny_block, & real (kind=dbl_kind), parameter :: & hsno_init = 0.20_dbl_kind , & ! initial snow thickness (m) - edge_init_nh = 70._dbl_kind, & ! initial ice edge, N.Hem. (deg) + edge_init_nh = 70._dbl_kind, & ! initial ice edge, N.Hem. (deg) edge_init_sh = -60._dbl_kind ! initial ice edge, S.Hem. (deg) + real (kind=dbl_kind) :: & ! boxslotcyl + pi , & ! pi + secday , & ! seconds per day + max_vel , & ! max velocity + domain_length , & ! physical domain length + period ! rotational period + logical (kind=log_kind) :: tr_brine, tr_lvl, tr_snow integer (kind=int_kind) :: ntrcr integer (kind=int_kind) :: nt_Tsfc, nt_qice, nt_qsno, nt_sice @@ -2853,6 +2938,7 @@ subroutine set_state_var (nx_block, ny_block, & nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) call icepack_query_parameters(rhos_out=rhos, Lfresh_out=Lfresh, puny_out=puny, & rad_to_deg_out=rad_to_deg, rsnw_fall_out=rsnw_fall) + call icepack_query_parameters(secday_out=secday, pi_out=pi) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -2870,7 +2956,7 @@ subroutine set_state_var (nx_block, ny_block, & vicen(i,j,n) = c0 vsnon(i,j,n) = c0 if (tmask(i,j)) then - trcrn(i,j,nt_Tsfc,n) = Tf(i,j) ! surface temperature + trcrn(i,j,nt_Tsfc,n) = Tf(i,j) ! surface temperature else trcrn(i,j,nt_Tsfc,n) = c0 ! at land grid cells (for clean history/restart files) endif @@ -2942,9 +3028,9 @@ subroutine set_state_var (nx_block, ny_block, & ! initial category areas in cells with ice hbar = c3 ! initial ice thickness with greatest area - ! Note: the resulting average ice thickness + ! Note: the resulting average ice thickness ! tends to be less than hbar due to the - ! nonlinear distribution of ice thicknesses + ! nonlinear distribution of ice thicknesses sum = c0 do n = 1, ncat if (n < ncat) then @@ -3003,7 +3089,7 @@ subroutine set_state_var (nx_block, ny_block, & if (tmask(i,j)) then ! check if grid point is inside slotted cylinder in_slot = (dxrect*real(iglob(i)-1, kind=dbl_kind) >= center_x - width/c2) .and. & - (dxrect*real(iglob(i)-1, kind=dbl_kind) <= center_x + width/c2) .and. & + (dxrect*real(iglob(i)-1, kind=dbl_kind) <= center_x + width/c2) .and. & (dyrect*real(jglob(j)-1, kind=dbl_kind) >= center_y - radius) .and. & (dyrect*real(jglob(j)-1, kind=dbl_kind) <= center_y + (length - radius)) @@ -3174,7 +3260,7 @@ subroutine set_state_var (nx_block, ny_block, & ! surface temperature trcrn(i,j,nt_Tsfc,n) = Tsfc ! deg C - ! ice enthalpy, salinity + ! ice enthalpy, salinity do k = 1, nilyr trcrn(i,j,nt_qice+k-1,n) = qin(k) trcrn(i,j,nt_sice+k-1,n) = salinz(i,j,k) @@ -3188,19 +3274,23 @@ subroutine set_state_var (nx_block, ny_block, & enddo ! ij enddo ! ncat - + !--------------------------------------------------------- ! ice velocity ! these velocites are defined on B-grid !--------------------------------------------------------- if (trim(ice_data_type) == 'boxslotcyl') then + domain_length = dxrect*cm_to_m*nx_global + period = c12*secday ! 12 days rotational period + max_vel = pi*domain_length/period do j = 1, ny_block do i = 1, nx_block - call boxslotcyl_data_vel(i, j, & - nx_block, ny_block, & - iglob, jglob, & - uvel, vvel) + + uvel(i,j) = c2*max_vel*(real(jglob(j), kind=dbl_kind) - p5) & + / real(ny_global - 1, kind=dbl_kind) - max_vel + vvel(i,j) = -c2*max_vel*(real(iglob(i), kind=dbl_kind) - p5) & + / real(nx_global - 1, kind=dbl_kind) + max_vel enddo ! j enddo ! i else @@ -3216,56 +3306,6 @@ subroutine set_state_var (nx_block, ny_block, & end subroutine set_state_var -!======================================================================= - -! Set ice velocity for slotted cylinder advection test -! -! author: Philippe Blain (ECCC) - - subroutine boxslotcyl_data_vel(i, j, & - nx_block, ny_block, & - iglob, jglob, & - uvel, vvel) - - use ice_constants, only: c2, c12, p5, cm_to_m - use ice_domain_size, only: nx_global, ny_global - use ice_grid, only: dxrect - - integer (kind=int_kind), intent(in) :: & - i, j, & ! local indices - nx_block, ny_block, & ! block dimensions - iglob(nx_block), & ! global indices - jglob(ny_block) - - real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & - uvel, vvel ! ice velocity - - ! local variables - real (kind=dbl_kind) :: & - pi , & ! pi - secday , & ! seconds per day - max_vel , & ! max velocity - domain_length , & ! physical domain length - period ! rotational period - - character(len=*), parameter :: subname = '(boxslotcyl_data_vel)' - - call icepack_query_parameters(secday_out=secday, pi_out=pi) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - domain_length = dxrect*cm_to_m*nx_global - period = c12*secday ! 12 days rotational period - max_vel = pi*domain_length/period - - uvel(i,j) = c2*max_vel*(real(jglob(j), kind=dbl_kind) - p5) & - / real(ny_global - 1, kind=dbl_kind) - max_vel - vvel(i,j) = -c2*max_vel*(real(iglob(i), kind=dbl_kind) - p5) & - / real(nx_global - 1, kind=dbl_kind) + max_vel - - end subroutine boxslotcyl_data_vel - !======================================================================= end module ice_init diff --git a/cicecore/cicedynB/general/ice_state.F90 b/cicecore/cicedynB/general/ice_state.F90 index e07eca209..4a3f83ce2 100644 --- a/cicecore/cicedynB/general/ice_state.F90 +++ b/cicecore/cicedynB/general/ice_state.F90 @@ -10,7 +10,7 @@ ! aicen(i,j,n) aice(i,j) --- ! vicen(i,j,n) vice(i,j) m ! vsnon(i,j,n) vsno(i,j) m -! trcrn(i,j,it,n) trcr(i,j,it) +! trcrn(i,j,it,n) trcr(i,j,it) ! ! Area is dimensionless because aice is the fractional area ! (normalized so that the sum over all categories, including open @@ -118,7 +118,7 @@ module ice_state strength ! ice strength (N/m) !----------------------------------------------------------------- - ! ice state at start of time step, saved for later in the step + ! ice state at start of time step, saved for later in the step !----------------------------------------------------------------- real (kind=dbl_kind), dimension(:,:,:), allocatable, & @@ -129,7 +129,7 @@ module ice_state dimension(:,:,:,:), allocatable, public :: & aicen_init , & ! initial ice concentration, for linear ITD vicen_init , & ! initial ice volume (m), for linear ITD - vsnon_init ! initial snow volume (m), for aerosol + vsnon_init ! initial snow volume (m), for aerosol !======================================================================= @@ -137,7 +137,7 @@ module ice_state !======================================================================= ! -! Allocate space for all state variables +! Allocate space for all state variables ! subroutine alloc_state integer (int_kind) :: ntrcr, ierr @@ -168,7 +168,7 @@ subroutine alloc_state vsnon (nx_block,ny_block,ncat,max_blocks) , & ! volume per unit area of snow (m) aicen_init(nx_block,ny_block,ncat,max_blocks) , & ! initial ice concentration, for linear ITD vicen_init(nx_block,ny_block,ncat,max_blocks) , & ! initial ice volume (m), for linear ITD - vsnon_init(nx_block,ny_block,ncat,max_blocks) , & ! initial snow volume (m), for aerosol + vsnon_init(nx_block,ny_block,ncat,max_blocks) , & ! initial snow volume (m), for aerosol trcr (nx_block,ny_block,ntrcr,max_blocks) , & ! ice tracers: 1: surface temperature of ice/snow (C) trcrn (nx_block,ny_block,ntrcr,ncat,max_blocks) , & ! tracers: 1: surface temperature of ice/snow (C) stat=ierr) @@ -181,11 +181,13 @@ subroutine alloc_state trcr_base(ntrcr,3) , & ! = 0 or 1 depending on tracer dependency, (1) aice, (2) vice, (3) vsno stat=ierr) if (ierr/=0) call abort_ice('(alloc_state): Out of memory2') - + trcr_depend = 0 n_trcr_strata = 0 nt_strata = 0 trcr_base = c0 + aicen = c0 + aicen_init = c0 end subroutine alloc_state diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index 3b0201cbf..b6f8741c0 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -128,7 +128,7 @@ subroutine prep_radiation (iblk) alidr_init(:,:,iblk) = c0 alidf_init(:,:,iblk) = c0 - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -188,7 +188,10 @@ subroutine step_therm1 (dt, iblk) hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & fswsfcn, fswintn, Sswabsn, Iswabsn, meltsliqn, meltsliq, & fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf - use ice_blocks, only: block, get_block, nx_block, ny_block + use ice_blocks, only: block, get_block +#ifdef CICE_IN_NEMO + use ice_blocks, only: nx_block, ny_block +#endif use ice_calendar, only: yday use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero @@ -205,13 +208,16 @@ subroutine step_therm1 (dt, iblk) use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn, fiso_atm, fiso_ocn, & Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn use ice_grid, only: lmask_n, lmask_s, tmask - use ice_state, only: aice, aicen, aice_init, aicen_init, vicen_init, & + use ice_state, only: aice, aicen, aicen_init, vicen_init, & vice, vicen, vsno, vsnon, trcrn, uvel, vvel, vsnon_init +#ifdef CICE_IN_NEMO + use ice_state, only: aice_init +#endif #ifdef CESMCOUPLED use ice_prescribed_mod, only: prescribed_ice #else - logical (kind=log_kind) :: & + logical (kind=log_kind) :: & prescribed_ice ! if .true., use prescribed ice instead of computed #endif real (kind=dbl_kind), intent(in) :: & @@ -222,7 +228,7 @@ subroutine step_therm1 (dt, iblk) ! local variables #ifdef CICE_IN_NEMO - real (kind=dbl_kind) :: & + real (kind=dbl_kind) :: & raice ! reciprocal of ice concentration #endif integer (kind=int_kind) :: & @@ -237,7 +243,11 @@ subroutine step_therm1 (dt, iblk) nt_isosno, nt_isoice, nt_rsnw, nt_smice, nt_smliq logical (kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, tr_pond_cesm, & +#else + tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, & +#endif tr_pond_lvl, tr_pond_topo, calc_Tsfc, highfreq, tr_snow real (kind=dbl_kind) :: & @@ -265,7 +275,11 @@ subroutine step_therm1 (dt, iblk) call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_iso_out=tr_iso, & +#ifdef UNDEPRECATE_CESMPONDS tr_aero_out=tr_aero, tr_pond_out=tr_pond, tr_pond_cesm_out=tr_pond_cesm, & +#else + tr_aero_out=tr_aero, tr_pond_out=tr_pond, & +#endif tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & tr_snow_out=tr_snow) call icepack_query_tracer_indices( & @@ -313,12 +327,12 @@ subroutine step_therm1 (dt, iblk) enddo ! j #endif - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi @@ -380,16 +394,16 @@ subroutine step_therm1 (dt, iblk) uvel = uvel_center , & vvel = vvel_center , & Tsfc = trcrn (i,j,nt_Tsfc,:,iblk), & - zqsn = trcrn (i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & - zqin = trcrn (i,j,nt_qice:nt_qice+nilyr-1,:,iblk), & - zSin = trcrn (i,j,nt_sice:nt_sice+nilyr-1,:,iblk), & - alvl = trcrn (i,j,nt_alvl,:,iblk), & - vlvl = trcrn (i,j,nt_vlvl,:,iblk), & - apnd = trcrn (i,j,nt_apnd,:,iblk), & - hpnd = trcrn (i,j,nt_hpnd,:,iblk), & - ipnd = trcrn (i,j,nt_ipnd,:,iblk), & + zqsn = trcrn (i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & + zqin = trcrn (i,j,nt_qice:nt_qice+nilyr-1,:,iblk), & + zSin = trcrn (i,j,nt_sice:nt_sice+nilyr-1,:,iblk), & + alvl = trcrn (i,j,nt_alvl,:,iblk), & + vlvl = trcrn (i,j,nt_vlvl,:,iblk), & + apnd = trcrn (i,j,nt_apnd,:,iblk), & + hpnd = trcrn (i,j,nt_hpnd,:,iblk), & + ipnd = trcrn (i,j,nt_ipnd,:,iblk), & iage = trcrn (i,j,nt_iage,:,iblk), & - FY = trcrn (i,j,nt_FY ,:,iblk), & + FY = trcrn (i,j,nt_FY ,:,iblk), & rsnwn = rsnwn (:,:), & smicen = smicen (:,:), & smliqn = smliqn (:,:), & @@ -593,7 +607,7 @@ subroutine step_therm2 (dt, iblk) use ice_blocks, only: block, get_block use ice_calendar, only: yday use ice_domain, only: blocks_ice - use ice_domain_size, only: ncat, nilyr, nslyr, n_aero, nblyr, nfsd + use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd use ice_flux, only: fresh, frain, fpond, frzmlt, frazil, frz_onset, & update_ocn_f, fsalt, Tf, sss, salinz, fhocn, rside, fside, & meltl, frazil_diag @@ -643,7 +657,7 @@ subroutine step_therm2 (dt, iblk) nltrcr = 0 endif - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -660,7 +674,7 @@ subroutine step_therm2 (dt, iblk) call icepack_step_therm2(dt=dt, ncat=ncat, & nltrcr=nltrcr, nilyr=nilyr, nslyr=nslyr, nblyr=nblyr, & - hin_max = hin_max (:), & + hin_max = hin_max (:), & aicen = aicen (i,j,:,iblk), & vicen = vicen (i,j,:,iblk), & vsnon = vsnon (i,j,:,iblk), & @@ -752,8 +766,8 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) real (kind=dbl_kind), intent(in), optional :: & offset ! d(age)/dt time offset = dt for thermo, 0 for dyn - integer (kind=int_kind) :: & - iblk, & ! block index + integer (kind=int_kind) :: & + iblk, & ! block index i,j, & ! horizontal indices ntrcr, & ! nt_iage ! @@ -787,9 +801,9 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) do i = 1, nx_block !----------------------------------------------------------------- - ! Aggregate the updated state variables (includes ghost cells). - !----------------------------------------------------------------- - + ! Aggregate the updated state variables (includes ghost cells). + !----------------------------------------------------------------- + ! if (tmask(i,j,iblk)) & call icepack_aggregate(ncat = ncat, & aicen = aicen(i,j,:,iblk), & @@ -848,7 +862,7 @@ end subroutine update_state subroutine step_dyn_wave (dt) - use ice_arrays_column, only: wave_spectrum, wave_sig_ht, & + use ice_arrays_column, only: wave_spectrum, & d_afsd_wave, floe_rad_l, floe_rad_c, wavefreq, dwavefreq use ice_blocks, only: block, get_block use ice_domain, only: blocks_ice, nblocks @@ -868,9 +882,7 @@ subroutine step_dyn_wave (dt) integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain iblk, & ! block index - i, j, & ! horizontal indices - ntrcr, & ! - nbtrcr ! + i, j ! horizontal indices character (len=char_len) :: wave_spec_type @@ -992,14 +1004,14 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) integer (kind=int_kind), intent(in) :: & ndtd, & ! number of dynamics subcycles - iblk ! block index + iblk ! block index ! local variables type (block) :: & this_block ! block information for current block - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain i, j, & ! horizontal indices ntrcr, & ! @@ -1119,9 +1131,7 @@ subroutine step_snow (dt, iblk) integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain i, j, & ! horizontal indices - n, & ! category index - ns, & ! history streams index - ipoint ! index for print diagnostic + ns ! history streams index real (kind=dbl_kind) :: & puny @@ -1134,7 +1144,7 @@ subroutine step_snow (dt, iblk) type (block) :: & this_block ! block information for current block - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1174,7 +1184,7 @@ subroutine step_snow (dt, iblk) trcrn(i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & trcrn(i,j,nt_alvl,:,iblk), & trcrn(i,j,nt_vlvl,:,iblk), & - trcrn(i,j,nt_smice:nt_smice+nslyr-1,:,iblk), & + trcrn(i,j,nt_smice:nt_smice+nslyr-1,:,iblk), & trcrn(i,j,nt_smliq:nt_smliq+nslyr-1,:,iblk), & trcrn(i,j,nt_rsnw:nt_rsnw+nslyr-1,:,iblk), & trcrn(i,j,nt_rhos:nt_rhos+nslyr-1,:,iblk), & @@ -1289,7 +1299,7 @@ subroutine step_radiation (dt, iblk) allocate(ztrcr_sw(nbtrcr_sw,ncat)) allocate(rsnow(nslyr,ncat)) - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1369,7 +1379,7 @@ subroutine step_radiation (dt, iblk) dhsn =dhsn (i,j,: ,iblk), ffracn =ffracn(i,j,:,iblk), & rsnow =rsnow (:,:), l_print_point=l_print_point) endif - + if (dEdd_algae .and. (tr_zaero .or. tr_bgc_N)) then do n = 1, ncat do k = 1, nbtrcr_sw @@ -1487,24 +1497,24 @@ subroutine ocean_mixed_layer (dt, iblk) j = indxj(ij) call icepack_atm_boundary(sfctype = 'ocn', & - Tsf = sst (i,j,iblk), & + Tsf = sst (i,j,iblk), & potT = potT (i,j,iblk), & uatm = uatmT (i,j,iblk), & vatm = vatmT (i,j,iblk), & - wind = wind (i,j,iblk), & - zlvl = zlvl (i,j,iblk), & - Qa = Qa (i,j,iblk), & + wind = wind (i,j,iblk), & + zlvl = zlvl (i,j,iblk), & + Qa = Qa (i,j,iblk), & rhoa = rhoa (i,j,iblk), & - strx = strairx_ocn(i,j,iblk), & - stry = strairy_ocn(i,j,iblk), & - Tref = Tref_ocn (i,j,iblk), & - Qref = Qref_ocn (i,j,iblk), & - delt = delt (i,j), & + strx = strairx_ocn(i,j,iblk), & + stry = strairy_ocn(i,j,iblk), & + Tref = Tref_ocn (i,j,iblk), & + Qref = Qref_ocn (i,j,iblk), & + delt = delt (i,j), & delq = delq (i,j), & lhcoef = lhcoef (i,j), & shcoef = shcoef (i,j), & - Cdn_atm = Cdn_atm (i,j,iblk), & - Cdn_atm_ratio_n = Cdn_atm_ratio(i,j,iblk)) + Cdn_atm = Cdn_atm (i,j,iblk), & + Cdn_atm_ratio_n = Cdn_atm_ratio(i,j,iblk)) enddo ! ij call icepack_warnings_flush(nu_diag) @@ -1568,10 +1578,10 @@ subroutine biogeochemistry (dt, iblk) n_doc, n_dic, n_don, n_fed, n_fep use ice_flux, only: meltbn, melttn, congeln, snoicen, & sst, sss, fsnow, meltsn - use ice_flux_bgc, only: hin_old, flux_bio, flux_bio_atm, faero_atm, & + use ice_flux_bgc, only: hin_old, flux_bio, flux_bio_atm, faero_atm, & nit, amm, sil, dmsp, dms, algalN, doc, don, dic, fed, fep, zaeros, hum use ice_state, only: aicen_init, vicen_init, aicen, vicen, vsnon, & - trcrn, vsnon_init, aice0 + trcrn, vsnon_init, aice0 use ice_timers, only: timer_bgc, ice_timer_start, ice_timer_stop real (kind=dbl_kind), intent(in) :: & @@ -1618,7 +1628,7 @@ subroutine biogeochemistry (dt, iblk) call ice_timer_start(timer_bgc,iblk) ! biogeochemistry - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1626,7 +1636,7 @@ subroutine biogeochemistry (dt, iblk) ! Define ocean concentrations for tracers used in simulation do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi call icepack_load_ocean_bio_array(max_nbtrcr = icepack_max_nbtrcr, & max_algae = icepack_max_algae, max_don = icepack_max_don, & @@ -1642,8 +1652,8 @@ subroutine biogeochemistry (dt, iblk) ocean_bio_all = ocean_bio_all(i,j,:,iblk)) do mm = 1,nbtrcr - ocean_bio(i,j,mm,iblk) = ocean_bio_all(i,j,bio_index_o(mm),iblk) - enddo ! mm + ocean_bio(i,j,mm,iblk) = ocean_bio_all(i,j,bio_index_o(mm),iblk) + enddo ! mm if (tr_zaero) then do mm = 1, n_zaero ! update aerosols flux_bio_atm(i,j,nlt_zaero(mm),iblk) = faero_atm(i,j,mm,iblk) @@ -1678,13 +1688,13 @@ subroutine biogeochemistry (dt, iblk) snow_bio_net = snow_bio_net(i,j,1:nbtrcr, iblk), & fswthrun = fswthrun (i,j,:, iblk), & sice_rho = sice_rho (i,j,:, iblk), & - fzsal = fzsal (i,j, iblk), & + fzsal = fzsal (i,j, iblk), & fzsal_g = fzsal_g (i,j, iblk), & meltbn = meltbn (i,j,:, iblk), & melttn = melttn (i,j,:, iblk), & congeln = congeln (i,j,:, iblk), & - snoicen = snoicen (i,j,:, iblk), & - sst = sst (i,j, iblk), & + snoicen = snoicen (i,j,:, iblk), & + sst = sst (i,j, iblk), & sss = sss (i,j, iblk), & fsnow = fsnow (i,j, iblk), & meltsn = meltsn (i,j,:, iblk), & diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 index 76a7659a6..2b64f8932 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 @@ -7,8 +7,8 @@ module ice_boundary ! ! 2007-07-19: Phil Jones, Yoshi Yoshida, John Dennis ! new naming conventions, optimizations during -! initialization, true multi-dimensional updates -! (rather than serial call to two-dimensional updates), +! initialization, true multi-dimensional updates +! (rather than serial call to two-dimensional updates), ! fixes for non-existent blocks ! 2008-01-28: Elizabeth Hunke replaced old routines with new POP ! infrastructure @@ -98,7 +98,7 @@ module ice_boundary !----------------------------------------------------------------------- ! ! to prevent frequent allocate-deallocate for 2d halo updates, create -! a static 2d buffer to be allocated once at creation. if future +! a static 2d buffer to be allocated once at creation. if future ! creation needs larger buffer, resize during the creation. ! !----------------------------------------------------------------------- @@ -177,9 +177,9 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & seBlock, swBlock, &! block id southeast, southwest nbrs srcProc, dstProc, &! source, dest processor locations srcLocalID, dstLocalID, &! local block index of src,dst blocks - maxTmp, &! temp for global maxval - blockSizeX, &! size of default physical domain in X - blockSizeY, &! size of default physical domain in Y + maxTmp, &! temp for global maxval + blockSizeX, &! size of default physical domain in X + blockSizeY, &! size of default physical domain in Y maxSizeSend, maxSizeRecv, &! max buffer sizes numMsgSend, numMsgRecv, &! number of messages for this halo eastMsgSize, westMsgSize, &! nominal sizes for e-w msgs @@ -305,7 +305,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & srcProc, dstProc, msgSize) !*** if a tripole boundary block, also create a local - !*** message into and out of tripole buffer + !*** message into and out of tripole buffer if (tripoleBlock) then !*** copy out of tripole buffer - includes halo @@ -316,7 +316,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & !*** copy in only required if dstProc not same as srcProc if (dstProc /= srcProc) then call ice_HaloIncrementMsgCount(sendCount, recvCount, & - srcProc, srcProc, & + srcProc, srcProc, & msgSize) endif endif @@ -393,7 +393,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & ewBoundaryType, nsBoundaryType) if (neBlock > 0) then - msgSize = cornerMsgSize ! normal corner message + msgSize = cornerMsgSize ! normal corner message call ice_distributionGetBlockLoc(dist, neBlock, dstProc, & dstLocalID) @@ -557,7 +557,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & !----------------------------------------------------------------------- ! -! if messages are received from the same processor, the message is +! if messages are received from the same processor, the message is ! actually a local copy - count them and reset to zero ! !----------------------------------------------------------------------- @@ -585,7 +585,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & ! check to see if they need to be re-sized ! !----------------------------------------------------------------------- - + maxTmp = maxval(sendCount) maxSizeSend = global_maxval(maxTmp, dist) maxTmp = maxval(recvCount) @@ -733,7 +733,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & 'north') !*** if a tripole boundary block, also create a local - !*** message into and out of tripole buffer + !*** message into and out of tripole buffer if (tripoleBlock) then !*** copy out of tripole buffer - includes halo @@ -1102,7 +1102,7 @@ subroutine ice_HaloMask(halo, basehalo, mask) elseif (mask(icel,jcel,abs(nblock)) /= 0) then tmpflag = .true. endif - + if (tmpflag) then scnt = scnt + 1 if (scnt == 1) then @@ -1117,7 +1117,7 @@ subroutine ice_HaloMask(halo, basehalo, mask) endif enddo enddo - halo%numMsgSend = numMsgSend + halo%numMsgSend = numMsgSend numMsgRecv = 0 do nmsg=1,basehalo%numMsgRecv @@ -1134,7 +1134,7 @@ subroutine ice_HaloMask(halo, basehalo, mask) elseif (mask(icel,jcel,abs(nblock)) /= 0) then tmpflag = .true. endif - + if (tmpflag) then scnt = scnt + 1 if (scnt == 1) then @@ -1149,7 +1149,7 @@ subroutine ice_HaloMask(halo, basehalo, mask) endif enddo enddo - halo%numMsgRecv = numMsgRecv + halo%numMsgRecv = numMsgRecv !----------------------------------------------------------------------- @@ -1312,7 +1312,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -1339,7 +1339,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -1409,7 +1409,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & ! ! take care of northern boundary in tripole case ! bufTripole array contains the top nghost+1 rows (u-fold) or nghost+2 rows -! (T-fold) of physical domain for entire (global) top row +! (T-fold) of physical domain for entire (global) top row ! !----------------------------------------------------------------------- @@ -1430,13 +1430,13 @@ subroutine ice_HaloUpdate2DR8(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 2,nxGlobal/2 iDst = nxGlobal - i + 2 x1 = bufTripoleR8(i ,halo%tripoleRows) @@ -1445,20 +1445,20 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleR8(i ,halo%tripoleRows) @@ -1467,12 +1467,12 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -1481,18 +1481,18 @@ subroutine ice_HaloUpdate2DR8(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i x1 = bufTripoleR8(i ,halo%tripoleRows) @@ -1501,20 +1501,20 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleR8(i ,halo%tripoleRows) @@ -1523,7 +1523,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -1730,7 +1730,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -1752,7 +1752,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -1828,13 +1828,13 @@ subroutine ice_HaloUpdate2DR4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 2,nxGlobal/2 iDst = nxGlobal - i + 2 x1 = bufTripoleR4(i ,halo%tripoleRows) @@ -1843,20 +1843,20 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleR4(i ,halo%tripoleRows) @@ -1865,32 +1865,32 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select + end select else ! tripole u-fold - + select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i x1 = bufTripoleR4(i ,halo%tripoleRows) @@ -1899,20 +1899,20 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleR4(i ,halo%tripoleRows) @@ -1921,7 +1921,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -2128,7 +2128,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -2150,7 +2150,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -2226,13 +2226,13 @@ subroutine ice_HaloUpdate2DI4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 2,nxGlobal/2 iDst = nxGlobal - i + 2 x1 = bufTripoleI4(i ,halo%tripoleRows) @@ -2241,20 +2241,20 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleI4(i ,halo%tripoleRows) @@ -2263,32 +2263,32 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - else ! tripole u-fold - + else ! tripole u-fold + select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i x1 = bufTripoleI4(i ,halo%tripoleRows) @@ -2297,20 +2297,20 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleI4(i ,halo%tripoleRows) @@ -2319,7 +2319,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -2451,7 +2451,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & real (dbl_kind), dimension(:,:,:), allocatable :: & bufTripole ! 3d tripole buffer - integer (int_kind) :: len ! length of message + integer (int_kind) :: len ! length of message character(len=*), parameter :: subname = '(ice_HaloUpdate3DR8)' @@ -2554,7 +2554,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -2576,7 +2576,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -2665,10 +2665,10 @@ subroutine ice_HaloUpdate3DR8(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -2682,20 +2682,20 @@ subroutine ice_HaloUpdate3DR8(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -2706,32 +2706,32 @@ subroutine ice_HaloUpdate3DR8(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - + end select + else ! tripole u-fold - + select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i @@ -2742,20 +2742,20 @@ subroutine ice_HaloUpdate3DR8(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -2766,7 +2766,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -2908,7 +2908,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & real (real_kind), dimension(:,:,:), allocatable :: & bufTripole ! 3d tripole buffer - integer (int_kind) :: len ! length of message + integer (int_kind) :: len ! length of message character(len=*), parameter :: subname = '(ice_HaloUpdate3DR4)' @@ -3011,7 +3011,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -3033,7 +3033,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -3122,10 +3122,10 @@ subroutine ice_HaloUpdate3DR4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -3139,20 +3139,20 @@ subroutine ice_HaloUpdate3DR4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -3163,32 +3163,32 @@ subroutine ice_HaloUpdate3DR4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - - else ! tripole u-fold - + end select + + else ! tripole u-fold + select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i @@ -3199,20 +3199,20 @@ subroutine ice_HaloUpdate3DR4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -3223,7 +3223,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -3468,7 +3468,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -3490,7 +3490,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -3563,7 +3563,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - + select case (fieldKind) case (field_type_scalar) isign = 1 @@ -3579,10 +3579,10 @@ subroutine ice_HaloUpdate3DI4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -3596,20 +3596,20 @@ subroutine ice_HaloUpdate3DI4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -3620,32 +3620,32 @@ subroutine ice_HaloUpdate3DI4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - - else ! tripole u-fold + + else ! tripole u-fold select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i @@ -3656,20 +3656,20 @@ subroutine ice_HaloUpdate3DI4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -3680,11 +3680,11 @@ subroutine ice_HaloUpdate3DI4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - + endif !*** copy out of global tripole buffer into local @@ -3929,7 +3929,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -3951,7 +3951,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -4034,7 +4034,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - + select case (fieldKind) case (field_type_scalar) isign = 1 @@ -4050,10 +4050,10 @@ subroutine ice_HaloUpdate4DR8(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -4069,17 +4069,17 @@ subroutine ice_HaloUpdate4DR8(array, halo, & end do end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -4095,32 +4095,32 @@ subroutine ice_HaloUpdate4DR8(array, halo, & end do end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - - else ! tripole u-fold + end select + + else ! tripole u-fold select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 - 1 @@ -4133,20 +4133,20 @@ subroutine ice_HaloUpdate4DR8(array, halo, & end do end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 @@ -4159,11 +4159,11 @@ subroutine ice_HaloUpdate4DR8(array, halo, & end do end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - + endif !*** copy out of global tripole buffer into local @@ -4410,7 +4410,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -4432,7 +4432,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -4515,7 +4515,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - + select case (fieldKind) case (field_type_scalar) isign = 1 @@ -4531,10 +4531,10 @@ subroutine ice_HaloUpdate4DR4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -4550,17 +4550,17 @@ subroutine ice_HaloUpdate4DR4(array, halo, & end do end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -4576,32 +4576,32 @@ subroutine ice_HaloUpdate4DR4(array, halo, & end do end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - - else ! tripole u-fold + end select + + else ! tripole u-fold select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 - 1 @@ -4614,20 +4614,20 @@ subroutine ice_HaloUpdate4DR4(array, halo, & end do end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 @@ -4640,11 +4640,11 @@ subroutine ice_HaloUpdate4DR4(array, halo, & end do end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - + endif !*** copy out of global tripole buffer into local @@ -4891,7 +4891,7 @@ subroutine ice_HaloUpdate4DI4(array, halo, & !----------------------------------------------------------------------- ! ! while messages are being communicated, fill out halo region -! needed for masked halos to ensure halo values are filled for +! needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated ! !----------------------------------------------------------------------- @@ -4913,7 +4913,7 @@ subroutine ice_HaloUpdate4DI4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -5012,10 +5012,10 @@ subroutine ice_HaloUpdate4DI4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -5031,17 +5031,17 @@ subroutine ice_HaloUpdate4DI4(array, halo, & end do end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -5057,32 +5057,32 @@ subroutine ice_HaloUpdate4DI4(array, halo, & end do end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - - else ! tripole u-fold + end select + + else ! tripole u-fold select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 - 1 @@ -5095,20 +5095,20 @@ subroutine ice_HaloUpdate4DI4(array, halo, & end do end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 @@ -5121,11 +5121,11 @@ subroutine ice_HaloUpdate4DI4(array, halo, & end do end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - + endif !*** copy out of global tripole buffer into local @@ -5354,7 +5354,7 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -5404,7 +5404,7 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & ! ! take care of northern boundary in tripole case ! bufTripole array contains the top haloWidth+1 rows of physical -! domain for entire (global) top row +! domain for entire (global) top row ! !----------------------------------------------------------------------- @@ -5432,12 +5432,12 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & ioffset = 1 joffset = 1 - case (field_loc_Eface) + case (field_loc_Eface) ioffset = 1 joffset = 0 - case (field_loc_Nface) + case (field_loc_Nface) ioffset = 0 joffset = 1 @@ -5468,7 +5468,7 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & iSrc = iSrc - ioffset jSrc = jSrc - joffset if (iSrc == 0) iSrc = nxGlobal - + !*** for center and Eface, do not need to replace !*** top row of physical domain, so jSrc should be !*** out of range and skipped @@ -5537,7 +5537,7 @@ subroutine ice_HaloIncrementMsgCount(sndCounter, rcvCounter, & !----------------------------------------------------------------------- ! -! if destination all land or outside closed boundary (dstProc = 0), +! if destination all land or outside closed boundary (dstProc = 0), ! then no send is necessary, so do the rest only for dstProc /= 0 ! !----------------------------------------------------------------------- @@ -5546,7 +5546,7 @@ subroutine ice_HaloIncrementMsgCount(sndCounter, rcvCounter, & !----------------------------------------------------------------------- ! -! if the current processor is the source, must send data +! if the current processor is the source, must send data ! local copy if dstProc = srcProc ! !----------------------------------------------------------------------- @@ -5556,14 +5556,14 @@ subroutine ice_HaloIncrementMsgCount(sndCounter, rcvCounter, & !----------------------------------------------------------------------- ! -! if the current processor is the destination, must receive data +! if the current processor is the destination, must receive data ! local copy if dstProc = srcProc ! !----------------------------------------------------------------------- if (dstProc == my_task + 1) then - if (srcProc > 0) then + if (srcProc > 0) then !*** the source block has ocean points !*** count as a receive from srcProc @@ -5631,7 +5631,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & !----------------------------------------------------------------------- ! -! if destination all land or outside closed boundary (dstProc = 0), +! if destination all land or outside closed boundary (dstProc = 0), ! then no send is necessary, so do the rest only for dstProc /= 0 ! !----------------------------------------------------------------------- @@ -5673,7 +5673,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & !----------------------------------------------------------------------- if (srcProc == my_task+1 .and. & - dstProc == my_task+1) then + dstProc == my_task+1) then !*** compute addresses based on direction @@ -5764,7 +5764,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & if (jeSrc - jbSrc + 1 < halo%tripoleRows) then call abort_ice(subname//'ERROR: not enough points in block for tripole') return - endif + endif do j=1,halo%tripoleRows do i=1,ieSrc-ibSrc+1 @@ -5784,7 +5784,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & else if (srcBlock < 0 .and. dstBlock > 0) then - !*** tripole grid - set up for copying out of + !*** tripole grid - set up for copying out of !*** tripole buffer into ghost cell domains !*** include e-w ghost cells @@ -5950,12 +5950,12 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & !----------------------------------------------------------------------- ! -! if dest block is local and source block does not exist, create a +! if dest block is local and source block does not exist, create a ! local copy to fill halo with a fill value ! !----------------------------------------------------------------------- - else if (srcProc == 0 .and. dstProc == my_task+1) then + else if (srcProc == 0 .and. dstProc == my_task+1) then msgIndx = halo%numLocalCopies @@ -6184,7 +6184,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & bufSize = halo%sizeSend(n) exit srchSend endif - end do srchSend + end do srchSend if (msgIndx == 0) then msgIndx = halo%numMsgSend + 1 @@ -6255,7 +6255,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & halo%sizeSend(msgIndx) = bufSize - else + else !*** tripole block - send top halo%tripoleRows rows of phys domain @@ -6318,7 +6318,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & halo%sizeSend(msgIndx) = bufSize - else + else !*** tripole block - send top halo%tripoleRows rows of phys domain @@ -6361,7 +6361,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & halo%sizeSend(msgIndx) = bufSize - else + else !*** tripole block - send top halo%tripoleRows rows of phys domain @@ -6447,7 +6447,7 @@ subroutine ice_HaloMsgCreate(halo, srcBlock, srcProc, srcLocalID, & bufSize = halo%sizeRecv(n) exit srchRecv endif - end do srchRecv + end do srchRecv if (msgIndx == 0) then msgIndx = halo%numMsgRecv + 1 @@ -6705,14 +6705,14 @@ end subroutine ice_HaloMsgCreate subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) -! This subroutine extrapolates ARRAY values into the first row or column -! of ghost cells, and is intended for grid variables whose ghost cells -! would otherwise be set using the default boundary conditions (Dirichlet +! This subroutine extrapolates ARRAY values into the first row or column +! of ghost cells, and is intended for grid variables whose ghost cells +! would otherwise be set using the default boundary conditions (Dirichlet ! or Neumann). ! Note: This routine will need to be modified for nghost > 1. ! We assume padding occurs only on east and north edges. ! -! This is the specific interface for double precision arrays +! This is the specific interface for double precision arrays ! corresponding to the generic interface ice_HaloExtrapolate use ice_blocks, only: block, nblocks_x, nblocks_y, get_block diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 index 00f427144..fab0c9218 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 @@ -72,7 +72,7 @@ subroutine init_communicate(mpicom) if (present(mpicom)) then ice_comm = mpicom else - ice_comm = MPI_COMM_WORLD ! Global communicator + ice_comm = MPI_COMM_WORLD ! Global communicator endif call MPI_INITIALIZED(flag,ierr) diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_exit.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_exit.F90 index 061fd63c5..eafb3228f 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_exit.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_exit.F90 @@ -1,6 +1,6 @@ !======================================================================= ! -! Exit the model. +! Exit the model. ! authors William H. Lipscomb (LANL) ! Elizabeth C. Hunke (LANL) ! 2006 ECH: separated serial and mpi functionality diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 index 0a58769db..030deabca 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 @@ -55,13 +55,13 @@ module ice_gather_scatter module procedure gather_global_dbl, & gather_global_real, & gather_global_int - end interface + end interface interface scatter_global module procedure scatter_global_dbl, & scatter_global_real, & scatter_global_int - end interface + end interface !----------------------------------------------------------------------- ! @@ -80,7 +80,7 @@ subroutine gather_global_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) ! This subroutine gathers a distributed array to a global-sized ! array on the processor dst_task. ! -! This is the specific inteface for double precision arrays +! This is the specific inteface for double precision arrays ! corresponding to the generic interface gather_global. It is shown ! to provide information on the generic interface (the generic ! interface is identical, but chooses a specific inteface based @@ -141,7 +141,7 @@ subroutine gather_global_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) !----------------------------------------------------------------------- ! -! if this task is the dst_task, copy local blocks into the global +! if this task is the dst_task, copy local blocks into the global ! array and post receives for non-local blocks. ! !----------------------------------------------------------------------- @@ -308,7 +308,7 @@ subroutine gather_global_real(ARRAY_G, ARRAY, dst_task, src_dist) !----------------------------------------------------------------------- ! -! if this task is the dst_task, copy local blocks into the global +! if this task is the dst_task, copy local blocks into the global ! array and post receives for non-local blocks. ! !----------------------------------------------------------------------- @@ -475,7 +475,7 @@ subroutine gather_global_int(ARRAY_G, ARRAY, dst_task, src_dist) !----------------------------------------------------------------------- ! -! if this task is the dst_task, copy local blocks into the global +! if this task is the dst_task, copy local blocks into the global ! array and post receives for non-local blocks. ! !----------------------------------------------------------------------- @@ -597,7 +597,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) real (dbl_kind), intent(in), optional :: & spc_val - + !----------------------------------------------------------------------- ! ! local variables @@ -643,7 +643,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) !----------------------------------------------------------------------- ! -! if this task is the dst_task, copy local blocks into the global +! if this task is the dst_task, copy local blocks into the global ! array and post receives for non-local blocks. ! !----------------------------------------------------------------------- @@ -907,7 +907,7 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) integer (int_kind), intent(in), optional :: & spc_val - + !----------------------------------------------------------------------- ! ! local variables @@ -953,7 +953,7 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) !----------------------------------------------------------------------- ! -! if this task is the dst_task, copy local blocks into the global +! if this task is the dst_task, copy local blocks into the global ! array and post receives for non-local blocks. ! !----------------------------------------------------------------------- @@ -1217,7 +1217,7 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) logical (log_kind), intent(in), optional :: & spc_val - + !----------------------------------------------------------------------- ! ! local variables @@ -1263,7 +1263,7 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) !----------------------------------------------------------------------- ! -! if this task is the dst_task, copy local blocks into the global +! if this task is the dst_task, copy local blocks into the global ! array and post receives for non-local blocks. ! !----------------------------------------------------------------------- @@ -1513,7 +1513,7 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & ! This subroutine scatters a global-sized array to a distributed array. ! -! This is the specific interface for double precision arrays +! This is the specific interface for double precision arrays ! corresponding to the generic interface scatter_global. integer (int_kind), intent(in) :: & @@ -1552,9 +1552,6 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & type (block) :: & this_block ! block info for current block - integer (int_kind), dimension(MPI_STATUS_SIZE) :: & - status - integer (int_kind), dimension(:), allocatable :: & rcv_request ! request array for receives @@ -1628,7 +1625,7 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & !----------------------------------------------------------------------- ! -! if this task is the src_task, copy blocks of global array into +! if this task is the src_task, copy blocks of global array into ! message buffer and send to other processors. also copy local blocks ! !----------------------------------------------------------------------- @@ -1941,9 +1938,6 @@ subroutine scatter_global_real(ARRAY, ARRAY_G, src_task, dst_dist, & type (block) :: & this_block ! block info for current block - integer (int_kind), dimension(MPI_STATUS_SIZE) :: & - status - integer (int_kind), dimension(:), allocatable :: & rcv_request ! request array for receives @@ -2017,7 +2011,7 @@ subroutine scatter_global_real(ARRAY, ARRAY_G, src_task, dst_dist, & !----------------------------------------------------------------------- ! -! if this task is the src_task, copy blocks of global array into +! if this task is the src_task, copy blocks of global array into ! message buffer and send to other processors. also copy local blocks ! !----------------------------------------------------------------------- @@ -2330,9 +2324,6 @@ subroutine scatter_global_int(ARRAY, ARRAY_G, src_task, dst_dist, & type (block) :: & this_block ! block info for current block - integer (int_kind), dimension(MPI_STATUS_SIZE) :: & - status - integer (int_kind), dimension(:), allocatable :: & rcv_request ! request array for receives @@ -2406,7 +2397,7 @@ subroutine scatter_global_int(ARRAY, ARRAY_G, src_task, dst_dist, & !----------------------------------------------------------------------- ! -! if this task is the src_task, copy blocks of global array into +! if this task is the src_task, copy blocks of global array into ! message buffer and send to other processors. also copy local blocks ! !----------------------------------------------------------------------- @@ -2666,7 +2657,7 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! This subroutine scatters a global-sized array to a distributed array. ! -! This is the specific interface for double precision arrays +! This is the specific interface for double precision arrays ! corresponding to the generic interface scatter_global. integer (int_kind), intent(in) :: & @@ -2698,9 +2689,6 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) type (block) :: & this_block ! block info for current block - integer (int_kind), dimension(MPI_STATUS_SIZE) :: & - status - integer (int_kind), dimension(:), allocatable :: & rcv_request ! request array for receives @@ -2722,7 +2710,7 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) !----------------------------------------------------------------------- ! -! if this task is the src_task, copy blocks of global array into +! if this task is the src_task, copy blocks of global array into ! message buffer and send to other processors. also copy local blocks ! !----------------------------------------------------------------------- @@ -3034,9 +3022,6 @@ subroutine scatter_global_stress(ARRAY, ARRAY_G1, ARRAY_G2, & type (block) :: & this_block ! block info for current block - integer (int_kind), dimension(MPI_STATUS_SIZE) :: & - status - integer (int_kind), dimension(:), allocatable :: & rcv_request ! request array for receives @@ -3058,17 +3043,17 @@ subroutine scatter_global_stress(ARRAY, ARRAY_G1, ARRAY_G2, & this_block = get_block(1,1) ! for the tripoleTflag - all blocks have it if (this_block%tripoleTFlag) then - xoffset = 2 ! treat stresses as cell-centered scalars (they are not + xoffset = 2 ! treat stresses as cell-centered scalars (they are not yoffset = 0 ! shared with neighboring grid cells) else - xoffset = 1 ! treat stresses as cell-centered scalars (they are not + xoffset = 1 ! treat stresses as cell-centered scalars (they are not yoffset = 1 ! shared with neighboring grid cells) endif isign = 1 !----------------------------------------------------------------------- ! -! if this task is the src_task, copy blocks of global array into +! if this task is the src_task, copy blocks of global array into ! message buffer and send to other processors. also copy local blocks ! !----------------------------------------------------------------------- diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 index 0728ac105..a5fed760b 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 @@ -533,7 +533,6 @@ function global_sum_scalar_dbl(scalar, dist) & !----------------------------------------------------------------------- integer (int_kind) :: & - ierr, &! mpi error flag numProcs, &! number of processor participating numBlocks, &! number of local blocks communicator ! communicator for this distribution @@ -603,7 +602,6 @@ function global_sum_scalar_real(scalar, dist) & !----------------------------------------------------------------------- integer (int_kind) :: & - ierr, &! mpi error flag numProcs, &! number of processor participating numBlocks, &! number of local blocks communicator ! communicator for this distribution @@ -714,7 +712,7 @@ end function global_sum_scalar_int function global_allreduce_sum_vector_dbl(vector, dist) & result(globalSums) -! Computes the global sums of sets of scalars (elements of 'vector') +! Computes the global sums of sets of scalars (elements of 'vector') ! distributed across a parallel machine. ! ! This is actually the specific interface for the generic global_allreduce_sum @@ -737,7 +735,6 @@ function global_allreduce_sum_vector_dbl(vector, dist) & !----------------------------------------------------------------------- integer (int_kind) :: & - ierr, &! mpi error flag numProcs, &! number of processor participating numBlocks, &! number of local blocks communicator, &! communicator for this distribution @@ -781,9 +778,9 @@ function global_sum_prod_dbl (array1, array2, dist, field_loc, & ! Computes the global sum of the physical domain of a product of ! two 2-d arrays. ! -! This is actually the specific interface for the generic +! This is actually the specific interface for the generic ! global_sum_prod function corresponding to double precision arrays. -! The generic interface is identical but will handle real and integer +! The generic interface is identical but will handle real and integer ! 2-d slabs. real (dbl_kind), dimension(:,:,:), intent(in) :: & @@ -919,9 +916,9 @@ function global_sum_prod_real (array1, array2, dist, field_loc, & ! Computes the global sum of the physical domain of a product of ! two 2-d arrays. ! -! This is actually the specific interface for the generic +! This is actually the specific interface for the generic ! global_sum_prod function corresponding to single precision arrays. -! The generic interface is identical but will handle real and integer +! The generic interface is identical but will handle real and integer ! 2-d slabs. real (real_kind), dimension(:,:,:), intent(in) :: & @@ -1057,9 +1054,9 @@ function global_sum_prod_int (array1, array2, dist, field_loc, & ! Computes the global sum of the physical domain of a product of ! two 2-d arrays. ! -! This is actually the specific interface for the generic +! This is actually the specific interface for the generic ! global_sum_prod function corresponding to integer arrays. -! The generic interface is identical but will handle real and integer +! The generic interface is identical but will handle real and integer ! 2-d slabs. integer (int_kind), dimension(:,:,:), intent(in) :: & @@ -1198,7 +1195,7 @@ function global_maxval_dbl (array, dist, lMask) & ! Computes the global maximum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to double precision arrays. +! function corresponding to double precision arrays. real (dbl_kind), dimension(:,:,:), intent(in) :: & array ! array for which max value needed @@ -1305,7 +1302,7 @@ function global_maxval_real (array, dist, lMask) & ! Computes the global maximum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to single precision arrays. +! function corresponding to single precision arrays. real (real_kind), dimension(:,:,:), intent(in) :: & array ! array for which max value needed @@ -1412,7 +1409,7 @@ function global_maxval_int (array, dist, lMask) & ! Computes the global maximum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to integer arrays. +! function corresponding to integer arrays. integer (int_kind), dimension(:,:,:), intent(in) :: & array ! array for which max value needed @@ -1520,7 +1517,7 @@ function global_maxval_scalar_dbl (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to double precision scalars. +! function corresponding to double precision scalars. real (dbl_kind), intent(in) :: & scalar ! scalar for which max value needed @@ -1578,7 +1575,7 @@ function global_maxval_scalar_real (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. real (real_kind), intent(in) :: & scalar ! scalar for which max value needed @@ -1636,7 +1633,7 @@ function global_maxval_scalar_int (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. integer (int_kind), intent(in) :: & scalar ! scalar for which max value needed @@ -1694,7 +1691,7 @@ function global_maxval_scalar_int_nodist (scalar, communicator) & ! a communicator. This method supports testing. ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. integer (int_kind), intent(in) :: & scalar ! scalar for which max value needed @@ -1743,7 +1740,7 @@ function global_minval_dbl (array, dist, lMask) & ! Computes the global minimum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_minval -! function corresponding to double precision arrays. +! function corresponding to double precision arrays. real (dbl_kind), dimension(:,:,:), intent(in) :: & array ! array for which min value needed @@ -1850,7 +1847,7 @@ function global_minval_real (array, dist, lMask) & ! Computes the global minimum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_minval -! function corresponding to single precision arrays. +! function corresponding to single precision arrays. real (real_kind), dimension(:,:,:), intent(in) :: & array ! array for which min value needed @@ -1957,7 +1954,7 @@ function global_minval_int (array, dist, lMask) & ! Computes the global minimum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_minval -! function corresponding to integer arrays. +! function corresponding to integer arrays. integer (int_kind), dimension(:,:,:), intent(in) :: & array ! array for which min value needed @@ -2065,7 +2062,7 @@ function global_minval_scalar_dbl (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_minval -! function corresponding to double precision scalars. +! function corresponding to double precision scalars. real (dbl_kind), intent(in) :: & scalar ! scalar for which min value needed @@ -2123,7 +2120,7 @@ function global_minval_scalar_real (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_minval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. real (real_kind), intent(in) :: & scalar ! scalar for which min value needed @@ -2181,7 +2178,7 @@ function global_minval_scalar_int (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_minval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. integer (int_kind), intent(in) :: & scalar ! scalar for which min value needed @@ -2239,7 +2236,7 @@ function global_minval_scalar_int_nodist (scalar, communicator) & ! a communicator. This method supports testing. ! ! This is actually the specific interface for the generic global_minval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. integer (int_kind), intent(in) :: & scalar ! scalar for which min value needed @@ -2299,7 +2296,7 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) ! reprosum = fixed point method based on ordered double integer sums. ! that requires two scalar reductions per global sum. ! This is extremely likely to be bfb. -! (See Mirin and Worley, 2012, IJHPCA, 26, 1730, +! (See Mirin and Worley, 2012, IJHPCA, 26, 1730, ! https://journals.sagepub.com/doi/10.1177/1094342011412630) ! ddpdd = parallel double-double algorithm using single scalar reduction. ! This is very likely to be bfb. diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 index 27f66f712..8c6f90363 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 @@ -3,34 +3,34 @@ MODULE ice_reprosum -!----------------------------------------------------------------------- -! -! Purpose: -!> Compute reproducible global sums of a set of arrays across an MPI +!----------------------------------------------------------------------- +! +! Purpose: +!> Compute reproducible global sums of a set of arrays across an MPI !> subcommunicator ! -! Methods: +! Methods: !> Compute using either or both a scalable, reproducible algorithm and a !> scalable, nonreproducible algorithm: -!> * Reproducible (scalable): +!> * Reproducible (scalable): !> Convert to fixed point (integer vector representation) to enable !> reproducibility when using MPI_Allreduce !> * Alternative usually reproducible (scalable): -!> Use parallel double-double algorithm due to Helen He and +!> Use parallel double-double algorithm due to Helen He and !> Chris Ding, based on David Bailey's/Don Knuth's DDPDD algorithm -!> * Nonreproducible (scalable): +!> * Nonreproducible (scalable): !> Floating point and MPI_Allreduce based. -!> If computing both reproducible and nonreproducible sums, compare +!> If computing both reproducible and nonreproducible sums, compare !> these and report relative difference (if absolute difference !> less than sum) or absolute difference back to calling routine. ! -! Author: P. Worley (based on suggestions from J. White for fixed -! point algorithm and on He/Ding paper for ddpdd +! Author: P. Worley (based on suggestions from J. White for fixed +! point algorithm and on He/Ding paper for ddpdd ! algorithm) ! ! Modified by T.Craig for CICE, March 2019 based on the public version in ! Oasis3-MCT_4.0. -! +! !----------------------------------------------------------------------- !----------------------------------------------------------------------- @@ -65,7 +65,7 @@ MODULE ice_reprosum public :: & ice_reprosum_setopts, &! set runtime options ice_reprosum_calc, &! calculate distributed sum - ice_reprosum_tolExceeded ! utility function to check relative + ice_reprosum_tolExceeded ! utility function to check relative ! differences against the tolerance !----------------------------------------------------------------------- @@ -87,14 +87,14 @@ MODULE ice_reprosum !----------------------------------------------------------------------- logical :: repro_sum_use_ddpdd = .false. - logical :: detailed_timing = .false. +! logical :: detailed_timing = .false. character(len=char_len_long) :: tmpstr CONTAINS !======================================================================== -!----------------------------------------------------------------------- -! Purpose: +!----------------------------------------------------------------------- +! Purpose: !> Set runtime options ! Author: P. Worley !----------------------------------------------------------------------- @@ -109,11 +109,11 @@ subroutine ice_reprosum_setopts(repro_sum_use_ddpdd_in, & logical, intent(in), optional :: repro_sum_use_ddpdd_in !< Use DDPDD algorithm instead of fixed precision algorithm real(r8), intent(in), optional :: repro_sum_rel_diff_max_in - !< maximum permissible difference between reproducible and + !< maximum permissible difference between reproducible and !< nonreproducible sums logical, intent(in), optional :: repro_sum_recompute_in - !< recompute using different algorithm when difference between - !< reproducible and nonreproducible sums is too great + !< recompute using different algorithm when difference between + !< reproducible and nonreproducible sums is too great logical, intent(in), optional :: repro_sum_master !< flag indicating whether this process should output !< log messages @@ -187,10 +187,10 @@ end subroutine ice_reprosum_setopts !======================================================================== -! -! Purpose: -!> Compute the global sum of each field in "arr" using the indicated -!> communicator with a reproducible yet scalable implementation based +! +! Purpose: +!> Compute the global sum of each field in "arr" using the indicated +!> communicator with a reproducible yet scalable implementation based !> on a fixed point algorithm. An alternative is to use an "almost !> always reproducible" floating point algorithm. ! @@ -198,65 +198,65 @@ end subroutine ice_reprosum_setopts ! number of "levels" of integer expansion. The algorithm will calculate ! the number of levels that is required for the sum to be essentially ! exact. The optional parameter arr_max_levels can be used to override -! the calculated value. The optional parameter arr_max_levels_out can be +! the calculated value. The optional parameter arr_max_levels_out can be ! used to return the values used. ! -! The algorithm also requires an upper bound on -! the maximum summand (in absolute value) for each field, and will +! The algorithm also requires an upper bound on +! the maximum summand (in absolute value) for each field, and will ! calculate this internally. However, if the optional parameters ! arr_max_levels and arr_gbl_max are both set, then the algorithm will ! use the values in arr_gbl_max for the upper bounds instead. If these ! are not upper bounds, or if the upper bounds are not tight enough ! to achieve the requisite accuracy, and if the optional parameter ! repro_sum_validate is NOT set to .false., the algorithm will repeat the -! computation with appropriate upper bounds. If only arr_gbl_max is present, +! computation with appropriate upper bounds. If only arr_gbl_max is present, ! then the maxima are computed internally (and the specified values are -! ignored). The optional parameter arr_gbl_max_out can be +! ignored). The optional parameter arr_gbl_max_out can be ! used to return the values used. ! ! Finally, the algorithm requires an upper bound on the number of -! local summands across all processes. This will be calculated internally, -! using an MPI collective, but the value in the optional argument +! local summands across all processes. This will be calculated internally, +! using an MPI collective, but the value in the optional argument ! gbl_max_nsummands will be used instead if (1) it is present, (2) -! it is > 0, and (3) the maximum value and required number of levels -! are also specified. (If the maximum value is calculated, the same -! MPI collective is used to determine the maximum number of local -! summands.) The accuracy of the user-specified value is not checked. -! However, if set to < 1, the value will instead be calculated. If the -! optional parameter gbl_max_nsummands_out is present, then the value -! used (gbl_max_nsummands if >= 1; calculated otherwise) will be +! it is > 0, and (3) the maximum value and required number of levels +! are also specified. (If the maximum value is calculated, the same +! MPI collective is used to determine the maximum number of local +! summands.) The accuracy of the user-specified value is not checked. +! However, if set to < 1, the value will instead be calculated. If the +! optional parameter gbl_max_nsummands_out is present, then the value +! used (gbl_max_nsummands if >= 1; calculated otherwise) will be ! returned. ! ! If requested (by setting ice_reprosum_reldiffmax >= 0.0 and passing in -! the optional rel_diff parameter), results are compared with a -! nonreproducible floating point algorithm. +! the optional rel_diff parameter), results are compared with a +! nonreproducible floating point algorithm. ! -! Note that the cost of the algorithm is not strongly correlated with +! Note that the cost of the algorithm is not strongly correlated with ! the number of levels, which primarily shows up as a (modest) increase -! in cost of the MPI_Allreduce as a function of vector length. Rather the -! cost is more a function of (a) the number of integers required to +! in cost of the MPI_Allreduce as a function of vector length. Rather the +! cost is more a function of (a) the number of integers required to ! represent an individual summand and (b) the number of MPI_Allreduce -! calls. The number of integers required to represent an individual -! summand is 1 or 2 when using 8-byte integers for 8-byte real summands -! when the number of local summands is not too large. As the number of -! local summands increases, the number of integers required increases. +! calls. The number of integers required to represent an individual +! summand is 1 or 2 when using 8-byte integers for 8-byte real summands +! when the number of local summands is not too large. As the number of +! local summands increases, the number of integers required increases. ! The number of MPI_Allreduce calls is either 2 (specifying nothing) or -! 1 (specifying gbl_max_nsummands, arr_max_levels, and arr_gbl_max -! correctly). When specifying arr_max_levels and arr_gbl_max +! 1 (specifying gbl_max_nsummands, arr_max_levels, and arr_gbl_max +! correctly). When specifying arr_max_levels and arr_gbl_max ! incorrectly, 3 or 4 MPI_Allreduce calls will be required. ! ! The alternative algorithm is a minor modification of a parallel ! implementation of David Bailey's routine DDPDD by Helen He ! and Chris Ding. Bailey uses the Knuth trick to implement quadruple -! precision summation of double precision values with 10 double -! precision operations. The advantage of this algorithm is that +! precision summation of double precision values with 10 double +! precision operations. The advantage of this algorithm is that ! it requires a single MPI_Allreduce and is less expensive per summand -! than is the fixed precision algorithm. The disadvantage is that it -! is not guaranteed to be reproducible (though it is reproducible -! much more often than is the standard algorithm). This alternative -! is used when the optional parameter ddpdd_sum is set to .true. It is -! also used if the fixed precision algorithm radix assumption does not -! hold. +! than is the fixed precision algorithm. The disadvantage is that it +! is not guaranteed to be reproducible (though it is reproducible +! much more often than is the standard algorithm). This alternative +! is used when the optional parameter ddpdd_sum is set to .true. It is +! also used if the fixed precision algorithm radix assumption does not +! hold. !---------------------------------------------------------------------- subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & @@ -273,10 +273,10 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & integer, intent(in) :: nsummands !< number of local summands integer, intent(in) :: dsummands !< declared first dimension integer, intent(in) :: nflds !< number of fields - real(r8), intent(in) :: arr(dsummands,nflds) + real(r8), intent(in) :: arr(dsummands,nflds) !< input array - real(r8), intent(out):: arr_gsum(nflds) + real(r8), intent(out):: arr_gsum(nflds) !< global means logical, intent(in), optional :: ddpdd_sum @@ -287,7 +287,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & !< upper bound on max(abs(arr)) real(r8), intent(out), optional :: arr_gbl_max_out(nflds) - !< calculated upper bound on + !< calculated upper bound on !< max(abs(arr)) integer, intent(in), optional :: arr_max_levels(nflds) @@ -306,13 +306,13 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & !< calculated maximum nsummands !< over all processes - integer, intent(in), optional :: gbl_count + integer, intent(in), optional :: gbl_count !< was total number of summands; - !< now is ignored; use + !< now is ignored; use !< gbl_max_nsummands instead logical, intent(in), optional :: repro_sum_validate - !< flag enabling/disabling testing that gmax and max_levels are + !< flag enabling/disabling testing that gmax and max_levels are !< accurate/sufficient. Default is enabled. integer, intent(inout), optional :: repro_sum_stats(5) @@ -325,21 +325,21 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & real(r8), intent(out), optional :: rel_diff(2,nflds) !< relative and absolute - !< differences between fixed + !< differences between fixed !< and floating point sums - integer, intent(in), optional :: commid + integer, intent(in), optional :: commid !< MPI communicator ! Local workspace logical :: use_ddpdd_sum ! flag indicating whether to ! use ice_reprosum_ddpdd or not - logical :: recompute ! flag indicating need to - ! determine gmax/gmin before + logical :: recompute ! flag indicating need to + ! determine gmax/gmin before ! computing sum - logical :: validate ! flag indicating need to - ! verify gmax and max_levels + logical :: validate ! flag indicating need to + ! verify gmax and max_levels ! are accurate/sufficient integer :: omp_nthreads ! number of OpenMP threads integer :: mpi_comm ! MPI subcommunicator @@ -364,12 +364,12 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & integer :: arr_lextremes(0:nflds,2)! local exponent extrema integer :: arr_gextremes(0:nflds,2)! global exponent extrema - integer :: arr_gmax_exp(nflds) ! global exponents maxima + integer :: arr_gmax_exp(nflds) ! global exponents maxima integer :: arr_gmin_exp(nflds) ! global exponents minima - integer :: arr_max_shift ! maximum safe exponent for + integer :: arr_max_shift ! maximum safe exponent for ! value < 1 (so that sum does ! not overflow) - integer :: max_levels(nflds) ! maximum number of levels of + integer :: max_levels(nflds) ! maximum number of levels of ! integer expansion to use integer :: max_level ! maximum value in max_levels integer :: gbl_max_red ! global max local sum reduction? (0/1) @@ -380,16 +380,16 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & real(r8) :: xmax_nsummands ! dble of max_nsummands real(r8) :: arr_lsum(nflds) ! local sums - real(r8) :: arr_gsum_fast(nflds) ! global sum calculated using - ! fast, nonreproducible, + real(r8) :: arr_gsum_fast(nflds) ! global sum calculated using + ! fast, nonreproducible, ! floating point alg. - real(r8) :: abs_diff ! absolute difference between - ! fixed and floating point + real(r8) :: abs_diff ! absolute difference between + ! fixed and floating point ! sums #ifdef _OPENMP integer omp_get_max_threads external omp_get_max_threads -#endif +#endif character(len=*),parameter :: subname = '(ice_reprosum_calc)' !----------------------------------------------------------------------- @@ -483,7 +483,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & if (.not. recompute) then -! determine maximum number of summands in local phases of the +! determine maximum number of summands in local phases of the ! algorithm ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_max") if ( present(gbl_max_nsummands) ) then @@ -509,7 +509,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & endif ! if (detailed_timing) call xicex_timer_stop("repro_sum_allr_max") -! determine maximum shift. Shift needs to be small enough that summation +! determine maximum shift. Shift needs to be small enough that summation ! does not exceed maximum number of digits in i8. ! if requested, return max_nsummands before it is redefined @@ -544,7 +544,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & repro_sum_fast = 1 if (recompute) then repro_sum_both = 1 - else + else ! if requested, return specified levels and upper bounds on maxima if ( present(arr_max_levels_out) ) then do ifld=1,nflds @@ -560,7 +560,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & endif endif -! do not have sufficient information; calculate global max/min and +! do not have sufficient information; calculate global max/min and ! use to compute required number of levels if (recompute) then @@ -622,13 +622,13 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & arr_gmin_exp(:) = arr_gextremes(1:nflds,2) ! if a field is identically zero, arr_gmin_exp still equals MAXEXPONENT -! and arr_gmax_exp still equals MINEXPONENT. In this case, set +! and arr_gmax_exp still equals MINEXPONENT. In this case, set ! arr_gmin_exp = arr_gmax_exp = MINEXPONENT do ifld=1,nflds arr_gmin_exp(ifld) = min(arr_gmax_exp(ifld),arr_gmin_exp(ifld)) enddo -! if requested, return upper bounds on observed maxima +! if requested, return upper bounds on observed maxima if ( present(arr_gbl_max_out) ) then do ifld=1,nflds arr_gbl_max_out(ifld) = scale(1.0_r8,arr_gmax_exp(ifld)) @@ -641,7 +641,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & endif ! determine maximum shift (same as in previous branch, but with calculated -! max_nsummands). Shift needs to be small enough that summation does not +! max_nsummands). Shift needs to be small enough that summation does not ! exceed maximum number of digits in i8. ! summing within each thread first @@ -763,14 +763,14 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & end subroutine ice_reprosum_calc !======================================================================== -! -! Purpose: -!> Compute the global sum of each field in "arr" using the indicated -!> communicator with a reproducible yet scalable implementation based +! +! Purpose: +!> Compute the global sum of each field in "arr" using the indicated +!> communicator with a reproducible yet scalable implementation based !> on a fixed point algorithm. The accuracy of the fixed point algorithm !> is controlled by the number of "levels" of integer expansion, the -!> maximum value of which is specified by max_level. -! +!> maximum value of which is specified by max_level. +! !---------------------------------------------------------------------- subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & @@ -785,29 +785,29 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & integer, intent(in) :: nsummands !< number of local summands integer, intent(in) :: dsummands !< declared first dimension integer, intent(in) :: nflds !< number of fields - integer, intent(in) :: arr_max_shift !< maximum safe exponent for - !< value < 1 (so that sum + integer, intent(in) :: arr_max_shift !< maximum safe exponent for + !< value < 1 (so that sum !< does not overflow) - integer, intent(in) :: arr_gmax_exp(nflds) + integer, intent(in) :: arr_gmax_exp(nflds) !< exponents of global maxima - integer, intent(in) :: max_levels(nflds) - !< maximum number of levels + integer, intent(in) :: max_levels(nflds) + !< maximum number of levels !< of integer expansion - integer, intent(in) :: max_level !< maximum value in + integer, intent(in) :: max_level !< maximum value in !< max_levels integer, intent(in) :: omp_nthreads !< number of OpenMP threads integer, intent(in) :: mpi_comm !< MPI subcommunicator - real(r8), intent(in) :: arr(dsummands,nflds) + real(r8), intent(in) :: arr(dsummands,nflds) !< input array logical, intent(in):: validate !< flag indicating that accuracy of solution generated from !< arr_gmax_exp and max_levels should be tested - logical, intent(out):: recompute + logical, intent(out):: recompute !< flag indicating that either the upper bounds are inaccurate, - !< or max_levels and arr_gmax_exp do not generate accurate + !< or max_levels and arr_gmax_exp do not generate accurate !< enough sums real(r8), intent(out):: arr_gsum(nflds) !< global means @@ -817,27 +817,27 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & integer, parameter :: max_jlevel = & 1 + (digits(0_i8)/digits(0.0_r8)) - integer(i8) :: i8_arr_tlsum_level(0:max_level,nflds,omp_nthreads) - ! integer vector representing local + integer(i8) :: i8_arr_tlsum_level(0:max_level,nflds,omp_nthreads) + ! integer vector representing local ! sum (per thread, per field) - integer(i8) :: i8_arr_lsum_level((max_level+3)*nflds) - ! integer vector representing local + integer(i8) :: i8_arr_lsum_level((max_level+3)*nflds) + ! integer vector representing local ! sum integer(i8) :: i8_arr_level ! integer part of summand for current ! expansion level - integer(i8) :: i8_arr_gsum_level((max_level+3)*nflds) + integer(i8) :: i8_arr_gsum_level((max_level+3)*nflds) ! integer vector representing global ! sum - integer(i8) :: IX_8 ! integer representation of current - ! jlevels of X_8 ('part' of + integer(i8) :: IX_8 ! integer representation of current + ! jlevels of X_8 ('part' of ! i8_arr_gsum_level) integer(i8) :: i8_sign ! sign global sum integer(i8) :: i8_radix ! radix for i8 variables - integer :: max_error(nflds,omp_nthreads) + integer :: max_error(nflds,omp_nthreads) ! accurate upper bound on data? - integer :: not_exact(nflds,omp_nthreads) - ! max_levels sufficient to + integer :: not_exact(nflds,omp_nthreads) + ! max_levels sufficient to ! capture all digits? integer :: isum_beg(omp_nthreads), isum_end(omp_nthreads) ! range of summand indices for each @@ -845,16 +845,16 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & integer :: ifld, isum, ithread ! loop variables integer :: arr_exp ! exponent of summand - integer :: arr_shift ! exponent used to generate integer + integer :: arr_shift ! exponent used to generate integer ! for current expansion level integer :: ilevel ! current integer expansion level - integer :: offset(nflds) ! beginning location in - ! i8_arr_{g,l}sum_level for integer + integer :: offset(nflds) ! beginning location in + ! i8_arr_{g,l}sum_level for integer ! expansion of current ifld - integer :: voffset ! modification to offset used to - ! include validation metrics + integer :: voffset ! modification to offset used to + ! include validation metrics integer :: ioffset ! offset(ifld) - integer :: jlevel ! number of floating point 'pieces' + integer :: jlevel ! number of floating point 'pieces' ! extracted from a given i8 integer integer :: ierr ! MPI error return integer :: LX(max_jlevel) ! exponent of X_8 (see below) @@ -867,13 +867,13 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & ! reconstruction from integer vector real(r8) :: arr_frac ! fraction of summand - real(r8) :: arr_remainder ! part of summand remaining after + real(r8) :: arr_remainder ! part of summand remaining after ! current level of integer expansion - real(r8) :: X_8(max_jlevel) ! r8 vector representation of current + real(r8) :: X_8(max_jlevel) ! r8 vector representation of current ! i8_arr_gsum_level - real(r8) :: RX_8 ! r8 representation of difference + real(r8) :: RX_8 ! r8 representation of difference ! between current i8_arr_gsum_level - ! and current jlevels of X_8 + ! and current jlevels of X_8 ! (== IX_8). Also used in final ! scaling step @@ -940,7 +940,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & ! calculate first shift arr_shift = arr_max_shift - (arr_gmax_exp(ifld)-arr_exp) -! determine first (probably) nonzero level (assuming initial fraction is +! determine first (probably) nonzero level (assuming initial fraction is ! 'normal' - algorithm still works if this is not true) ! NOTE: this is critical; scale will set to zero if min exponent is too small. if (arr_shift < 1) then @@ -956,7 +956,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & endif if (ilevel .le. max_levels(ifld)) then -! apply first shift/truncate, add it to the relevant running +! apply first shift/truncate, add it to the relevant running ! sum, and calculate the remainder. arr_remainder = scale(arr_frac,arr_shift) i8_arr_level = int(arr_remainder,i8) @@ -964,7 +964,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & i8_arr_tlsum_level(ilevel,ifld,ithread) + i8_arr_level arr_remainder = arr_remainder - i8_arr_level -! while the remainder is non-zero, continue to shift, truncate, +! while the remainder is non-zero, continue to shift, truncate, ! sum, and calculate new remainder do while ((arr_remainder .ne. 0.0_r8) & .and. (ilevel < max_levels(ifld))) @@ -986,9 +986,9 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & enddo ! postprocess integer vector to eliminate potential for overlap in the following -! sums over threads and processes: if value larger than or equal to -! (radix(IX_8)**arr_max_shift), add this 'overlap' to next larger integer in -! vector, resulting in nonoverlapping ranges for each component. Note that +! sums over threads and processes: if value larger than or equal to +! (radix(IX_8)**arr_max_shift), add this 'overlap' to next larger integer in +! vector, resulting in nonoverlapping ranges for each component. Note that ! "ilevel-1==0" corresponds to an extra level used to guarantee that the sums ! over threads and processes do not overflow for ilevel==1. do ilevel=max_levels(ifld),1,-1 @@ -1035,12 +1035,12 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & #if defined (NO_I8) ! Workaround for when i8 is not supported. ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_i4") - call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & + call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & veclth, MPI_INTEGER, MPI_SUM, mpi_comm, ierr) ! if (detailed_timing) call xicex_timer_stop("repro_sum_allr_i4") #else ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_i8") - call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & + call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & veclth, MPI_INTEGER8, MPI_SUM, mpi_comm, ierr) ! if (detailed_timing) call xicex_timer_stop("repro_sum_allr_i8") #endif @@ -1055,10 +1055,10 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & ! alternate. To avoid this, do some arithmetic with integer vectors so that all ! components have the same sign. This should keep relative difference between ! using different integer sizes (e.g. i8 and i4) to machine epsilon -! 3) assignment to X_8 will usually lose accuracy since maximum integer -! size is greater than the max number of 'digits' in r8 value (if xmax_nsummands -! correction not very large). Calculate remainder and add in first (since -! smaller). One correction is sufficient for r8 (53 digits) and i8 (63 digits). +! 3) assignment to X_8 will usually lose accuracy since maximum integer +! size is greater than the max number of 'digits' in r8 value (if xmax_nsummands +! correction not very large). Calculate remainder and add in first (since +! smaller). One correction is sufficient for r8 (53 digits) and i8 (63 digits). ! For r4 (24 digits) may need to correct twice. Code is written in a general ! fashion, to work no matter how many corrections are necessary (assuming ! max_jlevel parameter calculation is correct). @@ -1079,7 +1079,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & if (.not. recompute) then ! preprocess integer vector: -! a) if value larger than or equal to (radix(IX_8)**arr_max_shift), add this 'overlap' +! a) if value larger than or equal to (radix(IX_8)**arr_max_shift), add this 'overlap' ! to next larger integer in vector, resulting in nonoverlapping ranges for each ! component. Note that have "ilevel-1=0" level here as described above. do ilevel=max_levels(ifld),1,-1 @@ -1093,9 +1093,9 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & - IX_8 endif enddo -! b) subtract +/- 1 from larger and add +/- 1 to smaller when necessary -! so that all vector components have the same sign (eliminating loss -! of accuracy arising from difference of large values when +! b) subtract +/- 1 from larger and add +/- 1 to smaller when necessary +! so that all vector components have the same sign (eliminating loss +! of accuracy arising from difference of large values when ! reconstructing r8 sum from integer vector) ilevel = 0 do while ((i8_arr_gsum_level(ioffset+ilevel) .eq. 0_i8) & @@ -1117,7 +1117,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & i8_arr_gsum_level(ioffset+jlevel+1) = i8_arr_gsum_level(ioffset+jlevel+1) & + i8_sign*(i8_radix**arr_max_shift) endif - enddo + enddo endif ! start with maximum shift, and work up to larger values @@ -1130,7 +1130,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & if (i8_arr_gsum_level(ioffset+ilevel) .ne. 0_i8) then jlevel = 1 -! r8 representation of higher order bits in integer +! r8 representation of higher order bits in integer X_8(jlevel) = i8_arr_gsum_level(ioffset+ilevel) LX(jlevel) = exponent(X_8(jlevel)) @@ -1148,7 +1148,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & enddo ! add in contributions, smaller to larger, rescaling for each -! addition to guarantee that exponent of working summand is always +! addition to guarantee that exponent of working summand is always ! larger than minexponent do while (jlevel > 0) if (first) then @@ -1172,7 +1172,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & ! apply final exponent correction, scaling first if exponent is too small ! to apply directly corr_exp = curr_exp + exponent(arr_gsum(ifld)) - if (corr_exp .ge. MINEXPONENT(1._r8)) then + if (corr_exp .ge. MINEXPONENT(1._r8)) then arr_gsum(ifld) = set_exponent(arr_gsum(ifld),corr_exp) else RX_8 = set_exponent(arr_gsum(ifld), & @@ -1180,9 +1180,9 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & arr_gsum(ifld) = scale(RX_8,MINEXPONENT(1._r8)) endif -! if validate is .true. and some precision lost, test whether 'too much' +! if validate is .true. and some precision lost, test whether 'too much' ! was lost, due to too loose an upper bound, too stringent a limit on number -! of levels of expansion, cancellation, .... Calculated by comparing lower +! of levels of expansion, cancellation, .... Calculated by comparing lower ! bound on number of sigificant digits with number of digits in 1.0_r8 . if (validate) then if (i8_arr_gsum_level(ioffset-voffset+2) .ne. 0_i8) then @@ -1216,11 +1216,11 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & end subroutine ice_reprosum_int !======================================================================== -! -! Purpose: +! +! Purpose: !> Test whether distributed sum exceeds tolerance and print out a !> warning message. -! +! !---------------------------------------------------------------------- logical function ice_reprosum_tolExceeded (name, nflds, master, & @@ -1233,11 +1233,11 @@ logical function ice_reprosum_tolExceeded (name, nflds, master, & integer, intent(in) :: nflds !< number of fields logical, intent(in) :: master !< process that will write !< warning messages? - integer, optional, intent(in) :: logunit!< unit warning messages + integer, optional, intent(in) :: logunit!< unit warning messages !< written to real(r8), intent(in) :: rel_diff(2,nflds) !< relative and absolute - !< differences between fixed + !< differences between fixed !< and floating point sums ! Local workspace @@ -1301,12 +1301,12 @@ logical function ice_reprosum_tolExceeded (name, nflds, master, & end function ice_reprosum_tolExceeded !======================================================================== -! -! Purpose: -!> Compute the global sum of each field in "arr" using the indicated -!> communicator with a reproducible yet scalable implementation based +! +! Purpose: +!> Compute the global sum of each field in "arr" using the indicated +!> communicator with a reproducible yet scalable implementation based !> on He and Ding's implementation of the double-double algorithm. -! +! !---------------------------------------------------------------------- subroutine ice_reprosum_ddpdd (arr, arr_gsum, nsummands, dsummands, & @@ -1318,11 +1318,11 @@ subroutine ice_reprosum_ddpdd (arr, arr_gsum, nsummands, dsummands, & integer, intent(in) :: nsummands !< number of local summands integer, intent(in) :: dsummands !< declared first dimension integer, intent(in) :: nflds !< number of fields - real(r8), intent(in) :: arr(dsummands,nflds) + real(r8), intent(in) :: arr(dsummands,nflds) !< input array integer, intent(in) :: mpi_comm !< MPI subcommunicator - real(r8), intent(out):: arr_gsum(nflds) + real(r8), intent(out):: arr_gsum(nflds) !< global sums @@ -1360,8 +1360,8 @@ subroutine ice_reprosum_ddpdd (arr, arr_gsum, nsummands, dsummands, & arr_lsum_dd(ifld) = (0.0_r8,0.0_r8) do isum=1,nsummands - - ! Compute arr(isum,ifld) + arr_lsum_dd(ifld) using Knuth''s + + ! Compute arr(isum,ifld) + arr_lsum_dd(ifld) using Knuth''s ! trick. t1 = arr(isum,ifld) + real(arr_lsum_dd(ifld)) e = t1 - arr(isum,ifld) @@ -1393,11 +1393,11 @@ end subroutine ice_reprosum_ddpdd subroutine DDPDD (dda, ddb, len, itype) !---------------------------------------------------------------------- -! -! Purpose: -! Modification of original codes written by David H. Bailey +! +! Purpose: +! Modification of original codes written by David H. Bailey ! This subroutine computes ddb(i) = dda(i)+ddb(i) -! +! !---------------------------------------------------------------------- ! Arguments @@ -1433,10 +1433,10 @@ end subroutine DDPDD subroutine split_indices(total,num_pieces,ibeg,iend) !---------------------------------------------------------------------- -! -! Purpose: +! +! Purpose: ! Split range into 'num_pieces' -! +! !---------------------------------------------------------------------- ! Arguments diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 index abec3758f..baab6f49b 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 @@ -223,7 +223,7 @@ end subroutine init_ice_timers subroutine get_ice_timer(timer_id, name_choice, num_blocks, num_nodes) -! This routine initializes a timer with a given name and returns a +! This routine initializes a timer with a given name and returns a ! timer id. character (*), intent(in) :: & @@ -236,7 +236,7 @@ subroutine get_ice_timer(timer_id, name_choice, num_blocks, num_nodes) ! threaded region) integer (int_kind), intent(out) :: & - timer_id ! timer number assigned to this timer + timer_id ! timer number assigned to this timer !----------------------------------------------------------------------- ! @@ -267,7 +267,7 @@ subroutine get_ice_timer(timer_id, name_choice, num_blocks, num_nodes) all_timers(n)%name = name_choice all_timers(n)%in_use = .true. all_timers(n)%num_blocks = num_blocks - all_timers(n)%num_nodes = num_nodes + all_timers(n)%num_nodes = num_nodes allocate(all_timers(n)%block_started (num_blocks), & all_timers(n)%block_cycles1 (num_blocks), & @@ -285,7 +285,7 @@ subroutine get_ice_timer(timer_id, name_choice, num_blocks, num_nodes) if (srch_error /= 0) & call abort_ice(subname//'ERROR: Exceeded maximum number of timers') - + !----------------------------------------------------------------------- @@ -326,7 +326,7 @@ subroutine ice_timer_clear(timer_id) all_timers(timer_id)%block_accum_time(:) = c0 else call abort_ice(subname//'ERROR: attempt to reset undefined timer') - + endif !----------------------------------------------------------------------- @@ -386,7 +386,7 @@ subroutine ice_timer_start(timer_id, block_id) !*** another thread. if already started, keep track !*** of number of start requests in order to match !*** start and stop requests - + !$OMP CRITICAL if (.not. all_timers(timer_id)%node_started) then @@ -419,18 +419,18 @@ subroutine ice_timer_start(timer_id, block_id) endif else call abort_ice(subname//'ERROR: attempt to start undefined timer') - + endif !----------------------------------------------------------------------- end subroutine ice_timer_start - + !*********************************************************************** subroutine ice_timer_stop(timer_id, block_id) -! This routine stops a given node timer if appropriate. If block +! This routine stops a given node timer if appropriate. If block ! information is available the appropriate block timer is also stopped. integer (int_kind), intent(in) :: & @@ -491,7 +491,7 @@ subroutine ice_timer_stop(timer_id, block_id) !*** stop node timer if number of requested stops !*** matches the number of starts (to avoid stopping !*** a node timer started by multiple threads) - + cycles1 = all_timers(timer_id)%node_cycles1 !$OMP CRITICAL @@ -530,13 +530,13 @@ subroutine ice_timer_stop(timer_id, block_id) endif else call abort_ice(subname//'ERROR: attempt to stop undefined timer') - + endif !----------------------------------------------------------------------- end subroutine ice_timer_stop - + !*********************************************************************** subroutine ice_timer_print(timer_id,stats) @@ -560,7 +560,7 @@ subroutine ice_timer_print(timer_id,stats) integer (int_kind) :: & n,icount, & ! dummy loop index and counter - nBlocks + nBlocks logical (log_kind) :: & lrestart_timer ! flag to restart timer if timer is running @@ -613,7 +613,7 @@ subroutine ice_timer_print(timer_id,stats) local_time = c0 endif max_time = global_maxval(local_time,distrb_info) - + if (my_task == master_task) then write (nu_diag,timer_format) timer_id, & trim(all_timers(timer_id)%name),max_time @@ -735,7 +735,7 @@ end subroutine ice_timer_print_all subroutine ice_timer_check(timer_id,block_id) ! This routine checks a given timer by stopping and restarting the -! timer. This is primarily used to periodically accumulate time in +! timer. This is primarily used to periodically accumulate time in ! the timer to prevent timer cycles from wrapping around max_cycles. integer (int_kind), intent(in) :: & diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 index eb8f5d948..cafe4dc05 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 @@ -3,12 +3,12 @@ module ice_boundary ! This module contains data types and routines for updating halo -! regions (ghost cells) +! regions (ghost cells) ! ! 2007-07-19: Phil Jones, Yoshi Yoshida, John Dennis ! new naming conventions, optimizations during -! initialization, true multi-dimensional updates -! (rather than serial call to two-dimensional updates), +! initialization, true multi-dimensional updates +! (rather than serial call to two-dimensional updates), ! fixes for non-existent blocks ! 2008-01-28: Elizabeth Hunke replaced old routines with new POP ! infrastructure @@ -140,8 +140,8 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & seBlock, swBlock, &! block id southeast, southwest nbrs srcProc, dstProc, &! source, dest processor locations srcLocalID, dstLocalID, &! local block index of src,dst blocks - blockSizeX, &! size of default physical domain in X - blockSizeY, &! size of default physical domain in Y + blockSizeX, &! size of default physical domain in X + blockSizeY, &! size of default physical domain in Y eastMsgSize, westMsgSize, &! nominal sizes for e-w msgs northMsgSize, southMsgSize, &! nominal sizes for n-s msgs tripoleRows, &! number of rows in tripole buffer @@ -258,7 +258,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & srcProc, dstProc, northMsgSize) !*** if a tripole boundary block, also create a local - !*** message into and out of tripole buffer + !*** message into and out of tripole buffer if (tripoleBlock) then !*** copy in @@ -268,7 +268,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & !*** copy out of tripole buffer - includes halo call ice_HaloIncrementMsgCount(sendCount, recvCount, & - srcProc, srcProc, & + srcProc, srcProc, & (nghost+1)*nx_block) endif @@ -346,7 +346,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & ewBoundaryType, nsBoundaryType) if (neBlock > 0) then - msgSize = cornerMsgSize ! normal corner message + msgSize = cornerMsgSize ! normal corner message call ice_distributionGetBlockLoc(dist, neBlock, dstProc, & dstLocalID) @@ -425,7 +425,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & !----------------------------------------------------------------------- ! -! if messages are received from the same processor, the message is +! if messages are received from the same processor, the message is ! actually a local copy - count them and reset to zero ! !----------------------------------------------------------------------- @@ -526,7 +526,7 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & !*** for tripole grids, send a north tripole message to !*** the west block to make sure enough information is !*** available for tripole manipulations - + if (tripoleBlock) then call ice_HaloMsgCreate(halo, dist, iblock, -westBlock, 'north') endif @@ -752,7 +752,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -793,7 +793,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & ! ! take care of northern boundary in tripole case ! bufTripole array contains the top nghost+1 rows (u-fold) or nghost+2 rows -! (T-fold) of physical domain for entire (global) top row +! (T-fold) of physical domain for entire (global) top row ! !----------------------------------------------------------------------- @@ -814,13 +814,13 @@ subroutine ice_HaloUpdate2DR8(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 2,nxGlobal/2 iDst = nxGlobal - i + 2 x1 = bufTripoleR8(i ,halo%tripoleRows) @@ -829,20 +829,20 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleR8(i ,halo%tripoleRows) @@ -851,12 +851,12 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -865,18 +865,18 @@ subroutine ice_HaloUpdate2DR8(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i x1 = bufTripoleR8(i ,halo%tripoleRows) @@ -885,20 +885,20 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleR8(i ,halo%tripoleRows) @@ -907,7 +907,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & bufTripoleR8(i ,halo%tripoleRows) = xavg bufTripoleR8(iDst,halo%tripoleRows) = isign*xavg end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -1053,7 +1053,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -1106,13 +1106,13 @@ subroutine ice_HaloUpdate2DR4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 2,nxGlobal/2 iDst = nxGlobal - i + 2 x1 = bufTripoleR4(i ,halo%tripoleRows) @@ -1121,20 +1121,20 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleR4(i ,halo%tripoleRows) @@ -1143,32 +1143,32 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select + end select else ! tripole u-fold - + select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i x1 = bufTripoleR4(i ,halo%tripoleRows) @@ -1177,20 +1177,20 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleR4(i ,halo%tripoleRows) @@ -1199,7 +1199,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & bufTripoleR4(i ,halo%tripoleRows) = xavg bufTripoleR4(iDst,halo%tripoleRows) = isign*xavg end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -1345,7 +1345,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies while waiting for messages to complete -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -1398,13 +1398,13 @@ subroutine ice_HaloUpdate2DI4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 2,nxGlobal/2 iDst = nxGlobal - i + 2 x1 = bufTripoleI4(i ,halo%tripoleRows) @@ -1413,20 +1413,20 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleI4(i ,halo%tripoleRows) @@ -1435,32 +1435,32 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - else ! tripole u-fold - + else ! tripole u-fold + select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i x1 = bufTripoleI4(i ,halo%tripoleRows) @@ -1469,20 +1469,20 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i x1 = bufTripoleI4(i ,halo%tripoleRows) @@ -1491,7 +1491,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & bufTripoleI4(i ,halo%tripoleRows) = xavg bufTripoleI4(iDst,halo%tripoleRows) = isign*xavg end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -1617,7 +1617,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & nxGlobal = size(bufTripoleR8,dim=1) allocate(bufTripole(nxGlobal,halo%tripoleRows,nz)) bufTripole = fill - endif + endif !----------------------------------------------------------------------- ! @@ -1644,7 +1644,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & !----------------------------------------------------------------------- ! ! do local copies -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -1703,10 +1703,10 @@ subroutine ice_HaloUpdate3DR8(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -1720,20 +1720,20 @@ subroutine ice_HaloUpdate3DR8(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -1744,32 +1744,32 @@ subroutine ice_HaloUpdate3DR8(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - + end select + else ! tripole u-fold - + select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i @@ -1780,20 +1780,20 @@ subroutine ice_HaloUpdate3DR8(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -1804,7 +1804,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -1962,7 +1962,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -2021,10 +2021,10 @@ subroutine ice_HaloUpdate3DR4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -2038,20 +2038,20 @@ subroutine ice_HaloUpdate3DR4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -2062,32 +2062,32 @@ subroutine ice_HaloUpdate3DR4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - - else ! tripole u-fold - + end select + + else ! tripole u-fold + select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i @@ -2098,20 +2098,20 @@ subroutine ice_HaloUpdate3DR4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -2122,7 +2122,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select @@ -2280,7 +2280,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -2323,7 +2323,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - + select case (fieldKind) case (field_type_scalar) isign = 1 @@ -2339,10 +2339,10 @@ subroutine ice_HaloUpdate3DI4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -2356,20 +2356,20 @@ subroutine ice_HaloUpdate3DI4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -2380,32 +2380,32 @@ subroutine ice_HaloUpdate3DI4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - - else ! tripole u-fold + + else ! tripole u-fold select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 - 1 iDst = nxGlobal - i @@ -2416,20 +2416,20 @@ subroutine ice_HaloUpdate3DI4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do k=1,nz do i = 1,nxGlobal/2 iDst = nxGlobal + 1 - i @@ -2440,11 +2440,11 @@ subroutine ice_HaloUpdate3DI4(array, halo, & bufTripole(iDst,halo%tripoleRows,k) = isign*xavg end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - + endif !*** copy out of global tripole buffer into local @@ -2599,7 +2599,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & !----------------------------------------------------------------------- ! ! do local copies -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -2648,7 +2648,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - + select case (fieldKind) case (field_type_scalar) isign = 1 @@ -2664,10 +2664,10 @@ subroutine ice_HaloUpdate4DR8(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -2683,17 +2683,17 @@ subroutine ice_HaloUpdate4DR8(array, halo, & end do end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -2709,32 +2709,32 @@ subroutine ice_HaloUpdate4DR8(array, halo, & end do end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - - else ! tripole u-fold + end select + + else ! tripole u-fold select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 - 1 @@ -2747,20 +2747,20 @@ subroutine ice_HaloUpdate4DR8(array, halo, & end do end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 @@ -2773,11 +2773,11 @@ subroutine ice_HaloUpdate4DR8(array, halo, & end do end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - + endif !*** copy out of global tripole buffer into local @@ -2934,7 +2934,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -2983,7 +2983,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & !----------------------------------------------------------------------- if (nxGlobal > 0) then - + select case (fieldKind) case (field_type_scalar) isign = 1 @@ -2999,10 +2999,10 @@ subroutine ice_HaloUpdate4DR4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -3018,17 +3018,17 @@ subroutine ice_HaloUpdate4DR4(array, halo, & end do end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -3044,32 +3044,32 @@ subroutine ice_HaloUpdate4DR4(array, halo, & end do end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - - else ! tripole u-fold + end select + + else ! tripole u-fold select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 - 1 @@ -3082,20 +3082,20 @@ subroutine ice_HaloUpdate4DR4(array, halo, & end do end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 @@ -3108,11 +3108,11 @@ subroutine ice_HaloUpdate4DR4(array, halo, & end do end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - + endif !*** copy out of global tripole buffer into local @@ -3269,7 +3269,7 @@ subroutine ice_HaloUpdate4DI4(array, halo, & !----------------------------------------------------------------------- ! ! do local copies -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -3334,10 +3334,10 @@ subroutine ice_HaloUpdate4DI4(array, halo, & select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = -1 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -3353,17 +3353,17 @@ subroutine ice_HaloUpdate4DI4(array, halo, & end do end do end do - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 0 joffset = 1 - + case (field_loc_Eface) ! cell center location - + ioffset = 0 joffset = 0 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value @@ -3379,32 +3379,32 @@ subroutine ice_HaloUpdate4DI4(array, halo, & end do end do end do - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = -1 joffset = 1 - + case default call abort_ice(subname//'ERROR: Unknown field location') - end select - - else ! tripole u-fold + end select + + else ! tripole u-fold select case (fieldLoc) case (field_loc_center) ! cell center location - + ioffset = 0 joffset = 0 - + case (field_loc_NEcorner) ! cell corner location - + ioffset = 1 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 - 1 @@ -3417,20 +3417,20 @@ subroutine ice_HaloUpdate4DI4(array, halo, & end do end do end do - + case (field_loc_Eface) ! cell center location - + ioffset = 1 joffset = 0 - + case (field_loc_Nface) ! cell corner (velocity) location - + ioffset = 0 joffset = 1 - + !*** top row is degenerate, so must enforce symmetry !*** use average of two degenerate points for value - + do l=1,nt do k=1,nz do i = 1,nxGlobal/2 @@ -3443,11 +3443,11 @@ subroutine ice_HaloUpdate4DI4(array, halo, & end do end do end do - + case default call abort_ice(subname//'ERROR: Unknown field location') end select - + endif !*** copy out of global tripole buffer into local @@ -3587,7 +3587,7 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & !----------------------------------------------------------------------- ! ! do local copies -! if srcBlock is zero, that denotes an eliminated land block or a +! if srcBlock is zero, that denotes an eliminated land block or a ! closed boundary where ghost cell values are undefined ! if srcBlock is less than zero, the message is a copy out of the ! tripole buffer and will be treated later @@ -3616,7 +3616,7 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & ! ! take care of northern boundary in tripole case ! bufTripole array contains the top haloWidth+1 rows of physical -! domain for entire (global) top row +! domain for entire (global) top row ! !----------------------------------------------------------------------- @@ -3644,12 +3644,12 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & ioffset = 1 joffset = 1 - case (field_loc_Eface) + case (field_loc_Eface) ioffset = 1 joffset = 0 - case (field_loc_Nface) + case (field_loc_Nface) ioffset = 0 joffset = 1 @@ -3680,7 +3680,7 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & iSrc = iSrc - ioffset jSrc = jSrc - joffset if (iSrc == 0) iSrc = nxGlobal - + !*** for center and Eface, do not need to replace !*** top row of physical domain, so jSrc should be !*** out of range and skipped @@ -3735,7 +3735,7 @@ subroutine ice_HaloIncrementMsgCount(sndCounter, rcvCounter, & !----------------------------------------------------------------------- ! -! if destination all land or outside closed boundary (dstProc = 0), +! if destination all land or outside closed boundary (dstProc = 0), ! then no send is necessary, so do the rest only for dstProc /= 0 ! !----------------------------------------------------------------------- @@ -3744,7 +3744,7 @@ subroutine ice_HaloIncrementMsgCount(sndCounter, rcvCounter, & !----------------------------------------------------------------------- ! -! if the current processor is the source, must send data +! if the current processor is the source, must send data ! local copy if dstProc = srcProc ! !----------------------------------------------------------------------- @@ -3754,14 +3754,14 @@ subroutine ice_HaloIncrementMsgCount(sndCounter, rcvCounter, & !----------------------------------------------------------------------- ! -! if the current processor is the destination, must receive data +! if the current processor is the destination, must receive data ! local copy if dstProc = srcProc ! !----------------------------------------------------------------------- if (dstProc == my_task + 1) then - if (srcProc > 0) then + if (srcProc > 0) then !*** the source block has ocean points !*** count as a receive from srcProc @@ -3852,7 +3852,7 @@ subroutine ice_HaloMsgCreate(halo, dist, srcBlock, dstBlock, direction) !----------------------------------------------------------------------- ! -! if destination all land or outside closed boundary (dstProc = 0), +! if destination all land or outside closed boundary (dstProc = 0), ! then no send is necessary, so do the rest only for dstProc /= 0 ! !----------------------------------------------------------------------- @@ -3894,7 +3894,7 @@ subroutine ice_HaloMsgCreate(halo, dist, srcBlock, dstBlock, direction) !----------------------------------------------------------------------- if (srcProc == my_task+1 .and. & - dstProc == my_task+1) then + dstProc == my_task+1) then !*** compute addresses based on direction @@ -3989,7 +3989,7 @@ subroutine ice_HaloMsgCreate(halo, dist, srcBlock, dstBlock, direction) if (jeSrc - jbSrc + 1 < halo%tripoleRows) then call abort_ice(subname//'ERROR: not enough points in block for tripole') return - endif + endif msgIndx = halo%numLocalCopies @@ -4013,7 +4013,7 @@ subroutine ice_HaloMsgCreate(halo, dist, srcBlock, dstBlock, direction) else if (srcBlock < 0 .and. dstBlock > 0) then - !*** tripole grid - set up for copying out of + !*** tripole grid - set up for copying out of !*** tripole buffer into ghost cell domains !*** include e-w ghost cells @@ -4097,7 +4097,7 @@ subroutine ice_HaloMsgCreate(halo, dist, srcBlock, dstBlock, direction) else - !*** tripole grid - copy entire top halo+1 + !*** tripole grid - copy entire top halo+1 !*** rows into global buffer at src location msgIndx = halo%numLocalCopies @@ -4151,7 +4151,7 @@ subroutine ice_HaloMsgCreate(halo, dist, srcBlock, dstBlock, direction) else - !*** tripole grid - copy entire top halo+1 + !*** tripole grid - copy entire top halo+1 !*** rows into global buffer at src location msgIndx = halo%numLocalCopies @@ -4235,12 +4235,12 @@ subroutine ice_HaloMsgCreate(halo, dist, srcBlock, dstBlock, direction) !----------------------------------------------------------------------- ! -! if dest block is local and source block does not exist, create a +! if dest block is local and source block does not exist, create a ! local copy to fill halo with a fill value ! !----------------------------------------------------------------------- - else if (srcProc == 0 .and. dstProc == my_task+1) then + else if (srcProc == 0 .and. dstProc == my_task+1) then !*** compute addresses based on direction @@ -4481,14 +4481,14 @@ end subroutine ice_HaloMsgCreate subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) -! This subroutine extrapolates ARRAY values into the first row or column -! of ghost cells, and is intended for grid variables whose ghost cells -! would otherwise be set using the default boundary conditions (Dirichlet +! This subroutine extrapolates ARRAY values into the first row or column +! of ghost cells, and is intended for grid variables whose ghost cells +! would otherwise be set using the default boundary conditions (Dirichlet ! or Neumann). ! Note: This routine will need to be modified for nghost > 1. ! We assume padding occurs only on east and north edges. ! -! This is the specific interface for double precision arrays +! This is the specific interface for double precision arrays ! corresponding to the generic interface ice_HaloExtrapolate use ice_blocks, only: block, nblocks_x, nblocks_y, get_block diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 index 4b0bb1f9e..34cca2d03 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 @@ -346,7 +346,7 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) real (dbl_kind), optional :: & spc_val - + real (dbl_kind), dimension(:,:), intent(inout) :: & ARRAY_G ! array containing global horizontal field on dst_task @@ -504,7 +504,7 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) integer (int_kind), optional :: & spc_val - + integer (int_kind), dimension(:,:), intent(inout) :: & ARRAY_G ! array containing global horizontal field on dst_task @@ -662,7 +662,7 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) logical (log_kind), optional :: & spc_val - + logical (log_kind), dimension(:,:), intent(inout) :: & ARRAY_G ! array containing global horizontal field on dst_task @@ -1581,7 +1581,7 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) !----------------------------------------------------------------------- ! -! initialize return array to zero +! initialize return array to zero ! !----------------------------------------------------------------------- @@ -1754,10 +1754,10 @@ subroutine scatter_global_stress(ARRAY, ARRAY_G1, ARRAY_G2, & this_block = get_block(1,1) ! for the tripoleTflag - all blocks have it if (this_block%tripoleTFlag) then - xoffset = 2 ! treat stresses as cell-centered scalars (they are not + xoffset = 2 ! treat stresses as cell-centered scalars (they are not yoffset = 0 ! shared with neighboring grid cells) else - xoffset = 1 ! treat stresses as cell-centered scalars (they are not + xoffset = 1 ! treat stresses as cell-centered scalars (they are not yoffset = 1 ! shared with neighboring grid cells) endif isign = 1 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 index a024698d5..e859ea2bd 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 @@ -534,7 +534,6 @@ function global_sum_scalar_dbl(scalar, dist) & !----------------------------------------------------------------------- integer (int_kind) :: & - ierr, &! mpi error flag numProcs, &! number of processor participating numBlocks, &! number of local blocks communicator ! communicator for this distribution @@ -604,7 +603,6 @@ function global_sum_scalar_real(scalar, dist) & !----------------------------------------------------------------------- integer (int_kind) :: & - ierr, &! mpi error flag numProcs, &! number of processor participating numBlocks, &! number of local blocks communicator ! communicator for this distribution @@ -715,7 +713,7 @@ end function global_sum_scalar_int function global_allreduce_sum_vector_dbl(vector, dist) & result(globalSums) -! Computes the global sums of sets of scalars (elements of 'vector') +! Computes the global sums of sets of scalars (elements of 'vector') ! distributed across a parallel machine. ! ! This is actually the specific interface for the generic global_allreduce_sum @@ -738,7 +736,6 @@ function global_allreduce_sum_vector_dbl(vector, dist) & !----------------------------------------------------------------------- integer (int_kind) :: & - ierr, &! mpi error flag numProcs, &! number of processor participating numBlocks, &! number of local blocks communicator, &! communicator for this distribution @@ -782,9 +779,9 @@ function global_sum_prod_dbl (array1, array2, dist, field_loc, & ! Computes the global sum of the physical domain of a product of ! two 2-d arrays. ! -! This is actually the specific interface for the generic +! This is actually the specific interface for the generic ! global_sum_prod function corresponding to double precision arrays. -! The generic interface is identical but will handle real and integer +! The generic interface is identical but will handle real and integer ! 2-d slabs. real (dbl_kind), dimension(:,:,:), intent(in) :: & @@ -920,9 +917,9 @@ function global_sum_prod_real (array1, array2, dist, field_loc, & ! Computes the global sum of the physical domain of a product of ! two 2-d arrays. ! -! This is actually the specific interface for the generic +! This is actually the specific interface for the generic ! global_sum_prod function corresponding to single precision arrays. -! The generic interface is identical but will handle real and integer +! The generic interface is identical but will handle real and integer ! 2-d slabs. real (real_kind), dimension(:,:,:), intent(in) :: & @@ -1058,9 +1055,9 @@ function global_sum_prod_int (array1, array2, dist, field_loc, & ! Computes the global sum of the physical domain of a product of ! two 2-d arrays. ! -! This is actually the specific interface for the generic +! This is actually the specific interface for the generic ! global_sum_prod function corresponding to integer arrays. -! The generic interface is identical but will handle real and integer +! The generic interface is identical but will handle real and integer ! 2-d slabs. integer (int_kind), dimension(:,:,:), intent(in) :: & @@ -1199,7 +1196,7 @@ function global_maxval_dbl (array, dist, lMask) & ! Computes the global maximum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to double precision arrays. +! function corresponding to double precision arrays. real (dbl_kind), dimension(:,:,:), intent(in) :: & array ! array for which max value needed @@ -1306,7 +1303,7 @@ function global_maxval_real (array, dist, lMask) & ! Computes the global maximum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to single precision arrays. +! function corresponding to single precision arrays. real (real_kind), dimension(:,:,:), intent(in) :: & array ! array for which max value needed @@ -1413,7 +1410,7 @@ function global_maxval_int (array, dist, lMask) & ! Computes the global maximum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to integer arrays. +! function corresponding to integer arrays. integer (int_kind), dimension(:,:,:), intent(in) :: & array ! array for which max value needed @@ -1521,7 +1518,7 @@ function global_maxval_scalar_dbl (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to double precision scalars. +! function corresponding to double precision scalars. real (dbl_kind), intent(in) :: & scalar ! scalar for which max value needed @@ -1579,7 +1576,7 @@ function global_maxval_scalar_real (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. real (real_kind), intent(in) :: & scalar ! scalar for which max value needed @@ -1637,7 +1634,7 @@ function global_maxval_scalar_int (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. integer (int_kind), intent(in) :: & scalar ! scalar for which max value needed @@ -1695,7 +1692,7 @@ function global_maxval_scalar_int_nodist (scalar, communicator) & ! a communicator. This method supports testing. ! ! This is actually the specific interface for the generic global_maxval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. integer (int_kind), intent(in) :: & scalar ! scalar for which max value needed @@ -1744,7 +1741,7 @@ function global_minval_dbl (array, dist, lMask) & ! Computes the global minimum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_minval -! function corresponding to double precision arrays. +! function corresponding to double precision arrays. real (dbl_kind), dimension(:,:,:), intent(in) :: & array ! array for which min value needed @@ -1851,7 +1848,7 @@ function global_minval_real (array, dist, lMask) & ! Computes the global minimum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_minval -! function corresponding to single precision arrays. +! function corresponding to single precision arrays. real (real_kind), dimension(:,:,:), intent(in) :: & array ! array for which min value needed @@ -1958,7 +1955,7 @@ function global_minval_int (array, dist, lMask) & ! Computes the global minimum value of the physical domain of a 2-d field ! ! This is actually the specific interface for the generic global_minval -! function corresponding to integer arrays. +! function corresponding to integer arrays. integer (int_kind), dimension(:,:,:), intent(in) :: & array ! array for which min value needed @@ -2066,7 +2063,7 @@ function global_minval_scalar_dbl (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_minval -! function corresponding to double precision scalars. +! function corresponding to double precision scalars. real (dbl_kind), intent(in) :: & scalar ! scalar for which min value needed @@ -2124,7 +2121,7 @@ function global_minval_scalar_real (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_minval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. real (real_kind), intent(in) :: & scalar ! scalar for which min value needed @@ -2182,7 +2179,7 @@ function global_minval_scalar_int (scalar, dist) & ! a distributed machine. ! ! This is actually the specific interface for the generic global_minval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. integer (int_kind), intent(in) :: & scalar ! scalar for which min value needed @@ -2240,7 +2237,7 @@ function global_minval_scalar_int_nodist (scalar, communicator) & ! a communicator. This method supports testing. ! ! This is actually the specific interface for the generic global_minval -! function corresponding to single precision scalars. +! function corresponding to single precision scalars. integer (int_kind), intent(in) :: & scalar ! scalar for which min value needed @@ -2300,7 +2297,7 @@ subroutine compute_sums_dbl(array2,sums8,mpicomm,numprocs) ! reprosum = fixed point method based on ordered double integer sums. ! that requires two scalar reductions per global sum. ! This is extremely likely to be bfb. -! (See Mirin and Worley, 2012, IJHPCA, 26, 1730, +! (See Mirin and Worley, 2012, IJHPCA, 26, 1730, ! https://journals.sagepub.com/doi/10.1177/1094342011412630) ! ddpdd = parallel double-double algorithm using single scalar reduction. ! This is very likely to be bfb. diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 index 13ff6fcb8..2c584bd94 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 @@ -4,34 +4,34 @@ MODULE ice_reprosum -!----------------------------------------------------------------------- -! -! Purpose: -!> Compute reproducible global sums of a set of arrays across an MPI +!----------------------------------------------------------------------- +! +! Purpose: +!> Compute reproducible global sums of a set of arrays across an MPI !> subcommunicator ! -! Methods: +! Methods: !> Compute using either or both a scalable, reproducible algorithm and a !> scalable, nonreproducible algorithm: -!> * Reproducible (scalable): +!> * Reproducible (scalable): !> Convert to fixed point (integer vector representation) to enable !> reproducibility when using MPI_Allreduce !> * Alternative usually reproducible (scalable): -!> Use parallel double-double algorithm due to Helen He and +!> Use parallel double-double algorithm due to Helen He and !> Chris Ding, based on David Bailey's/Don Knuth's DDPDD algorithm -!> * Nonreproducible (scalable): +!> * Nonreproducible (scalable): !> Floating point and MPI_Allreduce based. -!> If computing both reproducible and nonreproducible sums, compare +!> If computing both reproducible and nonreproducible sums, compare !> these and report relative difference (if absolute difference !> less than sum) or absolute difference back to calling routine. ! -! Author: P. Worley (based on suggestions from J. White for fixed -! point algorithm and on He/Ding paper for ddpdd +! Author: P. Worley (based on suggestions from J. White for fixed +! point algorithm and on He/Ding paper for ddpdd ! algorithm) ! ! Modified by T.Craig for CICE, March 2019 based on the public version in ! Oasis3-MCT_4.0. -! +! !----------------------------------------------------------------------- !----------------------------------------------------------------------- @@ -66,7 +66,7 @@ MODULE ice_reprosum public :: & ice_reprosum_setopts, &! set runtime options ice_reprosum_calc, &! calculate distributed sum - ice_reprosum_tolExceeded ! utility function to check relative + ice_reprosum_tolExceeded ! utility function to check relative ! differences against the tolerance !----------------------------------------------------------------------- @@ -94,8 +94,8 @@ MODULE ice_reprosum CONTAINS !======================================================================== -!----------------------------------------------------------------------- -! Purpose: +!----------------------------------------------------------------------- +! Purpose: !> Set runtime options ! Author: P. Worley !----------------------------------------------------------------------- @@ -110,11 +110,11 @@ subroutine ice_reprosum_setopts(repro_sum_use_ddpdd_in, & logical, intent(in), optional :: repro_sum_use_ddpdd_in !< Use DDPDD algorithm instead of fixed precision algorithm real(r8), intent(in), optional :: repro_sum_rel_diff_max_in - !< maximum permissible difference between reproducible and + !< maximum permissible difference between reproducible and !< nonreproducible sums logical, intent(in), optional :: repro_sum_recompute_in - !< recompute using different algorithm when difference between - !< reproducible and nonreproducible sums is too great + !< recompute using different algorithm when difference between + !< reproducible and nonreproducible sums is too great logical, intent(in), optional :: repro_sum_master !< flag indicating whether this process should output !< log messages @@ -188,10 +188,10 @@ end subroutine ice_reprosum_setopts !======================================================================== -! -! Purpose: -!> Compute the global sum of each field in "arr" using the indicated -!> communicator with a reproducible yet scalable implementation based +! +! Purpose: +!> Compute the global sum of each field in "arr" using the indicated +!> communicator with a reproducible yet scalable implementation based !> on a fixed point algorithm. An alternative is to use an "almost !> always reproducible" floating point algorithm. ! @@ -199,65 +199,65 @@ end subroutine ice_reprosum_setopts ! number of "levels" of integer expansion. The algorithm will calculate ! the number of levels that is required for the sum to be essentially ! exact. The optional parameter arr_max_levels can be used to override -! the calculated value. The optional parameter arr_max_levels_out can be +! the calculated value. The optional parameter arr_max_levels_out can be ! used to return the values used. ! -! The algorithm also requires an upper bound on -! the maximum summand (in absolute value) for each field, and will +! The algorithm also requires an upper bound on +! the maximum summand (in absolute value) for each field, and will ! calculate this internally. However, if the optional parameters ! arr_max_levels and arr_gbl_max are both set, then the algorithm will ! use the values in arr_gbl_max for the upper bounds instead. If these ! are not upper bounds, or if the upper bounds are not tight enough ! to achieve the requisite accuracy, and if the optional parameter ! repro_sum_validate is NOT set to .false., the algorithm will repeat the -! computation with appropriate upper bounds. If only arr_gbl_max is present, +! computation with appropriate upper bounds. If only arr_gbl_max is present, ! then the maxima are computed internally (and the specified values are -! ignored). The optional parameter arr_gbl_max_out can be +! ignored). The optional parameter arr_gbl_max_out can be ! used to return the values used. ! ! Finally, the algorithm requires an upper bound on the number of -! local summands across all processes. This will be calculated internally, -! using an MPI collective, but the value in the optional argument +! local summands across all processes. This will be calculated internally, +! using an MPI collective, but the value in the optional argument ! gbl_max_nsummands will be used instead if (1) it is present, (2) -! it is > 0, and (3) the maximum value and required number of levels -! are also specified. (If the maximum value is calculated, the same -! MPI collective is used to determine the maximum number of local -! summands.) The accuracy of the user-specified value is not checked. -! However, if set to < 1, the value will instead be calculated. If the -! optional parameter gbl_max_nsummands_out is present, then the value -! used (gbl_max_nsummands if >= 1; calculated otherwise) will be +! it is > 0, and (3) the maximum value and required number of levels +! are also specified. (If the maximum value is calculated, the same +! MPI collective is used to determine the maximum number of local +! summands.) The accuracy of the user-specified value is not checked. +! However, if set to < 1, the value will instead be calculated. If the +! optional parameter gbl_max_nsummands_out is present, then the value +! used (gbl_max_nsummands if >= 1; calculated otherwise) will be ! returned. ! ! If requested (by setting ice_reprosum_reldiffmax >= 0.0 and passing in -! the optional rel_diff parameter), results are compared with a -! nonreproducible floating point algorithm. +! the optional rel_diff parameter), results are compared with a +! nonreproducible floating point algorithm. ! -! Note that the cost of the algorithm is not strongly correlated with +! Note that the cost of the algorithm is not strongly correlated with ! the number of levels, which primarily shows up as a (modest) increase -! in cost of the MPI_Allreduce as a function of vector length. Rather the -! cost is more a function of (a) the number of integers required to +! in cost of the MPI_Allreduce as a function of vector length. Rather the +! cost is more a function of (a) the number of integers required to ! represent an individual summand and (b) the number of MPI_Allreduce -! calls. The number of integers required to represent an individual -! summand is 1 or 2 when using 8-byte integers for 8-byte real summands -! when the number of local summands is not too large. As the number of -! local summands increases, the number of integers required increases. +! calls. The number of integers required to represent an individual +! summand is 1 or 2 when using 8-byte integers for 8-byte real summands +! when the number of local summands is not too large. As the number of +! local summands increases, the number of integers required increases. ! The number of MPI_Allreduce calls is either 2 (specifying nothing) or -! 1 (specifying gbl_max_nsummands, arr_max_levels, and arr_gbl_max -! correctly). When specifying arr_max_levels and arr_gbl_max +! 1 (specifying gbl_max_nsummands, arr_max_levels, and arr_gbl_max +! correctly). When specifying arr_max_levels and arr_gbl_max ! incorrectly, 3 or 4 MPI_Allreduce calls will be required. ! ! The alternative algorithm is a minor modification of a parallel ! implementation of David Bailey's routine DDPDD by Helen He ! and Chris Ding. Bailey uses the Knuth trick to implement quadruple -! precision summation of double precision values with 10 double -! precision operations. The advantage of this algorithm is that +! precision summation of double precision values with 10 double +! precision operations. The advantage of this algorithm is that ! it requires a single MPI_Allreduce and is less expensive per summand -! than is the fixed precision algorithm. The disadvantage is that it -! is not guaranteed to be reproducible (though it is reproducible -! much more often than is the standard algorithm). This alternative -! is used when the optional parameter ddpdd_sum is set to .true. It is -! also used if the fixed precision algorithm radix assumption does not -! hold. +! than is the fixed precision algorithm. The disadvantage is that it +! is not guaranteed to be reproducible (though it is reproducible +! much more often than is the standard algorithm). This alternative +! is used when the optional parameter ddpdd_sum is set to .true. It is +! also used if the fixed precision algorithm radix assumption does not +! hold. !---------------------------------------------------------------------- subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & @@ -274,10 +274,10 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & integer, intent(in) :: nsummands !< number of local summands integer, intent(in) :: dsummands !< declared first dimension integer, intent(in) :: nflds !< number of fields - real(r8), intent(in) :: arr(dsummands,nflds) + real(r8), intent(in) :: arr(dsummands,nflds) !< input array - real(r8), intent(out):: arr_gsum(nflds) + real(r8), intent(out):: arr_gsum(nflds) !< global means logical, intent(in), optional :: ddpdd_sum @@ -288,7 +288,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & !< upper bound on max(abs(arr)) real(r8), intent(out), optional :: arr_gbl_max_out(nflds) - !< calculated upper bound on + !< calculated upper bound on !< max(abs(arr)) integer, intent(in), optional :: arr_max_levels(nflds) @@ -307,13 +307,13 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & !< calculated maximum nsummands !< over all processes - integer, intent(in), optional :: gbl_count + integer, intent(in), optional :: gbl_count !< was total number of summands; - !< now is ignored; use + !< now is ignored; use !< gbl_max_nsummands instead logical, intent(in), optional :: repro_sum_validate - !< flag enabling/disabling testing that gmax and max_levels are + !< flag enabling/disabling testing that gmax and max_levels are !< accurate/sufficient. Default is enabled. integer, intent(inout), optional :: repro_sum_stats(5) @@ -326,21 +326,21 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & real(r8), intent(out), optional :: rel_diff(2,nflds) !< relative and absolute - !< differences between fixed + !< differences between fixed !< and floating point sums - integer, intent(in), optional :: commid + integer, intent(in), optional :: commid !< MPI communicator ! Local workspace logical :: use_ddpdd_sum ! flag indicating whether to ! use ice_reprosum_ddpdd or not - logical :: recompute ! flag indicating need to - ! determine gmax/gmin before + logical :: recompute ! flag indicating need to + ! determine gmax/gmin before ! computing sum - logical :: validate ! flag indicating need to - ! verify gmax and max_levels + logical :: validate ! flag indicating need to + ! verify gmax and max_levels ! are accurate/sufficient integer :: omp_nthreads ! number of OpenMP threads integer :: mpi_comm ! MPI subcommunicator @@ -365,12 +365,12 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & integer :: arr_lextremes(0:nflds,2)! local exponent extrema integer :: arr_gextremes(0:nflds,2)! global exponent extrema - integer :: arr_gmax_exp(nflds) ! global exponents maxima + integer :: arr_gmax_exp(nflds) ! global exponents maxima integer :: arr_gmin_exp(nflds) ! global exponents minima - integer :: arr_max_shift ! maximum safe exponent for + integer :: arr_max_shift ! maximum safe exponent for ! value < 1 (so that sum does ! not overflow) - integer :: max_levels(nflds) ! maximum number of levels of + integer :: max_levels(nflds) ! maximum number of levels of ! integer expansion to use integer :: max_level ! maximum value in max_levels integer :: gbl_max_red ! global max local sum reduction? (0/1) @@ -381,16 +381,16 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & real(r8) :: xmax_nsummands ! dble of max_nsummands real(r8) :: arr_lsum(nflds) ! local sums - real(r8) :: arr_gsum_fast(nflds) ! global sum calculated using - ! fast, nonreproducible, + real(r8) :: arr_gsum_fast(nflds) ! global sum calculated using + ! fast, nonreproducible, ! floating point alg. - real(r8) :: abs_diff ! absolute difference between - ! fixed and floating point + real(r8) :: abs_diff ! absolute difference between + ! fixed and floating point ! sums #ifdef _OPENMP integer omp_get_max_threads external omp_get_max_threads -#endif +#endif character(len=*),parameter :: subname = '(ice_reprosum_calc)' !----------------------------------------------------------------------- @@ -484,7 +484,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & if (.not. recompute) then -! determine maximum number of summands in local phases of the +! determine maximum number of summands in local phases of the ! algorithm ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_max") if ( present(gbl_max_nsummands) ) then @@ -510,7 +510,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & endif ! if (detailed_timing) call xicex_timer_stop("repro_sum_allr_max") -! determine maximum shift. Shift needs to be small enough that summation +! determine maximum shift. Shift needs to be small enough that summation ! does not exceed maximum number of digits in i8. ! if requested, return max_nsummands before it is redefined @@ -545,7 +545,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & repro_sum_fast = 1 if (recompute) then repro_sum_both = 1 - else + else ! if requested, return specified levels and upper bounds on maxima if ( present(arr_max_levels_out) ) then do ifld=1,nflds @@ -561,7 +561,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & endif endif -! do not have sufficient information; calculate global max/min and +! do not have sufficient information; calculate global max/min and ! use to compute required number of levels if (recompute) then @@ -623,13 +623,13 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & arr_gmin_exp(:) = arr_gextremes(1:nflds,2) ! if a field is identically zero, arr_gmin_exp still equals MAXEXPONENT -! and arr_gmax_exp still equals MINEXPONENT. In this case, set +! and arr_gmax_exp still equals MINEXPONENT. In this case, set ! arr_gmin_exp = arr_gmax_exp = MINEXPONENT do ifld=1,nflds arr_gmin_exp(ifld) = min(arr_gmax_exp(ifld),arr_gmin_exp(ifld)) enddo -! if requested, return upper bounds on observed maxima +! if requested, return upper bounds on observed maxima if ( present(arr_gbl_max_out) ) then do ifld=1,nflds arr_gbl_max_out(ifld) = scale(1.0_r8,arr_gmax_exp(ifld)) @@ -642,7 +642,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & endif ! determine maximum shift (same as in previous branch, but with calculated -! max_nsummands). Shift needs to be small enough that summation does not +! max_nsummands). Shift needs to be small enough that summation does not ! exceed maximum number of digits in i8. ! summing within each thread first @@ -764,14 +764,14 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & end subroutine ice_reprosum_calc !======================================================================== -! -! Purpose: -!> Compute the global sum of each field in "arr" using the indicated -!> communicator with a reproducible yet scalable implementation based +! +! Purpose: +!> Compute the global sum of each field in "arr" using the indicated +!> communicator with a reproducible yet scalable implementation based !> on a fixed point algorithm. The accuracy of the fixed point algorithm !> is controlled by the number of "levels" of integer expansion, the -!> maximum value of which is specified by max_level. -! +!> maximum value of which is specified by max_level. +! !---------------------------------------------------------------------- subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & @@ -786,29 +786,29 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & integer, intent(in) :: nsummands !< number of local summands integer, intent(in) :: dsummands !< declared first dimension integer, intent(in) :: nflds !< number of fields - integer, intent(in) :: arr_max_shift !< maximum safe exponent for - !< value < 1 (so that sum + integer, intent(in) :: arr_max_shift !< maximum safe exponent for + !< value < 1 (so that sum !< does not overflow) - integer, intent(in) :: arr_gmax_exp(nflds) + integer, intent(in) :: arr_gmax_exp(nflds) !< exponents of global maxima - integer, intent(in) :: max_levels(nflds) - !< maximum number of levels + integer, intent(in) :: max_levels(nflds) + !< maximum number of levels !< of integer expansion - integer, intent(in) :: max_level !< maximum value in + integer, intent(in) :: max_level !< maximum value in !< max_levels integer, intent(in) :: omp_nthreads !< number of OpenMP threads integer, intent(in) :: mpi_comm !< MPI subcommunicator - real(r8), intent(in) :: arr(dsummands,nflds) + real(r8), intent(in) :: arr(dsummands,nflds) !< input array logical, intent(in):: validate !< flag indicating that accuracy of solution generated from !< arr_gmax_exp and max_levels should be tested - logical, intent(out):: recompute + logical, intent(out):: recompute !< flag indicating that either the upper bounds are inaccurate, - !< or max_levels and arr_gmax_exp do not generate accurate + !< or max_levels and arr_gmax_exp do not generate accurate !< enough sums real(r8), intent(out):: arr_gsum(nflds) !< global means @@ -818,27 +818,27 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & integer, parameter :: max_jlevel = & 1 + (digits(0_i8)/digits(0.0_r8)) - integer(i8) :: i8_arr_tlsum_level(0:max_level,nflds,omp_nthreads) - ! integer vector representing local + integer(i8) :: i8_arr_tlsum_level(0:max_level,nflds,omp_nthreads) + ! integer vector representing local ! sum (per thread, per field) - integer(i8) :: i8_arr_lsum_level((max_level+3)*nflds) - ! integer vector representing local + integer(i8) :: i8_arr_lsum_level((max_level+3)*nflds) + ! integer vector representing local ! sum integer(i8) :: i8_arr_level ! integer part of summand for current ! expansion level - integer(i8) :: i8_arr_gsum_level((max_level+3)*nflds) + integer(i8) :: i8_arr_gsum_level((max_level+3)*nflds) ! integer vector representing global ! sum - integer(i8) :: IX_8 ! integer representation of current - ! jlevels of X_8 ('part' of + integer(i8) :: IX_8 ! integer representation of current + ! jlevels of X_8 ('part' of ! i8_arr_gsum_level) integer(i8) :: i8_sign ! sign global sum integer(i8) :: i8_radix ! radix for i8 variables - integer :: max_error(nflds,omp_nthreads) + integer :: max_error(nflds,omp_nthreads) ! accurate upper bound on data? - integer :: not_exact(nflds,omp_nthreads) - ! max_levels sufficient to + integer :: not_exact(nflds,omp_nthreads) + ! max_levels sufficient to ! capture all digits? integer :: isum_beg(omp_nthreads), isum_end(omp_nthreads) ! range of summand indices for each @@ -846,16 +846,16 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & integer :: ifld, isum, ithread ! loop variables integer :: arr_exp ! exponent of summand - integer :: arr_shift ! exponent used to generate integer + integer :: arr_shift ! exponent used to generate integer ! for current expansion level integer :: ilevel ! current integer expansion level - integer :: offset(nflds) ! beginning location in - ! i8_arr_{g,l}sum_level for integer + integer :: offset(nflds) ! beginning location in + ! i8_arr_{g,l}sum_level for integer ! expansion of current ifld - integer :: voffset ! modification to offset used to - ! include validation metrics + integer :: voffset ! modification to offset used to + ! include validation metrics integer :: ioffset ! offset(ifld) - integer :: jlevel ! number of floating point 'pieces' + integer :: jlevel ! number of floating point 'pieces' ! extracted from a given i8 integer integer :: ierr ! MPI error return integer :: LX(max_jlevel) ! exponent of X_8 (see below) @@ -868,13 +868,13 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & ! reconstruction from integer vector real(r8) :: arr_frac ! fraction of summand - real(r8) :: arr_remainder ! part of summand remaining after + real(r8) :: arr_remainder ! part of summand remaining after ! current level of integer expansion - real(r8) :: X_8(max_jlevel) ! r8 vector representation of current + real(r8) :: X_8(max_jlevel) ! r8 vector representation of current ! i8_arr_gsum_level - real(r8) :: RX_8 ! r8 representation of difference + real(r8) :: RX_8 ! r8 representation of difference ! between current i8_arr_gsum_level - ! and current jlevels of X_8 + ! and current jlevels of X_8 ! (== IX_8). Also used in final ! scaling step @@ -941,7 +941,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & ! calculate first shift arr_shift = arr_max_shift - (arr_gmax_exp(ifld)-arr_exp) -! determine first (probably) nonzero level (assuming initial fraction is +! determine first (probably) nonzero level (assuming initial fraction is ! 'normal' - algorithm still works if this is not true) ! NOTE: this is critical; scale will set to zero if min exponent is too small. if (arr_shift < 1) then @@ -957,7 +957,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & endif if (ilevel .le. max_levels(ifld)) then -! apply first shift/truncate, add it to the relevant running +! apply first shift/truncate, add it to the relevant running ! sum, and calculate the remainder. arr_remainder = scale(arr_frac,arr_shift) i8_arr_level = int(arr_remainder,i8) @@ -965,7 +965,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & i8_arr_tlsum_level(ilevel,ifld,ithread) + i8_arr_level arr_remainder = arr_remainder - i8_arr_level -! while the remainder is non-zero, continue to shift, truncate, +! while the remainder is non-zero, continue to shift, truncate, ! sum, and calculate new remainder do while ((arr_remainder .ne. 0.0_r8) & .and. (ilevel < max_levels(ifld))) @@ -987,9 +987,9 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & enddo ! postprocess integer vector to eliminate potential for overlap in the following -! sums over threads and processes: if value larger than or equal to -! (radix(IX_8)**arr_max_shift), add this 'overlap' to next larger integer in -! vector, resulting in nonoverlapping ranges for each component. Note that +! sums over threads and processes: if value larger than or equal to +! (radix(IX_8)**arr_max_shift), add this 'overlap' to next larger integer in +! vector, resulting in nonoverlapping ranges for each component. Note that ! "ilevel-1==0" corresponds to an extra level used to guarantee that the sums ! over threads and processes do not overflow for ilevel==1. do ilevel=max_levels(ifld),1,-1 @@ -1036,12 +1036,12 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & #if defined (NO_I8) ! Workaround for when i8 is not supported. ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_i4") - call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & + call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & veclth, MPI_INTEGER, MPI_SUM, mpi_comm, ierr) ! if (detailed_timing) call xicex_timer_stop("repro_sum_allr_i4") #else ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_i8") - call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & + call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & veclth, MPI_INTEGER8, MPI_SUM, mpi_comm, ierr) ! if (detailed_timing) call xicex_timer_stop("repro_sum_allr_i8") #endif @@ -1056,10 +1056,10 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & ! alternate. To avoid this, do some arithmetic with integer vectors so that all ! components have the same sign. This should keep relative difference between ! using different integer sizes (e.g. i8 and i4) to machine epsilon -! 3) assignment to X_8 will usually lose accuracy since maximum integer -! size is greater than the max number of 'digits' in r8 value (if xmax_nsummands -! correction not very large). Calculate remainder and add in first (since -! smaller). One correction is sufficient for r8 (53 digits) and i8 (63 digits). +! 3) assignment to X_8 will usually lose accuracy since maximum integer +! size is greater than the max number of 'digits' in r8 value (if xmax_nsummands +! correction not very large). Calculate remainder and add in first (since +! smaller). One correction is sufficient for r8 (53 digits) and i8 (63 digits). ! For r4 (24 digits) may need to correct twice. Code is written in a general ! fashion, to work no matter how many corrections are necessary (assuming ! max_jlevel parameter calculation is correct). @@ -1080,7 +1080,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & if (.not. recompute) then ! preprocess integer vector: -! a) if value larger than or equal to (radix(IX_8)**arr_max_shift), add this 'overlap' +! a) if value larger than or equal to (radix(IX_8)**arr_max_shift), add this 'overlap' ! to next larger integer in vector, resulting in nonoverlapping ranges for each ! component. Note that have "ilevel-1=0" level here as described above. do ilevel=max_levels(ifld),1,-1 @@ -1094,9 +1094,9 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & - IX_8 endif enddo -! b) subtract +/- 1 from larger and add +/- 1 to smaller when necessary -! so that all vector components have the same sign (eliminating loss -! of accuracy arising from difference of large values when +! b) subtract +/- 1 from larger and add +/- 1 to smaller when necessary +! so that all vector components have the same sign (eliminating loss +! of accuracy arising from difference of large values when ! reconstructing r8 sum from integer vector) ilevel = 0 do while ((i8_arr_gsum_level(ioffset+ilevel) .eq. 0_i8) & @@ -1118,7 +1118,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & i8_arr_gsum_level(ioffset+jlevel+1) = i8_arr_gsum_level(ioffset+jlevel+1) & + i8_sign*(i8_radix**arr_max_shift) endif - enddo + enddo endif ! start with maximum shift, and work up to larger values @@ -1131,7 +1131,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & if (i8_arr_gsum_level(ioffset+ilevel) .ne. 0_i8) then jlevel = 1 -! r8 representation of higher order bits in integer +! r8 representation of higher order bits in integer X_8(jlevel) = i8_arr_gsum_level(ioffset+ilevel) LX(jlevel) = exponent(X_8(jlevel)) @@ -1149,7 +1149,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & enddo ! add in contributions, smaller to larger, rescaling for each -! addition to guarantee that exponent of working summand is always +! addition to guarantee that exponent of working summand is always ! larger than minexponent do while (jlevel > 0) if (first) then @@ -1173,7 +1173,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & ! apply final exponent correction, scaling first if exponent is too small ! to apply directly corr_exp = curr_exp + exponent(arr_gsum(ifld)) - if (corr_exp .ge. MINEXPONENT(1._r8)) then + if (corr_exp .ge. MINEXPONENT(1._r8)) then arr_gsum(ifld) = set_exponent(arr_gsum(ifld),corr_exp) else RX_8 = set_exponent(arr_gsum(ifld), & @@ -1181,9 +1181,9 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & arr_gsum(ifld) = scale(RX_8,MINEXPONENT(1._r8)) endif -! if validate is .true. and some precision lost, test whether 'too much' +! if validate is .true. and some precision lost, test whether 'too much' ! was lost, due to too loose an upper bound, too stringent a limit on number -! of levels of expansion, cancellation, .... Calculated by comparing lower +! of levels of expansion, cancellation, .... Calculated by comparing lower ! bound on number of sigificant digits with number of digits in 1.0_r8 . if (validate) then if (i8_arr_gsum_level(ioffset-voffset+2) .ne. 0_i8) then @@ -1217,11 +1217,11 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & end subroutine ice_reprosum_int !======================================================================== -! -! Purpose: +! +! Purpose: !> Test whether distributed sum exceeds tolerance and print out a !> warning message. -! +! !---------------------------------------------------------------------- logical function ice_reprosum_tolExceeded (name, nflds, master, & @@ -1234,11 +1234,11 @@ logical function ice_reprosum_tolExceeded (name, nflds, master, & integer, intent(in) :: nflds !< number of fields logical, intent(in) :: master !< process that will write !< warning messages? - integer, optional, intent(in) :: logunit!< unit warning messages + integer, optional, intent(in) :: logunit!< unit warning messages !< written to real(r8), intent(in) :: rel_diff(2,nflds) !< relative and absolute - !< differences between fixed + !< differences between fixed !< and floating point sums ! Local workspace @@ -1302,12 +1302,12 @@ logical function ice_reprosum_tolExceeded (name, nflds, master, & end function ice_reprosum_tolExceeded !======================================================================== -! -! Purpose: -!> Compute the global sum of each field in "arr" using the indicated -!> communicator with a reproducible yet scalable implementation based +! +! Purpose: +!> Compute the global sum of each field in "arr" using the indicated +!> communicator with a reproducible yet scalable implementation based !> on He and Ding's implementation of the double-double algorithm. -! +! !---------------------------------------------------------------------- subroutine ice_reprosum_ddpdd (arr, arr_gsum, nsummands, dsummands, & @@ -1319,11 +1319,11 @@ subroutine ice_reprosum_ddpdd (arr, arr_gsum, nsummands, dsummands, & integer, intent(in) :: nsummands !< number of local summands integer, intent(in) :: dsummands !< declared first dimension integer, intent(in) :: nflds !< number of fields - real(r8), intent(in) :: arr(dsummands,nflds) + real(r8), intent(in) :: arr(dsummands,nflds) !< input array integer, intent(in) :: mpi_comm !< MPI subcommunicator - real(r8), intent(out):: arr_gsum(nflds) + real(r8), intent(out):: arr_gsum(nflds) !< global sums @@ -1361,8 +1361,8 @@ subroutine ice_reprosum_ddpdd (arr, arr_gsum, nsummands, dsummands, & arr_lsum_dd(ifld) = (0.0_r8,0.0_r8) do isum=1,nsummands - - ! Compute arr(isum,ifld) + arr_lsum_dd(ifld) using Knuth''s + + ! Compute arr(isum,ifld) + arr_lsum_dd(ifld) using Knuth''s ! trick. t1 = arr(isum,ifld) + real(arr_lsum_dd(ifld)) e = t1 - arr(isum,ifld) @@ -1394,11 +1394,11 @@ end subroutine ice_reprosum_ddpdd subroutine DDPDD (dda, ddb, len, itype) !---------------------------------------------------------------------- -! -! Purpose: -! Modification of original codes written by David H. Bailey +! +! Purpose: +! Modification of original codes written by David H. Bailey ! This subroutine computes ddb(i) = dda(i)+ddb(i) -! +! !---------------------------------------------------------------------- ! Arguments @@ -1434,10 +1434,10 @@ end subroutine DDPDD subroutine split_indices(total,num_pieces,ibeg,iend) !---------------------------------------------------------------------- -! -! Purpose: +! +! Purpose: ! Split range into 'num_pieces' -! +! !---------------------------------------------------------------------- ! Arguments diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 index be6e12253..bbe2fd4d1 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 @@ -116,7 +116,7 @@ module ice_timers type (timer_data), dimension(max_timers) :: & all_timers ! timer data for all timers - integer (int_kind) :: & + integer (int_kind) :: & cycles_max ! max clock cycles allowed by system real (dbl_kind) :: & @@ -148,8 +148,8 @@ subroutine init_ice_timers !----------------------------------------------------------------------- ! ! Call F90 intrinsic system_clock to determine clock rate -! and maximum cycles for single-processor runs. If no clock -! available, print message. +! and maximum cycles for single-processor runs. If no clock +! available, print message. ! !----------------------------------------------------------------------- @@ -231,7 +231,7 @@ end subroutine init_ice_timers subroutine get_ice_timer(timer_id, name_choice, num_blocks, num_nodes) -! This routine initializes a timer with a given name and returns a +! This routine initializes a timer with a given name and returns a ! timer id. character (*), intent(in) :: & @@ -244,7 +244,7 @@ subroutine get_ice_timer(timer_id, name_choice, num_blocks, num_nodes) ! threaded region) integer (int_kind), intent(out) :: & - timer_id ! timer number assigned to this timer + timer_id ! timer number assigned to this timer !----------------------------------------------------------------------- ! @@ -275,7 +275,7 @@ subroutine get_ice_timer(timer_id, name_choice, num_blocks, num_nodes) all_timers(n)%name = name_choice all_timers(n)%in_use = .true. all_timers(n)%num_blocks = num_blocks - all_timers(n)%num_nodes = num_nodes + all_timers(n)%num_nodes = num_nodes allocate(all_timers(n)%block_started (num_blocks), & all_timers(n)%block_cycles1 (num_blocks), & @@ -293,7 +293,7 @@ subroutine get_ice_timer(timer_id, name_choice, num_blocks, num_nodes) if (srch_error /= 0) & call abort_ice(subname//'ERROR: Exceeded maximum number of timers') - + !----------------------------------------------------------------------- @@ -334,7 +334,7 @@ subroutine ice_timer_clear(timer_id) all_timers(timer_id)%block_accum_time(:) = c0 else call abort_ice(subname//'ERROR: attempt to reset undefined timer') - + endif !----------------------------------------------------------------------- @@ -396,7 +396,7 @@ subroutine ice_timer_start(timer_id, block_id) !*** another thread. if already started, keep track !*** of number of start requests in order to match !*** start and stop requests - + !$OMP CRITICAL if (.not. all_timers(timer_id)%node_started) then @@ -431,18 +431,18 @@ subroutine ice_timer_start(timer_id, block_id) endif else call abort_ice(subname//'ERROR: attempt to start undefined timer') - + endif !----------------------------------------------------------------------- end subroutine ice_timer_start - + !*********************************************************************** subroutine ice_timer_stop(timer_id, block_id) -! This routine stops a given node timer if appropriate. If block +! This routine stops a given node timer if appropriate. If block ! information is available the appropriate block timer is also stopped. integer (int_kind), intent(in) :: & @@ -513,7 +513,7 @@ subroutine ice_timer_stop(timer_id, block_id) !*** stop node timer if number of requested stops !*** matches the number of starts (to avoid stopping !*** a node timer started by multiple threads) - + cycles1 = all_timers(timer_id)%node_cycles1 !$OMP CRITICAL @@ -566,13 +566,13 @@ subroutine ice_timer_stop(timer_id, block_id) endif else call abort_ice(subname//'ERROR: attempt to stop undefined timer') - + endif !----------------------------------------------------------------------- end subroutine ice_timer_stop - + !*********************************************************************** subroutine ice_timer_print(timer_id,stats) @@ -648,7 +648,7 @@ subroutine ice_timer_print(timer_id,stats) local_time = c0 endif max_time = global_maxval(local_time,distrb_info) - + if (my_task == master_task) then write (nu_diag,timer_format) timer_id, & trim(all_timers(timer_id)%name),max_time @@ -713,7 +713,7 @@ subroutine ice_timer_print(timer_id,stats) if (lrestart_timer) call ice_timer_start(timer_id) else call abort_ice(subname//'ERROR: attempt to print undefined timer') - + endif !----------------------------------------------------------------------- @@ -771,7 +771,7 @@ end subroutine ice_timer_print_all subroutine ice_timer_check(timer_id,block_id) ! This routine checks a given timer by stopping and restarting the -! timer. This is primarily used to periodically accumulate time in +! timer. This is primarily used to periodically accumulate time in ! the timer to prevent timer cycles from wrapping around max_cycles. integer (int_kind), intent(in) :: & diff --git a/cicecore/cicedynB/infrastructure/ice_blocks.F90 b/cicecore/cicedynB/infrastructure/ice_blocks.F90 index 74aba9cb5..fb7483914 100644 --- a/cicecore/cicedynB/infrastructure/ice_blocks.F90 +++ b/cicecore/cicedynB/infrastructure/ice_blocks.F90 @@ -3,8 +3,8 @@ module ice_blocks ! This module contains data types and tools for decomposing a global -! horizontal domain into a set of blocks. It contains a data type -! for describing each block and contains routines for creating and +! horizontal domain into a set of blocks. It contains a data type +! for describing each block and contains routines for creating and ! querying the block decomposition for a global domain. ! ! author: Phil Jones, LANL @@ -46,7 +46,7 @@ module ice_blocks nx_block, ny_block ! x,y dir including ghost ! predefined directions for neighbor id routine - ! Note: the directions that are commented out are implemented in + ! Note: the directions that are commented out are implemented in ! POP but not in CICE. If the tripole cut were in the south ! instead of the north, these would need to be used (and also ! implemented in ice_boundary.F90). @@ -314,11 +314,12 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & if (debug_blocks) then if (my_task == master_task) then - write(nu_diag,*) 'block i,j locations' + write(nu_diag,*) ' ' + write(nu_diag,'(2a)') subname,' block ID, iblock, jblock Locations:' do n = 1, nblocks_tot - write(nu_diag,*) 'block id, iblock, jblock, tripole:', & + write(nu_diag,'(2a,3i8,l4)') subname,' global block ID, iblock, jblock, tripole:', & all_blocks(n)%block_id, & - all_blocks(n)%iblock, & + all_blocks(n)%iblock, & all_blocks(n)%jblock, & all_blocks(n)%tripole enddo @@ -380,7 +381,7 @@ function ice_blocksGetNbrID(blockID, direction, iBoundary, jBoundary) & ! local variables ! !---------------------------------------------------------------------- - + integer (int_kind) :: & iBlock, jBlock, &! i,j block location of current block inbr, jnbr ! i,j block location of neighboring block @@ -394,6 +395,7 @@ function ice_blocksGetNbrID(blockID, direction, iBoundary, jBoundary) & !---------------------------------------------------------------------- call get_block_parameter(blockID, iblock=iBlock, jblock=jBlock) + nbrID = 0 ! initial default !---------------------------------------------------------------------- ! @@ -422,7 +424,7 @@ function ice_blocksGetNbrID(blockID, direction, iBoundary, jBoundary) & !*** other points if there has been padding or !*** if the block size does not divide the domain !*** evenly - inbr = nblocks_x - iBlock + 1 + inbr = nblocks_x - iBlock + 1 jnbr = -jBlock case default call abort_ice(subname//'ERROR: unknown north boundary') @@ -515,7 +517,7 @@ function ice_blocksGetNbrID(blockID, direction, iBoundary, jBoundary) & !*** other points if there has been padding or !*** if the block size does not divide the domain !*** evenly - inbr = nblocks_x - iBlock + inbr = nblocks_x - iBlock if (inbr == 0) inbr = nblocks_x jnbr = -jBlock case default @@ -554,7 +556,7 @@ function ice_blocksGetNbrID(blockID, direction, iBoundary, jBoundary) & !*** other points if there has been padding or !*** if the block size does not divide the domain !*** evenly - inbr = nblocks_x - iBlock + 2 + inbr = nblocks_x - iBlock + 2 if (inbr > nblocks_x) inbr = 1 jnbr = -jBlock case default @@ -691,7 +693,7 @@ function ice_blocksGetNbrID(blockID, direction, iBoundary, jBoundary) & !*** other points if there has been padding or !*** if the block size does not divide the domain !*** evenly - inbr = nblocks_x - iBlock - 1 + inbr = nblocks_x - iBlock - 1 if (inbr <= 0) inbr = inbr + nblocks_x jnbr = -jBlock case default @@ -799,7 +801,7 @@ end function get_block !********************************************************************** - subroutine get_block_parameter(block_id, local_id, & + subroutine get_block_parameter(block_id, local_id, & ilo, ihi, jlo, jhi, & iblock, jblock, tripole, & i_glob, j_glob) diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index 79f5bcb9a..ac56356e5 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -18,7 +18,7 @@ module ice_domain use ice_kinds_mod use ice_constants, only: shlat, nhlat use ice_communicate, only: my_task, master_task, get_num_procs, & - add_mpi_barriers + add_mpi_barriers, ice_barrier use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_blocks, only: block, get_block, create_blocks, nghost, & nblocks_x, nblocks_y, nblocks_tot, nx_block, ny_block, debug_blocks @@ -26,7 +26,7 @@ module ice_domain use ice_boundary, only: ice_halo use ice_exit, only: abort_ice use ice_fileunits, only: nu_nml, nml_filename, nu_diag, & - get_fileunit, release_fileunit + get_fileunit, release_fileunit, flush_fileunit use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters @@ -79,7 +79,7 @@ module ice_domain distribution_type, &! method to use for distributing blocks ! 'cartesian', 'roundrobin', 'sectrobin', 'sectcart' ! 'rake', 'spacecurve', etc - distribution_wght ! method for weighting work per block + distribution_wght ! method for weighting work per block ! 'block' = POP default configuration ! 'blockall' = no land block elimination ! 'latitude' = no. ocean points * |lat| @@ -326,6 +326,7 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) ig,jg ,&! global indices igm1,igp1,jgm1,jgp1,&! global indices ninfo ,&! ice_distributionGet check + np, nlb, m ,&! debug blocks temporaries work_unit ,&! size of quantized work unit #ifdef USE_NETCDF fid ,&! file id @@ -357,7 +358,7 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) ! ! check that there are at least nghost+1 rows or columns of land cells ! for closed boundary conditions (otherwise grid lengths are zero in -! cells neighboring ocean points). +! cells neighboring ocean points). ! !---------------------------------------------------------------------- @@ -526,12 +527,12 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) KMTG(igm1,jg ) > puny .or. KMTG(igp1,jg ) > puny .or. & KMTG(ig ,jgp1) > puny .or. KMTG(ig ,jgm1) > puny) .and. & (ULATG(ig,jg) < shlat/rad_to_deg .or. & - ULATG(ig,jg) > nhlat/rad_to_deg) ) & + ULATG(ig,jg) > nhlat/rad_to_deg) ) & nocn(n) = nocn(n) + flat(ig,jg) else if (KMTG(ig,jg) > puny .and. & (ULATG(ig,jg) < shlat/rad_to_deg .or. & - ULATG(ig,jg) > nhlat/rad_to_deg) ) & + ULATG(ig,jg) > nhlat/rad_to_deg) ) & nocn(n) = nocn(n) + flat(ig,jg) endif endif @@ -543,7 +544,7 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) !*** points, so where the block is not completely land, !*** reset nocn to be the full size of the block - ! use processor_shape = 'square-pop' and distribution_wght = 'block' + ! use processor_shape = 'square-pop' and distribution_wght = 'block' ! to make CICE and POP decompositions/distributions identical. #ifdef CICE_IN_NEMO @@ -596,8 +597,41 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) call create_local_block_ids(blocks_ice, distrb_info) + ! write out block distribution ! internal check of icedistributionGet as part of verification process if (debug_blocks) then + + call flush_fileunit(nu_diag) + call ice_barrier() + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,'(2a)') subname, ' Blocks by Proc:' + endif + call ice_distributionGet(distrb_info, nprocs=np, numLocalBlocks=nlb) + do m = 1, np + if (m == my_task+1) then + do n=1,nlb + write(nu_diag,'(2a,3i8)') & + subname,' my_task, local block ID, global block ID: ', & + my_task, n, distrb_info%blockGlobalID(n) + enddo + call flush_fileunit(nu_diag) + endif + call ice_barrier() + enddo + + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,'(2a)') subname, ' Blocks by Global Block ID:' + do m = 1, nblocks_tot + write(nu_diag,'(2a,3i8)') & + subname,' global block id, proc, local block ID: ', & + m, distrb_info%blockLocation(m), distrb_info%blockLocalID(m) + enddo + call flush_fileunit(nu_diag) + endif + call ice_barrier() + call ice_distributionGet(distrb_info, nprocs=ninfo) if (ninfo /= distrb_info%nprocs) & call abort_ice(subname//' ice_distributionGet nprocs ERROR', file=__FILE__, line=__LINE__) @@ -635,8 +669,11 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) deallocate(blkinfo) - if (my_task == master_task) & - write(nu_diag,*) subname,' ice_distributionGet checks pass' + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,'(2a)') subname,' ice_distributionGet checks pass' + write(nu_diag,*) ' ' + endif endif if (associated(blocks_ice)) then diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index f7b854b4f..523c7ea2c 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -14,7 +14,7 @@ ! 2006: Converted to free source form (F90) by Elizabeth Hunke ! 2007: Option to read from netcdf files (A. Keen, Met Office) ! Grid reading routines reworked by E. Hunke for boundary values -! 2021: Add N (center of north face) and E (center of east face) grids +! 2021: Add N (center of north face) and E (center of east face) grids ! to support C and CD solvers. Defining T at center of cells, U at ! NE corner, N at center of top face, E at center of right face. ! All cells are quadrilaterals with NE, E, and N associated with @@ -55,7 +55,7 @@ module ice_grid kmt_type , & ! options are file, default, boxislands bathymetry_file, & ! input bathymetry for seabed stress bathymetry_format, & ! bathymetry file format (default or pop) - grid_spacing , & ! default of 30.e3m or set by user in namelist + grid_spacing , & ! default of 30.e3m or set by user in namelist grid_ice , & ! Underlying model grid structure (A, B, C, CD) grid_ice_thrm, & ! ocean forcing grid for thermo fields (T, U, N, E) grid_ice_dynu, & ! ocean forcing grid for dyn U fields (T, U, N, E) @@ -111,12 +111,12 @@ module ice_grid G_HTN ! length of northern edge of T-cell (global ext.) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - cyp , & ! 1.5*HTE(i,j)-0.5*HTW(i,j) = 1.5*HTE(i,j)-0.5*HTE(i-1,j) + cyp , & ! 1.5*HTE(i,j)-0.5*HTW(i,j) = 1.5*HTE(i,j)-0.5*HTE(i-1,j) cxp , & ! 1.5*HTN(i,j)-0.5*HTS(i,j) = 1.5*HTN(i,j)-0.5*HTN(i,j-1) - cym , & ! 0.5*HTE(i,j)-1.5*HTW(i,j) = 0.5*HTE(i,j)-1.5*HTE(i-1,j) - cxm , & ! 0.5*HTN(i,j)-1.5*HTS(i,j) = 0.5*HTN(i,j)-1.5*HTN(i,j-1) + cym , & ! 0.5*HTE(i,j)-1.5*HTW(i,j) = 0.5*HTE(i,j)-1.5*HTE(i-1,j) + cxm , & ! 0.5*HTN(i,j)-1.5*HTS(i,j) = 0.5*HTN(i,j)-1.5*HTN(i,j-1) dxhy , & ! 0.5*(HTE(i,j) - HTW(i,j)) = 0.5*(HTE(i,j) - HTE(i-1,j)) - dyhx ! 0.5*(HTN(i,j) - HTS(i,j)) = 0.5*(HTN(i,j) - HTN(i,j-1)) + dyhx ! 0.5*(HTN(i,j) - HTS(i,j)) = 0.5*(HTN(i,j) - HTN(i,j-1)) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & ratiodxN , & ! - dxN(i+1,j) / dxN(i,j) @@ -157,14 +157,14 @@ module ice_grid dimension (:,:,:,:,:), allocatable, public :: & mne, & ! matrices used for coordinate transformations in remapping mnw, & ! ne = northeast corner, nw = northwest, etc. - mse, & + mse, & msw ! masks real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & hm , & ! land/boundary mask, thickness (T-cell) bm , & ! task/block id - uvm , & ! land/boundary mask (U-cell) + uvm , & ! land/boundary mask (U-cell) npm , & ! land/boundary mask (N-cell) epm , & ! land/boundary mask (E-cell) kmt ! ocean topography mask for bathymetry (T-cell) @@ -173,8 +173,7 @@ module ice_grid use_bathymetry, & ! flag for reading in bathymetry_file pgl_global_ext ! flag for init primary grid lengths (global ext.) - logical (kind=log_kind), & - dimension (:,:,:), allocatable, public :: & + logical (kind=log_kind), dimension (:,:,:), allocatable, public :: & tmask , & ! land/boundary mask, thickness (T-cell) umask , & ! land/boundary mask (U-cell) (1 if all surrounding T cells are ocean) umaskCD, & ! land/boundary mask (U-cell) (1 if at least two surrounding T cells are ocean) @@ -183,6 +182,11 @@ module ice_grid lmask_n, & ! northern hemisphere mask lmask_s ! southern hemisphere mask + logical (kind=log_kind), dimension (:,:,:), allocatable, public :: & + iceumask, & ! ice extent mask (U-cell) + icenmask, & ! ice extent mask (N-cell) + iceemask ! ice extent mask (E-cell) + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & rndex_global ! global index for local subdomain (dbl) @@ -201,7 +205,7 @@ module ice_grid !======================================================================= ! -! Allocate space for all variables +! Allocate space for all variables ! subroutine alloc_grid @@ -263,6 +267,7 @@ subroutine alloc_grid umaskCD (nx_block,ny_block,max_blocks), & ! land/boundary mask, velocity (U-cell) nmask (nx_block,ny_block,max_blocks), & ! land/boundary mask (N-cell) emask (nx_block,ny_block,max_blocks), & ! land/boundary mask (E-cell) + iceumask (nx_block,ny_block,max_blocks), & ! u mask for dynamics lmask_n (nx_block,ny_block,max_blocks), & ! northern hemisphere mask lmask_s (nx_block,ny_block,max_blocks), & ! southern hemisphere mask rndex_global(nx_block,ny_block,max_blocks), & ! global index for local subdomain (dbl) @@ -283,6 +288,8 @@ subroutine alloc_grid if (grid_ice == 'CD' .or. grid_ice == 'C') then allocate( & + iceemask (nx_block,ny_block,max_blocks), & ! e mask for dynamics + icenmask (nx_block,ny_block,max_blocks), & ! n mask for dynamics ratiodxN (nx_block,ny_block,max_blocks), & ratiodyE (nx_block,ny_block,max_blocks), & ratiodxNr(nx_block,ny_block,max_blocks), & @@ -304,11 +311,11 @@ end subroutine alloc_grid !======================================================================= ! Distribute blocks across processors. The distribution is optimized -! based on latitude and topography, contained in the ULAT and KMT arrays. +! based on latitude and topography, contained in the ULAT and KMT arrays. ! ! authors: William Lipscomb and Phil Jones, LANL - subroutine init_grid1 + subroutine init_grid1 use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_array @@ -480,7 +487,7 @@ subroutine init_grid2 call popgrid_nc ! read POP grid lengths from nc file else call popgrid ! read POP grid lengths directly - endif + endif #ifdef CESMCOUPLED elseif (trim(grid_type) == 'latlon') then call latlongrid ! lat lon grid for sequential CESM (CAM mode) @@ -500,7 +507,7 @@ subroutine init_grid2 !$OMP PARALLEL DO ORDERED PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks if (my_task == master_task) then - !$OMP ORDERED + !$OMP ORDERED if (iblk == 1) then call omp_get_schedule(ompsk,ompcs) write(nu_diag,*) '' @@ -509,7 +516,7 @@ subroutine init_grid2 endif write(nu_diag,*) subname,' block, thread = ',iblk,OMP_GET_THREAD_NUM() call flush_fileunit(nu_diag) - !$OMP END ORDERED + !$OMP END ORDERED endif enddo !$OMP END PARALLEL DO @@ -574,8 +581,8 @@ subroutine init_grid2 cyp(i,j,iblk) = (c1p5*HTE(i,j,iblk) - p5*HTE(i-1,j,iblk)) cxp(i,j,iblk) = (c1p5*HTN(i,j,iblk) - p5*HTN(i,j-1,iblk)) ! match order of operations in cyp, cxp for tripole grids - cym(i,j,iblk) = -(c1p5*HTE(i-1,j,iblk) - p5*HTE(i,j,iblk)) - cxm(i,j,iblk) = -(c1p5*HTN(i,j-1,iblk) - p5*HTN(i,j,iblk)) + cym(i,j,iblk) = -(c1p5*HTE(i-1,j,iblk) - p5*HTE(i,j,iblk)) + cxm(i,j,iblk) = -(c1p5*HTN(i,j-1,iblk) - p5*HTN(i,j,iblk)) enddo enddo @@ -697,7 +704,7 @@ subroutine init_grid2 enddo !$OMP END PARALLEL DO endif ! regional - + call ice_timer_start(timer_bound) call ice_HaloUpdate (ANGLET, halo_info, & field_loc_center, field_type_angle, & @@ -753,7 +760,7 @@ end subroutine init_grid2 !======================================================================= -! POP displaced pole grid and land mask (or tripole). +! POP displaced pole grid and land mask (or tripole). ! Grid record number, field and units are: \\ ! (1) ULAT (radians) \\ ! (2) ULON (radians) \\ @@ -761,7 +768,7 @@ end subroutine init_grid2 ! (4) HTE (cm) \\ ! (5) HUS (cm) \\ ! (6) HUW (cm) \\ -! (7) ANGLE (radians) +! (7) ANGLE (radians) ! ! Land mask record number and field is (1) KMT. ! @@ -802,7 +809,7 @@ subroutine popgrid !----------------------------------------------------------------- call ice_read(nu_kmt,1,work1,'ida4',diag, & - field_loc=field_loc_center, & + field_loc=field_loc_center, & field_type=field_type_scalar) hm (:,:,:) = c0 @@ -831,14 +838,14 @@ subroutine popgrid allocate(work_g1(nx_global,ny_global)) call ice_read_global(nu_grid,1,work_g1,'rda8',.true.) ! ULAT - call gridbox_verts(work_g1,latt_bounds) + call gridbox_verts(work_g1,latt_bounds) call scatter_global(ULAT, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULAT, distrb_info, & ew_boundary_type, ns_boundary_type) call ice_read_global(nu_grid,2,work_g1,'rda8',.true.) ! ULON - call gridbox_verts(work_g1,lont_bounds) + call gridbox_verts(work_g1,lont_bounds) call scatter_global(ULON, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & @@ -850,7 +857,7 @@ subroutine popgrid !----------------------------------------------------------------- ! cell dimensions - ! calculate derived quantities from global arrays to preserve + ! calculate derived quantities from global arrays to preserve ! information on boundaries !----------------------------------------------------------------- @@ -919,7 +926,7 @@ subroutine popgrid_nc type (block) :: & this_block ! block information for current block - + integer(kind=int_kind) :: & varid integer (kind=int_kind) :: & @@ -945,7 +952,7 @@ subroutine popgrid_nc fieldname='kmt' call ice_read_nc(fid_kmt,1,fieldname,work1,diag, & - field_loc=field_loc_center, & + field_loc=field_loc_center, & field_type=field_type_scalar) hm (:,:,:) = c0 @@ -975,7 +982,7 @@ subroutine popgrid_nc fieldname='ulat' call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ULAT - call gridbox_verts(work_g1,latt_bounds) + call gridbox_verts(work_g1,latt_bounds) call scatter_global(ULAT, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULAT, distrb_info, & @@ -983,7 +990,7 @@ subroutine popgrid_nc fieldname='ulon' call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ULON - call gridbox_verts(work_g1,lont_bounds) + call gridbox_verts(work_g1,lont_bounds) call scatter_global(ULON, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & @@ -1010,7 +1017,7 @@ subroutine popgrid_nc endif call broadcast_scalar(l_readCenter,master_task) if (l_readCenter) then - call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) call scatter_global(ANGLET, work_g1, master_task, distrb_info, & field_loc_center, field_type_angle) where (ANGLET > pi) ANGLET = pi @@ -1026,7 +1033,7 @@ subroutine popgrid_nc endif !----------------------------------------------------------------- ! cell dimensions - ! calculate derived quantities from global arrays to preserve + ! calculate derived quantities from global arrays to preserve ! information on boundaries !----------------------------------------------------------------- @@ -1053,7 +1060,7 @@ end subroutine popgrid_nc #ifdef CESMCOUPLED !======================================================================= -! Read in kmt file that matches CAM lat-lon grid and has single column +! Read in kmt file that matches CAM lat-lon grid and has single column ! functionality ! author: Mariana Vertenstein ! 2007: Elizabeth Hunke upgraded to netcdf90 and cice ncdf calls @@ -1070,8 +1077,8 @@ subroutine latlongrid #endif integer (kind=int_kind) :: & - i, j, iblk - + i, j, iblk + integer (kind=int_kind) :: & ni, nj, ncid, dimid, varid, ier @@ -1099,7 +1106,7 @@ subroutine latlongrid status ! status flag real (kind=dbl_kind), allocatable :: & - lats(:),lons(:),pos_lons(:), glob_grid(:,:) ! temporaries + lats(:),lons(:),pos_lons(:), glob_grid(:,:) ! temporaries real (kind=dbl_kind) :: & pos_scmlon,& ! temporary @@ -1168,12 +1175,12 @@ subroutine latlongrid status = nf90_get_var(ncid, varid, glob_grid, start3, count3) if (status /= nf90_noerr) call abort_ice (subname//' get_var yc') do j = 1,nj - lats(j) = glob_grid(1,j) + lats(j) = glob_grid(1,j) end do - + ! convert lons array and scmlon to 0,360 and find index of value closest to 0 ! and obtain single-column longitude/latitude indices to retrieve - + pos_lons(:)= mod(lons(:) + 360._dbl_kind,360._dbl_kind) pos_scmlon = mod(scmlon + 360._dbl_kind,360._dbl_kind) start(1) = (MINLOC(abs(pos_lons-pos_scmlon),dim=1)) @@ -1260,7 +1267,7 @@ subroutine latlongrid ! Calculate various geometric 2d arrays ! The U grid (velocity) is not used when run with sequential CAM ! because we only use thermodynamic sea ice. However, ULAT is used - ! in the default initialization of CICE so we calculate it here as + ! in the default initialization of CICE so we calculate it here as ! a "dummy" so that CICE will initialize with ice. If a no ice ! initialization is OK (or desired) this can be commented out and ! ULAT will remain 0 as specified above. ULAT is located at the @@ -1291,12 +1298,12 @@ subroutine latlongrid uarear(i,j,iblk) = c1/uarea(i,j,iblk) if (single_column) then - ULAT (i,j,iblk) = TLAT(i,j,iblk)+(pi/nj) + ULAT (i,j,iblk) = TLAT(i,j,iblk)+(pi/nj) else if (ny_global == 1) then ULAT (i,j,iblk) = TLAT(i,j,iblk) else - ULAT (i,j,iblk) = TLAT(i,j,iblk)+(pi/ny_global) + ULAT (i,j,iblk) = TLAT(i,j,iblk)+(pi/ny_global) endif endif ULON (i,j,iblk) = c0 @@ -1304,9 +1311,9 @@ subroutine latlongrid NLAT (i,j,iblk) = c0 ELON (i,j,iblk) = c0 ELAT (i,j,iblk) = c0 - ANGLE (i,j,iblk) = c0 + ANGLE (i,j,iblk) = c0 - ANGLET(i,j,iblk) = c0 + ANGLET(i,j,iblk) = c0 HTN (i,j,iblk) = 1.e36_dbl_kind HTE (i,j,iblk) = 1.e36_dbl_kind dxT (i,j,iblk) = 1.e36_dbl_kind @@ -1344,13 +1351,12 @@ end subroutine latlongrid subroutine rectgrid - use ice_blocks, only: nx_block, ny_block use ice_constants, only: c0, c1, c2, radius, cm_to_m, & field_loc_center, field_loc_NEcorner, field_type_scalar use ice_domain, only: close_boundaries integer (kind=int_kind) :: & - i, j, iblk, & + i, j, & imid, jmid real (kind=dbl_kind) :: & @@ -1397,6 +1403,7 @@ subroutine rectgrid enddo work_g1(:,:) = work_g1(:,:) / rad_to_deg endif + call gridbox_verts(work_g1,lont_bounds) call scatter_global(ULON, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & @@ -1416,6 +1423,7 @@ subroutine rectgrid enddo work_g1(:,:) = work_g1(:,:) / rad_to_deg endif + call gridbox_verts(work_g1,latt_bounds) call scatter_global(ULAT, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULAT, distrb_info, & @@ -1543,8 +1551,8 @@ subroutine grid_boxislands_kmt (work) if (nxb < 1 .or. nyb < 1) & call abort_ice(subname//'ERROR: requires larger grid size') - - ! initialize work area as all ocean (c1). + + ! initialize work area as all ocean (c1). work(:,:) = c1 ! now add land points (c0) @@ -1947,7 +1955,7 @@ subroutine primary_grid_lengths_HTE(work_g) do i = 1, nx_global ! assume cyclic; noncyclic will be handled during scatter im1 = i-1 - if (i == 1) im1 = nx_global + if (i == 1) im1 = nx_global work_g2(i,j) = p25*(work_g(i,j) + work_g(im1,j) + work_g(i,j+1) + work_g(im1,j+1)) ! dyN enddo enddo @@ -1956,7 +1964,7 @@ subroutine primary_grid_lengths_HTE(work_g) do i = 1, nx_global ! assume cyclic; noncyclic will be handled during scatter im1 = i-1 - if (i == 1) im1 = nx_global + if (i == 1) im1 = nx_global work_g2(i,ny_global) = p5*(c2*work_g(i ,ny_global-1) - work_g(i ,ny_global-2) + & c2*work_g(im1,ny_global-1) - work_g(im1,ny_global-2)) ! dyN enddo @@ -2223,7 +2231,7 @@ subroutine Tlatlon ! the prior atan2 call ??? not sure what's going on. #if (1 == 1) enddo ! i - enddo ! j + enddo ! j enddo ! iblk !$OMP END PARALLEL DO @@ -2295,9 +2303,9 @@ subroutine Tlatlon ! ELAT in radians North ELAT(i,j,iblk) = asin(tz) - + enddo ! i - enddo ! j + enddo ! j enddo ! iblk !$OMP END PARALLEL DO @@ -2818,12 +2826,12 @@ subroutine grid_average_X2YS(dir,work1,wght1,mask1,work2) do i = ilo, ihi wtmp = (mask1(i ,j ,iblk)*wght1(i ,j ,iblk) & + mask1(i-1,j ,iblk)*wght1(i-1,j ,iblk) & - + mask1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & + + mask1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & + mask1(i-1,j-1,iblk)*wght1(i-1,j-1,iblk)) if (wtmp /= c0) & work2(i,j,iblk) = (mask1(i ,j ,iblk)*work1(i ,j ,iblk)*wght1(i ,j ,iblk) & + mask1(i-1,j ,iblk)*work1(i-1,j ,iblk)*wght1(i-1,j ,iblk) & - + mask1(i ,j-1,iblk)*work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & + + mask1(i ,j-1,iblk)*work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & + mask1(i-1,j-1,iblk)*work1(i-1,j-1,iblk)*wght1(i-1,j-1,iblk)) & / wtmp enddo @@ -2867,12 +2875,12 @@ subroutine grid_average_X2YS(dir,work1,wght1,mask1,work2) do j = jlo, jhi do i = ilo, ihi wtmp = (mask1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & - + mask1(i+1,j-1,iblk)*wght1(i+1,j-1,iblk) & + + mask1(i+1,j-1,iblk)*wght1(i+1,j-1,iblk) & + mask1(i ,j ,iblk)*wght1(i ,j ,iblk) & + mask1(i+1,j ,iblk)*wght1(i+1,j ,iblk)) if (wtmp /= c0) & work2(i,j,iblk) = (mask1(i ,j-1,iblk)*work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & - + mask1(i+1,j-1,iblk)*work1(i+1,j-1,iblk)*wght1(i+1,j-1,iblk) & + + mask1(i+1,j-1,iblk)*work1(i+1,j-1,iblk)*wght1(i+1,j-1,iblk) & + mask1(i ,j ,iblk)*work1(i ,j ,iblk)*wght1(i ,j ,iblk) & + mask1(i+1,j ,iblk)*work1(i+1,j ,iblk)*wght1(i+1,j ,iblk)) & / wtmp @@ -3048,12 +3056,12 @@ subroutine grid_average_X2YA(dir,work1,wght1,work2) do i = ilo, ihi wtmp = (wght1(i ,j ,iblk) & + wght1(i-1,j ,iblk) & - + wght1(i ,j-1,iblk) & + + wght1(i ,j-1,iblk) & + wght1(i-1,j-1,iblk)) if (wtmp /= c0) & work2(i,j,iblk) = (work1(i ,j ,iblk)*wght1(i ,j ,iblk) & + work1(i-1,j ,iblk)*wght1(i-1,j ,iblk) & - + work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & + + work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & + work1(i-1,j-1,iblk)*wght1(i-1,j-1,iblk)) & / wtmp enddo @@ -3097,12 +3105,12 @@ subroutine grid_average_X2YA(dir,work1,wght1,work2) do j = jlo, jhi do i = ilo, ihi wtmp = (wght1(i ,j-1,iblk) & - + wght1(i+1,j-1,iblk) & + + wght1(i+1,j-1,iblk) & + wght1(i ,j ,iblk) & + wght1(i+1,j ,iblk)) if (wtmp /= c0) & work2(i,j,iblk) = (work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & - + work1(i+1,j-1,iblk)*wght1(i+1,j-1,iblk) & + + work1(i+1,j-1,iblk)*wght1(i+1,j-1,iblk) & + work1(i ,j ,iblk)*wght1(i ,j ,iblk) & + work1(i+1,j ,iblk)*wght1(i+1,j ,iblk)) & / wtmp @@ -3273,7 +3281,7 @@ subroutine grid_average_X2YF(dir,work1,wght1,work2,wght2) work2(i,j,iblk) = p25 * & (work1(i ,j ,iblk)*wght1(i ,j ,iblk) & + work1(i-1,j ,iblk)*wght1(i-1,j ,iblk) & - + work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & + + work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & + work1(i-1,j-1,iblk)*wght1(i-1,j-1,iblk)) & / wght2(i ,j ,iblk) enddo @@ -3314,7 +3322,7 @@ subroutine grid_average_X2YF(dir,work1,wght1,work2,wght2) do i = ilo, ihi work2(i,j,iblk) = p25 * & (work1(i ,j-1,iblk)*wght1(i ,j-1,iblk) & - + work1(i+1,j-1,iblk)*wght1(i+1,j-1,iblk) & + + work1(i+1,j-1,iblk)*wght1(i+1,j-1,iblk) & + work1(i ,j ,iblk)*wght1(i ,j ,iblk) & + work1(i+1,j ,iblk)*wght1(i+1,j ,iblk)) & / wght2(i ,j ,iblk) @@ -4168,7 +4176,7 @@ subroutine gridbox_verts(work_g,vbounds) if (my_task == master_task) then do j = 1, ny_global do i = 2, nx_global - work_g2(i,j) = work_g(i-1,j ) * rad_to_deg + work_g2(i,j) = work_g(i-1,j ) * rad_to_deg enddo enddo ! extrapolate @@ -4374,13 +4382,13 @@ end subroutine get_bathymetry_popfile !======================================================================= -! Read bathymetry data for seabed stress calculation (grounding scheme for -! landfast ice) in CICE stand-alone mode. When CICE is in coupled mode -! (e.g. CICE-NEMO), hwater should be uptated at each time level so that +! Read bathymetry data for seabed stress calculation (grounding scheme for +! landfast ice) in CICE stand-alone mode. When CICE is in coupled mode +! (e.g. CICE-NEMO), hwater should be uptated at each time level so that ! it varies with ocean dynamics. ! ! author: Fred Dupont, CMC - + subroutine read_seabedstress_bathy ! use module @@ -4390,7 +4398,7 @@ subroutine read_seabedstress_bathy ! local variables integer (kind=int_kind) :: & fid_init ! file id for netCDF init file - + character (char_len_long) :: & ! input data file names fieldname @@ -4424,7 +4432,7 @@ subroutine read_seabedstress_bathy endif end subroutine read_seabedstress_bathy - + !======================================================================= end module ice_grid diff --git a/cicecore/cicedynB/infrastructure/ice_memusage.F90 b/cicecore/cicedynB/infrastructure/ice_memusage.F90 index 19e7dfb15..8dca4e621 100644 --- a/cicecore/cicedynB/infrastructure/ice_memusage.F90 +++ b/cicecore/cicedynB/infrastructure/ice_memusage.F90 @@ -11,11 +11,11 @@ MODULE ice_memusage implicit none private - + ! PUBLIC: Public interfaces public :: ice_memusage_getusage, & - ice_memusage_init, & + ice_memusage_init, & ice_memusage_print logical(log_kind), public :: memory_stats @@ -39,22 +39,20 @@ subroutine ice_memusage_init(iunit) !----- arguments ----- integer, optional :: iunit !< output unit number for optional writes - + !----- local ----- - ! --- Memory stats --- + ! --- Memory stats --- integer :: msize ! memory size (high water) - integer :: mrss ! resident size (current memory use) - integer :: msize0,msize1 ! temporary size integer :: mrss0,mrss1,mrss2 ! temporary rss integer :: mshare,mtext,mdatastack integer :: ierr - + integer :: ice_memusage_gptl real(dbl_kind),allocatable :: mem_tmp(:) character(*),parameter :: subname = '(ice_memusage_init)' - + !--------------------------------------------------- ! return if memory_stats are off @@ -121,7 +119,7 @@ subroutine ice_memusage_print(iunit,string) integer, intent(in) :: iunit !< unit number to write to character(len=*),optional, intent(in) :: string !< optional string - !----- local --- + !----- local --- real(dbl_kind) :: msize,mrss character(len=128) :: lstring character(*),parameter :: subname = '(ice_memusage_print)' diff --git a/cicecore/cicedynB/infrastructure/ice_memusage_gptl.c b/cicecore/cicedynB/infrastructure/ice_memusage_gptl.c index ec9c2c1d8..309c8824b 100644 --- a/cicecore/cicedynB/infrastructure/ice_memusage_gptl.c +++ b/cicecore/cicedynB/infrastructure/ice_memusage_gptl.c @@ -28,7 +28,7 @@ ** Author: Jim Rosinski ** Credit to Chuck Bardeen for MACOS section (__APPLE__ ifdef) ** -** get_memusage: +** get_memusage: ** ** Designed to be called from Fortran, returns information about memory ** usage in each of 5 input int* args. On Linux read from the /proc @@ -133,7 +133,7 @@ int ice_memusage_gptl (int *size, int *rss, int *share, int *text, int *datastac #endif long long total; int node_config; - + /* memory available */ #if defined(BGP) Kernel_GetPersonality(&pers, sizeof(pers)); @@ -195,7 +195,7 @@ int ice_memusage_gptl (int *size, int *rss, int *share, int *text, int *datastac ** arguments, close the file and return. */ - ret = fscanf (fd, "%d %d %d %d %d %d %d", + ret = fscanf (fd, "%d %d %d %d %d %d %d", size, rss, share, text, datastack, &dum, &dum); ret = fclose (fd); return 0; @@ -203,9 +203,9 @@ int ice_memusage_gptl (int *size, int *rss, int *share, int *text, int *datastac #elif (defined __APPLE__) FILE *fd; - char cmd[60]; + char cmd[60]; int pid = (int) getpid (); - + sprintf (cmd, "ps -o vsz -o rss -o tsiz -p %d | grep -v RSS", pid); fd = popen (cmd, "r"); @@ -224,7 +224,7 @@ int ice_memusage_gptl (int *size, int *rss, int *share, int *text, int *datastac if (getrusage (RUSAGE_SELF, &usage) < 0) return -1; - + *size = -1; *rss = usage.ru_maxrss; *share = -1; diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index d5cbe1768..b9074d8f6 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -25,7 +25,7 @@ module ice_read_write use ice_fileunits, only: nu_diag #ifdef USE_NETCDF - use netcdf + use netcdf #endif implicit none @@ -33,7 +33,7 @@ module ice_read_write private integer (kind=int_kind), parameter, private :: & - bits_per_byte = 8 ! number of bits per byte. + bits_per_byte = 8 ! number of bits per byte. ! used to determine RecSize in ice_open public :: ice_open, & @@ -148,7 +148,7 @@ subroutine ice_open_ext(nu, filename, nbits) integer (kind=int_kind), intent(in) :: & nu , & ! unit number nbits ! no. of bits per variable (0 for sequential access) - + integer (kind=int_kind) :: RecSize, nbytes character (*) :: filename @@ -468,9 +468,9 @@ subroutine ice_read_xyzt(nu, nrec, work, atype, diag, & if (present(field_loc)) then call scatter_global(work(:,:,k,:), work_g4(:,:,k), master_task, distrb_info, & field_loc, field_type) - + else - + call scatter_global(work(:,:,k,:), work_g4(:,:,k), master_task, distrb_info, & field_loc_noupdate, field_type_noupdate) endif @@ -791,11 +791,11 @@ subroutine ice_write_xyt(nu, nrec, work, atype, diag) elseif (atype == 'ida8') then allocate(work_gi8(nx_global,ny_global)) work_gi8 = nint(work_g1) - write(nu,rec=nrec) work_gi8 + write(nu,rec=nrec) work_gi8 deallocate(work_gi8) elseif (atype == 'rda4') then allocate(work_gr(nx_global,ny_global)) - work_gr = work_g1 + work_gr = real(work_g1,real_kind) write(nu,rec=nrec) work_gr deallocate(work_gr) elseif (atype == 'rda8') then @@ -824,7 +824,7 @@ end subroutine ice_write_xyt !======================================================================= -! Writes an unformatted file +! Writes an unformatted file ! work is a real array, atype indicates the format of the data subroutine ice_write_xyzt(nu, nrec, work, atype, diag) @@ -895,11 +895,11 @@ subroutine ice_write_xyzt(nu, nrec, work, atype, diag) elseif (atype == 'ida8') then allocate(work_gi9(nx_global,ny_global,nblyr+2)) work_gi9 = nint(work_g4) - write(nu,rec=nrec) work_gi9 + write(nu,rec=nrec) work_gi9 deallocate(work_gi9) elseif (atype == 'rda4') then allocate(work_gr3(nx_global,ny_global,nblyr+2)) - work_gr3 = work_g4 + work_gr3 = real(work_g4,real_kind) write(nu,rec=nrec) work_gr3 deallocate(work_gr3) elseif (atype == 'rda8') then @@ -1002,11 +1002,11 @@ subroutine ice_write_ext(nu, nrec, work, atype, diag) elseif (atype == 'ida8') then allocate(work_gi8(nx,ny)) work_gi8 = nint(work_g1) - write(nu,rec=nrec) work_gi8 + write(nu,rec=nrec) work_gi8 deallocate(work_gi8) elseif (atype == 'rda4') then allocate(work_gr(nx,ny)) - work_gr = work_g1 + work_gr = real(work_g1,real_kind) write(nu,rec=nrec) work_gr deallocate(work_gr) elseif (atype == 'rda8') then @@ -1040,7 +1040,7 @@ end subroutine ice_write_ext subroutine ice_open_nc(filename, fid) - character (char_len_long), intent(in) :: & + character (char_len_long), intent(in) :: & filename ! netCDF filename integer (kind=int_kind), intent(out) :: & @@ -1052,7 +1052,7 @@ subroutine ice_open_nc(filename, fid) #ifdef USE_NETCDF integer (kind=int_kind) :: & - status ! status variable from netCDF routine + status ! status variable from netCDF routine if (my_task == master_task) then @@ -1089,12 +1089,12 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & integer (kind=int_kind), intent(in) :: & fid , & ! file id - nrec ! record number + nrec ! record number logical (kind=log_kind), intent(in) :: & diag ! if true, write diagnostic output - character (len=*), intent(in) :: & + character (len=*), intent(in) :: & varname ! field name in netcdf file real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(out) :: & @@ -1113,13 +1113,13 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & #ifdef USE_NETCDF ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & varid , & ! variable id status , & ! status output from netcdf routines ndims , & ! number of dimensions dimlen ! dimension size - integer (kind=int_kind), dimension(10) :: & + integer (kind=int_kind), dimension(10) :: & dimids ! generic size dimids real (kind=dbl_kind) :: & @@ -1200,12 +1200,12 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & endif !-------------------------------------------------------------- - ! Read global array + ! Read global array !-------------------------------------------------------------- if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,lnrec/), & + start=(/1,1,lnrec/), & count=(/nx_global+2,ny_global+1,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -1214,7 +1214,7 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & work_g1 = work_g2(2:nx_global+1,1:ny_global) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,lnrec/), & + start=(/1,1,lnrec/), & count=(/nx,ny,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -1230,8 +1230,8 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,'(2a,i8,a,i8,2a)') & - subname,' fid= ',fid, ', lnrec = ',lnrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar @@ -1294,9 +1294,9 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & integer (kind=int_kind), intent(in) :: & fid , & ! file id - nrec ! record number + nrec ! record number - character (len=*), intent(in) :: & + character (len=*), intent(in) :: & varname ! field name in netcdf file logical (kind=log_kind), intent(in) :: & @@ -1318,14 +1318,14 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & #ifdef USE_NETCDF ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & n, & ! ncat index varid , & ! variable id status , & ! status output from netcdf routines ndims , & ! number of dimensions dimlen ! dimension size - integer (kind=int_kind), dimension(10) :: & + integer (kind=int_kind), dimension(10) :: & dimids ! generic size dimids real (kind=dbl_kind) :: & @@ -1407,12 +1407,12 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & endif !-------------------------------------------------------------- - ! Read global array + ! Read global array !-------------------------------------------------------------- if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,1,lnrec/), & + start=(/1,1,1,lnrec/), & count=(/nx_global+2,ny_global+1,ncat,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -1421,7 +1421,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & work_g1 = work_g2(2:nx_global+1,1:ny_global,:) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,lnrec/), & + start=(/1,1,1,lnrec/), & count=(/nx,ny,ncat,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -1437,8 +1437,8 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,'(2a,i8,a,i8,2a)') & - subname,' fid= ',fid, ', lnrec = ',lnrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar @@ -1506,14 +1506,14 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & field_loc, field_type, restart_ext) use ice_fileunits, only: nu_diag - use ice_domain_size, only: nfsd, nfreq + use ice_domain_size, only: nfreq use ice_gather_scatter, only: scatter_global, scatter_global_ext integer (kind=int_kind), intent(in) :: & fid , & ! file id - nrec ! record number + nrec ! record number - character (len=*), intent(in) :: & + character (len=*), intent(in) :: & varname ! field name in netcdf file logical (kind=log_kind), intent(in) :: & @@ -1533,7 +1533,7 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ! local variables ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & varid, & ! variable id status, & ! status output from netcdf routines ndim, nvar, & ! sizes of netcdf file @@ -1542,7 +1542,7 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ndims, & ! number of dimensions dimlen ! dimension size - integer (kind=int_kind), dimension(10) :: & + integer (kind=int_kind), dimension(10) :: & dimids ! generic size dimids real (kind=dbl_kind) :: & @@ -1627,12 +1627,12 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & endif !-------------------------------------------------------------- - ! Read global array + ! Read global array !-------------------------------------------------------------- if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & - start=(/1,1,1,lnrec/), & + start=(/1,1,1,lnrec/), & count=(/nx_global+2,ny_global+1,nfreq,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -1641,7 +1641,7 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & work_g1 = work_g2(2:nx_global+1,1:ny_global,:) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,lnrec/), & + start=(/1,1,1,lnrec/), & count=(/nx,ny,nfreq,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -1725,12 +1725,12 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & integer (kind=int_kind), intent(in) :: & fid , & ! file id - nrec ! record number + nrec ! record number logical (kind=log_kind), intent(in) :: & diag ! if true, write diagnostic output - character (char_len), intent(in) :: & + character (char_len), intent(in) :: & varname ! field name in netcdf file integer (kind=int_kind), optional, intent(in) :: & @@ -1746,7 +1746,7 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & #ifdef USE_NETCDF ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & varid, & ! netcdf id for field status, & ! status output from netcdf routines ndim, nvar, & ! sizes of netcdf file @@ -1754,7 +1754,7 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & ndims, & ! number of dimensions dimlen ! dimension size - integer (kind=int_kind), dimension(10) :: & + integer (kind=int_kind), dimension(10) :: & dimids ! generic size dimids real (kind=dbl_kind), dimension(1) :: & @@ -1805,8 +1805,8 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & ! Read point variable !-------------------------------------------------------------- - status = nf90_get_var(fid, varid, workg, & - start= (/ lnrec /), & + status = nf90_get_var(fid, varid, workg, & + start= (/ lnrec /), & count=(/ 1 /)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -1819,8 +1819,8 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,'(2a,i8,a,i8,2a)') & - subname,' fid= ',fid, ', lnrec = ',lnrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar @@ -1830,7 +1830,7 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & enddo endif - work = workg(1) + work = workg(1) #else call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & @@ -1870,11 +1870,7 @@ subroutine ice_read_nc_1D(fid, varname, work, diag, & integer (kind=int_kind) :: & varid, & ! netcdf id for field status, & ! status output from netcdf routines - ndim, nvar, & ! sizes of netcdf file - dimlen ! size of dimension - - character (char_len) :: & - dimname ! dimension name + ndim, nvar ! sizes of netcdf file real (kind=dbl_kind), dimension(xdim) :: & workg ! output array (real, 8-byte) @@ -1958,11 +1954,7 @@ subroutine ice_read_nc_2D(fid, varname, work, diag, & integer (kind=int_kind) :: & varid, & ! netcdf id for field status, & ! status output from netcdf routines - ndim, nvar, & ! sizes of netcdf file - dimlen ! size of dimension - - character (char_len) :: & - dimname ! dimension name + ndim, nvar ! sizes of netcdf file real (kind=dbl_kind), dimension(xdim,ydim) :: & workg ! output array (real, 8-byte) @@ -2049,11 +2041,7 @@ subroutine ice_read_nc_3D(fid, varname, work, diag, & integer (kind=int_kind) :: & varid, & ! netcdf id for field status, & ! status output from netcdf routines - ndim, nvar, & ! sizes of netcdf file - dimlen ! size of dimension - - character (char_len) :: & - dimname ! dimension name + ndim, nvar ! sizes of netcdf file real (kind=dbl_kind), dimension(xdim,ydim,zdim) :: & workg ! output array (real, 8-byte) @@ -2121,12 +2109,12 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & integer (kind=int_kind), intent(in) :: & fid , & ! file id - nrec ! record number + nrec ! record number logical (kind=log_kind), intent(in) :: & diag ! if true, write diagnostic output - character (char_len), intent(in) :: & + character (char_len), intent(in) :: & varname ! field name in netcdf file integer (kind=int_kind), optional, intent(in) :: & @@ -2143,7 +2131,7 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & work_z ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & varid, & ! netcdf id for field status, & ! status output from netcdf routines ndim, nvar, & ! sizes of netcdf file @@ -2151,11 +2139,11 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & ndims, & ! number of dimensions dimlen ! dimension size - integer (kind=int_kind), dimension(10) :: & + integer (kind=int_kind), dimension(10) :: & dimids ! generic size dimids character (char_len) :: & - dimname ! dimension name + dimname ! dimension name integer (kind=int_kind) :: lnrec ! local value of nrec @@ -2204,11 +2192,11 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & endif !-------------------------------------------------------------- - ! Read global array + ! Read global array !-------------------------------------------------------------- status = nf90_get_var( fid, varid, work_z, & - start=(/1,lnrec/), & + start=(/1,lnrec/), & count=(/nilyr,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -2221,8 +2209,8 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then - write(nu_diag,'(2a,i8,a,i8,2a)') & - subname,' fid= ',fid, ', lnrec = ',lnrec, & + write(nu_diag,'(2a,i8,a,i8,2a)') & + subname,' fid= ',fid, ', lnrec = ',lnrec, & ', varname = ',trim(varname) status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar @@ -2256,7 +2244,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & integer (kind=int_kind), intent(in) :: & fid , & ! file id varid , & ! variable id - nrec ! record number + nrec ! record number logical (kind=log_kind), intent(in) :: & diag ! if true, write diagnostic output @@ -2276,7 +2264,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & #ifdef USE_NETCDF ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & status ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index @@ -2327,11 +2315,11 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & if (my_task == master_task) then !-------------------------------------------------------------- - ! Write global array + ! Write global array !-------------------------------------------------------------- status = nf90_put_var( fid, varid, work_g1, & - start=(/1,1,nrec/), & + start=(/1,1,nrec/), & count=(/nx,ny,1/)) endif ! my_task = master_task @@ -2341,8 +2329,8 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then -! write(nu_diag,*) & -! subname,' fid= ',fid, ', nrec = ',nrec, & +! write(nu_diag,*) & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varid = ',varid ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar @@ -2357,7 +2345,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & endif deallocate(work_g1) - + #else call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) @@ -2379,7 +2367,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & integer (kind=int_kind), intent(in) :: & fid , & ! file id varid , & ! variable id - nrec ! record number + nrec ! record number logical (kind=log_kind), intent(in) :: & diag ! if true, write diagnostic output @@ -2399,7 +2387,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & #ifdef USE_NETCDF ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & n, & ! ncat index status ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file @@ -2457,11 +2445,11 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & if (my_task == master_task) then !-------------------------------------------------------------- - ! Read global array + ! Read global array !-------------------------------------------------------------- status = nf90_put_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & + start=(/1,1,1,nrec/), & count=(/nx,ny,ncat,1/)) endif ! my_task = master_task @@ -2471,8 +2459,8 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & !------------------------------------------------------------------- if (my_task==master_task .and. diag) then -! write(nu_diag,*) & -! subname,' fid= ',fid, ', nrec = ',nrec, & +! write(nu_diag,*) & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varid = ',varid ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar @@ -2491,14 +2479,14 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & endif deallocate(work_g1) - + #else call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) #endif end subroutine ice_write_nc_xyz - + !======================================================================= ! Read a netcdf file. @@ -2506,15 +2494,15 @@ end subroutine ice_write_nc_xyz ! work_g is a real array ! ! Adapted by William Lipscomb, LANL, from ice_read -! Adapted by Ann Keen, Met Office, to read from a netcdf file +! Adapted by Ann Keen, Met Office, to read from a netcdf file subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) integer (kind=int_kind), intent(in) :: & fid , & ! file id - nrec ! record number + nrec ! record number - character (char_len), intent(in) :: & + character (char_len), intent(in) :: & varname ! field name in netcdf file real (kind=dbl_kind), dimension(nx_global,ny_global), intent(out) :: & @@ -2529,12 +2517,12 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) #ifdef USE_NETCDF ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & varid, & ! netcdf id for field status ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file ! id, & ! dimension index -! dimlen ! size of dimension +! dimlen ! size of dimension real (kind=dbl_kind) :: & amin, amax, asum ! min, max values and sum of input array @@ -2551,7 +2539,7 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) else allocate(work_g3(1,1)) ! to save memory endif - work_g3(:,:) = c0 + work_g3(:,:) = c0 endif work_g(:,:) = c0 @@ -2569,9 +2557,9 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) endif !-------------------------------------------------------------- - ! Read global array + ! Read global array !-------------------------------------------------------------- - + if (orca_halogrid) then status = nf90_get_var( fid, varid, work_g3, & start=(/1,1,nrec/), & @@ -2583,7 +2571,7 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) work_g=work_g3(2:nx_global+1,1:ny_global) else status = nf90_get_var( fid, varid, work_g, & - start=(/1,1,nrec/), & + start=(/1,1,nrec/), & count=(/nx_global,ny_global,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & @@ -2597,8 +2585,8 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) !------------------------------------------------------------------- if (my_task == master_task .and. diag) then -! write(nu_diag,*) & -! subname,' fid= ',fid, ', nrec = ',nrec, & +! write(nu_diag,*) & +! subname,' fid= ',fid, ', nrec = ',nrec, & ! ', varname = ',trim(varname) ! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) ! write(nu_diag,*) subname,' ndim= ',ndim,', nvar= ',nvar @@ -2638,7 +2626,7 @@ subroutine ice_close_nc(fid) #ifdef USE_NETCDF integer (kind=int_kind) :: & - status ! status variable from netCDF routine + status ! status variable from netCDF routine if (my_task == master_task) then status = nf90_close(fid) @@ -2667,13 +2655,13 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & integer (kind=int_kind), intent(in) :: & fid , & ! file id - nrec , & ! record number + nrec , & ! record number nzlev ! z level logical (kind=log_kind), intent(in) :: & diag ! if true, write diagnostic output - character (len=*), intent(in) :: & + character (len=*), intent(in) :: & varname ! field name in netcdf file real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(out) :: & @@ -2692,7 +2680,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & #ifdef USE_NETCDF ! netCDF file diagnostics: - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & varid , & ! variable id status ! status output from netcdf routines ! ndim, nvar, & ! sizes of netcdf file @@ -2739,11 +2727,11 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & endif !-------------------------------------------------------------- - ! Read global array + ! Read global array !-------------------------------------------------------------- status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,nzlev,nrec/), & + start=(/1,1,nzlev,nrec/), & count=(/nx,ny,1,1/)) if (status /= nf90_noerr) then call abort_ice(subname//' ERROR: Cannot get variable '//trim(varname), & diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 index 2e236b62a..64b8d2101 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 @@ -9,7 +9,7 @@ ! 2004-05: Block structure added by William Lipscomb ! Restart module separated from history module ! 2006 ECH: Accepted some CESM code into mainstream CICE -! Converted to free source form (F90) +! Converted to free source form (F90) ! 2008 ECH: Rearranged order in which internal stresses are written and read ! 2010 ECH: Changed eice, esno to qice, qsno ! 2012 ECH: Added routines for reading/writing extended grid @@ -56,14 +56,14 @@ subroutine dumpfile(filename_spec) use ice_domain, only: nblocks use ice_domain_size, only: nilyr, nslyr, ncat, max_blocks use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT, strocnyT, sst, frzmlt, iceumask, iceemask, icenmask, & + strocnxT, strocnyT, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & - stresspU, stressmU, stress12U + stresspU, stressmU, stress12U use ice_flux, only: coszen - use ice_grid, only: grid_ice, tmask + use ice_grid, only: grid_ice, tmask, iceumask, iceemask, icenmask use ice_state, only: aicen, vicen, vsnon, trcrn, uvel, vvel, & uvelE, vvelE, uvelN, vvelN @@ -85,7 +85,7 @@ subroutine dumpfile(filename_spec) character(len=*), parameter :: subname = '(dumpfile)' call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & - nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -162,7 +162,7 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! radiation fields !----------------------------------------------------------------- - + if (restart_coszen) call write_restart_field(nu_dump,0,coszen,'ruf8','coszen',1,diag) call write_restart_field(nu_dump,0,scale_factor,'ruf8','scale_factor',1,diag) @@ -214,7 +214,7 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! ice mask for dynamics !----------------------------------------------------------------- - + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block @@ -277,14 +277,15 @@ subroutine restartfile (ice_ic) use ice_domain_size, only: nilyr, nslyr, ncat, & max_blocks use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT, strocnyT, sst, frzmlt, iceumask, iceemask, icenmask, & + strocnxT, strocnyT, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U use ice_flux, only: coszen - use ice_grid, only: tmask, grid_type, grid_ice + use ice_grid, only: tmask, grid_type, grid_ice, & + iceumask, iceemask, icenmask use ice_state, only: trcr_depend, aice, vice, vsno, trcr, & aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & uvelE, vvelE, uvelN, vvelN, & @@ -315,7 +316,7 @@ subroutine restartfile (ice_ic) file=__FILE__, line=__LINE__) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & - nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -439,7 +440,7 @@ subroutine restartfile (ice_ic) !----------------------------------------------------------------- if (my_task == master_task) write(nu_diag,*) & 'internal stress components' - + call read_restart_field(nu_restart,0,stressp_1,'ruf8', & 'stressp_1',1,diag,field_loc_center,field_type_scalar) ! stressp_1 call read_restart_field(nu_restart,0,stressp_3,'ruf8', & @@ -707,12 +708,12 @@ subroutine restartfile_v4 (ice_ic) use ice_domain_size, only: nilyr, nslyr, ncat, nx_global, ny_global, & max_blocks use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT, strocnyT, sst, frzmlt, iceumask, & + strocnxT, strocnyT, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 use ice_gather_scatter, only: scatter_global_stress - use ice_grid, only: tmask + use ice_grid, only: tmask, iceumask use ice_read_write, only: ice_open, ice_read, ice_read_global use ice_state, only: trcr_depend, aice, vice, vsno, trcr, & aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & @@ -754,7 +755,7 @@ subroutine restartfile_v4 (ice_ic) file=__FILE__, line=__LINE__) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & - nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -884,7 +885,7 @@ subroutine restartfile_v4 (ice_ic) !----------------------------------------------------------------- if (my_task == master_task) write(nu_diag,*) & 'internal stress components' - + allocate (work_g1(nx_global,ny_global), & work_g2(nx_global,ny_global)) @@ -1054,7 +1055,7 @@ subroutine restartfile_v4 (ice_ic) ! creates new file filename = trim(restart_dir) // '/iced.converted' - call dumpfile(filename) + call dumpfile(filename) call final_restart ! stop diff --git a/cicecore/cicedynB/infrastructure/ice_restoring.F90 b/cicecore/cicedynB/infrastructure/ice_restoring.F90 index f21e50513..221d066df 100644 --- a/cicecore/cicedynB/infrastructure/ice_restoring.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restoring.F90 @@ -46,7 +46,7 @@ module ice_restoring !======================================================================= -! Allocates and initializes arrays needed for restoring the ice state +! Allocates and initializes arrays needed for restoring the ice state ! in cells surrounding the grid. @@ -115,7 +115,7 @@ subroutine ice_HaloRestore_init !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block, & !$OMP iglob,jglob,iblock,jblock) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -154,7 +154,7 @@ subroutine ice_HaloRestore_init !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block, & !$OMP i,j,n,nt,ibc,npad) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -280,7 +280,7 @@ subroutine ice_HaloRestore_init enddo if (my_task == master_task) & - write (nu_diag,*) 'ice restoring timescale = ',trestore,' days' + write (nu_diag,*) 'ice restoring timescale = ',trestore,' days' end subroutine ice_HaloRestore_init @@ -318,7 +318,7 @@ subroutine set_restore_var (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & Tair , & ! air temperature (K) - Tf ! freezing temperature (C) + Tf ! freezing temperature (C) real (kind=dbl_kind), dimension (nx_block,ny_block,nilyr), intent(in) :: & salinz , & ! initial salinity profile @@ -395,7 +395,7 @@ subroutine set_restore_var (nx_block, ny_block, & vicen(i,j,n) = c0 vsnon(i,j,n) = c0 if (tmask(i,j)) then - trcrn(i,j,nt_Tsfc,n) = Tf(i,j) ! surface temperature + trcrn(i,j,nt_Tsfc,n) = Tf(i,j) ! surface temperature else trcrn(i,j,nt_Tsfc,n) = c0 ! on land gridcells endif @@ -526,7 +526,7 @@ subroutine set_restore_var (nx_block, ny_block, & ! surface temperature trcrn(i,j,nt_Tsfc,n) = Tsfc ! deg C - ! ice enthalpy, salinity + ! ice enthalpy, salinity do k = 1, nilyr trcrn(i,j,nt_qice+k-1,n) = qin(k) trcrn(i,j,nt_sice+k-1,n) = salinz(i,j,k) @@ -569,7 +569,7 @@ subroutine ice_HaloRestore i,j,iblk,nt,n, &! dummy loop indices ilo,ihi,jlo,jhi, &! beginning and end of physical domain ibc, &! ghost cell column or row - ntrcr, &! + ntrcr, &! npad ! padding column/row counter type (block) :: & @@ -611,7 +611,7 @@ subroutine ice_HaloRestore !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block, & !$OMP i,j,n,nt,ibc,npad) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 index b98e09814..2a3f042c3 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 @@ -6,7 +6,7 @@ ! Elizabeth C. Hunke and William H. Lipscomb, LANL ! C. M. Bitz, UW ! -! 2004 WHL: Block structure added +! 2004 WHL: Block structure added ! 2006 ECH: Accepted some CESM code into mainstream CICE ! Added ice_present, aicen, vicen; removed aice1...10, vice1...1. ! Added histfreq_n and histfreq='h' options, removed histfreq='w' @@ -25,7 +25,7 @@ module ice_history_write implicit none private public :: ice_write_hist - + !======================================================================= contains diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 index 5dd35fdf4..b2b438ebe 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 @@ -17,7 +17,7 @@ module ice_restart use ice_fileunits, only: nu_dump_lvl, nu_dump_pond, nu_dump_hbrine use ice_fileunits, only: nu_dump_iso, nu_dump_snow use ice_fileunits, only: nu_dump_bgc, nu_dump_aero, nu_dump_fsd - use ice_fileunits, only: nu_restart, nu_restart_eap, nu_restart_FY, nu_restart_age + use ice_fileunits, only: nu_restart, nu_restart_eap, nu_restart_FY, nu_restart_age use ice_fileunits, only: nu_restart_lvl, nu_restart_pond, nu_restart_hbrine use ice_fileunits, only: nu_restart_bgc, nu_restart_aero, nu_restart_fsd use ice_fileunits, only: nu_restart_iso, nu_restart_snow @@ -58,7 +58,11 @@ subroutine init_restart_read(ice_ic) logical (kind=log_kind) :: & solve_zsal, tr_fsd, & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & +#else + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & +#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow character(len=char_len_long) :: & @@ -83,14 +87,18 @@ subroutine init_restart_read(ice_ic) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & +#ifdef UNDEPRECATE_CESMPONDS tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & +#else + tr_iso_out=tr_iso, tr_aero_out=tr_aero, & +#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (present(ice_ic)) then + if (present(ice_ic)) then filename = trim(ice_ic) else if (my_task == master_task) then @@ -123,7 +131,7 @@ subroutine init_restart_read(ice_ic) call broadcast_scalar(time_forc,master_task) call broadcast_scalar(myear,master_task) call set_date_from_timesecs(timesecs) - + istep1 = istep0 ! Supplemental restart files @@ -228,6 +236,7 @@ subroutine init_restart_read(ice_ic) endif endif +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) @@ -247,6 +256,7 @@ subroutine init_restart_read(ice_ic) write(nu_diag,*) 'Reading ',filename(1:lenstr(filename)) endif endif +#endif if (tr_pond_lvl) then if (my_task == master_task) then @@ -414,7 +424,11 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & solve_zsal, tr_fsd, & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & +#else + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & +#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow integer (kind=int_kind) :: & @@ -430,7 +444,11 @@ subroutine init_restart_write(filename_spec) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & +#ifdef UNDEPRECATE_CESMPONDS tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & +#else + tr_iso_out=tr_iso, tr_aero_out=tr_aero, & +#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) @@ -446,7 +464,7 @@ subroutine init_restart_write(filename_spec) restart_file(1:lenstr(restart_file)),'.', & myear,'-',mmonth,'-',mday,'-',msec end if - + ! write pointer (path/file) if (my_task == master_task) then open(nu_rst_pointer,file=pointer_file) @@ -563,6 +581,7 @@ subroutine init_restart_write(filename_spec) endif +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & @@ -582,6 +601,7 @@ subroutine init_restart_write(filename_spec) endif endif +#endif if (tr_pond_lvl) then @@ -789,7 +809,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & endif end subroutine read_restart_field - + !======================================================================= ! Writes a single restart field. @@ -851,7 +871,11 @@ subroutine final_restart() logical (kind=log_kind) :: & solve_zsal, & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & +#else + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & +#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow integer (kind=int_kind) :: & @@ -865,7 +889,11 @@ subroutine final_restart() nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & +#ifdef UNDEPRECATE_CESMPONDS tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & +#else + tr_iso_out=tr_iso, tr_aero_out=tr_aero, & +#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) @@ -880,7 +908,9 @@ subroutine final_restart() if (tr_iage) close(nu_dump_age) if (tr_FY) close(nu_dump_FY) if (tr_lvl) close(nu_dump_lvl) +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) close(nu_dump_pond) +#endif if (tr_pond_lvl) close(nu_dump_pond) if (tr_pond_topo) close(nu_dump_pond) if (tr_snow) close(nu_dump_snow) diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 index 97bb72dab..019ab8ce9 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -9,7 +9,7 @@ ! Elizabeth C. Hunke and William H. Lipscomb, LANL ! C. M. Bitz, UW ! -! 2004 WHL: Block structure added +! 2004 WHL: Block structure added ! 2006 ECH: Accepted some CESM code into mainstream CICE ! Added ice_present, aicen, vicen; removed aice1...10, vice1...1. ! Added histfreq_n and histfreq='h' options, removed histfreq='w' @@ -30,7 +30,7 @@ module ice_history_write implicit none private public :: ice_write_hist - + !======================================================================= contains @@ -48,7 +48,7 @@ subroutine ice_write_hist (ns) use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_calendar, only: msec, timesecs, idate, idate0, write_ic, & - histfreq, histfreq_n, days_per_year, use_leap_years, dayyr, & + histfreq, days_per_year, use_leap_years, dayyr, & hh_init, mm_init, ss_init use ice_communicate, only: my_task, master_task use ice_domain, only: distrb_info @@ -60,7 +60,10 @@ subroutine ice_write_hist (ns) lont_bounds, latt_bounds, lonu_bounds, latu_bounds, & lonn_bounds, latn_bounds, lone_bounds, late_bounds use ice_history_shared - use ice_restart_shared, only: runid, lcdf64 + use ice_restart_shared, only: lcdf64 +#ifdef CESMCOUPLED + use ice_restart_shared, only: runid +#endif #ifdef USE_NETCDF use netcdf #endif @@ -444,7 +447,7 @@ subroutine ice_write_hist (ns) dimidex(4)=kmtidb dimidex(5)=kmtida dimidex(6)=fmtid - + do i = 1, nvar_grdz if (igrdz(i)) then status = nf90_def_var(ncid, var_grdz(i)%short_name, & @@ -779,7 +782,7 @@ subroutine ice_write_hist (ns) work1 = ELAT*rad_to_deg call gather_global(work_g1,work1,master_task,distrb_info) END SELECT - + if (my_task == master_task) then status = nf90_inq_varid(ncid, var_coord(i)%short_name, varid) if (status /= nf90_noerr) call abort_ice(subname// & @@ -900,7 +903,7 @@ subroutine ice_write_hist (ns) call broadcast_scalar(var_nverts(i)%short_name,master_task) SELECT CASE (var_nverts(i)%short_name) CASE ('lont_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts work1(:,:,:) = lont_bounds(ivertex,:,:,:) call gather_global(work_g1, work1, master_task, distrb_info) if (my_task == master_task) work1_3(ivertex,:,:) = work_g1(:,:) diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index f117384d9..f647bd96b 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -59,7 +59,7 @@ subroutine init_restart_read(ice_ic) character(len=*), parameter :: subname = '(init_restart_read)' #ifdef USE_NETCDF - if (present(ice_ic)) then + if (present(ice_ic)) then filename = trim(ice_ic) else if (my_task == master_task) then @@ -78,7 +78,7 @@ subroutine init_restart_read(ice_ic) status = nf90_open(trim(filename), nf90_nowrite, ncid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: reading restart ncfile '//trim(filename)) - + if (use_restart_time) then status1 = nf90_noerr status = nf90_get_att(ncid, nf90_global, 'istep1', istep0) @@ -145,7 +145,11 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & solve_zsal, skl_bgc, z_tracers, tr_fsd, & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & +#else + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & +#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & @@ -181,7 +185,11 @@ subroutine init_restart_write(filename_spec) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & +#ifdef UNDEPRECATE_CESMPONDS tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & +#else + tr_iso_out=tr_iso, tr_aero_out=tr_aero, & +#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & @@ -254,12 +262,12 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'uvelN',dims) call define_rest_field(ncid,'vvelN',dims) endif - + if (grid_ice == 'C') then call define_rest_field(ncid,'uvelE',dims) call define_rest_field(ncid,'vvelN',dims) endif - + if (restart_coszen) call define_rest_field(ncid,'coszen',dims) call define_rest_field(ncid,'scale_factor',dims) @@ -359,11 +367,11 @@ subroutine init_restart_write(filename_spec) enddo endif if (tr_bgc_Fe ) then - do k=1,n_fed + do k=1,n_fed write(nchar,'(i3.3)') k call define_rest_field(ncid,'fed'//trim(nchar),dims) enddo - do k=1,n_fep + do k=1,n_fep write(nchar,'(i3.3)') k call define_rest_field(ncid,'fep'//trim(nchar),dims) enddo @@ -408,10 +416,12 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'vlvl',dims) end if +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then call define_rest_field(ncid,'apnd',dims) call define_rest_field(ncid,'hpnd',dims) end if +#endif if (tr_pond_topo) then call define_rest_field(ncid,'apnd',dims) @@ -472,17 +482,17 @@ subroutine init_restart_write(filename_spec) if (tr_bgc_PON) & call define_rest_field(ncid,'bgc_PON' ,dims) if (tr_bgc_DON) then - do k = 1, n_don + do k = 1, n_don write(nchar,'(i3.3)') k call define_rest_field(ncid,'bgc_DON'//trim(nchar) ,dims) enddo endif if (tr_bgc_Fe ) then - do k = 1, n_fed + do k = 1, n_fed write(nchar,'(i3.3)') k call define_rest_field(ncid,'bgc_Fed'//trim(nchar) ,dims) enddo - do k = 1, n_fep + do k = 1, n_fep write(nchar,'(i3.3)') k call define_rest_field(ncid,'bgc_Fep'//trim(nchar) ,dims) enddo @@ -547,7 +557,7 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'zSalinity'//trim(nchar),dims) enddo endif - + if (z_tracers) then if (tr_zaero) then do n = 1, n_zaero @@ -647,14 +657,14 @@ subroutine init_restart_write(filename_spec) enddo endif if (tr_bgc_Fe ) then - do n = 1, n_fed + do n = 1, n_fed write(ncharb,'(i3.3)') n do k = 1, nblyr+3 write(nchar,'(i3.3)') k call define_rest_field(ncid,'bgc_Fed'//trim(ncharb)//trim(nchar),dims) enddo enddo - do n = 1, n_fep + do n = 1, n_fep write(ncharb,'(i3.3)') n do k = 1, nblyr+3 write(nchar,'(i3.3)') k @@ -766,7 +776,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & #endif end subroutine read_restart_field - + !======================================================================= ! Writes a single restart field. @@ -808,7 +818,7 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) #ifdef USE_NETCDF status = nf90_inq_varid(ncid,trim(vname),varid) - if (ndim3 == ncat) then + if (ndim3 == ncat) then if (restart_ext) then call ice_write_nc(ncid, 1, varid, work, diag, restart_ext, varname=trim(vname)) else @@ -882,7 +892,7 @@ subroutine define_rest_field(ncid, vname, dims) call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) #endif - + end subroutine define_rest_field !======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 index 92f7663a2..6407d8c76 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -6,7 +6,7 @@ ! Elizabeth C. Hunke and William H. Lipscomb, LANL ! C. M. Bitz, UW ! -! 2004 WHL: Block structure added +! 2004 WHL: Block structure added ! 2006 ECH: Accepted some CESM code into mainstream CICE ! Added ice_present, aicen, vicen; removed aice1...10, vice1...1. ! Added histfreq_n and histfreq='h' options, removed histfreq='w' @@ -27,7 +27,7 @@ module ice_history_write implicit none private public :: ice_write_hist - + !======================================================================= contains @@ -407,7 +407,7 @@ subroutine ice_write_hist (ns) endif if (f_bounds) then status = pio_put_att(File, varid, 'bounds', trim(coord_bounds(i))) - endif + endif enddo ! Extra dimensions (NCAT, NZILYR, NZSLYR, NZBLYR, NZALYR, NFSD) @@ -446,14 +446,14 @@ subroutine ice_write_hist (ns) if (f_bounds) then status = pio_def_var(File, trim(var_nverts(i)%short_name), & lprecision,dimid_nverts, varid) - status = & + status = & pio_put_att(File,varid, 'long_name', trim(var_nverts(i)%long_name)) status = & pio_put_att(File, varid, 'units', trim(var_nverts(i)%units)) call ice_write_hist_fill(File,varid,var_nverts(i)%short_name,history_precision) endif enddo - + !----------------------------------------------------------------- ! define attributes for time-variant variables !----------------------------------------------------------------- @@ -507,7 +507,7 @@ subroutine ice_write_hist (ns) call ice_write_hist_attrs(File,varid,avail_hist_fields(n),ns) endif enddo ! num_avail_hist_fields_3Dz - + !----------------------------------------------------------------- ! 3D (biology ice layers) !----------------------------------------------------------------- @@ -690,7 +690,7 @@ subroutine ice_write_hist (ns) bnd_start = (/1,1/) bnd_length = (/2,1/) status = pio_put_var(File,varid,ival=time_bounds, & - start=bnd_start(:),count=bnd_length(:)) + start=bnd_start(:),count=bnd_length(:)) endif !----------------------------------------------------------------- @@ -738,7 +738,7 @@ subroutine ice_write_hist (ns) status = pio_inq_varid(File, var_grdz(i)%short_name, varid) SELECT CASE (var_grdz(i)%short_name) CASE ('NCAT') - status = pio_put_var(File, varid, hin_max(1:ncat_hist)) + status = pio_put_var(File, varid, hin_max(1:ncat_hist)) CASE ('NFSD') status = pio_put_var(File, varid, floe_rad_c(1:nfsd_hist)) CASE ('VGRDi') @@ -826,35 +826,35 @@ subroutine ice_write_hist (ns) do i = 1, nvar_verts SELECT CASE (var_nverts(i)%short_name) CASE ('lont_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts workd3v(ivertex,:,:,:) = lont_bounds(ivertex,:,:,1:nblocks) enddo CASE ('latt_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts workd3v(ivertex,:,:,:) = latt_bounds(ivertex,:,:,1:nblocks) enddo CASE ('lonu_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts workd3v(ivertex,:,:,:) = lonu_bounds(ivertex,:,:,1:nblocks) enddo CASE ('latu_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts workd3v(ivertex,:,:,:) = latu_bounds(ivertex,:,:,1:nblocks) enddo CASE ('lonn_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts workd3v(ivertex,:,:,:) = lonn_bounds(ivertex,:,:,1:nblocks) enddo CASE ('latn_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts workd3v(ivertex,:,:,:) = latn_bounds(ivertex,:,:,1:nblocks) enddo CASE ('lone_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts workd3v(ivertex,:,:,:) = lone_bounds(ivertex,:,:,1:nblocks) enddo CASE ('late_bounds') - do ivertex = 1, nverts + do ivertex = 1, nverts workd3v(ivertex,:,:,:) = late_bounds(ivertex,:,:,1:nblocks) enddo END SELECT diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 index d4149f7bf..b242f542b 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 @@ -10,7 +10,7 @@ module ice_pio use ice_communicate use ice_domain, only : nblocks, blocks_ice use ice_domain_size - use ice_fileunits + use ice_fileunits use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use pio @@ -52,7 +52,7 @@ subroutine ice_pio_init(mode, filename, File, clobber, cdf64, iotype) use perf_mod, only : t_initf #endif #endif - + implicit none character(len=*) , intent(in), optional :: mode character(len=*) , intent(in), optional :: filename @@ -140,14 +140,14 @@ subroutine ice_pio_init(mode, filename, File, clobber, cdf64, iotype) #endif if (present(mode) .and. present(filename) .and. present(File)) then - + if (trim(mode) == 'write') then lclobber = .false. if (present(clobber)) lclobber=clobber - + lcdf64 = .false. if (present(cdf64)) lcdf64=cdf64 - + if (File%fh<0) then ! filename not open inquire(file=trim(filename),exist=exists) @@ -178,7 +178,7 @@ subroutine ice_pio_init(mode, filename, File, clobber, cdf64, iotype) ! filename is already open, just return endif end if - + if (trim(mode) == 'read') then inquire(file=trim(filename),exist=exists) if (exists) then @@ -205,7 +205,7 @@ subroutine ice_pio_initdecomp_2d(iodesc, precision) integer (kind=int_kind) :: & iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k - type(block) :: this_block + type(block) :: this_block integer(kind=int_kind), pointer :: dof2d(:) integer(kind=int_kind) :: lprecision @@ -218,12 +218,12 @@ subroutine ice_pio_initdecomp_2d(iodesc, precision) n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j=1,ny_block do i=1,nx_block n = n+1 @@ -249,7 +249,7 @@ subroutine ice_pio_initdecomp_2d(iodesc, precision) endif deallocate(dof2d) - + end subroutine ice_pio_initdecomp_2d !================================================================================ @@ -261,9 +261,9 @@ subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap, precision) logical, optional :: remap integer(kind=int_kind), optional, intent(in) :: precision integer (kind=int_kind) :: & - iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k + iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k - type(block) :: this_block + type(block) :: this_block logical :: lremap integer(kind=int_kind), pointer :: dof3d(:) integer(kind=int_kind) :: lprecision @@ -279,12 +279,12 @@ subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap, precision) ! Reorder the ndim3 and nblocks loops to avoid a temporary array in restart read/write n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - do k=1,ndim3 + do k=1,ndim3 do j=1,ny_block do i=1,nx_block n = n+1 @@ -295,7 +295,7 @@ subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap, precision) else lon = this_block%i_glob(i) lat = this_block%j_glob(j) - dof3d(n) = ((lat-1)*nx_global + lon) + (k-1)*nx_global*ny_global + dof3d(n) = ((lat-1)*nx_global + lon) + (k-1)*nx_global*ny_global endif enddo !i enddo !j @@ -303,9 +303,9 @@ subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap, precision) enddo ! iblk else n=0 - do k=1,ndim3 + do k=1,ndim3 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -320,7 +320,7 @@ subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap, precision) else lon = this_block%i_glob(i) lat = this_block%j_glob(j) - dof3d(n) = ((lat-1)*nx_global + lon) + (k-1)*nx_global*ny_global + dof3d(n) = ((lat-1)*nx_global + lon) + (k-1)*nx_global*ny_global endif enddo !i enddo !j @@ -350,9 +350,9 @@ subroutine ice_pio_initdecomp_3d_inner(ndim3, inner_dim, iodesc, precision) integer(kind=int_kind), optional, intent(in) :: precision integer (kind=int_kind) :: & - iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k + iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k - type(block) :: this_block + type(block) :: this_block integer(kind=int_kind), pointer :: dof3d(:) integer(kind=int_kind) :: lprecision @@ -365,12 +365,12 @@ subroutine ice_pio_initdecomp_3d_inner(ndim3, inner_dim, iodesc, precision) n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j=1,ny_block do i=1,nx_block do k=1,ndim3 @@ -410,9 +410,9 @@ subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc, precision) integer(kind=int_kind), optional, intent(in) :: precision integer (kind=int_kind) :: & - iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k,l + iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k,l - type(block) :: this_block + type(block) :: this_block integer(kind=int_kind), pointer :: dof4d(:) integer(kind=int_kind) :: lprecision @@ -427,12 +427,12 @@ subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc, precision) do l=1,ndim4 do k=1,ndim3 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j=1,ny_block do i=1,nx_block n = n+1 @@ -444,8 +444,8 @@ subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc, precision) lon = this_block%i_glob(i) lat = this_block%j_glob(j) dof4d(n) = ((lat-1)*nx_global + lon) & - + (k-1)*nx_global*ny_global & - + (l-1)*nx_global*ny_global*ndim3 + + (k-1)*nx_global*ny_global & + + (l-1)*nx_global*ny_global*ndim3 endif enddo !i enddo !j @@ -464,7 +464,7 @@ subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc, precision) deallocate(dof4d) end subroutine ice_pio_initdecomp_4d - + !================================================================================ end module ice_pio diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index 59682fe32..1124cc048 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -61,7 +61,7 @@ subroutine init_restart_read(ice_ic) character(len=*), parameter :: subname = '(init_restart_read)' - if (present(ice_ic)) then + if (present(ice_ic)) then filename = trim(ice_ic) else if (my_task == master_task) then @@ -83,7 +83,7 @@ subroutine init_restart_read(ice_ic) if (restart_format == 'pio_pnetcdf') iotype = PIO_IOTYPE_PNETCDF File%fh=-1 call ice_pio_init(mode='read', filename=trim(filename), File=File, iotype=iotype) - + call ice_pio_initdecomp(iodesc=iodesc2d, precision=8) call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat,remap=.true., precision=8) @@ -122,7 +122,7 @@ subroutine init_restart_read(ice_ic) ! call broadcast_scalar(time,master_task) ! call broadcast_scalar(time_forc,master_task) call broadcast_scalar(myear,master_task) - + istep1 = istep0 ! if runid is bering then need to correct npt for istep0 @@ -151,7 +151,11 @@ subroutine init_restart_write(filename_spec) solve_zsal, skl_bgc, z_tracers logical (kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & +#else + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & +#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & @@ -187,7 +191,11 @@ subroutine init_restart_write(filename_spec) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & +#ifdef UNDEPRECATE_CESMPONDS tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & +#else + tr_iso_out=tr_iso, tr_aero_out=tr_aero, & +#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & @@ -211,7 +219,7 @@ subroutine init_restart_write(filename_spec) restart_file(1:lenstr(restart_file)),'.', & myear,'-',mmonth,'-',mday,'-',msec end if - + if (restart_format(1:3) /= 'bin') filename = trim(filename) // '.nc' ! write pointer (path/file) @@ -222,7 +230,7 @@ subroutine init_restart_write(filename_spec) endif ! if (restart_format(1:3) == 'pio') then - + iotype = PIO_IOTYPE_NETCDF if (restart_format == 'pio_pnetcdf') iotype = PIO_IOTYPE_PNETCDF File%fh=-1 @@ -412,10 +420,12 @@ subroutine init_restart_write(filename_spec) call define_rest_field(File,'vlvl',dims) end if +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) then call define_rest_field(File,'apnd',dims) call define_rest_field(File,'hpnd',dims) end if +#endif if (tr_pond_topo) then call define_rest_field(File,'apnd',dims) @@ -797,14 +807,14 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(vname) endif endif - + endif ! else ! call abort_ice(subname//"ERROR: Invalid restart_format: "//trim(restart_format)) ! endif ! restart_format end subroutine read_restart_field - + !======================================================================= ! Writes a single restart field. @@ -852,10 +862,10 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) write(nu_diag,*)'Parallel restart file write: ',vname status = pio_inq_varid(File,trim(vname),vardesc) - + status = pio_inq_varndims(File, vardesc, ndims) - if (ndims==3) then + if (ndims==3) then call pio_write_darray(File, vardesc, iodesc3d_ncat,work(:,:,:,1:nblocks), & status, fillval=c0) elseif (ndims == 2) then @@ -927,7 +937,7 @@ subroutine define_rest_field(File, vname, dims) character(len=*), parameter :: subname = '(define_rest_field)' status = pio_def_var(File,trim(vname),pio_double,dims,vardesc) - + end subroutine define_rest_field !======================================================================= diff --git a/cicecore/drivers/direct/hadgem3/CICE.F90 b/cicecore/drivers/direct/hadgem3/CICE.F90 index b0176e801..fe322a04d 100644 --- a/cicecore/drivers/direct/hadgem3/CICE.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE.F90 @@ -1,21 +1,21 @@ !======================================================================= ! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. -! +! ! Copyright 2022. Triad National Security, LLC. This software was -! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad -! National Security, LLC for the U.S. Department of Energy. The U.S. -! Government has rights to use, reproduce, and distribute this software. -! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY ! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF -! THIS SOFTWARE. If software is modified to produce derivative works, -! such modified software should be clearly marked, so as not to confuse +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse ! it with the version available from LANL. ! ! The full license and distribution policy are available from ! https://github.com/CICE-Consortium -! +! !======================================================================= ! ! Main driver routine for CICE. Initializes and steps through the model. @@ -48,7 +48,7 @@ program icemodel call CICE_Run !----------------------------------------------------------------- - ! Finalize CICE + ! Finalize CICE !----------------------------------------------------------------- call CICE_Finalize diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index b2a0e3cd1..87dc8d9a1 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -176,11 +176,11 @@ subroutine cice_init if (icepack_warnings_aborted()) call abort_ice(subname, & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical !property tables - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer @@ -215,7 +215,7 @@ subroutine cice_init call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions + if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -235,11 +235,17 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, read_restart_pond_cesm, & +#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -254,7 +260,11 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & +#else + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & +#endif tr_pond_topo, tr_fsd, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -273,7 +283,11 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -283,7 +297,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar() ! update time parameters @@ -294,17 +308,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -315,7 +329,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -326,12 +340,13 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif +#ifdef UNDEPRECATE_CESMPONDS ! CESM melt ponds if (tr_pond_cesm) then if (trim(runtype) == 'continue') & @@ -339,12 +354,13 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk endif endif +#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -352,7 +368,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -367,7 +383,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -388,7 +404,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero diff --git a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 index 61f261bb2..91f7985bd 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -46,14 +46,14 @@ subroutine CICE_Run use ice_calendar, only: stop_now, advance_timestep use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec - use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, fzaero_data, & + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, fzaero_data, & faero_default use ice_flux, only: init_flux_atm, init_flux_ocn use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_couple, timer_step logical (kind=log_kind) :: & tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd - + character(len=*), parameter :: subname = '(CICE_Run)' !-------------------------------------------------------------------- @@ -118,7 +118,7 @@ subroutine CICE_Run ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -145,7 +145,11 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & +#ifdef UNDEPRECATE_CESMPONDS write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & +#else + write_restart_lvl, write_restart_pond_lvl, & +#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile @@ -159,7 +163,7 @@ subroutine ice_step timer_hist, timer_readwrite integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -168,7 +172,11 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & +#else + tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & +#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -177,7 +185,11 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -226,7 +238,7 @@ subroutine ice_step !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics call biogeochemistry (dt, iblk) ! biogeochemistry call step_therm2 (dt, iblk) ! ice thickness distribution thermo @@ -321,13 +333,15 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) call write_restart_pond_cesm +#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart @@ -336,7 +350,7 @@ subroutine ice_step call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -366,12 +380,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -452,7 +466,7 @@ subroutine coupling_prep (iblk) do j = jlo, jhi do i = ilo, ihi if (aicen(i,j,n,iblk) > puny) then - + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) alidf(i,j,iblk) = alidf(i,j,iblk) & @@ -477,7 +491,7 @@ subroutine coupling_prep (iblk) + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) - + endif ! aicen > puny enddo enddo @@ -507,8 +521,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -529,7 +543,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -554,21 +568,21 @@ subroutine coupling_prep (iblk) alvdf (:,:,iblk), alidf (:,:,iblk), & fzsal (:,:,iblk), fzsal_g (:,:,iblk), & flux_bio(:,:,1:nbtrcr,iblk)) - + !echmod - comment this out for efficiency, if .not. calc_Tsfc if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod call ice_timer_stop(timer_couple) ! atm/ocn coupling @@ -578,10 +592,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -615,7 +629,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! Lsub, & ! @@ -642,7 +656,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & enddo ! j enddo ! n -#endif +#endif end subroutine sfcflux_to_ocn diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 index b2a0e3cd1..87dc8d9a1 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 @@ -176,11 +176,11 @@ subroutine cice_init if (icepack_warnings_aborted()) call abort_ice(subname, & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical !property tables - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer @@ -215,7 +215,7 @@ subroutine cice_init call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions + if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -235,11 +235,17 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, read_restart_pond_cesm, & +#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -254,7 +260,11 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & +#else + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & +#endif tr_pond_topo, tr_fsd, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -273,7 +283,11 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -283,7 +297,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar() ! update time parameters @@ -294,17 +308,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -315,7 +329,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -326,12 +340,13 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif +#ifdef UNDEPRECATE_CESMPONDS ! CESM melt ponds if (tr_pond_cesm) then if (trim(runtype) == 'continue') & @@ -339,12 +354,13 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk endif endif +#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -352,7 +368,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -367,7 +383,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -388,7 +404,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 index eb2bdcbf1..ea6a65165 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -46,14 +46,14 @@ subroutine CICE_Run use ice_calendar, only: stop_now, advance_timestep use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec - use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, fzaero_data, & + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, fzaero_data, & faero_default use ice_flux, only: init_flux_atm, init_flux_ocn use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_couple, timer_step logical (kind=log_kind) :: & tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd - + character(len=*), parameter :: subname = '(CICE_Run)' !-------------------------------------------------------------------- @@ -118,7 +118,7 @@ subroutine CICE_Run ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -145,7 +145,11 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & +#ifdef UNDEPRECATE_CESMPONDS write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & +#else + write_restart_lvl, write_restart_pond_lvl, & +#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile @@ -159,7 +163,7 @@ subroutine ice_step timer_hist, timer_readwrite integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -168,7 +172,11 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & +#else + tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & +#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -177,7 +185,11 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -226,7 +238,7 @@ subroutine ice_step !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics call biogeochemistry (dt, iblk) ! biogeochemistry call step_therm2 (dt, iblk) ! ice thickness distribution thermo @@ -321,13 +333,15 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) call write_restart_pond_cesm +#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart @@ -336,7 +350,7 @@ subroutine ice_step call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -366,12 +380,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -452,7 +466,7 @@ subroutine coupling_prep (iblk) do j = jlo, jhi do i = ilo, ihi if (aicen(i,j,n,iblk) > puny) then - + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) alidf(i,j,iblk) = alidf(i,j,iblk) & @@ -477,7 +491,7 @@ subroutine coupling_prep (iblk) + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) - + endif ! aicen > puny enddo enddo @@ -507,8 +521,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -529,7 +543,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -556,21 +570,21 @@ subroutine coupling_prep (iblk) alvdf (:,:,iblk), alidf (:,:,iblk), & fzsal (:,:,iblk), fzsal_g (:,:,iblk), & flux_bio(:,:,1:nbtrcr,iblk)) - + !echmod - comment this out for efficiency, if .not. calc_Tsfc if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod call ice_timer_stop(timer_couple) ! atm/ocn coupling @@ -580,10 +594,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -617,7 +631,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! Lsub, & ! @@ -644,7 +658,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & enddo ! j enddo ! n -#endif +#endif end subroutine sfcflux_to_ocn diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index 4725b1d41..cfc5bece9 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -193,11 +193,11 @@ subroutine cice_init(mpicom_ice) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical !property tables - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer @@ -241,7 +241,7 @@ subroutine cice_init(mpicom_ice) call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions + if (write_ic) call accum_hist(dt) ! write initial conditions if (my_task == master_task) then call ice_memusage_print(nu_diag,subname//':end') @@ -264,11 +264,17 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, read_restart_pond_cesm, & +#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_snow, read_restart_snow, & @@ -285,7 +291,11 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & +#else + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & +#endif tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -305,7 +315,11 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -318,7 +332,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar() ! update time parameters @@ -329,17 +343,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -350,7 +364,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -361,12 +375,13 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif +#ifdef UNDEPRECATE_CESMPONDS ! CESM melt ponds if (tr_pond_cesm) then if (trim(runtype) == 'continue') & @@ -374,12 +389,13 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk endif endif +#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -387,7 +403,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -402,7 +418,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -453,7 +469,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index d2efaa8d4..b96086c6d 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -110,7 +110,7 @@ subroutine CICE_Run ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -137,7 +137,11 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & +#ifdef UNDEPRECATE_CESMPONDS write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & +#else + write_restart_lvl, write_restart_pond_lvl, & +#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_iso, write_restart_bgc, write_restart_hbrine, & write_restart_snow @@ -153,7 +157,7 @@ subroutine ice_step use ice_prescribed_mod integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -162,7 +166,11 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & +#else + tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & +#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -180,7 +188,11 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) @@ -391,7 +403,9 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) call write_restart_pond_cesm +#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_snow) call write_restart_snow @@ -399,7 +413,7 @@ subroutine ice_step if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart @@ -407,7 +421,7 @@ subroutine ice_step call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -442,12 +456,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -585,8 +599,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -607,7 +621,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -638,7 +652,7 @@ subroutine coupling_prep (iblk) fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk), & Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) - + if (nbtrcr > 0 .or. skl_bgc) then call bgcflux_ice_to_ocn (nx_block, ny_block, & flux_bio(:,:,1:nbtrcr,iblk), & @@ -655,16 +669,16 @@ subroutine coupling_prep (iblk) if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling @@ -674,10 +688,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -710,7 +724,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! Lsub, & ! diff --git a/cicecore/drivers/mct/cesm1/CICE_copyright.txt b/cicecore/drivers/mct/cesm1/CICE_copyright.txt index 6eb3c9cca..14d97c481 100644 --- a/cicecore/drivers/mct/cesm1/CICE_copyright.txt +++ b/cicecore/drivers/mct/cesm1/CICE_copyright.txt @@ -1,17 +1,17 @@ ! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. -! +! ! Copyright 2022. Triad National Security, LLC. This software was -! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad -! National Security, LLC for the U.S. Department of Energy. The U.S. -! Government has rights to use, reproduce, and distribute this software. -! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY ! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF -! THIS SOFTWARE. If software is modified to produce derivative works, -! such modified software should be clearly marked, so as not to confuse +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse ! it with the version available from LANL. ! ! The full license and distribution policy are available from ! https://github.com/CICE-Consortium -! +! diff --git a/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 b/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 index b0a78bfcd..454895410 100644 --- a/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 +++ b/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 @@ -56,7 +56,7 @@ module ice_comp_esmf use ice_timers use ice_kinds_mod, only : int_kind, dbl_kind, char_len_long, log_kind - use ice_boundary, only : ice_HaloUpdate + use ice_boundary, only : ice_HaloUpdate use ice_scam, only : scmlat, scmlon, single_column use ice_fileunits, only : nu_diag, inst_index, inst_name, inst_suffix, & release_all_fileunits @@ -89,7 +89,7 @@ module ice_comp_esmf ! ! !PRIVATE VARIABLES - integer (kind=int_kind) :: ICEID + integer (kind=int_kind) :: ICEID type(mct_gGrid) :: dom_i type(mct_gsMap) :: gsMap_i @@ -140,7 +140,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) ! ! !DESCRIPTION: ! Initialize thermodynamic ice model and obtain relevant atmospheric model -! arrays back from driver +! arrays back from driver ! ! !USES: @@ -168,7 +168,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) integer :: xoff,yoff integer :: nxg,nyg integer :: k, iblk - + character(len=256) :: drvarchdir ! driver archive directory character(len=32) :: starttype ! infodata start type integer :: start_ymd ! Start date (YYYYMMDD) @@ -207,7 +207,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) call ice_cpl_indices_set() - ! duplicate the mpi communicator from the current VM + ! duplicate the mpi communicator from the current VM call ESMF_VMGetCurrent(vm, rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) @@ -218,7 +218,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) if(rc /= 0) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) ! Initialize cice id - + call ESMF_AttributeGet(export_state, name="ID", value=ICEID, rc=rc) if (rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) @@ -324,14 +324,14 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) ! use EClock to reset calendar information on initial start !--------------------------------------------------------------------------- - ! - on restart run + ! - on restart run ! - istep0, time and time_forc are read from restart file ! - istep1 is set to istep0 ! - idate is determined from time via the call to calendar (see below) - ! - on initial run + ! - on initial run ! - iyear, month and mday obtained from sync clock ! - time determined from iyear, month and mday - ! - istep0 and istep1 are set to 0 + ! - istep0 and istep1 are set to 0 call seq_timemgr_EClockGetData(EClock, & start_ymd=start_ymd, start_tod=start_tod, & @@ -384,7 +384,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) call calendar(time) ! update calendar info if (write_ic) call accum_hist(dt) ! write initial conditions - + !--------------------------------------------------------------------------- ! Initialize MCT attribute vectors and indices !--------------------------------------------------------------------------- @@ -413,12 +413,12 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) !----------------------------------------- ! Set arrayspec for dom, l2x and x2l !----------------------------------------- - + call ESMF_ArraySpecSet(arrayspec, rank=2, typekind=ESMF_TYPEKIND_R8, rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) !----------------------------------------- - ! Create dom + ! Create dom !----------------------------------------- nfields = shr_string_listGetNum(trim(seq_flds_dom_fields)) @@ -430,11 +430,11 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) call ESMF_AttributeSet(dom, name="mct_names", value=trim(seq_flds_dom_fields), rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) - ! Set values of dom + ! Set values of dom call ice_domain_esmf(dom) - !----------------------------------------- - ! Create i2x + !----------------------------------------- + ! Create i2x !----------------------------------------- ! 1d undistributed index of fields, 2d is packed data @@ -447,9 +447,9 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) call ESMF_AttributeSet(i2x, name="mct_names", value=trim(seq_flds_i2x_fields), rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) - - !----------------------------------------- - ! Create x2i + + !----------------------------------------- + ! Create x2i !----------------------------------------- nfields = shr_string_listGetNum(trim(seq_flds_x2i_fields)) @@ -461,16 +461,16 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) call ESMF_AttributeSet(x2i, name="mct_names", value=trim(seq_flds_x2i_fields), rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) - !----------------------------------------- - ! Add esmf arrays to import and export state !----------------------------------------- - + ! Add esmf arrays to import and export state + !----------------------------------------- + call ESMF_StateAdd(export_state, (/dom/), rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) call ESMF_StateAdd(export_state, (/i2x/), rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) - + call ESMF_StateAdd(import_state, (/x2i/), rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) @@ -575,7 +575,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) ! Error check if ((tr_aero .and. .not. atm_aero) .or. (tr_zaero .and. .not. atm_aero)) then - write(nu_diag,*) 'ice_import ERROR: atm_aero must be set for tr_aero or tr_zaero' + write(nu_diag,*) 'ice_import ERROR: atm_aero must be set for tr_aero or tr_zaero' call shr_sys_abort() end if @@ -596,7 +596,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) ! write(shrlogunit,105) trim(subname)//' memory_write: model date = ',start_ymd,start_tod, & ! ' memory = ',msize0,' MB (highwater) ',mrss0,' MB (usage)' ! endif - + 105 format( A, 2i8, A, f10.2, A, f10.2, A) end subroutine ice_init_esmf @@ -668,7 +668,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) call shr_file_getLogUnit (shrlogunit) call shr_file_getLogLevel(shrloglev) call shr_file_setLogUnit (nu_diag) - + ! Determine time of next atmospheric shortwave calculation call ESMF_AttributeGet(export_state, name="nextsw_cday", value=nextsw_cday, rc=rc) @@ -706,7 +706,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) !------------------------------------------------------------------- ! get import state !------------------------------------------------------------------- - + call t_barrierf('cice_run_import_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_import') @@ -724,7 +724,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) endif call ice_timer_stop(timer_cplrecv) call t_stopf ('cice_run_import') - + !-------------------------------------------------------------------- ! timestep update !-------------------------------------------------------------------- @@ -732,9 +732,9 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) call CICE_Run() !----------------------------------------------------------------- - ! send export state to driver + ! send export state to driver !----------------------------------------------------------------- - + call t_barrierf('cice_run_export_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_export') call ice_timer_start(timer_cplsend) @@ -752,7 +752,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) endif call ice_timer_stop(timer_cplsend) call t_stopf ('cice_run_export') - + !-------------------------------------------------------------------- ! check that internal clock is in sync with master clock !-------------------------------------------------------------------- @@ -767,7 +767,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) call shr_sys_abort( SubName// & ":: Internal sea-ice clock not in sync with Sync Clock") end if - + ! reset shr logging to my original values call shr_file_setLogUnit (shrlogunit) @@ -776,9 +776,9 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) !------------------------------------------------------------------- ! stop timers and print timer info !------------------------------------------------------------------- - ! Need to have this logic here instead of in ice_final_esmf since + ! Need to have this logic here instead of in ice_final_esmf since ! the ice_final_esmf.F90 will still be called even in aqua-planet mode - ! Could put this logic in the driver - but it seems easier here + ! Could put this logic in the driver - but it seems easier here ! Need to stop this at the end of every run phase in a coupled run. call ice_timer_stop(timer_total) ! stop timing @@ -788,7 +788,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) call ice_timer_print_all(stats=.true.) ! print timing information call release_all_fileunits end if - + ! if(tod == 0) then ! call shr_get_memusage(msize,mrss) ! call shr_mpi_max(mrss, mrss0, MPI_COMM_ICE,trim(subname)//' mrss0') @@ -799,7 +799,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) ! endif ! endif call t_stopf ('cice_run_total') - + 105 format( A, 2i8, A, f10.2, A, f10.2, A) end subroutine ice_run_esmf @@ -881,12 +881,12 @@ function ice_distgrid_esmf(gsize) n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 @@ -902,12 +902,12 @@ function ice_distgrid_esmf(gsize) allocate(gindex(lsize)) n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 @@ -918,7 +918,7 @@ function ice_distgrid_esmf(gsize) enddo !i enddo !j enddo !iblk - + ice_distgrid_esmf = ESMF_DistGridCreate(arbSeqIndexList=gindex, rc=rc) if(rc /= ESMF_SUCCESS) call ESMF_Finalize(rc=rc, endflag=ESMF_END_ABORT) @@ -969,17 +969,17 @@ subroutine ice_domain_esmf( dom ) fptr(:,:) = -9999.0_R8 n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 - fptr(klon, n) = TLON(i,j,iblk)*rad_to_deg - fptr(klat, n) = TLAT(i,j,iblk)*rad_to_deg + fptr(klon, n) = TLON(i,j,iblk)*rad_to_deg + fptr(klat, n) = TLAT(i,j,iblk)*rad_to_deg fptr(karea, n) = tarea(i,j,iblk)/(radius*radius) fptr(kmask, n) = real(nint(hm(i,j,iblk)),kind=dbl_kind) if (trim(grid_type) == 'latlon') then diff --git a/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 b/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 index d663d0f97..a1d1a2ad1 100644 --- a/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 +++ b/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 @@ -53,7 +53,7 @@ module ice_comp_mct use ice_timers use ice_kinds_mod, only : int_kind, dbl_kind, char_len_long, log_kind - use ice_boundary, only : ice_HaloUpdate + use ice_boundary, only : ice_HaloUpdate use ice_scam, only : scmlat, scmlon, single_column use ice_fileunits, only : nu_diag, inst_index, inst_name, inst_suffix, & release_all_fileunits @@ -90,7 +90,7 @@ module ice_comp_mct ! ! !PRIVATE VARIABLES - integer (kind=int_kind) :: ICEID + integer (kind=int_kind) :: ICEID !--- for coupling on other grid from gridcpl_file --- type(mct_gsMap) :: gsMap_iloc ! local gsmaps @@ -115,7 +115,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) ! ! !DESCRIPTION: ! Initialize thermodynamic ice model and obtain relevant atmospheric model -! arrays back from driver +! arrays back from driver ! ! !USES: @@ -139,7 +139,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) integer :: xoff,yoff integer :: nxg,nyg integer :: k, iblk - + type(mct_gsMap) :: gsmap_extend ! local gsmaps character(len=256) :: drvarchdir ! driver archive directory @@ -240,7 +240,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) scmlat = -999. scmlon = -999. - call seq_infodata_GetData( infodata, case_name=runid , & + call seq_infodata_GetData( infodata, case_name=runid , & single_column=single_column ,scmlat=scmlat,scmlon=scmlon) call seq_infodata_GetData( infodata, start_type=starttype) @@ -296,13 +296,13 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) ! use EClock to reset calendar information on initial start !--------------------------------------------------------------------------- - ! - on restart run + ! - on restart run ! - istep0, time and time_forc are read from restart file ! - istep1 is set to istep0 ! - date information is determined from restart - ! - on initial run + ! - on initial run ! - myear, mmonth, mday, msec obtained from sync clock - ! - istep0 and istep1 are set to 0 + ! - istep0 and istep1 are set to 0 call seq_timemgr_EClockGetData(EClock, & start_ymd=start_ymd, start_tod=start_tod, & @@ -352,7 +352,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) call calendar ! update calendar info if (write_ic) call accum_hist(dt) ! write initial conditions - + !--------------------------------------------------------------------------- ! Initialize MCT attribute vectors and indices !--------------------------------------------------------------------------- @@ -362,22 +362,22 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) ! Initialize ice gsMap if (trim(gridcpl_file) == 'unknown_gridcpl_file') then - call ice_SetGSMap_mct( MPI_COMM_ICE, ICEID, GSMap_ice ) + call ice_SetGSMap_mct( MPI_COMM_ICE, ICEID, GSMap_ice ) lsize = mct_gsMap_lsize(gsMap_ice, MPI_COMM_ICE) call ice_domain_mct( lsize, gsMap_ice, dom_i ) other_cplgrid = .false. nxg = nx_global nyg = ny_global else - call ice_SetGSMap_mct( MPI_COMM_ICE, ICEID, GSMap_iloc ) + call ice_SetGSMap_mct( MPI_COMM_ICE, ICEID, GSMap_iloc ) lsize_loc = mct_gsMap_lsize(gsMap_iloc, MPI_COMM_ICE) call ice_domain_mct( lsize_loc, gsMap_iloc, dom_iloc ) - + call ice_setcoupling_mct(MPI_COMM_ICE, ICEID, gsmap_ice, dom_i) call ice_coffset_mct(xoff,yoff,gsmap_iloc,dom_iloc,gsmap_ice,dom_i,MPI_COMM_ICE) call mct_gsmap_clean(gsmap_ice) call mct_gGrid_clean(dom_i) - + call ice_SetGSMap_mct( MPI_COMM_ICE, ICEID, gsmap_extend, xoff, yoff, nxcpl, nycpl) if (lsize_loc /= mct_gsmap_lsize(gsmap_extend,MPI_COMM_ICE)) then write(nu_diag,*) subname,' :: gsmap_extend extended ',lsize_loc, & @@ -398,7 +398,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) call mct_aVect_init(i2x_iloc, rList=seq_flds_i2x_fields, lsize=lsize_loc) call mct_aVect_zero(i2x_iloc) call mct_gsmap_clean(gsmap_extend) - + other_cplgrid = .true. nxg = nxcpl nyg = nycpl @@ -409,7 +409,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) call mct_aVect_init(x2i_i, rList=seq_flds_x2i_fields, lsize=lsize) call mct_aVect_zero(x2i_i) - call mct_aVect_init(i2x_i, rList=seq_flds_i2x_fields, lsize=lsize) + call mct_aVect_init(i2x_i, rList=seq_flds_i2x_fields, lsize=lsize) call mct_aVect_zero(i2x_i) !----------------------------------------------------------------- @@ -448,7 +448,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) ! Error check if ((tr_aero .or. tr_zaero) .and. .not. atm_aero) then - write(nu_diag,*) 'ice_import ERROR: atm_aero must be set for tr_aero/tr_zaero' + write(nu_diag,*) 'ice_import ERROR: atm_aero must be set for tr_aero/tr_zaero' call shr_sys_abort() end if @@ -469,7 +469,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) ! write(shrlogunit,105) trim(subname)//': memory_write: model date = ',start_ymd,start_tod, & ! ' memory = ',msize0,' MB (highwater) ',mrss0,' MB (usage)' ! endif - + 105 format( A, 2i8, A, f10.2, A, f10.2, A) end subroutine ice_init_mct @@ -514,7 +514,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) integer :: lbnum integer :: n, myearp type(mct_gGrid) , pointer :: dom_i - type(seq_infodata_type), pointer :: infodata + type(seq_infodata_type), pointer :: infodata type(mct_gsMap) , pointer :: gsMap_i real(r8) :: eccen, obliqr, lambm0, mvelpp character(len=char_len_long) :: fname @@ -542,7 +542,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) call shr_file_getLogUnit (shrlogunit) call shr_file_getLogLevel(shrloglev) call shr_file_setLogUnit (nu_diag) - + call seq_cdata_setptrs(cdata_i, infodata=infodata, dom=dom_i, & gsMap=gsMap_i) @@ -577,7 +577,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) !------------------------------------------------------------------- ! get import state !------------------------------------------------------------------- - + call t_barrierf('cice_run_import_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_import') call ice_timer_start(timer_cplrecv) @@ -589,7 +589,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) endif call ice_timer_stop(timer_cplrecv) call t_stopf ('cice_run_import') - + !-------------------------------------------------------------------- ! timestep update !-------------------------------------------------------------------- @@ -597,9 +597,9 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) call CICE_Run() !----------------------------------------------------------------- - ! send export state to driver + ! send export state to driver !----------------------------------------------------------------- - + call t_barrierf('cice_run_export_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_export') call ice_timer_start(timer_cplsend) @@ -612,7 +612,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) endif call ice_timer_stop(timer_cplsend) call t_stopf ('cice_run_export') - + !-------------------------------------------------------------------- ! check that internal clock is in sync with master clock !-------------------------------------------------------------------- @@ -627,7 +627,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) call shr_sys_abort( SubName// & ":: Internal sea-ice clock not in sync with Sync Clock") end if - + ! reset shr logging to my original values call shr_file_setLogUnit (shrlogunit) @@ -636,9 +636,9 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) !------------------------------------------------------------------- ! stop timers and print timer info !------------------------------------------------------------------- - ! Need to have this logic here instead of in ice_final_mct since + ! Need to have this logic here instead of in ice_final_mct since ! the ice_final_mct.F90 will still be called even in aqua-planet mode - ! Could put this logic in the driver - but it seems easier here + ! Could put this logic in the driver - but it seems easier here ! Need to stop this at the end of every run phase in a coupled run. call ice_timer_stop(timer_total) ! stop timing @@ -648,7 +648,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) call ice_timer_print_all(stats=.true.) ! print timing information call release_all_fileunits end if - + ! if(tod == 0) then ! call shr_get_memusage(msize,mrss) ! call shr_mpi_max(mrss, mrss0, MPI_COMM_ICE,trim(subname)//' mrss0') @@ -659,7 +659,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) ! endif ! endif call t_stopf ('cice_run_total') - + 105 format( A, 2i8, A, f10.2, A, f10.2, A) end subroutine ice_run_mct @@ -754,12 +754,12 @@ subroutine ice_SetGSMap_mct( mpicom, ID, gsMap_ice, xoff, yoff, nxgin, nygin ) n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 @@ -771,12 +771,12 @@ subroutine ice_SetGSMap_mct( mpicom, ID, gsMap_ice, xoff, yoff, nxgin, nygin ) allocate(gindex(lsize),stat=ier) n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 @@ -787,7 +787,7 @@ subroutine ice_SetGSMap_mct( mpicom, ID, gsMap_ice, xoff, yoff, nxgin, nygin ) enddo !i enddo !j enddo !iblk - + call mct_gsMap_init( gsMap_ice, gindex, mpicom, ID, lsize, gsize ) deallocate(gindex) @@ -802,7 +802,7 @@ subroutine ice_domain_mct( lsize, gsMap_i, dom_i ) ! integer , intent(in) :: lsize type(mct_gsMap), intent(in) :: gsMap_i - type(mct_ggrid), intent(inout) :: dom_i + type(mct_ggrid), intent(inout) :: dom_i ! ! Local Variables ! @@ -824,7 +824,7 @@ subroutine ice_domain_mct( lsize, gsMap_i, dom_i ) call mct_gGrid_init( GGrid=dom_i, CoordChars=trim(seq_flds_dom_coord), & OtherChars=trim(seq_flds_dom_other), lsize=lsize ) call mct_aVect_zero(dom_i%data) - ! + ! allocate(data(lsize)) ! ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT @@ -835,63 +835,63 @@ subroutine ice_domain_mct( lsize, gsMap_i, dom_i ) ! Determine domain (numbering scheme is: West to East and South to North to South pole) ! Initialize attribute vector with special value ! - data(:) = -9999.0_R8 - call mct_gGrid_importRAttr(dom_i,"lat" ,data,lsize) - call mct_gGrid_importRAttr(dom_i,"lon" ,data,lsize) - call mct_gGrid_importRAttr(dom_i,"area" ,data,lsize) - call mct_gGrid_importRAttr(dom_i,"aream",data,lsize) - data(:) = 0.0_R8 - call mct_gGrid_importRAttr(dom_i,"mask",data,lsize) - call mct_gGrid_importRAttr(dom_i,"frac",data,lsize) + data(:) = -9999.0_R8 + call mct_gGrid_importRAttr(dom_i,"lat" ,data,lsize) + call mct_gGrid_importRAttr(dom_i,"lon" ,data,lsize) + call mct_gGrid_importRAttr(dom_i,"area" ,data,lsize) + call mct_gGrid_importRAttr(dom_i,"aream",data,lsize) + data(:) = 0.0_R8 + call mct_gGrid_importRAttr(dom_i,"mask",data,lsize) + call mct_gGrid_importRAttr(dom_i,"frac",data,lsize) ! ! Fill in correct values for domain components ! - data(:) = -9999.0_R8 + data(:) = -9999.0_R8 n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 - data(n) = TLON(i,j,iblk)*rad_to_deg + data(n) = TLON(i,j,iblk)*rad_to_deg enddo !i enddo !j enddo !iblk - call mct_gGrid_importRattr(dom_i,"lon",data,lsize) + call mct_gGrid_importRattr(dom_i,"lon",data,lsize) - data(:) = -9999.0_R8 + data(:) = -9999.0_R8 n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 - data(n) = TLAT(i,j,iblk)*rad_to_deg + data(n) = TLAT(i,j,iblk)*rad_to_deg enddo !i enddo !j enddo !iblk - call mct_gGrid_importRattr(dom_i,"lat",data,lsize) + call mct_gGrid_importRattr(dom_i,"lat",data,lsize) - data(:) = -9999.0_R8 + data(:) = -9999.0_R8 n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 @@ -899,17 +899,17 @@ subroutine ice_domain_mct( lsize, gsMap_i, dom_i ) enddo !i enddo !j enddo !iblk - call mct_gGrid_importRattr(dom_i,"area",data,lsize) + call mct_gGrid_importRattr(dom_i,"area",data,lsize) data(:) = 0.0_R8 n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 @@ -917,17 +917,17 @@ subroutine ice_domain_mct( lsize, gsMap_i, dom_i ) enddo !i enddo !j enddo !iblk - call mct_gGrid_importRattr(dom_i,"mask",data,lsize) + call mct_gGrid_importRattr(dom_i,"mask",data,lsize) data(:) = 0.0_R8 n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 @@ -939,7 +939,7 @@ subroutine ice_domain_mct( lsize, gsMap_i, dom_i ) enddo !i enddo !j enddo !iblk - call mct_gGrid_importRattr(dom_i,"frac",data,lsize) + call mct_gGrid_importRattr(dom_i,"frac",data,lsize) deallocate(data) deallocate(idata) @@ -948,7 +948,7 @@ end subroutine ice_domain_mct !======================================================================= - subroutine ice_setdef_mct( i2x_i ) + subroutine ice_setdef_mct( i2x_i ) !----------------------------------------------------- type(mct_aVect) , intent(inout) :: i2x_i @@ -1196,7 +1196,7 @@ subroutine ice_setcoupling_mct(mpicom_i, ICEID, gsmap_i, dom_i, gsmap_base) start(1) = 1 pe_loc(1) = 0 - do n = 2,npes + do n = 2,npes pe_loc(n) = n-1 start(n) = start(n-1) + length(n-1) enddo @@ -1231,14 +1231,14 @@ subroutine ice_setcoupling_mct(mpicom_i, ICEID, gsmap_i, dom_i, gsmap_base) ! Initialize attribute vector with special value allocate(data(lsize)) - data(:) = -9999.0_R8 - call mct_gGrid_importRAttr(dom_i,"lat" ,data,lsize) - call mct_gGrid_importRAttr(dom_i,"lon" ,data,lsize) - call mct_gGrid_importRAttr(dom_i,"area" ,data,lsize) - call mct_gGrid_importRAttr(dom_i,"aream",data,lsize) - data(:) = 0.0_R8 - call mct_gGrid_importRAttr(dom_i,"mask",data,lsize) - call mct_gGrid_importRAttr(dom_i,"frac",data,lsize) + data(:) = -9999.0_R8 + call mct_gGrid_importRAttr(dom_i,"lat" ,data,lsize) + call mct_gGrid_importRAttr(dom_i,"lon" ,data,lsize) + call mct_gGrid_importRAttr(dom_i,"area" ,data,lsize) + call mct_gGrid_importRAttr(dom_i,"aream",data,lsize) + data(:) = 0.0_R8 + call mct_gGrid_importRAttr(dom_i,"mask",data,lsize) + call mct_gGrid_importRAttr(dom_i,"frac",data,lsize) deallocate(data) ! Read domain arrays diff --git a/cicecore/drivers/mct/cesm1/ice_cpl_indices.F90 b/cicecore/drivers/mct/cesm1/ice_cpl_indices.F90 index 9e549a5ca..6c04271d2 100644 --- a/cicecore/drivers/mct/cesm1/ice_cpl_indices.F90 +++ b/cicecore/drivers/mct/cesm1/ice_cpl_indices.F90 @@ -1,5 +1,5 @@ module ice_cpl_indices - + use seq_flds_mod use mct_mod @@ -7,25 +7,25 @@ module ice_cpl_indices public ! By default make data private - ! ice -> drv + ! ice -> drv integer :: index_i2x_Si_ifrac ! fractional ice coverage wrt ocean integer :: index_i2x_Si_snowh ! snow height (m) - integer :: index_i2x_Si_t ! temperature - integer :: index_i2x_Si_tref ! 2m reference temperature - integer :: index_i2x_Si_qref ! 2m reference specific humidity + integer :: index_i2x_Si_t ! temperature + integer :: index_i2x_Si_tref ! 2m reference temperature + integer :: index_i2x_Si_qref ! 2m reference specific humidity integer :: index_i2x_Si_logz0 ! surface roughness length (m) - integer :: index_i2x_Si_avsdr ! albedo: visible, direct - integer :: index_i2x_Si_avsdf ! albedo: near ir, direct - integer :: index_i2x_Si_anidr ! albedo: visible, diffuse - integer :: index_i2x_Si_anidf ! albedo: near ir, diffuse + integer :: index_i2x_Si_avsdr ! albedo: visible, direct + integer :: index_i2x_Si_avsdf ! albedo: near ir, direct + integer :: index_i2x_Si_anidr ! albedo: visible, diffuse + integer :: index_i2x_Si_anidf ! albedo: near ir, diffuse integer :: index_i2x_Si_u10 ! 10m wind - integer :: index_i2x_Faii_lwup ! upward longwave heat flux - integer :: index_i2x_Faii_lat ! latent heat flux - integer :: index_i2x_Faii_sen ! sensible heat flux - integer :: index_i2x_Faii_evap ! evaporation water flux - integer :: index_i2x_Faii_taux ! wind stress, zonal - integer :: index_i2x_Faii_tauy ! wind stress, meridional + integer :: index_i2x_Faii_lwup ! upward longwave heat flux + integer :: index_i2x_Faii_lat ! latent heat flux + integer :: index_i2x_Faii_sen ! sensible heat flux + integer :: index_i2x_Faii_evap ! evaporation water flux + integer :: index_i2x_Faii_taux ! wind stress, zonal + integer :: index_i2x_Faii_tauy ! wind stress, meridional integer :: index_i2x_Faii_swnet ! sw: net integer :: index_i2x_Fioi_swpen ! sw: net penetrating ice integer :: index_i2x_Fioi_melth ! heat flux from melting ice (<0) @@ -76,14 +76,14 @@ module ice_cpl_indices integer :: index_x2i_So_dhdx ! ocn surface slope, zonal integer :: index_x2i_So_dhdy ! ocn surface slope, meridional integer :: index_x2i_Faxa_lwdn ! downward lw heat flux - integer :: index_x2i_Faxa_rain ! prec: liquid - integer :: index_x2i_Faxa_snow ! prec: frozen + integer :: index_x2i_Faxa_rain ! prec: liquid + integer :: index_x2i_Faxa_snow ! prec: frozen integer :: index_x2i_Faxa_swndr ! sw: nir direct downward integer :: index_x2i_Faxa_swvdr ! sw: vis direct downward integer :: index_x2i_Faxa_swndf ! sw: nir diffuse downward integer :: index_x2i_Faxa_swvdf ! sw: vis diffuse downward integer :: index_x2i_Faxa_swnet ! sw: net - integer :: index_x2i_Fioo_q ! ocn freeze or melt heat + integer :: index_x2i_Fioo_q ! ocn freeze or melt heat integer :: index_x2i_Faxa_bcphidry ! flux: Black Carbon hydrophilic dry deposition integer :: index_x2i_Faxa_bcphodry ! flux: Black Carbon hydrophobic dry deposition integer :: index_x2i_Faxa_bcphiwet ! flux: Black Carbon hydrophilic wet deposition diff --git a/cicecore/drivers/mct/cesm1/ice_import_export.F90 b/cicecore/drivers/mct/cesm1/ice_import_export.F90 index 3acf9bdaa..7ac4f0bb7 100644 --- a/cicecore/drivers/mct/cesm1/ice_import_export.F90 +++ b/cicecore/drivers/mct/cesm1/ice_import_export.F90 @@ -30,7 +30,7 @@ module ice_import_export use ice_domain_size , only: nx_global, ny_global, block_size_x, block_size_y, max_blocks use ice_grid , only: tlon, tlat, tarea, tmask, anglet, hm use ice_grid , only: grid_type, grid_average_X2Y - use ice_boundary , only: ice_HaloUpdate + use ice_boundary , only: ice_HaloUpdate use ice_communicate , only: my_task, master_task, MPI_COMM_ICE, get_num_procs use ice_calendar , only: istep, istep1, diagfreq use ice_fileunits , only: nu_diag @@ -66,11 +66,11 @@ subroutine ice_import( x2i ) integer,parameter :: nflds=17,nfldv=6,nfldb=27 real (kind=dbl_kind),allocatable :: aflds(:,:,:,:) real (kind=dbl_kind) :: workx, worky - real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP + real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP character(len=char_len) :: tfrz_option logical (kind=log_kind) :: modal_aero, z_tracers, skl_bgc logical (kind=log_kind) :: tr_aero, tr_iage, tr_FY, tr_pond - logical (kind=log_kind) :: tr_lvl, tr_zaero, tr_bgc_Nit + logical (kind=log_kind) :: tr_lvl, tr_zaero, tr_bgc_Nit real (kind=dbl_kind) :: tffresh character(len=*), parameter :: subname = '(ice_import)' !----------------------------------------------------- @@ -102,7 +102,7 @@ subroutine ice_import( x2i ) n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -250,7 +250,7 @@ subroutine ice_import( x2i ) deallocate(aflds) !------------------------------------------------------- - ! Set aerosols from coupler + ! Set aerosols from coupler !------------------------------------------------------- allocate(aflds(nx_block,ny_block,nfldb,nblocks)) @@ -258,7 +258,7 @@ subroutine ice_import( x2i ) n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -310,7 +310,7 @@ subroutine ice_import( x2i ) aflds(i,j,7,iblk) = x2i(index_x2i_So_doc, n) * p5 ! split evenly for now aflds(i,j,8,iblk) = x2i(index_x2i_So_doc, n) * p5 !x2i(index_x2i_So_doc2, n) aflds(i,j,9,iblk) = c0 - aflds(i,j,10,iblk) = c0 !x2i(index_x2i_So_dic, n) + aflds(i,j,10,iblk) = c0 !x2i(index_x2i_So_dic, n) aflds(i,j,11,iblk) = x2i(index_x2i_So_don, n) aflds(i,j,12,iblk) = x2i(index_x2i_So_no3, n) aflds(i,j,13,iblk) = x2i(index_x2i_So_sio3, n) @@ -322,12 +322,12 @@ subroutine ice_import( x2i ) aflds(i,j,19,iblk) = c0 !x2i(index_x2i_So_fep2, n) aflds(i,j,20,iblk) = x2i(index_x2i_So_fed, n) aflds(i,j,21,iblk) = c0 !x2i(index_x2i_So_fed2, n) - aflds(i,j,22,iblk) = c0 !x2i(index_x2i_So_zaer1, n) - aflds(i,j,23,iblk) = c0 !x2i(index_x2i_So_zaer2, n) - aflds(i,j,24,iblk) = c0 !x2i(index_x2i_So_zaer3, n) - aflds(i,j,25,iblk) = c0 !x2i(index_x2i_So_zaer4, n) - aflds(i,j,26,iblk) = c0 !x2i(index_x2i_So_zaer5, n) - aflds(i,j,27,iblk) = c0 !x2i(index_x2i_So_zaer6, n) + aflds(i,j,22,iblk) = c0 !x2i(index_x2i_So_zaer1, n) + aflds(i,j,23,iblk) = c0 !x2i(index_x2i_So_zaer2, n) + aflds(i,j,24,iblk) = c0 !x2i(index_x2i_So_zaer3, n) + aflds(i,j,25,iblk) = c0 !x2i(index_x2i_So_zaer4, n) + aflds(i,j,26,iblk) = c0 !x2i(index_x2i_So_zaer5, n) + aflds(i,j,27,iblk) = c0 !x2i(index_x2i_So_zaer6, n) endif enddo enddo @@ -346,7 +346,7 @@ subroutine ice_import( x2i ) do i = 1,nx_block faero_atm(i,j,1,iblk) = aflds(i,j,1,iblk) faero_atm(i,j,2,iblk) = aflds(i,j,2,iblk) - faero_atm(i,j,3,iblk) = aflds(i,j,3,iblk) + faero_atm(i,j,3,iblk) = aflds(i,j,3,iblk) enddo !i enddo !j enddo !iblk @@ -357,7 +357,7 @@ subroutine ice_import( x2i ) do iblk = 1, nblocks do j = 1,ny_block do i = 1,nx_block - algalN(i,j,1,iblk) = aflds(i,j,4,iblk) + algalN(i,j,1,iblk) = aflds(i,j,4,iblk) algalN(i,j,2,iblk) = aflds(i,j,5,iblk) algalN(i,j,3,iblk) = aflds(i,j,6,iblk) doc(i,j,1,iblk) = aflds(i,j,7,iblk) @@ -409,16 +409,16 @@ subroutine ice_import( x2i ) do i = 1,nx_block ! ocean - workx = uocn (i,j,iblk) ! currents, m/s + workx = uocn (i,j,iblk) ! currents, m/s worky = vocn (i,j,iblk) - uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + worky*sin(ANGLET(i,j,iblk)) vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & - workx*sin(ANGLET(i,j,iblk)) workx = ss_tltx (i,j,iblk) ! sea sfc tilt, m/m worky = ss_tlty (i,j,iblk) - ss_tltx(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + ss_tltx(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + worky*sin(ANGLET(i,j,iblk)) ss_tlty(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & - workx*sin(ANGLET(i,j,iblk)) @@ -430,7 +430,7 @@ subroutine ice_import( x2i ) #endif if (tfrz_option == 'minus1p8') then - Tf (i,j,iblk) = -1.8_dbl_kind + Tf (i,j,iblk) = -1.8_dbl_kind elseif (tfrz_option == 'linear_salt') then Tf (i,j,iblk) = -0.0544_r8*sss(i,j,iblk) ! THIS IS THE ORIGINAL POP FORMULA elseif (tfrz_option == 'mushy') then @@ -463,7 +463,7 @@ subroutine ice_import( x2i ) !$OMP END PARALLEL DO call t_stopf ('cice_imp_ocn') - ! Interpolate ocean dynamics variables from T-cell centers to + ! Interpolate ocean dynamics variables from T-cell centers to ! U-cell centers. if (.not.prescribed_ice) then @@ -487,7 +487,7 @@ subroutine ice_import( x2i ) ! atmosphere workx = uatm(i,j,iblk) ! wind velocity, m/s - worky = vatm(i,j,iblk) + worky = vatm(i,j,iblk) uatm (i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + worky*sin(ANGLET(i,j,iblk)) ! note uatm, vatm, wind vatm (i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & ! are on the T-grid here @@ -506,7 +506,7 @@ end subroutine ice_import !=============================================================================== - subroutine ice_export( i2x ) + subroutine ice_export( i2x ) !----------------------------------------------------- ! @@ -514,7 +514,7 @@ subroutine ice_export( i2x ) real(r8), intent(inout) :: i2x(:,:) ! ! Local Variables - integer :: i, j, iblk, n, ij + integer :: i, j, iblk, n, ij integer :: ilo, ihi, jlo, jhi !beginning and end of physical domain integer (kind=int_kind) :: icells ! number of ocean/ice cells integer (kind=int_kind), dimension (nx_block*ny_block) :: indxi ! compressed indices in i @@ -532,7 +532,7 @@ subroutine ice_export( i2x ) workx, worky ! tmps for converting grid real (kind=dbl_kind) :: & - vonkar, zref, iceruf, tffresh + vonkar, zref, iceruf, tffresh type(block) :: this_block ! block information for current block integer :: icnt,icnt1,iblk1,icnt1sum,icnt1max ! gridcell and block counters @@ -614,7 +614,7 @@ subroutine ice_export( i2x ) icnt1 = 0 iblk1 = 0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -630,12 +630,12 @@ subroutine ice_export( i2x ) if ( tmask(i,j,iblk)) i2x(:,n) = c0 - !-------states-------------------- - i2x(index_i2x_Si_ifrac ,n) = ailohi(i,j,iblk) + !-------states-------------------- + i2x(index_i2x_Si_ifrac ,n) = ailohi(i,j,iblk) if ( tmask(i,j,iblk) .and. ailohi(i,j,iblk) > c0 ) then icnt = icnt + 1 - !-------states-------------------- + !-------states-------------------- i2x(index_i2x_Si_t ,n) = Tsrf(i,j,iblk) i2x(index_i2x_Si_avsdr ,n) = alvdr(i,j,iblk) i2x(index_i2x_Si_anidr ,n) = alidr(i,j,iblk) @@ -659,17 +659,17 @@ subroutine ice_export( i2x ) endif !--- a/i fluxes computed by ice - i2x(index_i2x_Faii_taux ,n) = tauxa(i,j,iblk) - i2x(index_i2x_Faii_tauy ,n) = tauya(i,j,iblk) - i2x(index_i2x_Faii_lat ,n) = flat(i,j,iblk) - i2x(index_i2x_Faii_sen ,n) = fsens(i,j,iblk) - i2x(index_i2x_Faii_lwup ,n) = flwout(i,j,iblk) - i2x(index_i2x_Faii_evap ,n) = evap(i,j,iblk) + i2x(index_i2x_Faii_taux ,n) = tauxa(i,j,iblk) + i2x(index_i2x_Faii_tauy ,n) = tauya(i,j,iblk) + i2x(index_i2x_Faii_lat ,n) = flat(i,j,iblk) + i2x(index_i2x_Faii_sen ,n) = fsens(i,j,iblk) + i2x(index_i2x_Faii_lwup ,n) = flwout(i,j,iblk) + i2x(index_i2x_Faii_evap ,n) = evap(i,j,iblk) i2x(index_i2x_Faii_swnet,n) = fswabs(i,j,iblk) !--- i/o fluxes computed by ice i2x(index_i2x_Fioi_melth,n) = fhocn(i,j,iblk) - i2x(index_i2x_Fioi_swpen,n) = fswthru(i,j,iblk) ! hf from melting + i2x(index_i2x_Fioi_swpen,n) = fswthru(i,j,iblk) ! hf from melting i2x(index_i2x_Fioi_meltw,n) = fresh(i,j,iblk) ! h2o flux from melting ??? i2x(index_i2x_Fioi_salt ,n) = fsalt(i,j,iblk) ! salt flux from melting ??? i2x(index_i2x_Fioi_taux ,n) = tauxo(i,j,iblk) ! stress : i/o zonal ??? @@ -680,18 +680,18 @@ subroutine ice_export( i2x ) if (index_i2x_Fioi_diat > 0) i2x(index_i2x_Fioi_diat ,n) = falgalN(i,j,1,iblk) * R_C2N(1) if (index_i2x_Fioi_sp > 0) i2x(index_i2x_Fioi_sp ,n) = falgalN(i,j,2,iblk) * R_C2N(2) if (index_i2x_Fioi_phaeo > 0) i2x(index_i2x_Fioi_phaeo ,n) = falgalN(i,j,3,iblk) * R_C2N(3) - if (index_i2x_Fioi_doc > 0) i2x(index_i2x_Fioi_doc ,n) = fdoc(i,j,1,iblk) + fdoc(i,j,2,iblk) - if (index_i2x_Fioi_doc2 > 0) i2x(index_i2x_Fioi_doc2 ,n) = c0 !fdoc(i,j,2,iblk) + if (index_i2x_Fioi_doc > 0) i2x(index_i2x_Fioi_doc ,n) = fdoc(i,j,1,iblk) + fdoc(i,j,2,iblk) + if (index_i2x_Fioi_doc2 > 0) i2x(index_i2x_Fioi_doc2 ,n) = c0 !fdoc(i,j,2,iblk) if (index_i2x_Fioi_doc3 > 0) i2x(index_i2x_Fioi_doc3 ,n) = c0 !fdoc(i,j,3,iblk) if (index_i2x_Fioi_dic > 0) i2x(index_i2x_Fioi_dic ,n) = c0 !fdic(i,j,1,iblk) - if (index_i2x_Fioi_don > 0) i2x(index_i2x_Fioi_don ,n) = fdon(i,j,1,iblk) - if (index_i2x_Fioi_no3 > 0) i2x(index_i2x_Fioi_no3 ,n) = fnit(i,j,iblk) - if (index_i2x_Fioi_sio3 > 0) i2x(index_i2x_Fioi_sio3 ,n) = fsil(i,j,iblk) - if (index_i2x_Fioi_nh4 > 0) i2x(index_i2x_Fioi_nh4 ,n) = famm(i,j,iblk) - if (index_i2x_Fioi_dms > 0) i2x(index_i2x_Fioi_dms ,n) = fdms(i,j,iblk) - if (index_i2x_Fioi_dmspp > 0) i2x(index_i2x_Fioi_dmspp ,n) = c0 - if (index_i2x_Fioi_dmsp > 0) i2x(index_i2x_Fioi_dmsp ,n) = fdmsp(i,j,iblk) - if (index_i2x_Fioi_donr > 0) i2x(index_i2x_Fioi_donr ,n) = fhum(i,j,iblk) + if (index_i2x_Fioi_don > 0) i2x(index_i2x_Fioi_don ,n) = fdon(i,j,1,iblk) + if (index_i2x_Fioi_no3 > 0) i2x(index_i2x_Fioi_no3 ,n) = fnit(i,j,iblk) + if (index_i2x_Fioi_sio3 > 0) i2x(index_i2x_Fioi_sio3 ,n) = fsil(i,j,iblk) + if (index_i2x_Fioi_nh4 > 0) i2x(index_i2x_Fioi_nh4 ,n) = famm(i,j,iblk) + if (index_i2x_Fioi_dms > 0) i2x(index_i2x_Fioi_dms ,n) = fdms(i,j,iblk) + if (index_i2x_Fioi_dmspp > 0) i2x(index_i2x_Fioi_dmspp ,n) = c0 + if (index_i2x_Fioi_dmsp > 0) i2x(index_i2x_Fioi_dmsp ,n) = fdmsp(i,j,iblk) + if (index_i2x_Fioi_donr > 0) i2x(index_i2x_Fioi_donr ,n) = fhum(i,j,iblk) ! convert from umol Fe/m^3 to mmol Fe/m^3 if (index_i2x_Fioi_fep1 > 0) i2x(index_i2x_Fioi_fep1 ,n) = c0 !ffep(i,j,1,iblk) / 1000.0_dbl_kind if (index_i2x_Fioi_fep2 > 0) i2x(index_i2x_Fioi_fep2 ,n) = c0 !ffep(i,j,2,iblk) / 1000.0_dbl_kind diff --git a/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 index 0868ef2fa..78b7d15c4 100644 --- a/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 +++ b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 @@ -1,5 +1,5 @@ !=================================================================== -!BOP +!BOP ! ! !MODULE: ice_prescribed_mod - Prescribed Ice Model ! @@ -19,7 +19,7 @@ ! 2005-Apr-19 - B. Kauffman, J. Schramm, M. Vertenstein, NCAR - design ! ! !INTERFACE: ---------------------------------------------------------- - + module ice_prescribed_mod ! !USES: @@ -72,7 +72,7 @@ module ice_prescribed_mod integer(SHR_KIND_IN),parameter :: nFilesMaximum = 400 ! max number of files integer(kind=int_kind) :: stream_year_first ! first year in stream to use integer(kind=int_kind) :: stream_year_last ! last year in stream to use - integer(kind=int_kind) :: model_year_align ! align stream_year_first + integer(kind=int_kind) :: model_year_align ! align stream_year_first ! with this model year character(len=char_len_long) :: stream_fldVarName @@ -88,7 +88,7 @@ module ice_prescribed_mod type(shr_strdata_type) :: sdat ! prescribed data stream character(len=char_len_long) :: fldList ! list of fields in data stream - real(kind=dbl_kind), allocatable :: ice_cov(:,:,:) ! ice cover + real(kind=dbl_kind), allocatable :: ice_cov(:,:,:) ! ice cover ! real (kind=dbl_kind), parameter :: & ! cp_sno = 0.0_dbl_kind & ! specific heat of snow (J/kg/K) @@ -109,13 +109,13 @@ module ice_prescribed_mod ! ! !IROUTINE: ice_prescribed_init - prescribed ice initialization ! -! !INTERFACE: +! !INTERFACE: subroutine ice_prescribed_init(compid, gsmap, dom) use mpi ! MPI Fortran module use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys ! !DESCRIPTION: -! Prescribed ice initialization - needed to -! work with new shr_strdata module derived type +! Prescribed ice initialization - needed to +! work with new shr_strdata module derived type ! ! !REVISION HISTORY: ! 2009-Oct-12 - M. Vertenstein @@ -130,7 +130,7 @@ subroutine ice_prescribed_init(compid, gsmap, dom) !EOP !----- Local ------ integer(kind=int_kind) :: nml_error ! namelist i/o error flag - integer(kind=int_kind) :: n, nFile, ierr + integer(kind=int_kind) :: n, nFile, ierr character(len=8) :: fillalgo character(len=*), parameter :: subname = '(ice_prescribed_init)' character(*),parameter :: F00 = "(4a)" @@ -227,9 +227,9 @@ subroutine ice_prescribed_init(compid, gsmap, dom) if (my_task == master_task) then write(nu_diag,*) ' ' write(nu_diag,*) 'This is the prescribed ice coverage option.' - write(nu_diag,*) ' stream_year_first = ',stream_year_first - write(nu_diag,*) ' stream_year_last = ',stream_year_last - write(nu_diag,*) ' model_year_align = ',model_year_align + write(nu_diag,*) ' stream_year_first = ',stream_year_first + write(nu_diag,*) ' stream_year_last = ',stream_year_last + write(nu_diag,*) ' model_year_align = ',model_year_align write(nu_diag,*) ' stream_fldVarName = ',trim(stream_fldVarName) do n = 1,nFile write(nu_diag,*) ' stream_fldFileName = ',trim(stream_fldFileName(n)),n @@ -280,7 +280,7 @@ subroutine ice_prescribed_init(compid, gsmap, dom) hin_max(1) = 999._dbl_kind end if end subroutine ice_prescribed_init - + !======================================================================= !BOP =================================================================== ! @@ -316,7 +316,7 @@ subroutine ice_prescribed_run(mDateIn, secIn) logical, save :: first_time = .true. character(len=*), parameter :: subname = '(ice_prescribed_run)' character(*),parameter :: F00 = "(a,2g20.13)" - + !------------------------------------------------------------------------ ! Interpolate to new ice coverage !------------------------------------------------------------------------ @@ -327,16 +327,16 @@ subroutine ice_prescribed_run(mDateIn, secIn) allocate(ice_cov(nx_block,ny_block,max_blocks)) endif - ice_cov(:,:,:) = c0 ! This initializes ghost cells as well + ice_cov(:,:,:) = c0 ! This initializes ghost cells as well n=0 do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi n = n+1 @@ -384,11 +384,11 @@ end subroutine ice_prescribed_run ! 2001-May - B. P. Briegleb - Original version ! ! !INTERFACE: ------------------------------------------------------------------ - + subroutine ice_prescribed_phys ! !USES: - + use ice_flux use ice_state use ice_arrays_column, only : hin_max @@ -396,9 +396,9 @@ subroutine ice_prescribed_phys use ice_dyn_evp implicit none - + ! !INPUT/OUTPUT PARAMETERS: - + !EOP !----- Local ------ @@ -411,12 +411,12 @@ subroutine ice_prescribed_phys real(kind=dbl_kind) :: slope ! diff in underlying ocean tmp and ice surface tmp real(kind=dbl_kind) :: Ti ! ice level temperature real(kind=dbl_kind) :: Tmlt ! ice level melt temperature - real(kind=dbl_kind) :: qin_save(nilyr) + real(kind=dbl_kind) :: qin_save(nilyr) real(kind=dbl_kind) :: qsn_save(nslyr) real(kind=dbl_kind) :: hi ! ice prescribed (hemispheric) ice thickness real(kind=dbl_kind) :: hs ! snow thickness real(kind=dbl_kind) :: zn ! normalized ice thickness - real(kind=dbl_kind) :: salin(nilyr) ! salinity (ppt) + real(kind=dbl_kind) :: salin(nilyr) ! salinity (ppt) real(kind=dbl_kind) :: rad_to_deg, pi, puny real(kind=dbl_kind) :: rhoi, rhos, cp_ice, cp_ocn, lfresh, depressT @@ -444,11 +444,11 @@ subroutine ice_prescribed_phys ! aicen(:,:,:,:) = c0 ! vicen(:,:,:,:) = c0 ! eicen(:,:,:,:) = c0 - + ! do nc=1,ncat ! trcrn(:,:,nt_Tsfc,nc,:) = Tf(:,:,:) ! enddo - + !----------------------------------------------------------------- ! Set ice cover over land to zero, not sure if this should be ! be done earier, before time/spatial interp?????? @@ -502,8 +502,8 @@ subroutine ice_prescribed_phys endif aicen(i,j,nc,iblk) = ice_cov(i,j,iblk) - vicen(i,j,nc,iblk) = hi*aicen(i,j,nc,iblk) - vsnon(i,j,nc,iblk) = hs*aicen(i,j,nc,iblk) + vicen(i,j,nc,iblk) = hi*aicen(i,j,nc,iblk) + vsnon(i,j,nc,iblk) = hs*aicen(i,j,nc,iblk) !--------------------------------------------------------- ! make linear temp profile and compute enthalpy @@ -564,7 +564,7 @@ subroutine ice_prescribed_phys trcr_base = trcr_base(1:ntrcr,:), & n_trcr_strata = n_trcr_strata(1:ntrcr), & nt_strata = nt_strata(1:ntrcr,:)) - + enddo ! i enddo ! j enddo ! iblk diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 338b25050..3d5e5cc2a 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -78,7 +78,7 @@ subroutine cice_init2() use ice_calendar , only: dt, dt_dyn, istep, istep1, write_ic, init_calendar, calendar use ice_communicate , only: my_task, master_task use ice_diagnostics , only: init_diags - use ice_domain_size , only: ncat, nfsd + use ice_domain_size , only: ncat, nfsd, nfreq use ice_dyn_eap , only: init_eap, alloc_dyn_eap use ice_dyn_shared , only: kdyn, init_dyn use ice_dyn_vp , only: init_vp @@ -94,10 +94,12 @@ subroutine cice_init2() use ice_restoring , only: ice_HaloRestore_init use ice_timers , only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver , only: init_transport + use ice_arrays_column , only: wavefreq, dwavefreq logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers logical(kind=log_kind) :: tr_iso, tr_fsd, wave_spec, tr_snow character(len=char_len) :: snw_aging_table + real(kind=dbl_kind), dimension(25) :: wave_spectrum_profile ! hardwire for now character(len=*), parameter :: subname = '(cice_init2)' !---------------------------------------------------- @@ -177,6 +179,11 @@ subroutine cice_init2() endif endif + if (wave_spec) then + call icepack_init_wave(nfreq=nfreq, & + wave_spectrum_profile=wave_spectrum_profile, wavefreq=wavefreq, dwavefreq=dwavefreq) + end if + ! Initialize shortwave components using swdn from previous timestep ! if restarting. These components will be scaled to current forcing ! in prep_radiation. @@ -215,11 +222,17 @@ subroutine init_restart() use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, read_restart_pond_cesm, & +#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_snow, read_restart_snow, & @@ -236,7 +249,11 @@ subroutine init_restart() i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & +#else + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & +#endif tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, tr_snow, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -257,7 +274,11 @@ subroutine init_restart() call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -319,6 +340,7 @@ subroutine init_restart() enddo ! iblk endif endif +#ifdef UNDEPRECATE_CESMPONDS ! CESM melt ponds if (tr_pond_cesm) then if (trim(runtype) == 'continue') & @@ -332,6 +354,7 @@ subroutine init_restart() enddo ! iblk endif endif +#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 79066e82a..c68583648 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -121,7 +121,11 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & +#ifdef UNDEPRECATE_CESMPONDS write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & +#else + write_restart_lvl, write_restart_pond_lvl, & +#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_iso, write_restart_bgc, write_restart_hbrine, & write_restart_snow @@ -146,7 +150,11 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & +#else + tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & +#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -164,7 +172,11 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) @@ -219,7 +231,7 @@ subroutine ice_step !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics if (debug_model) then plabeld = 'post step_therm1' @@ -370,7 +382,9 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) call write_restart_pond_cesm +#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_snow) call write_restart_snow @@ -387,7 +401,7 @@ subroutine ice_step call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -620,7 +634,7 @@ subroutine coupling_prep (iblk) fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk), & Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) - + !----------------------------------------------------------------- ! Define ice-ocean bgc fluxes !----------------------------------------------------------------- @@ -696,7 +710,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! Lsub, & ! diff --git a/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt b/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt index 6eb3c9cca..14d97c481 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt +++ b/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt @@ -1,17 +1,17 @@ ! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. -! +! ! Copyright 2022. Triad National Security, LLC. This software was -! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad -! National Security, LLC for the U.S. Department of Energy. The U.S. -! Government has rights to use, reproduce, and distribute this software. -! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY ! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF -! THIS SOFTWARE. If software is modified to produce derivative works, -! such modified software should be clearly marked, so as not to confuse +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse ! it with the version available from LANL. ! ! The full license and distribution policy are available from ! https://github.com/CICE-Consortium -! +! diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 8920ea386..182308973 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -25,6 +25,7 @@ module ice_comp_nuopc use ice_calendar , only : force_restart_now, write_ic use ice_calendar , only : idate, mday, mmonth, myear, year_init use ice_calendar , only : msec, dt, calendar, calendar_type, nextsw_cday, istep + use ice_calendar , only : ice_calendar_noleap, ice_calendar_gregorian use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name use ice_fileunits , only : inst_suffix, release_all_fileunits, flush_fileunit @@ -80,9 +81,6 @@ module ice_comp_nuopc character(len=*) , parameter :: orb_variable_year = 'variable_year' character(len=*) , parameter :: orb_fixed_parameters = 'fixed_parameters' - character(len=*),parameter :: shr_cal_noleap = 'NO_LEAP' - character(len=*),parameter :: shr_cal_gregorian = 'GREGORIAN' - type(ESMF_Mesh) :: ice_mesh integer :: nthrds ! Number of threads to use in this component @@ -216,7 +214,6 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) type(ESMF_Time) :: stopTime ! Stop time type(ESMF_Time) :: refTime ! Ref time type(ESMF_TimeInterval) :: timeStep ! Model timestep - type(ESMF_Calendar) :: esmf_calendar ! esmf calendar type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type integer :: start_ymd ! Start date (YYYYMMDD) integer :: start_tod ! start time of day (s) @@ -339,7 +336,8 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call get_component_instance(gcomp, inst_suffix, inst_index, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - inst_name = "ICE"//trim(inst_suffix) +! inst_name = "ICE"//trim(inst_suffix) + inst_name = "ICE" !---------------------------------------------------------------------------- ! start cice timers @@ -470,9 +468,9 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_TimeGet( currTime, calkindflag=esmf_caltype, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (esmf_caltype == ESMF_CALKIND_NOLEAP) then - calendar_type = shr_cal_noleap + calendar_type = ice_calendar_noleap else if (esmf_caltype == ESMF_CALKIND_GREGORIAN) then - calendar_type = shr_cal_gregorian + calendar_type = ice_calendar_gregorian else call abort_ice( subname//'ERROR:: bad calendar for ESMF' ) end if @@ -581,9 +579,11 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call icepack_query_parameters( tfrz_option_out=tfrz_option) if (tfrz_option_driver /= tfrz_option) then - write(errmsg,'(a)') trim(subname)//'error: tfrz_option from driver '//trim(tfrz_option_driver)//& - ' must be the same as tfrz_option from cice namelist '//trim(tfrz_option) - call abort_ice(trim(errmsg)) + write(errmsg,'(a)') trim(subname)//'WARNING: tfrz_option from driver '//trim(tfrz_option_driver)//& + ' is overwriting tfrz_option from cice namelist '//trim(tfrz_option) + write(nu_diag,*) trim(errmsg) + call icepack_warnings_flush(nu_diag) + call icepack_init_parameters(tfrz_option_in=tfrz_option_driver) endif ! Flux convergence tolerance - always use the driver attribute value @@ -594,7 +594,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) read(cvalue,*) atmiter_conv_driver call icepack_query_parameters( atmiter_conv_out=atmiter_conv) if (atmiter_conv_driver /= atmiter_conv) then - write(errmsg,'(a,d13.5,a,d13.5)') trim(subname)//'warning: atmiter_ from driver ',& + write(errmsg,'(a,d13.5,a,d13.5)') trim(subname)//'WARNING: atmiter_ from driver ',& atmiter_conv_driver,' is overwritting atmiter_conv from cice namelist ',atmiter_conv write(nu_diag,*) trim(errmsg) call icepack_warnings_flush(nu_diag) diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 7bfc53f45..f5a00cdf8 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -3,12 +3,13 @@ module ice_import_export use ESMF use NUOPC use NUOPC_Model - use ice_kinds_mod , only : int_kind, dbl_kind, char_len, log_kind + use ice_kinds_mod , only : int_kind, dbl_kind, char_len, char_len_long, log_kind use ice_constants , only : c0, c1, spval_dbl, radius use ice_constants , only : field_loc_center, field_type_scalar, field_type_vector use ice_blocks , only : block, get_block, nx_block, ny_block use ice_domain , only : nblocks, blocks_ice, halo_info, distrb_info use ice_domain_size , only : nx_global, ny_global, block_size_x, block_size_y, max_blocks, ncat + use ice_domain_size , only : nfreq, nfsd use ice_exit , only : abort_ice use ice_flux , only : strairxT, strairyT, strocnxT, strocnyT use ice_flux , only : alvdr, alidr, alvdf, alidf, Tref, Qref, Uref @@ -23,9 +24,10 @@ module ice_import_export use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt use ice_flux , only : send_i2x_per_cat use ice_flux , only : sss, Tf, wind, fsw - use ice_state , only : vice, vsno, aice, aicen_init, trcr + use ice_arrays_column , only : floe_rad_c, wave_spectrum + use ice_state , only : vice, vsno, aice, aicen_init, trcr, trcrn use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm - use ice_grid , only : grid_type, grid_average_X2Y + use ice_grid , only : grid_type use ice_mesh_mod , only : ocn_gridcell_frac use ice_boundary , only : ice_HaloUpdate use ice_fileunits , only : nu_diag, flush_fileunit @@ -34,8 +36,10 @@ module ice_import_export use ice_shr_methods , only : chkerr, state_reset use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags + use icepack_intfc , only : icepack_query_tracer_indices use icepack_intfc , only : icepack_liquidus_temperature use icepack_intfc , only : icepack_sea_freezing_temperature + use icepack_parameters , only : puny, c2 use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp @@ -112,6 +116,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam character(char_len) :: stdname character(char_len) :: cvalue logical :: flds_wiso ! use case + logical :: flds_wave ! use case logical :: isPresent, isSet character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' !------------------------------------------------------------------------------- @@ -148,6 +153,17 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam write(nu_diag,*)'flds_wiso = ',flds_wiso end if + flds_wave = .false. + call NUOPC_CompAttributeGet(gcomp, name='wav_coupling_to_cice', value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_wave + end if + if (my_task == master_task) then + write(nu_diag,*)'flds_wave = ',flds_wave + end if + !----------------- ! advertise import fields !----------------- @@ -192,6 +208,14 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam ! from atm - dry dust deposition fluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) + ! the following are advertised but might not be connected if they are not advertised in the + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific + ! from wave + if (flds_wave) then + call fldlist_add(fldsToIce_num, fldsToIce, 'Sw_elevation_spectrum', ungridded_lbound=1, & + ungridded_ubound=25) + end if + do n = 1,fldsToIce_num call NUOPC_Advertise(importState, standardName=fldsToIce(n)%stdname, & TransferOfferGeomObject='will provide', rc=rc) @@ -225,6 +249,10 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction_n', & ungridded_lbound=1, ungridded_ubound=ncat) end if + if (flds_wave) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_thick' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_floediam' ) + end if ! ice/atm fluxes computed by ice call fldlist_add(fldsFrIce_num, fldsFrIce, 'stress_on_air_ice_zonal' ) @@ -292,7 +320,7 @@ subroutine ice_realize_fields(gcomp, mesh, flds_scalar_name, flds_scalar_num, rc type(ESMF_State) :: exportState type(ESMF_Field) :: lfield integer :: numOwnedElements - integer :: i, j, iblk, n + integer :: i, j, iblk, n, k integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain type(block) :: this_block ! block information for current block real(dbl_kind), allocatable :: mesh_areas(:) @@ -403,11 +431,10 @@ subroutine ice_import( importState, rc ) ! local variables integer,parameter :: nflds=16 integer,parameter :: nfldv=6 - integer :: i, j, iblk, n + integer :: i, j, iblk, n, k integer :: ilo, ihi, jlo, jhi !beginning and end of physical domain type(block) :: this_block ! block information for current block real (kind=dbl_kind),allocatable :: aflds(:,:,:,:) - real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: work real (kind=dbl_kind) :: workx, worky real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP real (kind=dbl_kind) :: Tffresh @@ -559,6 +586,29 @@ subroutine ice_import( importState, rc ) end do !$OMP END PARALLEL DO + ! import wave elevation spectrum from wave (frequencies 1-25, assume that nfreq is 25) + if (State_FldChk(importState, 'Sw_elevation_spectrum')) then + if (nfreq /= 25) then + call abort_ice(trim(subname)//": ERROR nfreq not equal to 25 ") + end if + call state_getfldptr(importState, 'Sw_elevation_spectrum', fldptr=dataPtr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + do k = 1,nfreq + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + wave_spectrum(i,j,k,iblk) = dataPtr2d(k,n) + end do + end do + end do + end do + end if + if ( State_fldChk(importState, 'Sa_ptem') .and. State_fldchk(importState,'air_density_height_lowest')) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks @@ -845,7 +895,7 @@ subroutine ice_export( exportState, rc ) ! local variables type(block) :: this_block ! block information for current block - integer :: i, j, iblk, n ! incides + integer :: i, j, iblk, n, k ! indices integer :: n2 ! thickness category index integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain real (kind=dbl_kind) :: workx, worky ! tmps for converting grid @@ -859,7 +909,11 @@ subroutine ice_export( exportState, rc ) real (kind=dbl_kind) :: tauxo (nx_block,ny_block,max_blocks) ! ice/ocean stress real (kind=dbl_kind) :: tauyo (nx_block,ny_block,max_blocks) ! ice/ocean stress real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) ! fractional ice area + real (kind=dbl_kind) :: floediam(nx_block,ny_block,max_blocks) + real (kind=dbl_kind) :: floethick(nx_block,ny_block,max_blocks) ! ice thickness real (kind=dbl_kind) :: Tffresh + logical (kind=log_kind) :: tr_fsd + integer (kind=int_kind) :: nt_fsd real (kind=dbl_kind), allocatable :: tempfld(:,:,:) real (kind=dbl_kind), pointer :: dataptr_ifrac_n(:,:) real (kind=dbl_kind), pointer :: dataptr_swpen_n(:,:) @@ -877,6 +931,9 @@ subroutine ice_export( exportState, rc ) ! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & ! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + call icepack_query_tracer_indices(nt_fsd_out=nt_fsd) + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=u_FILE_u, line=__LINE__) @@ -890,8 +947,10 @@ subroutine ice_export( exportState, rc ) tauya(:,:,:) = c0 tauxo(:,:,:) = c0 tauyo(:,:,:) = c0 + floediam(:,:,:) = c0 + floethick(:,:,:) = c0 - !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky, this_block, ilo, ihi, jlo, jhi) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,workx,worky, this_block, ilo, ihi, jlo, jhi) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -904,6 +963,27 @@ subroutine ice_export( exportState, rc ) ! ice fraction ailohi(i,j,iblk) = min(aice(i,j,iblk), c1) + if (tr_fsd) then + ! floe thickness (m) + if (aice(i,j,iblk) > puny) then + floethick(i,j,iblk) = vice(i,j,iblk) / aice(i,j,iblk) + else + floethick(i,j,iblk) = c0 + end if + + ! floe diameter (m) + workx = c0 + worky = c0 + do n = 1, ncat + do k = 1, nfsd + workx = workx + floe_rad_c(k) * aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) + worky = worky + aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) + end do + end do + if (worky > c0) workx = c2*workx / worky + floediam(i,j,iblk) = MAX(c2*floe_rad_c(1),workx) + endif + ! surface temperature Tsrf(i,j,iblk) = Tffresh + trcr(i,j,1,iblk) !Kelvin (original ???) @@ -1054,6 +1134,22 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'Si_snowh' , input=tempfld , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! ------ + ! optional floe diameter and ice thickness to wave + ! ------ + + ! Sea ice thickness (m) + if (State_FldChk(exportState, 'Si_thick')) then + call state_setexport(exportState, 'Si_thick' , input=floethick , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! Sea ice floe diameter (m) + if (State_FldChk(exportState, 'Si_floediam')) then + call state_setexport(exportState, 'Si_floediam' , input=floediam , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + ! ------ ! ice/atm fluxes computed by ice ! ------ diff --git a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 index 56287feb1..0b1b9349c 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 @@ -202,7 +202,7 @@ subroutine ice_mesh_set_distgrid(localpet, npes, distgrid, rc) deallocate(gindex) end subroutine ice_mesh_set_distgrid - + !======================================================================= subroutine ice_mesh_setmask_from_maskfile(ice_maskfile, ice_mesh, rc) @@ -429,7 +429,7 @@ subroutine ice_mesh_create_scolumn(scol_lon, scol_lat, ice_mesh, rc) ! Allocate module variable ocn_gridcell_frac allocate(ocn_gridcell_frac(nx_block,ny_block,max_blocks)) ocn_gridcell_frac(:,:,:) = scol_frac - + end subroutine ice_mesh_create_scolumn !=============================================================================== diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index dc40177d8..84973e9dd 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -175,7 +175,7 @@ subroutine ice_prescribed_init(clock, mesh, rc) end do write(nu_diag,*) ' ' endif - + ! initialize sdat call shr_strdata_init_from_inline(sdat, & my_task = my_task, & diff --git a/cicecore/drivers/nuopc/dmi/CICE.F90 b/cicecore/drivers/nuopc/dmi/CICE.F90 index d6a28c3ba..79dd06fca 100644 --- a/cicecore/drivers/nuopc/dmi/CICE.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE.F90 @@ -1,21 +1,21 @@ !======================================================================= ! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. -! +! ! Copyright 2022. Triad National Security, LLC. This software was -! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad -! National Security, LLC for the U.S. Department of Energy. The U.S. -! Government has rights to use, reproduce, and distribute this software. -! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY ! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF -! THIS SOFTWARE. If software is modified to produce derivative works, -! such modified software should be clearly marked, so as not to confuse +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse ! it with the version available from LANL. ! ! The full license and distribution policy are available from ! https://github.com/CICE-Consortium -! +! !======================================================================= ! ! Main driver routine for CICE. Initializes and steps through the model. @@ -49,7 +49,7 @@ program icemodel call CICE_Run !----------------------------------------------------------------- - ! Finalize CICE + ! Finalize CICE !----------------------------------------------------------------- call CICE_Finalize diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index 82f0ff0e8..78d462d4c 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -196,11 +196,11 @@ subroutine cice_init(mpi_comm) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical !property tables - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer @@ -252,7 +252,7 @@ subroutine cice_init(mpi_comm) call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions + if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -271,11 +271,17 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, read_restart_pond_cesm, & +#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_snow, read_restart_snow, & @@ -292,7 +298,11 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & +#else + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & +#endif tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -312,7 +322,11 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -325,7 +339,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar() ! update time parameters @@ -336,17 +350,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -357,7 +371,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -368,12 +382,13 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif +#ifdef UNDEPRECATE_CESMPONDS ! CESM melt ponds if (tr_pond_cesm) then if (trim(runtype) == 'continue') & @@ -381,12 +396,13 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk endif endif +#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -394,7 +410,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -409,7 +425,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -460,7 +476,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index 1aaee77f4..6e799723e 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -131,7 +131,7 @@ subroutine CICE_Run(stop_now_cpl) ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -157,7 +157,11 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & +#ifdef UNDEPRECATE_CESMPONDS write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & +#else + write_restart_lvl, write_restart_pond_lvl, & +#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_iso, write_restart_bgc, write_restart_hbrine, & write_restart_snow @@ -171,7 +175,7 @@ subroutine ice_step timer_hist, timer_readwrite integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -180,7 +184,11 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & +#else + tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & +#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -198,7 +206,11 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) @@ -245,7 +257,7 @@ subroutine ice_step !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics if (debug_model) then @@ -400,7 +412,9 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) call write_restart_pond_cesm +#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_snow) call write_restart_snow @@ -408,7 +422,7 @@ subroutine ice_step if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart @@ -417,7 +431,7 @@ subroutine ice_step call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -452,12 +466,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -593,8 +607,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -615,7 +629,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -645,22 +659,22 @@ subroutine coupling_prep (iblk) Qref_iso =Qref_iso (:,:,:,iblk), & fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk)) - + #ifdef CICE_IN_NEMO !echmod - comment this out for efficiency, if .not. calc_Tsfc if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod #endif call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling @@ -672,10 +686,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -708,7 +722,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! Lsub, & ! diff --git a/cicecore/drivers/nuopc/dmi/cice_cap.info b/cicecore/drivers/nuopc/dmi/cice_cap.info index 202207c38..0ec1dea5a 100644 --- a/cicecore/drivers/nuopc/dmi/cice_cap.info +++ b/cicecore/drivers/nuopc/dmi/cice_cap.info @@ -22,7 +22,7 @@ module cice_cap use ice_state use CICE_RunMod use CICE_InitMod - use CICE_FinalMod + use CICE_FinalMod !end cice specific use ESMF use NUOPC @@ -34,11 +34,11 @@ module cice_cap model_label_Finalize => label_Finalize implicit none - + private - + public SetServices - + ! type cice_internalstate_type ! end type @@ -167,7 +167,7 @@ module cice_cap call ESMF_LogWrite(info, ESMF_LOGMSG_INFO, rc=dbrc) end subroutine - + !----------------------------------------------------------------------------- subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) @@ -196,7 +196,7 @@ module cice_cap integer :: lbnd(2),ubnd(2) type(block) :: this_block type(ESMF_DELayout) :: delayout - real(ESMF_KIND_R8), pointer :: tarray(:,:) + real(ESMF_KIND_R8), pointer :: tarray(:,:) real(ESMF_KIND_R8), pointer :: coordXcenter(:,:) real(ESMF_KIND_R8), pointer :: coordYcenter(:,:) real(ESMF_KIND_R8), pointer :: coordXcorner(:,:) @@ -215,7 +215,7 @@ module cice_cap ! created can wrap on the data pointers in internal part of CICE write(tmpstr,'(a,2i8)') subname//' ice nx,ny = ',nx_global,ny_global call ESMF_LogWrite(trim(tmpstr), ESMF_LOGMSG_INFO, rc=dbrc) - + ! distgrid = ESMF_DistGridCreate(minIndex=(/1,1/), maxIndex=(/nx_global,ny_global/), & ! regDecomp=(/2,2/), rc=rc) @@ -307,9 +307,9 @@ module cice_cap rc = rc) if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) return - call ESMF_GridAddCoord(gridIn, staggerLoc=ESMF_STAGGERLOC_CENTER, rc=rc) + call ESMF_GridAddCoord(gridIn, staggerLoc=ESMF_STAGGERLOC_CENTER, rc=rc) if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) return - call ESMF_GridAddCoord(gridIn, staggerLoc=ESMF_STAGGERLOC_CORNER, rc=rc) + call ESMF_GridAddCoord(gridIn, staggerLoc=ESMF_STAGGERLOC_CORNER, rc=rc) if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) return call ESMF_GridAddItem(gridIn, itemFlag=ESMF_GRIDITEM_MASK, itemTypeKind=ESMF_TYPEKIND_I4, & staggerLoc=ESMF_STAGGERLOC_CENTER, rc=rc) @@ -341,7 +341,7 @@ module cice_cap if (lbnd(1) /= 1 .or. lbnd(2) /= 1 .or. ubnd(1) /= ihi-ilo+1 .or. ubnd(2) /= jhi-jlo+1) then write(tmpstr,'(a,5i8)') subname//' iblk bnds ERROR ' call ESMF_LogWrite(trim(tmpstr), ESMF_LOGMSG_INFO, line=__LINE__, file=__FILE__, rc=dbrc) - write(tmpstr,'(a,4i8)') subname//' iblk center bnds 2',ihi, ilo, jhi,jlo + write(tmpstr,'(a,4i8)') subname//' iblk center bnds 2',ihi, ilo, jhi,jlo call ESMF_LogWrite(trim(tmpstr), ESMF_LOGMSG_INFO, line=__LINE__, file=__FILE__, rc=dbrc) rc = ESMF_FAILURE return @@ -460,14 +460,14 @@ module cice_cap call ESMF_LogWrite(info, ESMF_LOGMSG_INFO, line=__LINE__, file=__FILE__, rc=dbrc) end subroutine - + !----------------------------------------------------------------------------- ! CICE model uses same clock as parent gridComp subroutine SetClock(gcomp, rc) type(ESMF_GridComp) :: gcomp integer, intent(out) :: rc - + ! local variables type(ESMF_Clock) :: clock type(ESMF_TimeInterval) :: stabilityTimeStep, timestep @@ -493,10 +493,10 @@ module cice_cap line=__LINE__, & file=__FILE__)) & return ! bail out - + ! initialize internal clock ! here: parent Clock and stability timeStep determine actual model timeStep - call ESMF_TimeIntervalSet(stabilityTimeStep, s=nint(dt), rc=rc) + call ESMF_TimeIntervalSet(stabilityTimeStep, s=nint(dt), rc=rc) if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, & line=__LINE__, & file=__FILE__)) & @@ -506,7 +506,7 @@ module cice_cap line=__LINE__, & file=__FILE__)) & return ! bail out - + end subroutine !----------------------------------------------------------------------------- @@ -514,7 +514,7 @@ module cice_cap subroutine ModelAdvance(gcomp, rc) type(ESMF_GridComp) :: gcomp integer, intent(out) :: rc - + ! local variables type(ESMF_Clock) :: clock type(ESMF_State) :: importState, exportState @@ -539,7 +539,7 @@ module cice_cap call ESMF_LogWrite(info, ESMF_LOGMSG_INFO, rc=dbrc) import_slice = import_slice + 1 export_slice = export_slice + 1 - + ! query the Component for its clock, importState and exportState call NUOPC_ModelGet(gcomp, modelClock=clock, importState=importState, & exportState=exportState, rc=rc) @@ -549,27 +549,27 @@ module cice_cap return ! bail out ! HERE THE MODEL ADVANCES: currTime -> currTime + timeStep - + ! Because of the way that the internal Clock was set in SetClock(), ! its timeStep is likely smaller than the parent timeStep. As a consequence - ! the time interval covered by a single parent timeStep will result in + ! the time interval covered by a single parent timeStep will result in ! multiple calls to the ModelAdvance() routine. Every time the currTime ! will come in by one internal timeStep advanced. This goes until the ! stopTime of the internal Clock has been reached. - + call ESMF_ClockPrint(clock, options="currTime", & preString="------>Advancing CICE from: ", rc=rc) if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, & line=__LINE__, & file=__FILE__)) & return ! bail out - + call ESMF_ClockGet(clock, currTime=currTime, timeStep=timeStep, rc=rc) if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, & line=__LINE__, & file=__FILE__)) & return ! bail out - + call ESMF_TimePrint(currTime + timeStep, & preString="--------------------------------> to: ", rc=rc) if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, & @@ -611,14 +611,14 @@ module cice_cap write(info,*) subname,' --- run phase 4 called --- ',rc call ESMF_LogWrite(info, ESMF_LOGMSG_INFO, rc=dbrc) if(profile_memory) call ESMF_VMLogMemInfo("Leaving CICE Model_ADVANCE: ") - end subroutine + end subroutine subroutine cice_model_finalize(gcomp, rc) ! input arguments type(ESMF_GridComp) :: gcomp integer, intent(out) :: rc - + ! local variables type(ESMF_Clock) :: clock type(ESMF_Time) :: currTime @@ -707,7 +707,7 @@ module cice_cap integer :: npet, nx, ny, pet, elb(2), eub(2), clb(2), cub(2), tlb(2), tub(2) type(ESMF_VM) :: vm character(len=*),parameter :: subname='(cice_cap:CICE_RealizeFields)' - + rc = ESMF_SUCCESS do i = 1, nfields @@ -734,7 +734,7 @@ module cice_cap file=__FILE__)) & return ! bail out endif - + if (NUOPC_IsConnected(state, fieldName=field_defs(i)%shortname)) then call NUOPC_Realize(state, field=field, rc=rc) if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, & @@ -814,15 +814,15 @@ module cice_cap call fld_list_add(fldsToIce_num, fldsToIce, "freezing_melting_potential" ,"1" , "will provide") call fld_list_add(fldsToIce_num, fldsToIce, "mixed_layer_depth" ,"m" , "will provide") ! fields for export - call fld_list_add(fldsFrIce_num, fldsFrIce, "sea_ice_fraction" ,"1" , "will provide") - call fld_list_add(fldsFrIce_num, fldsFrIce, "stress_on_ocn_ice_zonal" ,"1" , "will provide") - call fld_list_add(fldsFrIce_num, fldsFrIce, "stress_on_ocn_ice_merid" ,"1" , "will provide") - call fld_list_add(fldsFrIce_num, fldsFrIce, "sea_ice_temperature" ,"1" , "will provide") + call fld_list_add(fldsFrIce_num, fldsFrIce, "sea_ice_fraction" ,"1" , "will provide") + call fld_list_add(fldsFrIce_num, fldsFrIce, "stress_on_ocn_ice_zonal" ,"1" , "will provide") + call fld_list_add(fldsFrIce_num, fldsFrIce, "stress_on_ocn_ice_merid" ,"1" , "will provide") + call fld_list_add(fldsFrIce_num, fldsFrIce, "sea_ice_temperature" ,"1" , "will provide") ! call fld_list_add(fldsFrIce_num, fldsFrIce, "ice_mask" ,"1" , "will provide") - call fld_list_add(fldsFrIce_num, fldsFrIce, "mean_sw_pen_to_ocn" ,"1" , "will provide") + call fld_list_add(fldsFrIce_num, fldsFrIce, "mean_sw_pen_to_ocn" ,"1" , "will provide") call fld_list_add(fldsFrIce_num, fldsFrIce, "mean_fresh_water_to_ocean_rate" ,"1" , "will provide") - call fld_list_add(fldsFrIce_num, fldsFrIce, "mean_salt_rate" ,"1" , "will provide") - call fld_list_add(fldsFrIce_num, fldsFrIce, "net_heat_flx_to_ocn" ,"1" , "will provide") + call fld_list_add(fldsFrIce_num, fldsFrIce, "mean_salt_rate" ,"1" , "will provide") + call fld_list_add(fldsFrIce_num, fldsFrIce, "net_heat_flx_to_ocn" ,"1" , "will provide") call fld_list_add(fldsFrIce_num, fldsFrIce, "mean_ice_volume" ,"1" , "will provide") call fld_list_add(fldsFrIce_num, fldsFrIce, "mean_snow_volume" ,"1" , "will provide") @@ -885,9 +885,9 @@ module cice_cap real(kind=ESMF_KIND_R8), pointer :: dataPtr_ocncm(:,:,:) real(kind=ESMF_KIND_R8), pointer :: dataPtr_fmpot(:,:,:) real(kind=ESMF_KIND_R8), pointer :: dataPtr_mld(:,:,:) - integer :: ilo,ihi,jlo,jhi + integer :: ilo,ihi,jlo,jhi integer :: i,j,iblk,n,i1,i2,j1,j2 - real(kind=ESMF_KIND_R8) :: ue, vn, AngT_s + real(kind=ESMF_KIND_R8) :: ue, vn, AngT_s type(block) :: this_block character(len=*),parameter :: subname='(cice_cap:CICE_Import)' @@ -921,7 +921,7 @@ module cice_cap j1 = j - jlo + 1 sss (i,j,iblk) = dataPtr_sss (i1,j1,iblk) ! sea surface salinity (maybe for mushy layer) sst (i,j,iblk) = dataPtr_sst (i1,j1,iblk) - Tffresh ! sea surface temp (may not be needed?) - + frzmlt (i,j,iblk) = dataPtr_fmpot (i1,j1,iblk) ue = dataPtr_ocncz (i1,j1,iblk) vn = dataPtr_ocncm (i1,j1,iblk) @@ -969,7 +969,7 @@ module cice_cap integer :: ilo,ihi,jlo,jhi integer :: i,j,iblk,n,i1,i2,j1,j2 real(kind=ESMF_KIND_R8) :: ui, vj, angT - + type(block) :: this_block character(len=*),parameter :: subname='(cice_cap:CICE_Export)' !TODO clean up fields @@ -1035,7 +1035,7 @@ module cice_cap ! call ESMF_LogWrite(trim(tmpstr), ESMF_LOGMSG_INFO, rc=dbrc) - + end subroutine end module cice_cap diff --git a/cicecore/drivers/standalone/cice/CICE.F90 b/cicecore/drivers/standalone/cice/CICE.F90 index d6a28c3ba..79dd06fca 100644 --- a/cicecore/drivers/standalone/cice/CICE.F90 +++ b/cicecore/drivers/standalone/cice/CICE.F90 @@ -1,21 +1,21 @@ !======================================================================= ! Copyright (c) 2022, Triad National Security, LLC ! All rights reserved. -! +! ! Copyright 2022. Triad National Security, LLC. This software was -! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad -! National Security, LLC for the U.S. Department of Energy. The U.S. -! Government has rights to use, reproduce, and distribute this software. -! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY ! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF -! THIS SOFTWARE. If software is modified to produce derivative works, -! such modified software should be clearly marked, so as not to confuse +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse ! it with the version available from LANL. ! ! The full license and distribution policy are available from ! https://github.com/CICE-Consortium -! +! !======================================================================= ! ! Main driver routine for CICE. Initializes and steps through the model. @@ -49,7 +49,7 @@ program icemodel call CICE_Run !----------------------------------------------------------------- - ! Finalize CICE + ! Finalize CICE !----------------------------------------------------------------- call CICE_Finalize diff --git a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 index 28811c3cd..a8b074883 100644 --- a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 @@ -30,7 +30,6 @@ module CICE_FinalMod subroutine CICE_Finalize - use ice_restart_shared, only: runid use ice_timers, only: ice_timer_stop, ice_timer_print_all, timer_total, & timer_stats diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 0130d2588..07a151a01 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -66,7 +66,7 @@ subroutine cice_init floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & + use ice_calendar, only: dt, dt_dyn, write_ic, & init_calendar, advance_timestep, calc_timesteps use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags @@ -190,11 +190,11 @@ subroutine cice_init if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical !property tables - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer @@ -243,7 +243,7 @@ subroutine cice_init call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions + if (write_ic) call accum_hist(dt) ! write initial conditions if (my_task == master_task) then call ice_memusage_print(nu_diag,subname//':end') @@ -266,11 +266,17 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, read_restart_pond_cesm, & +#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_snow, read_restart_snow, & @@ -287,7 +293,11 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & +#else + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & +#endif tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -307,7 +317,11 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -320,7 +334,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar() ! update time parameters @@ -331,17 +345,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -352,7 +366,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -363,12 +377,13 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif +#ifdef UNDEPRECATE_CESMPONDS ! CESM melt ponds if (tr_pond_cesm) then if (trim(runtype) == 'continue') & @@ -376,12 +391,13 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk endif endif +#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -389,7 +405,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -404,7 +420,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -455,7 +471,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 78e3b5259..00c7921d1 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -45,7 +45,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: istep, istep1, dt, stop_now, advance_timestep + use ice_calendar, only: dt, stop_now, advance_timestep use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -125,7 +125,7 @@ subroutine CICE_Run ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -151,7 +151,11 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & +#ifdef UNDEPRECATE_CESMPONDS write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & +#else + write_restart_lvl, write_restart_pond_lvl, & +#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_iso, write_restart_bgc, write_restart_hbrine, & write_restart_snow @@ -165,7 +169,7 @@ subroutine ice_step timer_hist, timer_readwrite integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -174,7 +178,11 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & +#else + tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & +#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -192,7 +200,11 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) @@ -396,7 +408,9 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl +#ifdef UNDEPRECATE_CESMPONDS if (tr_pond_cesm) call write_restart_pond_cesm +#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_snow) call write_restart_snow @@ -404,7 +418,7 @@ subroutine ice_step if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart @@ -412,7 +426,7 @@ subroutine ice_step call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -447,12 +461,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -588,8 +602,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -610,7 +624,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -640,22 +654,22 @@ subroutine coupling_prep (iblk) Qref_iso =Qref_iso (:,:,:,iblk), & fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk)) - + #ifdef CICE_IN_NEMO !echmod - comment this out for efficiency, if .not. calc_Tsfc if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod #endif call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling @@ -667,10 +681,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -703,7 +717,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! Lsub, & ! diff --git a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 index 60f71fa8a..ad355d783 100644 --- a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 @@ -181,11 +181,11 @@ subroutine cice_init if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical !property tables - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer @@ -222,7 +222,7 @@ subroutine cice_init call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions + if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -241,11 +241,17 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, read_restart_pond_cesm, & +#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -261,7 +267,11 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & +#else + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & +#endif tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -280,7 +290,11 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -291,7 +305,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar() ! update time parameters @@ -302,17 +316,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -323,7 +337,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -334,12 +348,13 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif +#ifdef UNDEPRECATE_CESMPONDS ! CESM melt ponds if (tr_pond_cesm) then if (trim(runtype) == 'continue') & @@ -347,12 +362,13 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk endif endif +#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -360,7 +376,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -375,7 +391,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -398,7 +414,7 @@ subroutine init_restart if (restart_iso) then call read_restart_iso else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) enddo ! iblk @@ -410,7 +426,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero diff --git a/cicecore/drivers/unittest/gridavgchk/gridavgchk.F90 b/cicecore/drivers/unittest/gridavgchk/gridavgchk.F90 index 5a4b3d54e..bd7ed3165 100644 --- a/cicecore/drivers/unittest/gridavgchk/gridavgchk.F90 +++ b/cicecore/drivers/unittest/gridavgchk/gridavgchk.F90 @@ -8,7 +8,7 @@ program gridavgchk ! There are lots of issues here ! areas (T, U, N, E) are not locally conservative, affect X2YF ! X2YF is unmasked which can create havoc in U2T type directions - ! X2YS is masked but there can be no active cells to average (for instance, + ! X2YS is masked but there can be no active cells to average (for instance, ! single gridcell wide channels U2T where resuilt is zero) ! land block elimination can lead to missing data on halo ! This test tries to deal with all these things.... @@ -36,7 +36,7 @@ program gridavgchk integer(int_kind) :: i, j, n, ib, ie, jb, je, iblock integer(int_kind) :: iglob, jglob integer(int_kind) :: blockID, numBlocks - type (block) :: this_block + type (block) :: this_block real(dbl_kind) ,allocatable :: array1x(:,:,:), array1y(:,:,:) ! input real(dbl_kind) ,allocatable :: arraysx(:,:,:), arraysy(:,:,:) ! extra input for NE2T, NE2U diff --git a/cicecore/drivers/unittest/optargs/optargs.F90 b/cicecore/drivers/unittest/optargs/optargs.F90 new file mode 100644 index 000000000..4acf7ac9f --- /dev/null +++ b/cicecore/drivers/unittest/optargs/optargs.F90 @@ -0,0 +1,246 @@ + + program optargs + + use optargs_subs, only: computeA, computeB, computeC, computeD + use optargs_subs, only: oa_error, oa_OK, oa_A, oa_B, oa_C, oa_D + use optargs_subs, only: oa_layer1, oa_count1 + + implicit none + + real*8 :: Ai1, Ao + real*8 :: B + real*8 :: Ci1, Co + real*8 :: Di1, Di2, Do + integer :: ierr, ierrV + + integer :: n + integer, parameter :: ntests = 100 + integer :: iresult + real*8 :: result, resultV + real*8, parameter :: errtol = 1.0e-12 + + !---------------------- + + write(6,*) 'RunningUnitTest optargs' + write(6,*) ' ' + + iresult = 0 + do n = 1,ntests + + Ai1 = -99.; Ao = -99. + B = -99. + Ci1 = -99.; Co = -99. + Di1 = -99.; Di2 = -99.; Do = -99. + + ierr = oa_error + result = -888. + resultV = -999. + + computeA = .false. + computeB = .false. + computeC = .false. + computeD = .false. + + select case (n) + +! fails to compile as it should +! case(0) +! ierrV = oa_OK +! call oa_layer1() + + ! test counts of present optional arguments at 2nd level + ! result should be number of arguments + case(1) + result = -777.; resultV = -777. + ierrV = 9 + call oa_count1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) + case(2) + result = -777.; resultV = -777. + ierrV = 9 + call oa_count1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + case(3) + result = -777.; resultV = -777. + ierrV = 3 + call oa_count1(Ci1=Ci1,Co=Co,ierr=ierr) + case(4) + result = -777.; resultV = -777. + ierrV = 5 + call oa_count1(Ci1=Ci1,Co=Co,ierr=ierr,Ao=Ao,Di1=Di1) + + ! test optional order + case(11) + result = -777.; resultV = -777. + ierrV = oa_OK + call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) + case(12) + result = -777.; resultV = -777. + ierrV = oa_OK + call oa_layer1(Ci1=Ci1,Co=Co,ierr=ierr) + case(13) + result = -777.; resultV = -777. + ierrV = oa_OK + call oa_layer1(Ci1=Ci1,Co=Co,ierr=ierr,Ao=Ao,Di1=Di1) + + ! test optional argument checking + case(21) + computeA = .true. + computeB = .true. + computeC = .true. + computeD = .true. + result = -777.; resultV = -777. + ierrV = oa_error + ! B missing + call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) + case(22) + computeA = .true. + computeB = .true. + computeC = .true. + computeD = .true. + result = -777.; resultV = -777. + ierrV = oa_error + ! all optional missing + call oa_layer1(Ci1=Ci1,Co=Co,ierr=ierr) + case(23) + computeA = .true. + computeB = .true. + computeC = .true. + computeD = .true. + result = -777.; resultV = -777. + ierrV = oa_error + ! some optional missing + call oa_layer1(Ci1=Ci1,Co=Co,ierr=ierr,B=B,Ao=Ao,Di1=Di1) + case(24) + computeA = .true. + computeB = .true. + computeC = .true. + computeD = .true. + result = -777.; resultV = -777. + ierrV = oa_error + ! one optional missing + call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Do=Do,ierr=ierr) + + ! test computations individually + case(31) + computeA = .true. + ierrV = oa_A + Ai1 = 5. + resultV = 4. + call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) + result = Ao + case(32) + computeB = .true. + ierrV = oa_B + B = 15. + resultV = 20. + call oa_layer1(ierr=ierr,Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do) + result = B + case(33) + computeC = .true. + ierrV = oa_C + Ci1 = 7. + resultV = 14. + call oa_layer1(B=B,Ci1=Ci1,Co=Co,Di1=Di1,Ai1=Ai1,Ao=Ao,Di2=Di2,Do=Do,ierr=ierr) + result = Co + case(34) + computeD = .true. + ierrV = oa_D + Di1 = 19; Di2=11. + resultV = 30. + call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,B=B,ierr=ierr) + result = Do + + ! test computations individually + case(41) + computeA = .true. + computeC = .true. + ierrV = oa_A + oa_C + Ai1 = 6. + Ci1 = 8. + resultV = 21. + call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) + result = Ao + Co + case(42) + computeB = .true. + computeC = .true. + ierrV = oa_B + oa_C + B = -20. + Ci1 = 2. + resultV = -11. + call oa_layer1(ierr=ierr,Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do) + result = B + Co + case(43) + computeB = .true. + computeD = .true. + ierrV = oa_B + oa_D + B = 4. + Di1 = 3; Di2=19. + resultV = 31. + call oa_layer1(B=B,Ci1=Ci1,Co=Co,Di1=Di1,Ai1=Ai1,Ao=Ao,Di2=Di2,Do=Do,ierr=ierr) + result = B + Do + case(44) + computeC = .true. + computeD = .true. + ierrV = oa_C + oa_D + Ci1 = 7. + Di1 = 6; Di2=7. + resultV = 27. + call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,B=B,ierr=ierr) + result = Co + Do + case(45) + computeA = .true. + computeB = .true. + computeC = .true. + computeD = .true. + ierrV = oa_A + oa_B + oa_C + oa_D + Ai1 = 7. + B = 9. + Ci1 = 7. + Di1 = 12; Di2=3. + resultV = 49. + call oa_layer1(Ao=Ao,B=B,Co=Co,Do=Do,Ai1=Ai1,Ci1=Ci1,Di1=Di1,Di2=Di2,ierr=ierr) + result = Ao + B + Co + Do + case(46) + computeA = .true. + computeB = .true. + computeD = .true. + ierrV = oa_A + oa_B + oa_D + Ai1 = 10. + B = 11. + Di1 = 12; Di2=3. + resultV = 40. + call oa_layer1(Ao=Ao,B=B,Co=Co,Do=Do,Ai1=Ai1,Ci1=Ci1,Di1=Di1,Di2=Di2,ierr=ierr) + result = Ao + B + Do + + case DEFAULT + ierr = -1234 + + end select + + ! skip -1234 + if (ierr /= -1234) then + if (ierr == ierrV .and. abs(result-resultV) < errtol ) then + write(6,101) 'PASS','optarg test',n,ierr,ierrV,result,resultV,Ao,B,Co,Do +! write(6,101) 'PASS','optarg test',n,ierr,ierrV,result,resultV + else + write(6,101) 'FAIL','optarg test',n,ierr,ierrV,result,resultV,Ao,B,Co,Do +! write(6,101) 'FAIL','optarg test',n,ierr,ierrV,result,resultV + iresult = 1 + endif + endif + + enddo + + 101 format(1x,a,1x,a,1x,i2.2,2i6,3x,6g11.4) + + write(6,*) ' ' + write(6,*) 'optargs COMPLETED SUCCESSFULLY' + if (iresult == 1) then + write(6,*) 'optargs TEST FAILED' + else + write(6,*) 'optargs TEST COMPLETED SUCCESSFULLY' + endif + + !---------------------- + + end program + diff --git a/cicecore/drivers/unittest/optargs/optargs_subs.F90 b/cicecore/drivers/unittest/optargs/optargs_subs.F90 new file mode 100644 index 000000000..7469d6800 --- /dev/null +++ b/cicecore/drivers/unittest/optargs/optargs_subs.F90 @@ -0,0 +1,148 @@ + + module optargs_subs + + implicit none + private + + logical, public :: computeA = .false., & + computeB = .false., & + computeC = .false., & + computeD = .false. + + integer, public :: oa_error = -99, & + oa_OK = 0, & + oa_A = 1, & + oa_B = 2, & + oa_C = 4, & + oa_D = 8 + + public :: oa_layer1, oa_count1 + +!----------------------------------- +CONTAINS +!----------------------------------- + + subroutine oa_count1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + + real*8 , intent(in) , optional :: Ai1, Di1, Di2 + real*8 , intent(out) , optional :: Ao, Do + real*8 , intent(inout), optional :: B + real*8 , intent(in) :: Ci1 + real*8 , intent(out) :: Co + integer, intent(inout) :: ierr + + call oa_count2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + +! write(6,*) 'debug oa_count1 ',ierr + + end subroutine oa_count1 + +!----------------------------------- + + subroutine oa_count2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + + real*8 , intent(in) , optional :: Ai1, Di1, Di2 + real*8 , intent(out) , optional :: Ao, Do + real*8 , intent(inout), optional :: B + real*8 , intent(in) :: Ci1 + real*8 , intent(out) :: Co + integer, intent(inout) :: ierr + + ierr = 3 ! Ci1, Co, ierr have to be passed + if (present(Ai1)) ierr = ierr + 1 + if (present(Ao) ) ierr = ierr + 1 + if (present(B) ) ierr = ierr + 1 + if (present(Di1)) ierr = ierr + 1 + if (present(Di2)) ierr = ierr + 1 + if (present(Do) ) ierr = ierr + 1 + +! write(6,*) 'debug oa_count2 ',ierr + + end subroutine oa_count2 + +!----------------------------------- + + subroutine oa_layer1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + + real*8 , intent(in) , optional :: Ai1, Di1, Di2 + real*8 , intent(out) , optional :: Ao, Do + real*8 , intent(inout), optional :: B + real*8 , intent(in) :: Ci1 + real*8 , intent(out) :: Co + integer, intent(inout) :: ierr + + ierr = oa_OK + if (computeA) then + if (.not.(present(Ai1).and.present(Ao))) then + ierr = oa_error + endif + endif + if (computeB) then + if (.not.(present(B))) then + ierr = oa_error + endif + endif + if (computeD) then + if (.not.(present(Di1).and.present(Di2).and.present(Do))) then + ierr = oa_error + endif + endif + + if (ierr == oa_OK) then + call oa_layer2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + endif + + end subroutine oa_layer1 + +!----------------------------------- + + subroutine oa_layer2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + + real*8 , intent(in) , optional :: Ai1, Di1, Di2 + real*8 , intent(out) , optional :: Ao, Do + real*8 , intent(inout), optional :: B + real*8 , intent(in) :: Ci1 + real*8 , intent(out) :: Co + integer, intent(inout) :: ierr + + call oa_compute(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + + end subroutine oa_layer2 + +!----------------------------------- + + subroutine oa_compute(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + + real*8 , intent(in) , optional :: Ai1, Di1, Di2 + real*8 , intent(out) , optional :: Ao, Do + real*8 , intent(inout), optional :: B + real*8 , intent(in) :: Ci1 + real*8 , intent(out) :: Co + integer, intent(inout) :: ierr + + if (computeA) then + Ao = Ai1 - 1. + ierr = ierr + oa_A + endif + + if (computeB) then + B = B + 5. + ierr = ierr + oa_B + endif + + if (computeC) then + Co = Ci1 * (2.) + ierr = ierr + oa_C + endif + + if (computeD) then + Do = Di1 + Di2 + ierr = ierr + oa_D + endif + + return + end subroutine oa_compute + +!----------------------------------- + + end module optargs_subs diff --git a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 index 60f71fa8a..ad355d783 100644 --- a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 @@ -181,11 +181,11 @@ subroutine cice_init if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical !property tables - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer @@ -222,7 +222,7 @@ subroutine cice_init call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions + if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -241,11 +241,17 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, read_restart_pond_cesm, & +#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -261,7 +267,11 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & +#ifdef UNDEPRECATE_CESMPONDS tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & +#else + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & +#endif tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -280,7 +290,11 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & +#ifdef UNDEPRECATE_CESMPONDS tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -291,7 +305,7 @@ subroutine init_restart if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in call calendar() ! update time parameters @@ -302,17 +316,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -323,7 +337,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -334,12 +348,13 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif +#ifdef UNDEPRECATE_CESMPONDS ! CESM melt ponds if (tr_pond_cesm) then if (trim(runtype) == 'continue') & @@ -347,12 +362,13 @@ subroutine init_restart if (restart_pond_cesm) then call read_restart_pond_cesm else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk)) enddo ! iblk endif endif +#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -360,7 +376,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -375,7 +391,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -398,7 +414,7 @@ subroutine init_restart if (restart_iso) then call read_restart_iso else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) enddo ! iblk @@ -410,7 +426,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero diff --git a/cicecore/drivers/unittest/sumchk/sumchk.F90 b/cicecore/drivers/unittest/sumchk/sumchk.F90 index f314959cb..aba435b0e 100644 --- a/cicecore/drivers/unittest/sumchk/sumchk.F90 +++ b/cicecore/drivers/unittest/sumchk/sumchk.F90 @@ -1,7 +1,7 @@ program sumchk - ! This tests the CICE ice_global_reductions infrastructure by + ! This tests the CICE ice_global_reductions infrastructure by ! using CICE_InitMod (from the standalone model) to read/initialize ! a CICE grid/configuration. Then methods in ice_global_reductions ! are verified using hardwired inputs with known outputs. @@ -28,7 +28,7 @@ program sumchk integer(int_kind) :: i, j, k, l, m, n, iblock, ib, ie, jb, je integer(int_kind) :: blockID, numBlocks - type (block) :: this_block + type (block) :: this_block real(dbl_kind) ,allocatable :: arrayA(:,:,:),arrayB(:,:,:),arrayC(:,:,:) integer(int_kind),allocatable :: arrayiA(:,:,:),arrayiB(:,:,:) diff --git a/cicecore/shared/ice_arrays_column.F90 b/cicecore/shared/ice_arrays_column.F90 index dbad4292c..c9e8be8db 100644 --- a/cicecore/shared/ice_arrays_column.F90 +++ b/cicecore/shared/ice_arrays_column.F90 @@ -1,7 +1,7 @@ !======================================================================= ! Grid-dependent arrays needed for column package -! These were originally module variables in modules that became part of +! These were originally module variables in modules that became part of ! the column package ! author: Elizabeth C. Hunke, LANL @@ -94,9 +94,9 @@ module ice_arrays_column ! albedo components for history real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & - albicen, & ! bare ice - albsnon, & ! snow - albpndn, & ! pond + albicen, & ! bare ice + albsnon, & ! snow + albpndn, & ! pond apeffn ! effective pond area used for radiation calculation real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & @@ -128,12 +128,12 @@ module ice_arrays_column ! aerosol optical properties -> band | ! v aerosol ! for combined dust category, use category 4 properties - real (kind=dbl_kind), dimension(:,:), allocatable, public :: & + real (kind=dbl_kind), dimension(:,:), allocatable, public :: & kaer_tab, & ! aerosol mass extinction cross section (m2/kg) waer_tab, & ! aerosol single scatter albedo (fraction) gaer_tab ! aerosol asymmetry parameter (cos(theta)) - real (kind=dbl_kind), dimension(:,:), allocatable, public :: & + real (kind=dbl_kind), dimension(:,:), allocatable, public :: & kaer_bc_tab, & ! BC mass extinction cross section (m2/kg) waer_bc_tab, & ! BC single scatter albedo (fraction) gaer_bc_tab ! BC aerosol asymmetry parameter (cos(theta)) @@ -146,7 +146,7 @@ module ice_arrays_column real (kind=dbl_kind), dimension (:), allocatable, public :: & bgrid , & ! biology nondimensional vertical grid points igrid , & ! biology vertical interface points - cgrid , & ! CICE vertical coordinate + cgrid , & ! CICE vertical coordinate icgrid , & ! interface grid for CICE (shortwave variable) swgrid ! grid for ice tracers used in dEdd scheme @@ -187,21 +187,21 @@ module ice_arrays_column ! 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic + icepack_max_don + icepack_max_fe ! Fep(1:icepack_max_fe) = 2*icepack_max_algae + icepack_max_doc + 8 + icepack_max_dic + icepack_max_don + icepack_max_fe: ! 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic + icepack_max_don + 2*icepack_max_fe - ! zaero(1:icepack_max_aero) = 2*icepack_max_algae + icepack_max_doc + 8 + icepack_max_dic + icepack_max_don + 2*icepack_max_fe: + ! zaero(1:icepack_max_aero) = 2*icepack_max_algae + icepack_max_doc + 8 + icepack_max_dic + icepack_max_don + 2*icepack_max_fe: ! 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic + icepack_max_don + 2*icepack_max_fe ! + icepack_max_aero ! humic == 2*icepack_max_algae + icepack_max_doc + 8 + icepack_max_dic + icepack_max_don + 2*icepack_max_fe - ! + icepack_max_aero + ! + icepack_max_aero integer (kind=int_kind), dimension(:,:,:,:), allocatable, public :: & - algal_peak ! vertical location of algal maximum, 0 if no maximum + algal_peak ! vertical location of algal maximum, 0 if no maximum - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:,:), allocatable, public :: & Zoo ! N losses accumulated in timestep (ie. zooplankton/bacteria) ! mmol/m^3 - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & dhbr_top , & ! brine top change dhbr_bot ! brine bottom change @@ -214,7 +214,7 @@ module ice_arrays_column real (kind=dbl_kind), & dimension (:,:,:,:,:), allocatable, public :: & - bphi , & ! porosity of layers + bphi , & ! porosity of layers bTiz ! layer temperatures interpolated on bio grid (C) real (kind=dbl_kind), & @@ -222,23 +222,23 @@ module ice_arrays_column darcy_V ! darcy velocity positive up (m/s) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - zsal_tot , & ! Total ice salinity in per grid cell (g/m^2) - chl_net , & ! Total chla (mg chla/m^2) per grid cell - NO_net ! Total nitrate per grid cell + zsal_tot , & ! Total ice salinity in per grid cell (g/m^2) + chl_net , & ! Total chla (mg chla/m^2) per grid cell + NO_net ! Total nitrate per grid cell logical (kind=log_kind), dimension (:,:,:), allocatable, public :: & - Rayleigh_criteria ! .true. means Ra_c was reached + Rayleigh_criteria ! .true. means Ra_c was reached real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & Rayleigh_real ! .true. = c1, .false. = c0 - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & sice_rho ! avg sea ice density (kg/m^3) ! ech: diagnostic only? - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & - fzsaln, & ! category fzsal(kg/m^2/s) + fzsaln, & ! category fzsal(kg/m^2/s) fzsaln_g ! salt flux from gravity drainage only real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & @@ -249,26 +249,26 @@ module ice_arrays_column zfswin ! Shortwave flux into layers interpolated on bio grid (W/m^2) real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable, public :: & - iDi , & ! igrid Diffusivity (m^2/s) - iki ! Ice permeability (m^2) + iDi , & ! igrid Diffusivity (m^2/s) + iki ! Ice permeability (m^2) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & upNO , & ! nitrate uptake rate (mmol/m^2/d) times aice upNH ! ammonium uptake rate (mmol/m^2/d) times aice - + real (kind=dbl_kind), & dimension(:,:,:,:,:), allocatable, public :: & - trcrn_sw ! bgc tracers active in the delta-Eddington shortwave + trcrn_sw ! bgc tracers active in the delta-Eddington shortwave ! calculation on the shortwave grid (swgrid) real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & - ice_bio_net , & ! depth integrated tracer (mmol/m^2) + ice_bio_net , & ! depth integrated tracer (mmol/m^2) snow_bio_net ! depth integrated snow tracer (mmol/m^2) logical (kind=log_kind), public :: & oceanmixed_ice, & ! if true, use internal ocean mixed layer - restore_bgc ! + restore_bgc ! character(char_len), public :: & fe_data_type ! 'default', 'clim' @@ -280,7 +280,7 @@ module ice_arrays_column optics_file, & ! modal aero optics file optics_file_fieldname ! modal aero optics file fieldname - real (kind=dbl_kind), dimension(:), allocatable, public :: & + real (kind=dbl_kind), dimension(:), allocatable, public :: & R_C2N_DON ! carbon to nitrogen mole ratio of DON pool real (kind=dbl_kind), dimension(:), allocatable, public :: & @@ -353,11 +353,11 @@ subroutine alloc_arrays_column grow_net (nx_block,ny_block,max_blocks), & ! Specific growth rate (/s) per grid cell PP_net (nx_block,ny_block,max_blocks), & ! Total production (mg C/m^2/s) per grid cell hbri (nx_block,ny_block,max_blocks), & ! brine height, area-averaged for comparison with hi (m) - zsal_tot (nx_block,ny_block,max_blocks), & ! Total ice salinity in per grid cell (g/m^2) - chl_net (nx_block,ny_block,max_blocks), & ! Total chla (mg chla/m^2) per grid cell - NO_net (nx_block,ny_block,max_blocks), & ! Total nitrate per grid cell + zsal_tot (nx_block,ny_block,max_blocks), & ! Total ice salinity in per grid cell (g/m^2) + chl_net (nx_block,ny_block,max_blocks), & ! Total chla (mg chla/m^2) per grid cell + NO_net (nx_block,ny_block,max_blocks), & ! Total nitrate per grid cell Rayleigh_criteria & - (nx_block,ny_block,max_blocks), & ! .true. means Ra_c was reached + (nx_block,ny_block,max_blocks), & ! .true. means Ra_c was reached Rayleigh_real(nx_block,ny_block,max_blocks), & ! .true. = c1, .false. = c0 fzsal (nx_block,ny_block,max_blocks), & ! Total flux of salt to ocean at time step for conservation fzsal_g (nx_block,ny_block,max_blocks), & ! Total gravity drainage flux @@ -371,9 +371,9 @@ subroutine alloc_arrays_column alidrn (nx_block,ny_block,ncat,max_blocks), & ! near-ir direct albedo (fraction) alvdfn (nx_block,ny_block,ncat,max_blocks), & ! visible diffuse albedo (fraction) alidfn (nx_block,ny_block,ncat,max_blocks), & ! near-ir diffuse albedo (fraction) - albicen (nx_block,ny_block,ncat,max_blocks), & ! bare ice - albsnon (nx_block,ny_block,ncat,max_blocks), & ! snow - albpndn (nx_block,ny_block,ncat,max_blocks), & ! pond + albicen (nx_block,ny_block,ncat,max_blocks), & ! bare ice + albsnon (nx_block,ny_block,ncat,max_blocks), & ! snow + albpndn (nx_block,ny_block,ncat,max_blocks), & ! pond apeffn (nx_block,ny_block,ncat,max_blocks), & ! effective pond area used for radiation calculation snowfracn (nx_block,ny_block,ncat,max_blocks), & ! Category snow fraction used in radiation fswsfcn (nx_block,ny_block,ncat,max_blocks), & ! SW absorbed at ice/snow surface (W m-2) @@ -385,21 +385,21 @@ subroutine alloc_arrays_column fswintn (nx_block,ny_block,ncat,max_blocks), & ! SW absorbed in ice interior, below surface (W m-2) first_ice_real & (nx_block,ny_block,ncat,max_blocks), & ! .true. = c1, .false. = c0 - first_ice (nx_block,ny_block,ncat,max_blocks), & ! distinguishes ice that disappears (e.g. melts) and reappears (e.g. transport) + first_ice (nx_block,ny_block,ncat,max_blocks), & ! distinguishes ice that disappears (melts) and reappears (transport) dhbr_top (nx_block,ny_block,ncat,max_blocks), & ! brine top change dhbr_bot (nx_block,ny_block,ncat,max_blocks), & ! brine bottom change darcy_V (nx_block,ny_block,ncat,max_blocks), & ! darcy velocity positive up (m/s) sice_rho (nx_block,ny_block,ncat,max_blocks), & ! avg sea ice density (kg/m^3) ! ech: diagnostic only? - fzsaln (nx_block,ny_block,ncat,max_blocks), & ! category fzsal(kg/m^2/s) + fzsaln (nx_block,ny_block,ncat,max_blocks), & ! category fzsal(kg/m^2/s) fzsaln_g (nx_block,ny_block,ncat,max_blocks), & ! salt flux from gravity drainage only Iswabsn (nx_block,ny_block,nilyr,ncat,max_blocks), & ! SW radiation absorbed in ice layers (W m-2) Sswabsn (nx_block,ny_block,nslyr,ncat,max_blocks), & ! SW radiation absorbed in snow layers (W m-2) fswpenln (nx_block,ny_block,nilyr+1,ncat,max_blocks), & ! visible SW entering ice layers (W m-2) Zoo (nx_block,ny_block,nblyr+1,ncat,max_blocks), & ! N losses accumulated in timestep (ie. zooplankton/bacteria) zfswin (nx_block,ny_block,nblyr+1,ncat,max_blocks), & ! Shortwave flux into layers interpolated on bio grid (W/m^2) - iDi (nx_block,ny_block,nblyr+1,ncat,max_blocks), & ! igrid Diffusivity (m^2/s) - iki (nx_block,ny_block,nblyr+1,ncat,max_blocks), & ! Ice permeability (m^2) - bphi (nx_block,ny_block,nblyr+2,ncat,max_blocks), & ! porosity of layers + iDi (nx_block,ny_block,nblyr+1,ncat,max_blocks), & ! igrid Diffusivity (m^2/s) + iki (nx_block,ny_block,nblyr+1,ncat,max_blocks), & ! Ice permeability (m^2) + bphi (nx_block,ny_block,nblyr+2,ncat,max_blocks), & ! porosity of layers bTiz (nx_block,ny_block,nblyr+2,ncat,max_blocks), & ! layer temperatures interpolated on bio grid (C) stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of Memory1') @@ -409,9 +409,9 @@ subroutine alloc_arrays_column fbio_snoice (nx_block,ny_block,max_nbtrcr,max_blocks), & ! fluxes from snow to ice fbio_atmice (nx_block,ny_block,max_nbtrcr,max_blocks), & ! fluxes from atm to ice ocean_bio_all(nx_block,ny_block,max_nbtrcr,max_blocks), & ! fixed order, all values even for tracers false - ice_bio_net (nx_block,ny_block,max_nbtrcr,max_blocks), & ! depth integrated tracer (mmol/m^2) + ice_bio_net (nx_block,ny_block,max_nbtrcr,max_blocks), & ! depth integrated tracer (mmol/m^2) snow_bio_net (nx_block,ny_block,max_nbtrcr,max_blocks), & ! depth integrated snow tracer (mmol/m^2) - algal_peak (nx_block,ny_block,max_algae ,max_blocks), & ! vertical location of algal maximum, 0 if no maximum + algal_peak (nx_block,ny_block,max_algae ,max_blocks), & ! vertical location of algal maximum, 0 if no maximum stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of Memory2') @@ -420,7 +420,7 @@ subroutine alloc_arrays_column c_hi_range(ncat) , & ! bgrid(nblyr+2) , & ! biology nondimensional vertical grid points igrid(nblyr+1) , & ! biology vertical interface points - cgrid(nilyr+1) , & ! CICE vertical coordinate + cgrid(nilyr+1) , & ! CICE vertical coordinate icgrid(nilyr+1) , & ! interface grid for CICE (shortwave variable) swgrid(nilyr+1) , & ! grid for ice tracers used in dEdd scheme stat=ierr) diff --git a/cicecore/shared/ice_calendar.F90 b/cicecore/shared/ice_calendar.F90 index 7684fef67..7bd0c73b2 100644 --- a/cicecore/shared/ice_calendar.F90 +++ b/cicecore/shared/ice_calendar.F90 @@ -4,7 +4,7 @@ ! ! Authors: Elizabeth C. Hunke, LANL ! Tony Craig, NCAR -! Craig MacLachlan, UK Met Office +! Craig MacLachlan, UK Met Office ! ! 2006 ECH: Removed 'w' option for history; added 'h' and histfreq_n. ! Converted to free form source (F90). @@ -199,11 +199,15 @@ subroutine init_calendar hour=0 ! computed in calendar, but needs some reasonable initial value istep1 = istep0 ! number of steps at current timestep ! real (dumped) or imagined (use to set calendar) - idate0 = (myear)*10000 + mmonth*100 + mday ! date (yyyymmdd) + idate0 = (myear)*10000 + mmonth*100 + mday ! date (yyyymmdd) stop_now = 0 ! end program execution if stop_now=1 dt_dyn = dt/real(ndtd,kind=dbl_kind) ! dynamics et al timestep force_restart_now = .false. + ! initialize nstreams to zero (will be initialized from namelist in 'init_hist') + ! this avoids using it uninitialzed in 'calendar' below + nstreams = 0 + #ifdef CESMCOUPLED ! calendar_type set by coupling #else @@ -385,7 +389,7 @@ subroutine calendar() call abort_ice(subname//'ERROR: model year too large') endif - idate = (myear)*10000 + mmonth*100 + mday ! date (yyyymmdd) + idate = (myear)*10000 + mmonth*100 + mday ! date (yyyymmdd) yday = daycal(mmonth) + mday ! day of the year hour = int(msec/seconds_per_hour) @@ -638,7 +642,6 @@ integer function compute_days_between(year0,month0,day0,year1,month1,day1) integer (kind=int_kind), intent(in) :: day1 ! end day ! Internal variable - logical (kind=log_kind) :: isleap ! Leap year logical integer (kind=int_kind) :: nday0, nday1 character(len=*),parameter :: subname='(compute_days_between)' @@ -911,7 +914,7 @@ subroutine calendar_time2date(atimesecs,ayear,amon,aday,asec,year_ref,mon_ref,da tday = 1 tsec = 0 - ! add initial seconds to timesecs and treat lsec_ref as zero + ! add initial seconds to timesecs and treat lsec_ref as zero ltimesecs = atimesecs + real(lsec_ref,kind=dbl_kind) ! first estimate of tyear diff --git a/cicecore/shared/ice_constants.F90 b/cicecore/shared/ice_constants.F90 index c49732e35..f2da2ef9d 100644 --- a/cicecore/shared/ice_constants.F90 +++ b/cicecore/shared/ice_constants.F90 @@ -1,7 +1,7 @@ !======================================================================= ! ! This module defines a variety of physical and numerical constants -! used throughout the ice model +! used throughout the ice model ! ! author Elizabeth C. Hunke, LANL @@ -33,7 +33,7 @@ module ice_constants real (kind=dbl_kind), public :: & shlat = 30.0_dbl_kind ,&! artificial masking edge (deg) nhlat = -30.0_dbl_kind ! artificial masking edge (deg) - + !----------------------------------------------------------------- ! numbers used outside the column package !----------------------------------------------------------------- @@ -91,12 +91,12 @@ module ice_constants ! location of fields for staggered grids !----------------------------------------------------------------- - integer (int_kind), parameter, public :: & - field_loc_unknown = 0, & - field_loc_noupdate = -1, & - field_loc_center = 1, & - field_loc_NEcorner = 2, & - field_loc_Nface = 3, & + integer (int_kind), parameter, public :: & + field_loc_unknown = 0, & + field_loc_noupdate = -1, & + field_loc_center = 1, & + field_loc_NEcorner = 2, & + field_loc_Nface = 3, & field_loc_Eface = 4, & field_loc_Wface = 5 @@ -105,11 +105,11 @@ module ice_constants ! changes of direction across tripole boundary !----------------------------------------------------------------- - integer (int_kind), parameter, public :: & - field_type_unknown = 0, & - field_type_noupdate = -1, & - field_type_scalar = 1, & - field_type_vector = 2, & + integer (int_kind), parameter, public :: & + field_type_unknown = 0, & + field_type_noupdate = -1, & + field_type_scalar = 1, & + field_type_vector = 2, & field_type_angle = 3 !----------------------------------------------------------------- @@ -138,9 +138,10 @@ subroutine ice_init_constants( & omega_in , & ! angular velocity of earth (rad/sec) radius_in , & ! earth radius (m) spval_dbl_in , & ! special value (double precision) - spval_in , & ! special value for netCDF output shlat_in , & ! artificial masking edge (deg) nhlat_in ! artificial masking edge (deg) + real (kind=real_kind), intent(in), optional :: & + spval_in ! special value for netCDF output character(len=*),parameter :: subname='(ice_init_constants)' diff --git a/cicecore/shared/ice_distribution.F90 b/cicecore/shared/ice_distribution.F90 index 1a23b63be..0f3f6c198 100644 --- a/cicecore/shared/ice_distribution.F90 +++ b/cicecore/shared/ice_distribution.F90 @@ -61,7 +61,7 @@ function create_distribution(dist_type, nprocs, work_per_block) ! by call the appropriate subroutine based on distribution type ! requested. Currently three distributions are supported: ! 2-d Cartesian distribution (cartesian), a load-balanced -! distribution using a rake algorithm based on an input amount of work +! distribution using a rake algorithm based on an input amount of work ! per block, and a space-filling-curve algorithm. character (*), intent(in) :: & @@ -180,14 +180,6 @@ subroutine create_local_block_ids(block_ids, distribution) do n=1,size(distribution%blockLocation) if (distribution%blockLocation(n) == my_task+1) then block_ids(distribution%blockLocalID(n)) = n - - if (debug_blocks .and. my_task == master_task) then - write(nu_diag,'(2a,3i8)') & - subname,' block id, proc, local_block: ', & - block_ids(distribution%blockLocalID(n)), & - distribution%blockLocation(n), & - distribution%blockLocalID(n) - endif endif end do endif @@ -597,7 +589,7 @@ function create_distrb_cart(nprocs, workPerBlock) result(newDistrb) newDistrb%nprocs = nprocs call proc_decomposition(nprocs, nprocsX, nprocsY) - + !---------------------------------------------------------------------- ! @@ -639,7 +631,7 @@ function create_distrb_cart(nprocs, workPerBlock) result(newDistrb) do j=1,nprocsY do i=1,nprocsX - processor = (j-1)*nprocsX + i ! number the processors + processor = (j-1)*nprocsX + i ! number the processors ! left to right, bot to top is = (i-1)*numBlocksXPerProc + 1 ! starting block in i @@ -783,7 +775,7 @@ function create_distrb_rake(nprocs, workPerBlock) result(newDistrb) !---------------------------------------------------------------------- dist = create_distrb_cart(nprocs, workPerBlock) - + !---------------------------------------------------------------------- ! ! if the number of blocks is close to the number of processors, @@ -909,7 +901,7 @@ function create_distrb_rake(nprocs, workPerBlock) result(newDistrb) call ice_distributionRake (workTmp, procTmp, workPerBlock, & priority, dist) end do - + deallocate(workTmp, procTmp, stat=istat) if (istat > 0) then call abort_ice( & @@ -1092,7 +1084,7 @@ function create_distrb_roundrobin(nprocs, workPerBlock) result(newDistrb) integer (int_kind), dimension(:), allocatable :: & proc_tmp ! temp processor id - + character(len=*),parameter :: subname='(create_distrb_roundrobin)' !---------------------------------------------------------------------- @@ -1143,7 +1135,7 @@ function create_distrb_roundrobin(nprocs, workPerBlock) result(newDistrb) do j=1,nblocks_y do i=1,nblocks_x - + globalID = globalID + 1 if (workPerBlock(globalID) /= 0) then @@ -1199,7 +1191,7 @@ function create_distrb_roundrobin(nprocs, workPerBlock) result(newDistrb) !---------------------------------------------------------------------- end function create_distrb_roundrobin - + !*********************************************************************** function create_distrb_spiralcenter(nprocs, workPerBlock) result(newDistrb) @@ -1237,7 +1229,7 @@ function create_distrb_spiralcenter(nprocs, workPerBlock) result(newDistrb) blocklist ! temp block ordered list integer (int_kind), dimension(:,:), allocatable :: & blockchk ! temp block check array - + character(len=*),parameter :: subname='(create_distrb_spiralcenter)' !---------------------------------------------------------------------- @@ -1424,7 +1416,7 @@ function create_distrb_spiralcenter(nprocs, workPerBlock) result(newDistrb) !---------------------------------------------------------------------- end function create_distrb_spiralcenter - + !*********************************************************************** function create_distrb_wghtfile(nprocs, workPerBlock) result(newDistrb) @@ -1461,7 +1453,7 @@ function create_distrb_wghtfile(nprocs, workPerBlock) result(newDistrb) proc_tmp ! temp processor id logical (log_kind) :: up ! direction of pe counting - + character(len=*),parameter :: subname='(create_distrb_wghtfile)' !---------------------------------------------------------------------- @@ -1590,7 +1582,7 @@ function create_distrb_wghtfile(nprocs, workPerBlock) result(newDistrb) !---------------------------------------------------------------------- end function create_distrb_wghtfile - + !*********************************************************************** function create_distrb_sectrobin(nprocs, workPerBlock) result(newDistrb) @@ -1628,7 +1620,7 @@ function create_distrb_sectrobin(nprocs, workPerBlock) result(newDistrb) logical (log_kind), dimension(:), allocatable :: & bfree ! map of assigned blocks - + integer (int_kind) :: cnt, blktogether, i2 integer (int_kind) :: totblocks, nchunks logical (log_kind) :: keepgoing @@ -1704,7 +1696,7 @@ function create_distrb_sectrobin(nprocs, workPerBlock) result(newDistrb) blktogether = max(1,nint(float(totblocks)/float(6*nprocs))) ! write(nu_diag,*) 'ice_distrb_sectrobin totblocks = ',totblocks,nblocks_y*nblocks_x - + !------------------------------ ! southern group of blocks ! weave back and forth in i vs j @@ -1897,7 +1889,7 @@ function create_distrb_sectrobin(nprocs, workPerBlock) result(newDistrb) !---------------------------------------------------------------------- end function create_distrb_sectrobin - + !*********************************************************************** function create_distrb_sectcart(nprocs, workPerBlock) result(newDistrb) @@ -1933,7 +1925,7 @@ function create_distrb_sectcart(nprocs, workPerBlock) result(newDistrb) integer (int_kind), dimension(:), allocatable :: & proc_tmp ! temp processor id - + integer (int_kind) :: n character(len=*),parameter :: subname='(create_distrb_sectcart)' @@ -1997,7 +1989,7 @@ function create_distrb_sectcart(nprocs, workPerBlock) result(newDistrb) cnt = 0 do j2=1,nblocks_y do i2=1,nblocks_x/2 - + if (n == 1) then i = i2 j = j2 @@ -2066,7 +2058,7 @@ function create_distrb_sectcart(nprocs, workPerBlock) result(newDistrb) !---------------------------------------------------------------------- end function create_distrb_sectcart - + !********************************************************************** function create_distrb_spacecurve(nprocs,work_per_block) @@ -2400,7 +2392,7 @@ subroutine ice_distributionRake (procWork, procID, blockWork, & integer (int_kind) :: & i, n, &! dummy loop indices np1, &! n+1 corrected for cyclical wrap - iproc, inext, &! processor ids for current and next + iproc, inext, &! processor ids for current and next nprocs, numBlocks, &! number of blocks, processors lastPriority, &! priority for most recent block minPriority, &! minimum priority diff --git a/cicecore/shared/ice_domain_size.F90 b/cicecore/shared/ice_domain_size.F90 index 56381b986..999a35f48 100644 --- a/cicecore/shared/ice_domain_size.F90 +++ b/cicecore/shared/ice_domain_size.F90 @@ -51,11 +51,11 @@ module ice_domain_size !*** values for the parameter below. A value higher than !*** necessary will not cause the code to fail, but will !*** allocate more memory than is necessary. A value that - !*** is too low will cause the code to exit. + !*** is too low will cause the code to exit. !*** A good initial guess is found using !*** max_blocks = (nx_global/block_size_x)*(ny_global/block_size_y)/ !*** num_procs - + !======================================================================= end module ice_domain_size diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index ccb518807..1854dda64 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -84,7 +84,7 @@ module ice_fileunits nu_diag_set = .false. ! flag to indicate whether nu_diag is already set integer (kind=int_kind), public :: & - ice_IOUnitsMinUnit = 11, & ! do not use unit numbers below + ice_IOUnitsMinUnit = 11, & ! do not use unit numbers below ice_IOUnitsMaxUnit = 99 ! or above, set by setup_nml logical (kind=log_kind), dimension(:), allocatable :: & @@ -101,8 +101,8 @@ module ice_fileunits !======================================================================= -! This routine grabs needed unit numbers. -! nu_diag is set to 6 (stdout) but may be reset later by the namelist. +! This routine grabs needed unit numbers. +! nu_diag is set to 6 (stdout) but may be reset later by the namelist. ! nu_nml is obtained separately. subroutine init_fileunits @@ -203,7 +203,7 @@ end subroutine get_fileunit !======================================================================= -! This routine releases unit numbers at the end of a run. +! This routine releases unit numbers at the end of a run. subroutine release_all_fileunits diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 5643b4277..5339aa6ec 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -10,7 +10,7 @@ module ice_init_column use ice_blocks, only: nx_block, ny_block use ice_constants use ice_communicate, only: my_task, master_task, ice_barrier - use ice_domain_size, only: ncat, max_blocks + use ice_domain_size, only: ncat, max_blocks use ice_domain_size, only: nblyr, nilyr, nslyr use ice_domain_size, only: n_aero, n_zaero, n_algae use ice_domain_size, only: n_doc, n_dic, n_don @@ -44,7 +44,11 @@ module ice_init_column private public :: init_thermo_vertical, init_shortwave, & init_age, init_FY, init_lvl, init_fsd, & +#ifdef UNDEPRECATE_CESMPONDS init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & +#else + init_meltponds_lvl, init_meltponds_topo, & +#endif init_aerosol, init_bgc, init_hbrine, init_zbgc, input_zbgc, & count_tracers, init_isotope, init_snowtracers @@ -145,7 +149,7 @@ subroutine init_thermo_vertical character(len=*), parameter :: subname='(init_thermo_vertical)' !----------------------------------------------------------------- - ! initialize heat_capacity, l_brine, and salinity profile + ! initialize !----------------------------------------------------------------- call icepack_query_parameters(depressT_out=depressT) @@ -266,12 +270,12 @@ subroutine init_shortwave Iswabsn(:,:,:,:,iblk) = c0 Sswabsn(:,:,:,:,iblk) = c0 - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = 1, ny_block ! can be jlo, jhi do i = 1, nx_block ! can be ilo, ihi @@ -393,7 +397,7 @@ subroutine init_shortwave l_print_point=l_print_point, & initonly = .true.) endif - + !----------------------------------------------------------------- ! Define aerosol tracer on shortwave grid !----------------------------------------------------------------- @@ -410,7 +414,7 @@ subroutine init_shortwave enddo ! j !----------------------------------------------------------------- - ! Aggregate albedos + ! Aggregate albedos ! Match loop order in coupling_prep for same order of operations !----------------------------------------------------------------- @@ -524,7 +528,7 @@ end subroutine init_FY ! Initialize ice lvl tracers (call prior to reading restart data) - subroutine init_lvl(iblk, alvl, vlvl) + subroutine init_lvl(iblk, alvl, vlvl) use ice_constants, only: c0, c1 use ice_arrays_column, only: ffracn, dhsn @@ -543,6 +547,7 @@ subroutine init_lvl(iblk, alvl, vlvl) end subroutine init_lvl +#ifdef UNDEPRECATE_CESMPONDS !======================================================================= ! Initialize melt ponds. @@ -558,7 +563,7 @@ subroutine init_meltponds_cesm(apnd, hpnd) hpnd(:,:,:) = c0 end subroutine init_meltponds_cesm - +#endif !======================================================================= ! Initialize melt ponds. @@ -594,7 +599,7 @@ subroutine init_meltponds_topo(apnd, hpnd, ipnd) apnd(:,:,:) = c0 hpnd(:,:,:) = c0 ipnd(:,:,:) = c0 - + end subroutine init_meltponds_topo !======================================================================= @@ -746,7 +751,7 @@ end subroutine init_aerosol ! Initialize vertical profile for biogeochemistry - subroutine init_bgc() + subroutine init_bgc() use ice_arrays_column, only: zfswin, trcrn_sw, & ocean_bio_all, ice_bio_net, snow_bio_net, & @@ -776,15 +781,15 @@ subroutine init_bgc() logical (kind=log_kind) :: & RayleighC , & solve_zsal - + type (block) :: & this_block ! block information for current block real(kind=dbl_kind), allocatable :: & trcrn_bgc(:,:) - + real(kind=dbl_kind), dimension(nilyr,ncat) :: & - sicen + sicen real(kind=dbl_kind) :: & RayleighR @@ -809,13 +814,13 @@ subroutine init_bgc() allocate(trcrn_bgc(ntrcr,ncat)) - bphi(:,:,:,:,:) = c0 ! initial porosity for no ice + bphi(:,:,:,:,:) = c0 ! initial porosity for no ice iDi (:,:,:,:,:) = c0 ! interface diffusivity bTiz(:,:,:,:,:) = c0 ! initial bio grid ice temperature iki (:,:,:,:,:) = c0 ! permeability ocean_bio_all(:,:,:,:) = c0 - ice_bio_net (:,:,:,:) = c0 ! integrated ice tracer conc (mmol/m^2 or mg/m^2) + ice_bio_net (:,:,:,:) = c0 ! integrated ice tracer conc (mmol/m^2 or mg/m^2) snow_bio_net (:,:,:,:) = c0 ! integrated snow tracer conc (mmol/m^2 or mg/m^2) zfswin (:,:,:,:,:) = c0 ! shortwave flux on bio grid trcrn_sw (:,:,:,:,:) = c0 ! tracers active in the shortwave calculation @@ -826,20 +831,20 @@ subroutine init_bgc() !----------------------------------------------------------------- ! zsalinity initialization !----------------------------------------------------------------- - - if (solve_zsal) then ! default values + + if (solve_zsal) then ! default values !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,trcrn_bgc) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi call icepack_init_zsalinity(nblyr=nblyr, ntrcr_o=ntrcr_o, & Rayleigh_criteria = RayleighC, & Rayleigh_real = RayleighR, & @@ -858,7 +863,7 @@ subroutine init_bgc() enddo endif enddo ! i - enddo ! j + enddo ! j enddo ! iblk !$OMP END PARALLEL DO call icepack_warnings_flush(nu_diag) @@ -872,15 +877,15 @@ subroutine init_bgc() ! biogeochemistry initialization !----------------------------------------------------------------- - if (.not. restart_bgc) then - + if (.not. restart_bgc) then + !----------------------------------------------------------------- ! Initial Ocean Values if not coupled to the ocean bgc !----------------------------------------------------------------- !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -914,14 +919,14 @@ subroutine init_bgc() !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi call icepack_load_ocean_bio_array(max_nbtrcr=icepack_max_nbtrcr, & max_algae=icepack_max_algae, max_don=icepack_max_don, & @@ -929,7 +934,7 @@ subroutine init_bgc() max_dic=icepack_max_dic, max_aero=icepack_max_aero, & nit =nit (i,j, iblk), amm=amm(i,j, iblk), sil =sil (i,j, iblk), & dmsp=dmsp(i,j, iblk), dms=dms(i,j, iblk), algalN=algalN(i,j,:,iblk), & - doc =doc (i,j,:,iblk), don=don(i,j,:,iblk), dic =dic (i,j,:,iblk), & + doc =doc (i,j,:,iblk), don=don(i,j,:,iblk), dic =dic (i,j,:,iblk), & fed =fed (i,j,:,iblk), fep=fep(i,j,:,iblk), zaeros=zaeros(i,j,:,iblk), & hum=hum (i,j, iblk), ocean_bio_all=ocean_bio_all(i,j,:,iblk)) @@ -943,18 +948,18 @@ subroutine init_bgc() if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (.not. restart_bgc) then + if (.not. restart_bgc) then !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,sicen,trcrn_bgc) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi do n = 1, ncat do k = 1, nilyr sicen(k,n) = trcrn(i,j,nt_sice+k-1,n,iblk) @@ -982,7 +987,7 @@ subroutine init_bgc() ! read restart to complete BGC initialization !----------------------------------------------------------------- - if (restart_zsal .or. restart_bgc) call read_restart_bgc + if (restart_zsal .or. restart_bgc) call read_restart_bgc deallocate(trcrn_bgc) @@ -1025,7 +1030,7 @@ subroutine init_hbrine() if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__,line= __LINE__) - first_ice(:,:,:,:) = .true. + first_ice(:,:,:,:) = .true. if (tr_brine) trcrn(:,:,nt_fbri,:,:) = c1 end subroutine init_hbrine @@ -1033,7 +1038,7 @@ end subroutine init_hbrine !======================================================================= ! Namelist variables, set to default values; may be altered at run time -! +! ! author Elizabeth C. Hunke, LANL ! Nicole Jeffery, LANL @@ -1055,7 +1060,7 @@ subroutine input_zbgc tr_bgc_N, tr_bgc_C, tr_bgc_chl, & tr_bgc_DON, tr_bgc_Fe, tr_zaero, & tr_bgc_hum, tr_aero - + integer (kind=int_kind) :: & ktherm @@ -1082,7 +1087,7 @@ subroutine input_zbgc restore_bgc, restart_bgc, scale_bgc, solve_zsal, restart_zsal, & tr_bgc_Nit, tr_bgc_C, tr_bgc_chl, tr_bgc_Am, tr_bgc_Sil, & tr_bgc_DMS, tr_bgc_PON, tr_bgc_hum, tr_bgc_DON, tr_bgc_Fe, & - grid_o, grid_o_t, l_sk, grid_oS, optics_file, optics_file_fieldname, & + grid_o, grid_o_t, l_sk, grid_oS, optics_file, optics_file_fieldname, & l_skS, phi_snow, initbio_frac, frazil_scav, & ratio_Si2N_diatoms , ratio_Si2N_sp , ratio_Si2N_phaeo , & ratio_S2N_diatoms , ratio_S2N_sp , ratio_S2N_phaeo , & @@ -1119,9 +1124,9 @@ subroutine input_zbgc fedtype_1 , feptype_1 , zaerotype_bc1 , & zaerotype_bc2 , zaerotype_dust1 , zaerotype_dust2 , & zaerotype_dust3 , zaerotype_dust4 , ratio_C2N_diatoms , & - ratio_C2N_sp , ratio_C2N_phaeo , ratio_chl2N_diatoms, & + ratio_C2N_sp , ratio_C2N_phaeo , ratio_chl2N_diatoms, & ratio_chl2N_sp , ratio_chl2N_phaeo , F_abs_chl_diatoms , & - F_abs_chl_sp , F_abs_chl_phaeo , ratio_C2N_proteins + F_abs_chl_sp , F_abs_chl_phaeo , ratio_C2N_proteins !----------------------------------------------------------------- @@ -1146,22 +1151,22 @@ subroutine input_zbgc restart_bgc = .false. ! biogeochemistry restart restart_zsal = .false. ! salinity restart restart_hbrine = .false. ! hbrine restart - scale_bgc = .false. ! initial bgc tracers proportional to S - skl_bgc = .false. ! solve skeletal biochemistry + scale_bgc = .false. ! initial bgc tracers proportional to S + skl_bgc = .false. ! solve skeletal biochemistry z_tracers = .false. ! solve vertically resolved tracers dEdd_algae = .false. ! dynamic algae contributes to shortwave absorption ! in delta-Eddington calculation - solve_zbgc = .false. ! turn on z layer biochemistry - tr_bgc_PON = .false. !--------------------------------------------- + solve_zbgc = .false. ! turn on z layer biochemistry + tr_bgc_PON = .false. !--------------------------------------------- tr_bgc_Nit = .false. ! biogeochemistry (skl or zbgc) tr_bgc_C = .false. ! if skl_bgc = .true. then skl tr_bgc_chl = .false. ! if z_tracers = .true. then vertically resolved tr_bgc_Sil = .false. ! if z_tracers + solve_zbgc = .true. then - tr_bgc_Am = .false. ! vertically resolved with reactions + tr_bgc_Am = .false. ! vertically resolved with reactions tr_bgc_DMS = .false. !------------------------------------------------ - tr_bgc_DON = .false. ! + tr_bgc_DON = .false. ! tr_bgc_hum = .false. ! - tr_bgc_Fe = .false. ! + tr_bgc_Fe = .false. ! tr_bgc_N = .true. ! ! brine height parameter @@ -1170,17 +1175,17 @@ subroutine input_zbgc ! skl biology parameters bgc_flux_type = 'Jin2006'! type of ocean-ice poston velocity ('constant') - ! z biology parameters - grid_o = c5 ! for bottom flux - grid_o_t = c5 ! for top flux - l_sk = 7.0_dbl_kind ! characteristic diffusive scale (m) + ! z biology parameters + grid_o = c5 ! for bottom flux + grid_o_t = c5 ! for top flux + l_sk = 7.0_dbl_kind ! characteristic diffusive scale (m) initbio_frac = c1 ! fraction of ocean trcr concentration in bio trcrs - frazil_scav = c1 ! increase in initial bio tracer from ocean scavenging - ratio_Si2N_diatoms = 1.8_dbl_kind ! algal Si to N (mol/mol) + frazil_scav = c1 ! increase in initial bio tracer from ocean scavenging + ratio_Si2N_diatoms = 1.8_dbl_kind ! algal Si to N (mol/mol) ratio_Si2N_sp = c0 ! diatoms, small plankton, phaeocystis ratio_Si2N_phaeo = c0 ratio_S2N_diatoms = 0.03_dbl_kind ! algal S to N (mol/mol) - ratio_S2N_sp = 0.03_dbl_kind + ratio_S2N_sp = 0.03_dbl_kind ratio_S2N_phaeo = 0.03_dbl_kind ratio_Fe2C_diatoms = 0.0033_dbl_kind ! algal Fe to C (umol/mol) ratio_Fe2C_sp = 0.0033_dbl_kind @@ -1191,7 +1196,7 @@ subroutine input_zbgc ratio_Fe2DON = 0.023_dbl_kind ! Fe to N of DON (nmol/umol) ratio_Fe2DOC_s = p1 ! Fe to C of DOC (nmol/umol) saccharids ratio_Fe2DOC_l = 0.033_dbl_kind ! Fe to C of DOC (nmol/umol) lipids - fr_resp = 0.05_dbl_kind ! frac of algal growth lost due to respiration + fr_resp = 0.05_dbl_kind ! frac of algal growth lost due to respiration tau_min = 5200.0_dbl_kind ! rapid mobile to stationary exchanges (s) tau_max = 1.73e5_dbl_kind ! long time mobile to stationary exchanges (s) algal_vel = 1.11e-8_dbl_kind! 0.5 cm/d(m/s) Lavoie 2005 1.5 cm/day @@ -1200,13 +1205,13 @@ subroutine input_zbgc chlabs_diatoms = 0.03_dbl_kind ! chl absorption (1/m/(mg/m^3)) chlabs_sp = 0.01_dbl_kind chlabs_phaeo = 0.05_dbl_kind - alpha2max_low_diatoms = 0.8_dbl_kind ! light limitation (1/(W/m^2)) + alpha2max_low_diatoms = 0.8_dbl_kind ! light limitation (1/(W/m^2)) alpha2max_low_sp = 0.67_dbl_kind alpha2max_low_phaeo = 0.67_dbl_kind - beta2max_diatoms = 0.018_dbl_kind ! light inhibition (1/(W/m^2)) + beta2max_diatoms = 0.018_dbl_kind ! light inhibition (1/(W/m^2)) beta2max_sp = 0.0025_dbl_kind beta2max_phaeo = 0.01_dbl_kind - mu_max_diatoms = 1.2_dbl_kind ! maximum growth rate (1/day) + mu_max_diatoms = 1.2_dbl_kind ! maximum growth rate (1/day) mu_max_sp = 0.851_dbl_kind mu_max_phaeo = 0.851_dbl_kind grow_Tdep_diatoms = 0.06_dbl_kind ! Temperature dependence of growth (1/C) @@ -1236,10 +1241,10 @@ subroutine input_zbgc K_Fe_diatoms = c1 ! iron half saturation (nM) K_Fe_sp = 0.2_dbl_kind K_Fe_phaeo = p1 - f_don_protein = 0.6_dbl_kind ! fraction of spilled grazing to proteins - kn_bac_protein = 0.03_dbl_kind ! Bacterial degredation of DON (1/d) - f_don_Am_protein = 0.25_dbl_kind ! fraction of remineralized DON to ammonium - f_doc_s = 0.4_dbl_kind ! fraction of mortality to DOC + f_don_protein = 0.6_dbl_kind ! fraction of spilled grazing to proteins + kn_bac_protein = 0.03_dbl_kind ! Bacterial degredation of DON (1/d) + f_don_Am_protein = 0.25_dbl_kind ! fraction of remineralized DON to ammonium + f_doc_s = 0.4_dbl_kind ! fraction of mortality to DOC f_doc_l = 0.4_dbl_kind f_exude_s = c1 ! fraction of exudation to DOC f_exude_l = c1 @@ -1249,15 +1254,15 @@ subroutine input_zbgc fsal = c1 ! Salinity limitation (ppt) op_dep_min = p1 ! Light attenuates for optical depths exceeding min fr_graze_s = p5 ! fraction of grazing spilled or slopped - fr_graze_e = p5 ! fraction of assimilation excreted + fr_graze_e = p5 ! fraction of assimilation excreted fr_mort2min = p5 ! fractionation of mortality to Am fr_dFe = 0.3_dbl_kind ! fraction of remineralized nitrogen ! (in units of algal iron) - k_nitrif = c0 ! nitrification rate (1/day) + k_nitrif = c0 ! nitrification rate (1/day) t_iron_conv = 3065.0_dbl_kind ! desorption loss pFe to dFe (day) - max_loss = 0.9_dbl_kind ! restrict uptake to % of remaining value - max_dfe_doc1 = 0.2_dbl_kind ! max ratio of dFe to saccharides in the ice - !(nM Fe/muM C) + max_loss = 0.9_dbl_kind ! restrict uptake to % of remaining value + max_dfe_doc1 = 0.2_dbl_kind ! max ratio of dFe to saccharides in the ice + !(nM Fe/muM C) fr_resp_s = 0.75_dbl_kind ! DMSPd fraction of respiration loss as DMSPd y_sk_DMS = p5 ! fraction conversion given high yield t_sk_conv = 3.0_dbl_kind ! Stefels conversion time (d) @@ -1291,11 +1296,11 @@ subroutine input_zbgc F_abs_chl_diatoms = 2.0_dbl_kind ! scales absorbed radiation for dEdd F_abs_chl_sp = 4.0_dbl_kind F_abs_chl_phaeo = 5.0 - ratio_C2N_proteins = 7.0_dbl_kind ! ratio of C to N in proteins (mol/mol) + ratio_C2N_proteins = 7.0_dbl_kind ! ratio of C to N in proteins (mol/mol) ! z salinity parameters - grid_oS = c5 ! for bottom flux - l_skS = 7.0_dbl_kind ! characteristic diffusive scale (m) + grid_oS = c5 ! for bottom flux + l_skS = 7.0_dbl_kind ! characteristic diffusive scale (m) !----------------------------------------------------------------- ! read from input file @@ -1328,10 +1333,10 @@ subroutine input_zbgc ! broadcast !----------------------------------------------------------------- - call broadcast_scalar(solve_zsal, master_task) - call broadcast_scalar(restart_zsal, master_task) + call broadcast_scalar(solve_zsal, master_task) + call broadcast_scalar(restart_zsal, master_task) call broadcast_scalar(tr_brine, master_task) - call broadcast_scalar(restart_hbrine, master_task) + call broadcast_scalar(restart_hbrine, master_task) call broadcast_scalar(phi_snow, master_task) call broadcast_scalar(grid_oS, master_task) @@ -1349,14 +1354,14 @@ subroutine input_zbgc call broadcast_scalar(tr_bgc_Am, master_task) call broadcast_scalar(tr_bgc_Sil, master_task) call broadcast_scalar(tr_bgc_hum, master_task) - call broadcast_scalar(tr_bgc_DMS, master_task) - call broadcast_scalar(tr_bgc_PON, master_task) - call broadcast_scalar(tr_bgc_DON, master_task) - call broadcast_scalar(tr_bgc_Fe, master_task) + call broadcast_scalar(tr_bgc_DMS, master_task) + call broadcast_scalar(tr_bgc_PON, master_task) + call broadcast_scalar(tr_bgc_DON, master_task) + call broadcast_scalar(tr_bgc_Fe, master_task) call broadcast_scalar(z_tracers, master_task) call broadcast_scalar(tr_zaero, master_task) - call broadcast_scalar(dEdd_algae, master_task) + call broadcast_scalar(dEdd_algae, master_task) call broadcast_scalar(modal_aero, master_task) call broadcast_scalar(optics_file, master_task) call broadcast_scalar(optics_file_fieldname, master_task) @@ -1390,31 +1395,31 @@ subroutine input_zbgc call broadcast_scalar(chlabs_diatoms , master_task) call broadcast_scalar(chlabs_sp , master_task) call broadcast_scalar(chlabs_phaeo , master_task) - call broadcast_scalar(alpha2max_low_diatoms , master_task) + call broadcast_scalar(alpha2max_low_diatoms , master_task) call broadcast_scalar(alpha2max_low_sp , master_task) call broadcast_scalar(alpha2max_low_phaeo , master_task) - call broadcast_scalar(beta2max_diatoms , master_task) - call broadcast_scalar(beta2max_sp , master_task) - call broadcast_scalar(beta2max_phaeo , master_task) - call broadcast_scalar(mu_max_diatoms , master_task) - call broadcast_scalar(mu_max_sp , master_task) - call broadcast_scalar(mu_max_phaeo , master_task) - call broadcast_scalar(grow_Tdep_diatoms, master_task) - call broadcast_scalar(grow_Tdep_sp , master_task) - call broadcast_scalar(grow_Tdep_phaeo , master_task) - call broadcast_scalar(fr_graze_diatoms , master_task) - call broadcast_scalar(fr_graze_sp , master_task) - call broadcast_scalar(fr_graze_phaeo , master_task) - call broadcast_scalar(mort_pre_diatoms , master_task) - call broadcast_scalar(mort_pre_sp , master_task) - call broadcast_scalar(mort_pre_phaeo , master_task) - call broadcast_scalar(mort_Tdep_diatoms, master_task) - call broadcast_scalar(mort_Tdep_sp , master_task) - call broadcast_scalar(mort_Tdep_phaeo , master_task) - call broadcast_scalar(k_exude_diatoms , master_task) - call broadcast_scalar(k_exude_sp , master_task) - call broadcast_scalar(k_exude_phaeo , master_task) - call broadcast_scalar(K_Nit_diatoms , master_task) + call broadcast_scalar(beta2max_diatoms , master_task) + call broadcast_scalar(beta2max_sp , master_task) + call broadcast_scalar(beta2max_phaeo , master_task) + call broadcast_scalar(mu_max_diatoms , master_task) + call broadcast_scalar(mu_max_sp , master_task) + call broadcast_scalar(mu_max_phaeo , master_task) + call broadcast_scalar(grow_Tdep_diatoms, master_task) + call broadcast_scalar(grow_Tdep_sp , master_task) + call broadcast_scalar(grow_Tdep_phaeo , master_task) + call broadcast_scalar(fr_graze_diatoms , master_task) + call broadcast_scalar(fr_graze_sp , master_task) + call broadcast_scalar(fr_graze_phaeo , master_task) + call broadcast_scalar(mort_pre_diatoms , master_task) + call broadcast_scalar(mort_pre_sp , master_task) + call broadcast_scalar(mort_pre_phaeo , master_task) + call broadcast_scalar(mort_Tdep_diatoms, master_task) + call broadcast_scalar(mort_Tdep_sp , master_task) + call broadcast_scalar(mort_Tdep_phaeo , master_task) + call broadcast_scalar(k_exude_diatoms , master_task) + call broadcast_scalar(k_exude_sp , master_task) + call broadcast_scalar(k_exude_phaeo , master_task) + call broadcast_scalar(K_Nit_diatoms , master_task) call broadcast_scalar(K_Nit_sp , master_task) call broadcast_scalar(K_Nit_phaeo , master_task) call broadcast_scalar(K_Am_diatoms , master_task) @@ -1430,17 +1435,17 @@ subroutine input_zbgc call broadcast_scalar(kn_bac_protein , master_task) call broadcast_scalar(f_don_Am_protein , master_task) call broadcast_scalar(f_doc_s , master_task) - call broadcast_scalar(f_doc_l , master_task) + call broadcast_scalar(f_doc_l , master_task) call broadcast_scalar(f_exude_s , master_task) call broadcast_scalar(f_exude_l , master_task) - call broadcast_scalar(k_bac_s , master_task) + call broadcast_scalar(k_bac_s , master_task) call broadcast_scalar(k_bac_l , master_task) call broadcast_scalar(T_max , master_task) call broadcast_scalar(fsal , master_task) call broadcast_scalar(op_dep_min , master_task) - call broadcast_scalar(fr_graze_s , master_task) - call broadcast_scalar(fr_graze_e , master_task) - call broadcast_scalar(fr_mort2min , master_task) + call broadcast_scalar(fr_graze_s , master_task) + call broadcast_scalar(fr_graze_e , master_task) + call broadcast_scalar(fr_mort2min , master_task) call broadcast_scalar(fr_dFe , master_task) call broadcast_scalar(k_nitrif , master_task) call broadcast_scalar(t_iron_conv , master_task) @@ -1448,18 +1453,18 @@ subroutine input_zbgc call broadcast_scalar(max_dfe_doc1 , master_task) call broadcast_scalar(fr_resp_s , master_task) call broadcast_scalar(y_sk_DMS , master_task) - call broadcast_scalar(t_sk_conv , master_task) + call broadcast_scalar(t_sk_conv , master_task) call broadcast_scalar(t_sk_ox , master_task) call broadcast_scalar(algaltype_diatoms, master_task) - call broadcast_scalar(algaltype_sp , master_task) - call broadcast_scalar(algaltype_phaeo , master_task) + call broadcast_scalar(algaltype_sp , master_task) + call broadcast_scalar(algaltype_phaeo , master_task) call broadcast_scalar(nitratetype , master_task) call broadcast_scalar(ammoniumtype , master_task) call broadcast_scalar(silicatetype , master_task) - call broadcast_scalar(dmspptype , master_task) - call broadcast_scalar(dmspdtype , master_task) + call broadcast_scalar(dmspptype , master_task) + call broadcast_scalar(dmspdtype , master_task) call broadcast_scalar(humtype , master_task) - call broadcast_scalar(doctype_s , master_task) + call broadcast_scalar(doctype_s , master_task) call broadcast_scalar(doctype_l , master_task) call broadcast_scalar(dontype_protein , master_task) call broadcast_scalar(fedtype_1 , master_task) @@ -1479,7 +1484,7 @@ subroutine input_zbgc call broadcast_scalar(F_abs_chl_diatoms , master_task) call broadcast_scalar(F_abs_chl_sp , master_task) call broadcast_scalar(F_abs_chl_phaeo , master_task) - call broadcast_scalar(ratio_C2N_proteins , master_task) + call broadcast_scalar(ratio_C2N_proteins , master_task) !----------------------------------------------------------------- ! zsalinity and brine @@ -1498,7 +1503,7 @@ subroutine input_zbgc write(nu_diag,*) subname,' ERROR: solve_zsal=T but 0 zsalinity tracers' endif abort_flag = 101 - endif + endif if (solve_zsal .and. ((.not. tr_brine) .or. (ktherm /= 1))) then if (my_task == master_task) then @@ -1512,7 +1517,7 @@ subroutine input_zbgc write(nu_diag,*) subname,' ERROR: tr_brine=T but no biology layers compiled' endif abort_flag = 103 - endif + endif !----------------------------------------------------------------- ! biogeochemistry @@ -1547,14 +1552,14 @@ subroutine input_zbgc abort_flag = 107 endif - if (dEdd_algae .AND. trim(shortwave) /= 'dEdd') then + if (dEdd_algae .AND. trim(shortwave) /= 'dEdd') then if (my_task == master_task) then write(nu_diag,*) subname,' ERROR: dEdd_algae = T but shortwave /= dEdd' endif abort_flag = 108 endif - if (dEdd_algae .AND. (.NOT. tr_bgc_N) .AND. (.NOT. tr_zaero)) then + if (dEdd_algae .AND. (.NOT. tr_bgc_N) .AND. (.NOT. tr_zaero)) then if (my_task == master_task) then write(nu_diag,*) subname,' ERROR: need tr_bgc_N or tr_zaero for dEdd_algae' endif @@ -1567,8 +1572,8 @@ subroutine input_zbgc endif abort_flag = 110 endif - - if (modal_aero .AND. trim(shortwave) /= 'dEdd') then + + if (modal_aero .AND. trim(shortwave) /= 'dEdd') then if (my_task == master_task) then write(nu_diag,*) subname,' ERROR: modal_aero = T but shortwave /= dEdd' endif @@ -1813,7 +1818,11 @@ subroutine count_tracers integer (kind=int_kind) :: ntrcr logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_fsd logical (kind=log_kind) :: tr_snow +#ifdef UNDEPRECATE_CESMPONDS logical (kind=log_kind) :: tr_iso, tr_pond_cesm, tr_pond_lvl, tr_pond_topo +#else + logical (kind=log_kind) :: tr_iso, tr_pond_lvl, tr_pond_topo +#endif integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero integer (kind=int_kind) :: nt_fsd, nt_isosno, nt_isoice @@ -1880,7 +1889,7 @@ subroutine count_tracers tr_bgc_N, tr_bgc_C, tr_bgc_chl, & tr_bgc_DON, tr_bgc_Fe, tr_zaero, & tr_bgc_hum - + logical (kind=log_kind) :: & solve_zsal, skl_bgc, z_tracers @@ -1898,7 +1907,11 @@ subroutine count_tracers call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_aero_out=tr_aero, tr_pond_out=tr_pond, & +#ifdef UNDEPRECATE_CESMPONDS tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & +#else + tr_pond_lvl_out=tr_pond_lvl, & +#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_fsd_out=tr_fsd, & tr_snow_out=tr_snow, tr_iso_out=tr_iso, & tr_bgc_Nit_out=tr_bgc_Nit, tr_bgc_Am_out =tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & @@ -1958,7 +1971,7 @@ subroutine count_tracers nt_ipnd = ntrcr ! on level-ice ponds (if frzpnd='hlid') endif if (tr_pond_topo) then - ntrcr = ntrcr + 1 ! + ntrcr = ntrcr + 1 ! nt_ipnd = ntrcr ! refrozen pond ice lid thickness endif endif @@ -2001,7 +2014,7 @@ subroutine count_tracers !tcx, modify code so we don't have to reset n_aero here n_aero = 0 !echmod - this is not getting set correctly (overwritten later?) endif - + !----------------------------------------------------------------- ! initialize zbgc tracer indices !----------------------------------------------------------------- @@ -2742,7 +2755,7 @@ subroutine init_zbgc if (skl_bgc .or. z_tracers) then if (tr_bgc_N) then - do mm = 1, n_algae + do mm = 1, n_algae call init_bgc_trcr(nk, nt_fbri, & nt_bgc_N(mm), nlt_bgc_N(mm), & algaltype(mm), nt_depend, & @@ -2762,14 +2775,14 @@ subroutine init_zbgc nt_strata, bio_index) bio_index_o(nlt_bgc_Nit) = icepack_max_algae + 1 endif ! tr_bgc_Nit - + if (tr_bgc_C) then ! ! Algal C is not yet distinct from algal N ! * Reqires exudation and/or changing C:N ratios ! for implementation ! - ! do mm = 1,n_algae + ! do mm = 1,n_algae ! call init_bgc_trcr(nk, nt_fbri, & ! nt_bgc_C(mm), nlt_bgc_C(mm), & ! algaltype(mm), nt_depend, & @@ -2819,7 +2832,7 @@ subroutine init_zbgc trcr_base, n_trcr_strata, & nt_strata, bio_index) bio_index_o(nlt_bgc_Am) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 2 - endif + endif if (tr_bgc_Sil) then call init_bgc_trcr(nk, nt_fbri, & nt_bgc_Sil, nlt_bgc_Sil, & @@ -2828,7 +2841,7 @@ subroutine init_zbgc trcr_base, n_trcr_strata, & nt_strata, bio_index) bio_index_o(nlt_bgc_Sil) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 3 - endif + endif if (tr_bgc_DMS) then ! all together call init_bgc_trcr(nk, nt_fbri, & nt_bgc_DMSPp, nlt_bgc_DMSPp, & @@ -2853,7 +2866,7 @@ subroutine init_zbgc trcr_base, n_trcr_strata, & nt_strata, bio_index) bio_index_o(nlt_bgc_DMS) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 6 - endif + endif if (tr_bgc_PON) then call init_bgc_trcr(nk, nt_fbri, & nt_bgc_PON, nlt_bgc_PON, & @@ -2895,8 +2908,8 @@ subroutine init_zbgc bio_index_o(nlt_bgc_Fep(mm)) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic & + icepack_max_don + icepack_max_fe + 7 + mm enddo ! mm - endif ! tr_bgc_Fe - + endif ! tr_bgc_Fe + if (tr_bgc_hum) then call init_bgc_trcr(nk, nt_fbri, & nt_bgc_hum, nlt_bgc_hum, & @@ -2905,7 +2918,7 @@ subroutine init_zbgc trcr_base, n_trcr_strata, & nt_strata, bio_index) bio_index_o(nlt_bgc_hum) = 2*icepack_max_algae + icepack_max_doc + 8 + icepack_max_dic & - + icepack_max_don + 2*icepack_max_fe + icepack_max_aero + + icepack_max_don + 2*icepack_max_fe + icepack_max_aero endif endif ! skl_bgc or z_tracers @@ -2929,7 +2942,7 @@ subroutine init_zbgc ! and 2 snow layers (snow surface + interior) nk = nblyr + 1 - nt_depend = 2 + nt_fbri + ntd + nt_depend = 2 + nt_fbri + ntd ! z layer aerosols if (tr_zaero) then @@ -2950,15 +2963,15 @@ subroutine init_zbgc endif ! tr_zaero if (nbtrcr > 0) then - do k = 1,nbtrcr - zbgc_frac_init(k) = c1 - trcr_depend(nt_zbgc_frac+k-1) = 2+nt_fbri + do k = 1,nbtrcr + zbgc_frac_init(k) = c1 + trcr_depend(nt_zbgc_frac+k-1) = 2+nt_fbri trcr_base(nt_zbgc_frac+ k - 1,1) = c0 trcr_base(nt_zbgc_frac+ k - 1,2) = c1 trcr_base(nt_zbgc_frac+ k - 1,3) = c0 - n_trcr_strata(nt_zbgc_frac+ k - 1)= 1 + n_trcr_strata(nt_zbgc_frac+ k - 1)= 1 nt_strata(nt_zbgc_frac+ k - 1,1) = nt_fbri - nt_strata(nt_zbgc_frac+ k - 1,2) = 0 + nt_strata(nt_zbgc_frac+ k - 1,2) = 0 tau_ret(k) = c1 tau_rel(k) = c1 if (bgc_tracer_type(k) >= c0 .and. bgc_tracer_type(k) < p5) then @@ -2986,7 +2999,7 @@ subroutine init_zbgc do k = 1, nbtrcr zbgc_init_frac(k) = frazil_scav if (bgc_tracer_type(k) < c0) zbgc_init_frac(k) = initbio_frac - enddo + enddo !----------------------------------------------------------------- ! set values in icepack @@ -3003,7 +3016,7 @@ subroutine init_zbgc !----------------------------------------------------------------- ! final consistency checks - !----------------------------------------------------------------- + !----------------------------------------------------------------- if (nbtrcr > icepack_max_nbtrcr) then write (nu_diag,*) subname,' ' write (nu_diag,*) subname,'nbtrcr > icepack_max_nbtrcr' @@ -3024,13 +3037,13 @@ subroutine init_zbgc write(nu_diag,1020) ' number of bio tracers = ', nbtrcr write(nu_diag,1020) ' number of Isw tracers = ', nbtrcr_sw - + elseif (z_tracers) then - + write(nu_diag,1020) ' number of ztracers = ', nbtrcr write(nu_diag,1020) ' number of Isw tracers = ', nbtrcr_sw write(nu_diag,1000) ' initbio_frac = ', initbio_frac - write(nu_diag,1000) ' frazil_scav = ', frazil_scav + write(nu_diag,1000) ' frazil_scav = ', frazil_scav endif ! skl_bgc or solve_bgc endif ! master_task @@ -3079,7 +3092,7 @@ subroutine init_bgc_trcr(nk, nt_fbri, & integer (kind=int_kind) :: & k , & ! loop index n_strata , & ! temporary values - nt_strata1, & ! + nt_strata1, & ! nt_strata2 real (kind=dbl_kind) :: & @@ -3092,7 +3105,7 @@ subroutine init_bgc_trcr(nk, nt_fbri, & !-------- bgc_tracer_type(nlt_bgc) = bgctype - + if (nk > 1) then ! include vertical bgc in snow do k = nk, nk+1 trcr_depend (nt_bgc + k ) = 2 ! snow volume @@ -3104,10 +3117,10 @@ subroutine init_bgc_trcr(nk, nt_fbri, & nt_strata (nt_bgc + k,2) = 0 enddo - trcr_base1 = c0 - trcr_base2 = c1 + trcr_base1 = c0 + trcr_base2 = c1 trcr_base3 = c0 - n_strata = 1 + n_strata = 1 nt_strata1 = nt_fbri nt_strata2 = 0 else ! nk = 1 diff --git a/cicecore/shared/ice_restart_column.F90 b/cicecore/shared/ice_restart_column.F90 index 6ce393190..a15f9d2c1 100644 --- a/cicecore/shared/ice_restart_column.F90 +++ b/cicecore/shared/ice_restart_column.F90 @@ -29,7 +29,9 @@ module ice_restart_column public :: write_restart_age, read_restart_age, & write_restart_FY, read_restart_FY, & write_restart_lvl, read_restart_lvl, & +#ifdef UNDEPRECATE_CESMPONDS write_restart_pond_cesm, read_restart_pond_cesm, & +#endif write_restart_pond_lvl, read_restart_pond_lvl, & write_restart_pond_topo, read_restart_pond_topo, & write_restart_snow, read_restart_snow, & @@ -39,18 +41,20 @@ module ice_restart_column write_restart_bgc, read_restart_bgc, & write_restart_hbrine, read_restart_hbrine - logical (kind=log_kind), public :: & + logical (kind=log_kind), public :: & restart_age , & ! if .true., read age tracer restart file restart_FY , & ! if .true., read FY tracer restart file restart_lvl , & ! if .true., read lvl tracer restart file +#ifdef UNDEPRECATE_CESMPONDS restart_pond_cesm, & ! if .true., read meltponds restart file +#endif restart_pond_lvl , & ! if .true., read meltponds restart file restart_pond_topo, & ! if .true., read meltponds restart file restart_snow , & ! if .true., read snow tracer restart file restart_fsd , & ! if .true., read floe size restart file restart_iso , & ! if .true., read isotope tracer restart file restart_aero , & ! if .true., read aerosol tracer restart file - restart_zsal , & ! if .true., read Salinity from restart file + restart_zsal , & ! if .true., read Salinity from restart file restart_hbrine , & ! if .true., read hbrine from restart file restart_bgc ! if .true., read bgc restart file @@ -256,6 +260,7 @@ subroutine read_restart_lvl() end subroutine read_restart_lvl +#ifdef UNDEPRECATE_CESMPONDS !======================================================================= ! ! Dumps all values needed for restarting @@ -297,7 +302,7 @@ end subroutine write_restart_pond_cesm subroutine read_restart_pond_cesm() - use ice_fileunits, only: nu_restart_pond + use ice_fileunits, only: nu_restart_pond use ice_state, only: trcrn ! local variables @@ -322,7 +327,7 @@ subroutine read_restart_pond_cesm() 'hpnd',ncat,diag,field_loc_center,field_type_scalar) end subroutine read_restart_pond_cesm - +#endif !======================================================================= ! ! Dumps all values needed for restarting @@ -374,7 +379,7 @@ end subroutine write_restart_pond_lvl subroutine read_restart_pond_lvl() use ice_arrays_column, only: dhsn, ffracn - use ice_fileunits, only: nu_restart_pond + use ice_fileunits, only: nu_restart_pond use ice_flux, only: fsnow use ice_state, only: trcrn @@ -454,7 +459,7 @@ end subroutine write_restart_pond_topo subroutine read_restart_pond_topo() - use ice_fileunits, only: nu_restart_pond + use ice_fileunits, only: nu_restart_pond use ice_state, only: trcrn ! local variables @@ -497,7 +502,7 @@ subroutine write_restart_snow() logical (kind=log_kind) :: diag integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw, k - character*3 ck + character(len=3) :: ck character(len=*),parameter :: subname='(write_restart_snow)' call icepack_query_tracer_indices(nt_smice_out=nt_smice, & @@ -539,7 +544,7 @@ subroutine read_restart_snow() logical (kind=log_kind) :: & diag integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw, k - character*3 ck + character(len=3) :: ck character(len=*),parameter :: subname='(read_restart_snow)' call icepack_query_tracer_indices(nt_smice_out=nt_smice, & @@ -584,7 +589,7 @@ subroutine write_restart_fsd() logical (kind=log_kind) :: diag integer (kind=int_kind) :: nt_fsd, k - character*3 ck + character(len=3) :: ck character(len=*),parameter :: subname='(write_restart_fsd)' call icepack_query_tracer_indices(nt_fsd_out=nt_fsd) @@ -619,7 +624,7 @@ subroutine read_restart_fsd() logical (kind=log_kind) :: & diag integer (kind=int_kind) :: nt_fsd, k - character*3 ck + character(len=3) :: ck character(len=*),parameter :: subname='(read_restart_fsd)' call icepack_query_tracer_indices(nt_fsd_out=nt_fsd) @@ -655,7 +660,7 @@ subroutine write_restart_iso() logical (kind=log_kind) :: diag integer (kind=int_kind) :: nt_isosno, nt_isoice, k - character*3 ck + character(len=3) :: ck character(len=*),parameter :: subname='(write_restart_iso)' call icepack_query_tracer_indices(nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) @@ -697,7 +702,7 @@ subroutine read_restart_iso() logical (kind=log_kind) :: & diag integer (kind=int_kind) :: nt_isosno, nt_isoice, k - character*3 ck + character(len=3) :: ck character(len=*),parameter :: subname='(read_restart_iso)' call icepack_query_tracer_indices(nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) @@ -880,14 +885,14 @@ subroutine read_restart_hbrine() !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi do n = 1, ncat if (first_ice_real(i,j,n,iblk) >= p5) then first_ice (i,j,n,iblk) = .true. @@ -895,7 +900,7 @@ subroutine read_restart_hbrine() first_ice (i,j,n,iblk) = .false. endif enddo ! ncat - enddo ! i + enddo ! i enddo ! j enddo ! iblk !$OMP END PARALLEL DO @@ -941,14 +946,14 @@ subroutine write_restart_hbrine() !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi do n = 1, ncat ! zero out first_ice over land if (tmask(i,j,iblk) .and. first_ice (i,j,n,iblk)) then @@ -987,7 +992,7 @@ subroutine write_restart_bgc() doc, don, dic, fed, fep, zaeros, hum use ice_grid, only: tmask use ice_state, only: trcrn - use ice_flux, only: sss + use ice_flux, only: sss use ice_restart, only: write_restart_field ! local variables @@ -1006,27 +1011,27 @@ subroutine write_restart_bgc() nt_bgc_DMSPp, nt_bgc_Nit, nt_bgc_Sil, & nt_bgc_PON, nt_zbgc_frac, nt_bgc_hum, nbtrcr - integer (kind=int_kind), dimension(icepack_max_algae) :: & - nt_bgc_N , & ! diatoms, phaeocystis, pico/small - nt_bgc_C , & ! diatoms, phaeocystis, pico/small - nt_bgc_chl ! diatoms, phaeocystis, pico/small + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nt_bgc_N , & ! diatoms, phaeocystis, pico/small + nt_bgc_C , & ! diatoms, phaeocystis, pico/small + nt_bgc_chl ! diatoms, phaeocystis, pico/small - integer (kind=int_kind), dimension(icepack_max_doc) :: & + integer (kind=int_kind), dimension(icepack_max_doc) :: & nt_bgc_DOC ! dissolved organic carbon - integer (kind=int_kind), dimension(icepack_max_don) :: & + integer (kind=int_kind), dimension(icepack_max_don) :: & nt_bgc_DON ! dissolved organic nitrogen - integer (kind=int_kind), dimension(icepack_max_dic) :: & + integer (kind=int_kind), dimension(icepack_max_dic) :: & nt_bgc_DIC ! dissolved inorganic carbon - integer (kind=int_kind), dimension(icepack_max_fe) :: & + integer (kind=int_kind), dimension(icepack_max_fe) :: & nt_bgc_Fed, & ! dissolved iron nt_bgc_Fep ! particulate iron - integer (kind=int_kind), dimension(icepack_max_aero) :: & + integer (kind=int_kind), dimension(icepack_max_aero) :: & nt_zaero ! black carbon and other aerosols - + logical (kind=log_kind) :: tr_bgc_Nit, tr_bgc_Am, tr_bgc_Sil,& tr_bgc_DMS, tr_bgc_PON, tr_bgc_N, tr_bgc_C, & tr_bgc_DON, tr_bgc_Fe, tr_zaero , tr_bgc_chl, & @@ -1097,19 +1102,19 @@ subroutine write_restart_bgc() !----------------------------------------------------------------- ! Salinity and extras !----------------------------------------------------------------- - if (solve_zsal) then + if (solve_zsal) then do k = 1,nblyr write(nchar,'(i3.3)') k call write_restart_field(nu_dump_bgc,0,trcrn(:,:,nt_bgc_S+k-1,:,:),'ruf8', & 'zSalinity'//trim(nchar),ncat,diag) enddo - + call write_restart_field(nu_dump_bgc,0,sss,'ruf8','sss',1,diag) !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1183,7 +1188,7 @@ subroutine write_restart_bgc() if (tr_bgc_PON) & call write_restart_field(nu_dump_bgc,0,trcrn(:,:,nt_bgc_PON,:,:), & 'ruf8','bgc_PON',ncat,diag) - + if (tr_bgc_DON) then do k = 1, n_don write(nchar,'(i3.3)') k @@ -1192,19 +1197,19 @@ subroutine write_restart_bgc() enddo endif if (tr_bgc_Fe ) then - do k = 1, n_fed + do k = 1, n_fed write(nchar,'(i3.3)') k call write_restart_field(nu_dump_bgc,0,trcrn(:,:,nt_bgc_Fed (k),:,:), & 'ruf8','bgc_Fed'//trim(nchar),ncat,diag) enddo - do k = 1, n_fep + do k = 1, n_fep write(nchar,'(i3.3)') k call write_restart_field(nu_dump_bgc,0,trcrn(:,:,nt_bgc_Fep (k),:,:), & 'ruf8','bgc_Fep'//trim(nchar),ncat,diag) enddo endif - else + else !----------------------------------------------------------------- ! Z layer BGC @@ -1375,7 +1380,7 @@ subroutine write_restart_bgc() write(nchar,'(i3.3)') k call write_restart_field(nu_dump_bgc,0,dic(:,:,k,:),'ruf8','dic'//trim(nchar),1,diag) enddo !k - endif + endif if (tr_bgc_Nit) & call write_restart_field(nu_dump_bgc,0,nit, 'ruf8','nit', 1,diag) if (tr_bgc_Am) & @@ -1428,7 +1433,7 @@ subroutine read_restart_bgc() use ice_domain_size, only: ncat, n_algae, n_doc, n_dic,& n_don, n_zaero, n_fed, n_fep use ice_fileunits, only: nu_restart_bgc - use ice_flux, only: sss + use ice_flux, only: sss use ice_flux_bgc, only: nit, amm, sil, dmsp, dms, algalN, & doc, don, dic, fed, fep, zaeros, hum use ice_state, only: trcrn @@ -1451,27 +1456,27 @@ subroutine read_restart_bgc() nt_bgc_DMSPp, nt_bgc_Nit, nt_bgc_Sil, & nt_bgc_PON, nt_zbgc_frac, nt_bgc_hum, nbtrcr - integer (kind=int_kind), dimension(icepack_max_algae) :: & - nt_bgc_N , & ! diatoms, phaeocystis, pico/small - nt_bgc_C , & ! diatoms, phaeocystis, pico/small - nt_bgc_chl ! diatoms, phaeocystis, pico/small + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nt_bgc_N , & ! diatoms, phaeocystis, pico/small + nt_bgc_C , & ! diatoms, phaeocystis, pico/small + nt_bgc_chl ! diatoms, phaeocystis, pico/small - integer (kind=int_kind), dimension(icepack_max_doc) :: & + integer (kind=int_kind), dimension(icepack_max_doc) :: & nt_bgc_DOC ! dissolved organic carbon - integer (kind=int_kind), dimension(icepack_max_don) :: & + integer (kind=int_kind), dimension(icepack_max_don) :: & nt_bgc_DON ! dissolved organic nitrogen - integer (kind=int_kind), dimension(icepack_max_dic) :: & + integer (kind=int_kind), dimension(icepack_max_dic) :: & nt_bgc_DIC ! dissolved inorganic carbon - integer (kind=int_kind), dimension(icepack_max_fe) :: & + integer (kind=int_kind), dimension(icepack_max_fe) :: & nt_bgc_Fed, & ! dissolved iron nt_bgc_Fep ! particulate iron - integer (kind=int_kind), dimension(icepack_max_aero) :: & + integer (kind=int_kind), dimension(icepack_max_aero) :: & nt_zaero ! black carbon and other aerosols - + logical (kind=log_kind) :: tr_bgc_Nit, tr_bgc_Am, tr_bgc_Sil,& tr_bgc_DMS, tr_bgc_PON, tr_bgc_N, tr_bgc_C, & tr_bgc_DON, tr_bgc_Fe, tr_zaero , tr_bgc_chl, & @@ -1509,7 +1514,7 @@ subroutine read_restart_bgc() ! Salinity and extras !----------------------------------------------------------------- - if (restart_zsal) then + if (restart_zsal) then if (my_task == master_task) write(nu_diag,*) subname,'zSalinity restart' do k = 1,nblyr @@ -1517,21 +1522,21 @@ subroutine read_restart_bgc() call read_restart_field(nu_restart_bgc,0,trcrn(:,:,nt_bgc_S+k-1,:,:),'ruf8', & 'zSalinity'//trim(nchar),ncat,diag,field_loc_center,field_type_scalar) enddo - + if (my_task == master_task) write(nu_diag,*) subname,'sea surface salinity' call read_restart_field(nu_restart_bgc,0,sss,'ruf8','sss',1,diag) call read_restart_field(nu_restart_bgc,0,Rayleigh_real,'ruf8','Rayleigh',1,diag) !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi if (Rayleigh_real (i,j,iblk) .GE. c1) then Rayleigh_criteria (i,j,iblk) = .true. elseif (Rayleigh_real (i,j,iblk) < c1) then @@ -1613,13 +1618,13 @@ subroutine read_restart_bgc() enddo endif if (tr_bgc_Fe) then - do k = 1, n_fed + do k = 1, n_fed write(nchar,'(i3.3)') k call read_restart_field(nu_restart_bgc,0, & trcrn(:,:,nt_bgc_Fed (k),:,:), & 'ruf8','bgc_Fed'//trim(nchar),ncat,diag) enddo - do k = 1, n_fep + do k = 1, n_fep write(nchar,'(i3.3)') k call read_restart_field(nu_restart_bgc,0, & trcrn(:,:,nt_bgc_Fep (k),:,:), & @@ -1857,7 +1862,7 @@ subroutine read_restart_bgc() enddo !k endif endif ! restart_bgc - + end subroutine read_restart_bgc !======================================================================= diff --git a/cicecore/shared/ice_restart_shared.F90 b/cicecore/shared/ice_restart_shared.F90 index 6578ef3ad..7c178fec0 100644 --- a/cicecore/shared/ice_restart_shared.F90 +++ b/cicecore/shared/ice_restart_shared.F90 @@ -41,7 +41,7 @@ module ice_restart_shared integer function lenstr(label) - character*(*) label + character(len=*) :: label character(len=*),parameter :: subname='(lenstr)' diff --git a/cicecore/shared/ice_spacecurve.F90 b/cicecore/shared/ice_spacecurve.F90 index 931b2312b..205c50e77 100644 --- a/cicecore/shared/ice_spacecurve.F90 +++ b/cicecore/shared/ice_spacecurve.F90 @@ -4,8 +4,8 @@ module ice_spacecurve ! !DESCRIPTION: -! This module contains routines necessary to -! create space-filling curves. +! This module contains routines necessary to +! create space-filling curves. ! ! !REVISION HISTORY: ! @@ -22,7 +22,7 @@ module ice_spacecurve implicit none private -! !PUBLIC TYPES: +! !PUBLIC TYPES: type, public :: factor_t integer(int_kind) :: numfact ! The # of factors for a value @@ -30,7 +30,7 @@ module ice_spacecurve integer(int_kind), dimension(:), pointer :: used end type -! !PUBLIC MEMBER FUNCTIONS: +! !PUBLIC MEMBER FUNCTIONS: public :: GenSpaceCurve @@ -53,11 +53,10 @@ module ice_spacecurve FindandMark integer(int_kind), dimension(:,:), allocatable :: & - dir, &! direction to move along each level - ordered ! the ordering + ordered ! the ordering integer(int_kind), dimension(:), allocatable :: & pos ! position along each of the axes - + integer(int_kind) :: & maxdim, &! dimensionality of entire space vcnt ! visitation count @@ -68,7 +67,7 @@ module ice_spacecurve !EOC !*********************************************************************** -contains +contains !*********************************************************************** !BOP @@ -79,19 +78,19 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) ! !DESCRIPTION: ! This subroutine implements a Cinco space-filling curve. -! Cinco curves connect a Nb x Nb block of points where +! Cinco curves connect a Nb x Nb block of points where ! -! Nb = 5^p +! Nb = 5^p ! ! !REVISION HISTORY: ! same as module ! -! !INPUT PARAMETERS +! !INPUT PARAMETERS integer(int_kind), intent(in) :: & - l, & ! level of the space-filling curve + l, & ! level of the space-filling curve type, & ! type of SFC curve ma, & ! Major axis [0,1] md, & ! direction of major axis [-1,1] @@ -115,8 +114,8 @@ recursive function Cinco(l,type,ma,md,ja,jd) result(ierr) lmd, &! local major direction (next level) lja, &! local joiner axis (next level) ljd, &! local joiner direction (next level) - ltype, &! type of SFC on next level - ll ! next level down + ltype, &! type of SFC on next level + ll ! next level down character(len=*),parameter :: subname='(Cinco)' @@ -589,8 +588,8 @@ end function Cinco recursive function PeanoM(l,type,ma,md,ja,jd) result(ierr) ! !DESCRIPTION: -! This function implements a meandering Peano -! space-filling curve. A meandering Peano curve +! This function implements a meandering Peano +! space-filling curve. A meandering Peano curve ! connects a Nb x Nb block of points where ! ! Nb = 3^p @@ -947,8 +946,8 @@ end function hilbert function IncrementCurve(ja,jd) result(ierr) ! !DESCRIPTION: -! This function creates the curve which is stored in the -! the ordered array. The curve is implemented by +! This function creates the curve which is stored in the +! the ordered array. The curve is implemented by ! incrementing the curve in the direction [jd] of axis [ja]. ! ! !REVISION HISTORY: @@ -990,7 +989,7 @@ end function IncrementCurve function log2( n) ! !DESCRIPTION: -! This function calculates the log2 of its integer +! This function calculates the log2 of its integer ! input. ! ! !REVISION HISTORY: @@ -999,8 +998,8 @@ function log2( n) ! !INPUT PARAMETERS: integer(int_kind), intent(in) :: n ! integer value to find the log2 - -! !OUTPUT PARAMETERS: + +! !OUTPUT PARAMETERS: integer(int_kind) :: log2 @@ -1030,10 +1029,10 @@ function log2( n) else ! n > 1 log2 = 1 tmp =n - do while (tmp > 1 .and. tmp/2 .ne. 1) + do while (tmp > 1 .and. tmp/2 .ne. 1) tmp=tmp/2 log2=log2+1 - enddo + enddo endif !EOP @@ -1048,9 +1047,9 @@ end function log2 ! !INTERFACE: function IsLoadBalanced(nelem,npart) - + ! !DESCRIPTION: -! This function determines if we can create +! This function determines if we can create ! a perfectly load-balanced partitioning. ! ! !REVISION HISTORY: @@ -1063,7 +1062,7 @@ function IsLoadBalanced(nelem,npart) npart ! size of partition ! !OUTPUT PARAMETERS: - logical :: IsLoadBalanced ! .TRUE. if a perfectly load balanced + logical :: IsLoadBalanced ! .TRUE. if a perfectly load balanced ! partition is possible !EOP !BOC @@ -1080,7 +1079,7 @@ function IsLoadBalanced(nelem,npart) !----------------------------------------------------------------------- tmp1 = nelem/npart - if (npart*tmp1 == nelem ) then + if (npart*tmp1 == nelem ) then IsLoadBalanced=.TRUE. else IsLoadBalanced=.FALSE. @@ -1129,7 +1128,7 @@ function GenCurve(l,type,ma,md,ja,jd) result(ierr) !----------------------------------------------------------------------- !------------------------------------------------- - ! create the space-filling curve on the next level + ! create the space-filling curve on the next level !------------------------------------------------- if(type == 2) then @@ -1140,7 +1139,7 @@ function GenCurve(l,type,ma,md,ja,jd) result(ierr) if (f3 .and. my_task == master_task) write(nu_diag,*) subname,' calling PeanoM (3)' ierr = PeanoM(l,type,ma,md,ja,jd) f3 = .false. - elseif ( type == 5) then + elseif ( type == 5) then if (f5 .and. my_task == master_task) write(nu_diag,*) subname,' calling Cinco (5)' ierr = Cinco(l,type,ma,md,ja,jd) f5 = .false. @@ -1263,7 +1262,7 @@ end subroutine PrintFactor function Factor(num) result(res) ! !DESCRIPTION: -! This function factors the input value num into a +! This function factors the input value num into a ! product of 2,3, and 5. ! ! !REVISION HISTORY: @@ -1350,8 +1349,8 @@ function Factor(num) result(res) enddo !------------------------------------ - ! make sure that the input value - ! only contains factors of 2,3,and 5 + ! make sure that the input value + ! only contains factors of 2,3,and 5 !------------------------------------ tmp=1 do i=1,n @@ -1373,10 +1372,10 @@ end function Factor ! !INTERFACE: function IsFactorable(n) - + ! !DESCRIPTION: ! This function determines if we can factor -! n into 2,3,and 5. +! n into 2,3,and 5. ! ! !REVISION HISTORY: ! same as module @@ -1420,7 +1419,7 @@ end function IsFactorable subroutine map(l) ! !DESCRIPTION: -! Interface routine between internal subroutines and public +! Interface routine between internal subroutines and public ! subroutines. ! ! !REVISION HISTORY: @@ -1471,7 +1470,7 @@ subroutine PrintCurve(Mesh) ! !DESCRIPTION: -! This subroutine prints the several low order +! This subroutine prints the several low order ! space-filling curves in an easy to read format ! ! !REVISION HISTORY: @@ -1693,7 +1692,7 @@ end subroutine PrintCurve subroutine GenSpaceCurve(Mesh) ! !DESCRIPTION: -! This subroutine is the public interface into the +! This subroutine is the public interface into the ! space-filling curve functionality ! ! !REVISION HISTORY: @@ -1717,7 +1716,7 @@ subroutine GenSpaceCurve(Mesh) dim ! dimension of SFC... currently limited to 2D integer(int_kind) :: gridsize ! number of points on a side - + character(len=*),parameter :: subname='(GenSpaceCurve)' !----------------------------------------------------------------------- @@ -1743,19 +1742,19 @@ subroutine GenSpaceCurve(Mesh) ! Setup the working arrays for the traversal !-------------------------------------------- allocate(pos(0:dim-1)) - + !----------------------------------------------------- ! The array ordered will contain the visitation order !----------------------------------------------------- ordered(:,:) = 0 - call map(level) + call map(level) Mesh(:,:) = ordered(:,:) deallocate(pos,ordered) - end subroutine GenSpaceCurve + end subroutine GenSpaceCurve !EOC !----------------------------------------------------------------------- diff --git a/cicecore/version.txt b/cicecore/version.txt index 9e5f9f3e1..154cda3d7 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.3.1 +CICE 6.4.0 diff --git a/configuration/scripts/Makefile b/configuration/scripts/Makefile index 0322513d2..a2f17256f 100644 --- a/configuration/scripts/Makefile +++ b/configuration/scripts/Makefile @@ -74,7 +74,7 @@ AR := ar .SUFFIXES: -.PHONY: all cice libcice targets target db_files db_flags clean realclean helloworld calchk sumchk bcstchk gridavgchk +.PHONY: all cice libcice targets target db_files db_flags clean realclean helloworld calchk sumchk bcstchk gridavgchk optargs all: $(EXEC) cice: $(EXEC) @@ -93,7 +93,7 @@ targets: @echo " " @echo "Supported Makefile Targets are: cice, libcice, makdep, depends, clean, realclean" @echo " Diagnostics: targets, db_files, db_flags" - @echo " Unit Tests : helloworld, calchk, sumchk, bcstchk, gridavgchk" + @echo " Unit Tests : helloworld, calchk, sumchk, bcstchk, gridavgchk, optargs" target: targets db_files: @@ -157,6 +157,10 @@ HWOBJS := helloworld.o helloworld: $(HWOBJS) $(LD) -o $(EXEC) $(LDFLAGS) $(HWOBJS) $(ULIBS) $(SLIBS) +OAOBJS := optargs.o optargs_subs.o +optargs: $(OAOBJS) + $(LD) -o $(EXEC) $(LDFLAGS) $(OAOBJS) $(ULIBS) $(SLIBS) + #------------------------------------------------------------------------------- # build rules: MACFILE, cmd-line, or env vars must provide the needed macros #------------------------------------------------------------------------------- diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 43ce00010..fbe172f51 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -11,43 +11,7 @@ endif set jobfile = $1 -set ntasks = ${ICE_NTASKS} -set nthrds = ${ICE_NTHRDS} -set maxtpn = ${ICE_MACHINE_TPNODE} -set acct = ${ICE_ACCOUNT} - -@ ncores = ${ntasks} * ${nthrds} -@ taskpernode = ${maxtpn} / $nthrds -if (${taskpernode} == 0) set taskpernode = 1 -@ nnodes = ${ntasks} / ${taskpernode} -if (${nnodes} * ${taskpernode} < ${ntasks}) @ nnodes = $nnodes + 1 -set taskpernodelimit = ${taskpernode} -if (${taskpernodelimit} > ${ntasks}) set taskpernodelimit = ${ntasks} -@ corespernode = ${taskpernodelimit} * ${nthrds} - -set ptile = $taskpernode -if ($ptile > ${maxtpn} / 2) @ ptile = ${maxtpn} / 2 - -set runlength = ${ICE_RUNLENGTH} -if ($?ICE_MACHINE_MAXRUNLENGTH) then - if (${runlength} > ${ICE_MACHINE_MAXRUNLENGTH}) then - set runlength = ${ICE_MACHINE_MAXRUNLENGTH} - endif -endif - -set queue = "${ICE_QUEUE}" -set batchtime = "00:15:00" -if (${runlength} == 0) set batchtime = "00:29:00" -if (${runlength} == 1) set batchtime = "00:59:00" -if (${runlength} == 2) set batchtime = "2:00:00" -if (${runlength} == 3) set batchtime = "3:00:00" -if (${runlength} == 4) set batchtime = "4:00:00" -if (${runlength} == 5) set batchtime = "5:00:00" -if (${runlength} == 6) set batchtime = "6:00:00" -if (${runlength} == 7) set batchtime = "7:00:00" -if (${runlength} >= 8) set batchtime = "8:00:00" - -set shortcase = `echo ${ICE_CASENAME} | cut -c1-15` +source ${ICE_SCRIPTS}/setup_machparams.csh #========================================== @@ -284,7 +248,7 @@ cat >> ${jobfile} << EOFB #SBATCH --nodes ${nnodes} #SBATCH --ntasks ${ntasks} #SBATCH --cpus-per-task ${nthrds} -#SBATCH --mem-per-cpu=5G +#SBATCH --mem-per-cpu=${batchmem}G #SBATCH --comment="image=eccc/eccc_all_default_ubuntu-18.04-amd64_latest" EOFB diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index a4b6ca37d..bc9ff2b99 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -8,18 +8,7 @@ echo "running cice.launch.csh" set jobfile = $1 -set ntasks = ${ICE_NTASKS} -set nthrds = ${ICE_NTHRDS} -set maxtpn = ${ICE_MACHINE_TPNODE} - -@ ncores = ${ntasks} * ${nthrds} -@ taskpernode = ${maxtpn} / $nthrds -if (${taskpernode} == 0) set taskpernode = 1 -@ nnodes = ${ntasks} / ${taskpernode} -if (${nnodes} * ${taskpernode} < ${ntasks}) @ nnodes = $nnodes + 1 -set taskpernodelimit = ${taskpernode} -if (${taskpernodelimit} > ${ntasks}) set taskpernodelimit = ${ntasks} -@ corespernode = ${taskpernodelimit} * ${nthrds} +source ${ICE_SCRIPTS}/setup_machparams.csh #========================================== if (${ICE_MACHINE} =~ cheyenne*) then diff --git a/configuration/scripts/cice.settings b/configuration/scripts/cice.settings index 9b57aab3f..76ae6ad9e 100644 --- a/configuration/scripts/cice.settings +++ b/configuration/scripts/cice.settings @@ -32,6 +32,7 @@ setenv ICE_BFBCOMP undefined setenv ICE_BFBTYPE restart setenv ICE_SPVAL undefined setenv ICE_RUNLENGTH -1 +setenv ICE_MEMUSE -1 setenv ICE_ACCOUNT undefined setenv ICE_QUEUE undefined diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 27a333d86..ec582873a 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -100,8 +100,6 @@ restart_FY = .false. tr_lvl = .true. restart_lvl = .false. - tr_pond_cesm = .false. - restart_pond_cesm = .false. tr_pond_topo = .false. restart_pond_topo = .false. tr_pond_lvl = .true. @@ -149,7 +147,7 @@ Ktens = 0. e_yieldcurve = 2. e_plasticpot = 2. - visc_method = 'avg_strength' + visc_method = 'avg_zeta' elasticDamp = 0.36d0 deltaminEVP = 1e-11 deltaminVP = 2e-9 diff --git a/configuration/scripts/machines/Macros.cheyenne_gnu b/configuration/scripts/machines/Macros.cheyenne_gnu index 082130f77..5d3859ec8 100644 --- a/configuration/scripts/machines/Macros.cheyenne_gnu +++ b/configuration/scripts/machines/Macros.cheyenne_gnu @@ -12,7 +12,7 @@ FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow --std f2008 # FFLAGS += -O0 -g -fcheck=all -finit-real=snan -fimplicit-none -ffpe-trap=invalid,zero,overflow CFLAGS += -O0 endif diff --git a/configuration/scripts/machines/Macros.cheyenne_intel b/configuration/scripts/machines/Macros.cheyenne_intel index 52fc07ebb..6fb3a002a 100644 --- a/configuration/scripts/machines/Macros.cheyenne_intel +++ b/configuration/scripts/machines/Macros.cheyenne_intel @@ -13,7 +13,7 @@ FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) # FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg - FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -link_mpi=dbg + FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -link_mpi=dbg -stand f08 # FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays -link_mpi=dbg else FFLAGS += -O2 diff --git a/configuration/scripts/machines/env.badger_intel b/configuration/scripts/machines/env.badger_intel index 6d53bf978..5532b26d6 100755 --- a/configuration/scripts/machines/env.badger_intel +++ b/configuration/scripts/machines/env.badger_intel @@ -35,9 +35,9 @@ setenv ICE_MACHINE_MACHINFO "Penguin Intel Xeon Broadwell" setenv ICE_MACHINE_ENVNAME intel setenv ICE_MACHINE_ENVINFO "(Note: can vary) ifort 19.0.4.243 20190416, openmpi/2.1.2, netcdf4.4.0" setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR /net/scratch3/$user/CICE_RUNS +setenv ICE_MACHINE_WKDIR /net/scratch4/$user/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /usr/projects/climate/eclare/DATA/Consortium -setenv ICE_MACHINE_BASELINE /net/scratch3/$user/CICE_BASELINE +setenv ICE_MACHINE_BASELINE /net/scratch4/$user/CICE_BASELINE setenv ICE_MACHINE_SUBMIT "sbatch " #setenv ICE_MACHINE_ACCT e3sm setenv ICE_MACHINE_ACCT climatehilat diff --git a/configuration/scripts/machines/env.cheyenne_gnu b/configuration/scripts/machines/env.cheyenne_gnu index c962c35f3..fb29543f8 100755 --- a/configuration/scripts/machines/env.cheyenne_gnu +++ b/configuration/scripts/machines/env.cheyenne_gnu @@ -31,8 +31,9 @@ endif if ($?ICE_BFBTYPE) then if ($ICE_BFBTYPE =~ qcchk*) then - module load python - source /glade/u/apps/opt/ncar_pylib/ncar_pylib.csh default + module load conda +# conda env create -f ../../configuration/scripts/tests/qctest.yml + conda activate qctest endif endif diff --git a/configuration/scripts/machines/env.cheyenne_intel b/configuration/scripts/machines/env.cheyenne_intel index 89a8920b6..2c6eedec6 100755 --- a/configuration/scripts/machines/env.cheyenne_intel +++ b/configuration/scripts/machines/env.cheyenne_intel @@ -31,8 +31,9 @@ endif if ($?ICE_BFBTYPE) then if ($ICE_BFBTYPE =~ qcchk*) then - module load python - source /glade/u/apps/opt/ncar_pylib/ncar_pylib.csh default + module load conda +# conda env create -f ../../configuration/scripts/tests/qctest.yml + conda activate qctest endif endif diff --git a/configuration/scripts/machines/env.cheyenne_pgi b/configuration/scripts/machines/env.cheyenne_pgi index 5caa9d992..e6e339f08 100755 --- a/configuration/scripts/machines/env.cheyenne_pgi +++ b/configuration/scripts/machines/env.cheyenne_pgi @@ -31,8 +31,9 @@ endif if ($?ICE_BFBTYPE) then if ($ICE_BFBTYPE =~ qcchk*) then - module load python - source /glade/u/apps/opt/ncar_pylib/ncar_pylib.csh default + module load conda +# conda env create -f ../../configuration/scripts/tests/qctest.yml + conda activate qctest endif endif diff --git a/configuration/scripts/machines/env.gpsc3_intel b/configuration/scripts/machines/env.gpsc3_intel index 2c8d49275..87c7834a4 100644 --- a/configuration/scripts/machines/env.gpsc3_intel +++ b/configuration/scripts/machines/env.gpsc3_intel @@ -26,7 +26,7 @@ setenv ICE_MACHINE_ENVNAME intel setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/site3/cice/runs/ setenv ICE_MACHINE_INPUTDATA /home/ords/cmdd/cmde/sice500/ -setenv ICE_MACHINE_BASELINE ~/data/site3/cice/baseline/ +setenv ICE_MACHINE_BASELINE ~/data/site3/cice/baselines/ setenv ICE_MACHINE_SUBMIT "sbatch" setenv ICE_MACHINE_TPNODE 44 setenv ICE_MACHINE_ACCT "eccc_cmdd" diff --git a/configuration/scripts/machines/env.ppp5_intel b/configuration/scripts/machines/env.ppp5_intel index 79dbf2a1b..c4987124a 100644 --- a/configuration/scripts/machines/env.ppp5_intel +++ b/configuration/scripts/machines/env.ppp5_intel @@ -12,7 +12,9 @@ source /fs/ssm/main/opt/ssmuse/ssmuse-1.11/ssmuse_1.11_all/bin/ssmuse-boot.csh > set ssmuse=`which ssmuse-csh` # Intel compiler + Intel MPI source $ssmuse -d /fs/ssm/main/opt/intelcomp/inteloneapi-2022.1.2/intelcomp+mpi+mkl +# source /etc/profile.d/modules.csh # module use /home/sice500/modulefiles +# setenv I_MPI_LIBRARY_KIND debug # module load -s icc mpi setenv FOR_DUMP_CORE_FILE 1 setenv I_MPI_DEBUG_COREDUMP 1 diff --git a/configuration/scripts/machines/env.ppp6_gnu b/configuration/scripts/machines/env.ppp6_gnu index 39cc27740..69ed6ff8b 100644 --- a/configuration/scripts/machines/env.ppp6_gnu +++ b/configuration/scripts/machines/env.ppp6_gnu @@ -20,7 +20,7 @@ setenv ICE_MACHINE_ENVNAME gnu setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/site6/cice/runs/ setenv ICE_MACHINE_INPUTDATA /space/hall6/sitestore/eccc/cmd/e/sice500/ -setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baseline/ +setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baselines/ setenv ICE_MACHINE_SUBMIT qsub setenv ICE_MACHINE_TPNODE 80 setenv ICE_MACHINE_ACCT unused diff --git a/configuration/scripts/machines/env.ppp6_gnu-impi b/configuration/scripts/machines/env.ppp6_gnu-impi index f2a523bf1..461e09a43 100644 --- a/configuration/scripts/machines/env.ppp6_gnu-impi +++ b/configuration/scripts/machines/env.ppp6_gnu-impi @@ -29,7 +29,7 @@ setenv ICE_MACHINE_ENVNAME gnu-impi setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/site6/cice/runs/ setenv ICE_MACHINE_INPUTDATA /space/hall6/sitestore/eccc/cmd/e/sice500/ -setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baseline/ +setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baselines/ setenv ICE_MACHINE_SUBMIT qsub setenv ICE_MACHINE_TPNODE 80 setenv ICE_MACHINE_ACCT unused diff --git a/configuration/scripts/machines/env.ppp6_intel b/configuration/scripts/machines/env.ppp6_intel index dfaeb855f..ef9396575 100644 --- a/configuration/scripts/machines/env.ppp6_intel +++ b/configuration/scripts/machines/env.ppp6_intel @@ -12,7 +12,9 @@ source /fs/ssm/main/opt/ssmuse/ssmuse-1.11/ssmuse_1.11_all/bin/ssmuse-boot.csh > set ssmuse=`which ssmuse-csh` # Intel compiler + Intel MPI source $ssmuse -d /fs/ssm/main/opt/intelcomp/inteloneapi-2022.1.2/intelcomp+mpi+mkl +# source /etc/profile.d/modules.csh # module use /home/sice500/modulefiles +# setenv I_MPI_LIBRARY_KIND debug # module load -s icc mpi setenv FOR_DUMP_CORE_FILE 1 setenv I_MPI_DEBUG_COREDUMP 1 diff --git a/configuration/scripts/machines/env.ppp6_intel19 b/configuration/scripts/machines/env.ppp6_intel19 index d41242630..6cdd9a036 100644 --- a/configuration/scripts/machines/env.ppp6_intel19 +++ b/configuration/scripts/machines/env.ppp6_intel19 @@ -30,7 +30,7 @@ setenv ICE_MACHINE_ENVNAME intel19 setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/site6/cice/runs/ setenv ICE_MACHINE_INPUTDATA /space/hall6/sitestore/eccc/cmd/e/sice500/ -setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baseline/ +setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baselines/ setenv ICE_MACHINE_SUBMIT "qsub" setenv ICE_MACHINE_TPNODE 80 setenv ICE_MACHINE_ACCT P0000000 diff --git a/configuration/scripts/machines/env.robert_intel b/configuration/scripts/machines/env.robert_intel index 43c11a529..d3d9c1eae 100644 --- a/configuration/scripts/machines/env.robert_intel +++ b/configuration/scripts/machines/env.robert_intel @@ -12,7 +12,9 @@ source /fs/ssm/main/opt/ssmuse/ssmuse-1.11/ssmuse_1.11_all/bin/ssmuse-boot.csh > set ssmuse=`which ssmuse-csh` # Intel compiler + Intel MPI source $ssmuse -d /fs/ssm/main/opt/intelcomp/inteloneapi-2022.1.2/intelcomp+mpi+mkl +# source /etc/profile.d/modules.csh # module use /home/sice500/modulefiles +# setenv I_MPI_LIBRARY_KIND debug # module load -s icc mpi setenv FOR_DUMP_CORE_FILE 1 setenv I_MPI_DEBUG_COREDUMP 1 diff --git a/configuration/scripts/machines/env.underhill_intel b/configuration/scripts/machines/env.underhill_intel index 90192853e..bc3eec857 100644 --- a/configuration/scripts/machines/env.underhill_intel +++ b/configuration/scripts/machines/env.underhill_intel @@ -12,7 +12,9 @@ source /fs/ssm/main/opt/ssmuse/ssmuse-1.11/ssmuse_1.11_all/bin/ssmuse-boot.csh > set ssmuse=`which ssmuse-csh` # Intel compiler + Intel MPI source $ssmuse -d /fs/ssm/main/opt/intelcomp/inteloneapi-2022.1.2/intelcomp+mpi+mkl +# source /etc/profile.d/modules.csh # module use /home/sice500/modulefiles +# setenv I_MPI_LIBRARY_KIND debug # module load -s icc mpi setenv FOR_DUMP_CORE_FILE 1 setenv I_MPI_DEBUG_COREDUMP 1 diff --git a/configuration/scripts/machines/environment.yml b/configuration/scripts/machines/environment.yml index 57bdacfec..e76ff692f 100644 --- a/configuration/scripts/machines/environment.yml +++ b/configuration/scripts/machines/environment.yml @@ -12,7 +12,7 @@ dependencies: # Python dependencies for plotting scripts - numpy - matplotlib-base - - basemap + - cartopy - netcdf4 # Python dependencies for building the HTML documentation - sphinx diff --git a/configuration/scripts/options/set_env.memlarge b/configuration/scripts/options/set_env.memlarge new file mode 100644 index 000000000..2572e3ae7 --- /dev/null +++ b/configuration/scripts/options/set_env.memlarge @@ -0,0 +1,2 @@ +setenv ICE_MEMUSE 8 + diff --git a/configuration/scripts/options/set_env.memmed b/configuration/scripts/options/set_env.memmed new file mode 100644 index 000000000..5d7169268 --- /dev/null +++ b/configuration/scripts/options/set_env.memmed @@ -0,0 +1,2 @@ +setenv ICE_MEMUSE 4 + diff --git a/configuration/scripts/options/set_env.memsmall b/configuration/scripts/options/set_env.memsmall new file mode 100644 index 000000000..dc9e3c1ee --- /dev/null +++ b/configuration/scripts/options/set_env.memsmall @@ -0,0 +1,2 @@ +setenv ICE_MEMUSE 1 + diff --git a/configuration/scripts/options/set_env.optargs b/configuration/scripts/options/set_env.optargs new file mode 100644 index 000000000..84d48137f --- /dev/null +++ b/configuration/scripts/options/set_env.optargs @@ -0,0 +1,2 @@ +setenv ICE_DRVOPT unittest/optargs +setenv ICE_TARGET optargs diff --git a/configuration/scripts/options/set_nml.alt01 b/configuration/scripts/options/set_nml.alt01 index 24947dcda..6c2bf2159 100644 --- a/configuration/scripts/options/set_nml.alt01 +++ b/configuration/scripts/options/set_nml.alt01 @@ -6,13 +6,12 @@ distribution_wght = 'block' tr_iage = .false. tr_FY = .false. tr_lvl = .true. -tr_pond_cesm = .false. tr_pond_topo = .false. tr_pond_lvl = .false. tr_aero = .false. kcatbound = 1 kitd = 0 -ktherm = 0 +ktherm = 1 conduct = 'bubbly' kdyn = 1 seabed_stress = .true. diff --git a/configuration/scripts/options/set_nml.alt02 b/configuration/scripts/options/set_nml.alt02 index a478809ca..3c4d9c383 100644 --- a/configuration/scripts/options/set_nml.alt02 +++ b/configuration/scripts/options/set_nml.alt02 @@ -5,7 +5,6 @@ distribution_type = 'sectrobin' tr_iage = .true. tr_FY = .true. tr_lvl = .true. -tr_pond_cesm = .false. tr_pond_topo = .false. tr_pond_lvl = .false. tr_aero = .false. diff --git a/configuration/scripts/options/set_nml.alt03 b/configuration/scripts/options/set_nml.alt03 index c2ca38f32..22c3c28b0 100644 --- a/configuration/scripts/options/set_nml.alt03 +++ b/configuration/scripts/options/set_nml.alt03 @@ -6,7 +6,6 @@ conserv_check = .true. tr_iage = .false. tr_FY = .false. tr_lvl = .false. -tr_pond_cesm = .false. tr_pond_topo = .true. tr_pond_lvl = .false. tr_aero = .true. diff --git a/configuration/scripts/options/set_nml.alt04 b/configuration/scripts/options/set_nml.alt04 index d1bc6ad02..a07f70e66 100644 --- a/configuration/scripts/options/set_nml.alt04 +++ b/configuration/scripts/options/set_nml.alt04 @@ -6,7 +6,6 @@ distribution_wght = 'block' tr_iage = .true. tr_FY = .true. tr_lvl = .true. -tr_pond_cesm = .false. tr_pond_topo = .false. tr_pond_lvl = .true. tr_aero = .true. diff --git a/configuration/scripts/options/set_nml.alt05 b/configuration/scripts/options/set_nml.alt05 index 6793b5954..d97207dfa 100644 --- a/configuration/scripts/options/set_nml.alt05 +++ b/configuration/scripts/options/set_nml.alt05 @@ -2,7 +2,6 @@ ice_ic = 'internal' tr_iage = .false. tr_FY = .false. tr_lvl = .false. -tr_pond_cesm = .true. tr_pond_topo = .false. tr_pond_lvl = .false. tr_aero = .false. diff --git a/configuration/scripts/options/set_nml.alt07 b/configuration/scripts/options/set_nml.alt07 index 3355b6019..cb48dab1d 100644 --- a/configuration/scripts/options/set_nml.alt07 +++ b/configuration/scripts/options/set_nml.alt07 @@ -2,5 +2,5 @@ kdyn = 1 evp_algorithm = 'standard_2d' ndte = 300 capping_method = 'sum' -visc_method = 'avg_zeta' +visc_method = 'avg_strength' diff --git a/configuration/scripts/options/set_nml.boxadv b/configuration/scripts/options/set_nml.boxadv index 716884031..ca05970e3 100644 --- a/configuration/scripts/options/set_nml.boxadv +++ b/configuration/scripts/options/set_nml.boxadv @@ -13,12 +13,11 @@ ice_data_dist = 'box2001' tr_iage = .true. tr_FY = .false. tr_lvl = .true. -tr_pond_cesm = .false. tr_pond_topo = .false. tr_pond_lvl = .false. tr_aero = .false. kitd = 1 -ktherm = 0 +ktherm = 1 kdyn = 2 kstrength = 0 krdg_partic = 0 diff --git a/configuration/scripts/options/set_nml.boxnodyn b/configuration/scripts/options/set_nml.boxnodyn index 0b9a214f1..71abfdaea 100644 --- a/configuration/scripts/options/set_nml.boxnodyn +++ b/configuration/scripts/options/set_nml.boxnodyn @@ -38,12 +38,11 @@ ns_boundary_type = 'open' tr_iage = .false. tr_FY = .false. tr_lvl = .false. -tr_pond_cesm = .false. tr_pond_topo = .false. tr_pond_lvl = .false. tr_aero = .false. kitd = 0 -ktherm = 0 +ktherm = 1 kdyn = 1 revised_evp = .false. kstrength = 1 diff --git a/configuration/scripts/options/set_nml.boxrestore b/configuration/scripts/options/set_nml.boxrestore index fd6a9e59e..7bc4efa26 100644 --- a/configuration/scripts/options/set_nml.boxrestore +++ b/configuration/scripts/options/set_nml.boxrestore @@ -18,12 +18,11 @@ f_aice = 'd' tr_iage = .true. tr_FY = .true. tr_lvl = .true. -tr_pond_cesm = .false. tr_pond_topo = .false. tr_pond_lvl = .false. tr_aero = .false. kitd = 1 -ktherm = 0 +ktherm = 1 kdyn = 1 revised_evp = .true. kstrength = 0 diff --git a/configuration/scripts/options/set_nml.gridcd b/configuration/scripts/options/set_nml.gridcd index 104801879..7889e64f4 100644 --- a/configuration/scripts/options/set_nml.gridcd +++ b/configuration/scripts/options/set_nml.gridcd @@ -1,2 +1,5 @@ grid_ice = 'C_override_D' +# visc_method=avg_zeta causes some gridcd tests to abort, use avg_strength for now +visc_method = 'avg_strength' + diff --git a/configuration/scripts/options/set_nml.qc b/configuration/scripts/options/set_nml.qc index 70ba1b429..feefb376d 100644 --- a/configuration/scripts/options/set_nml.qc +++ b/configuration/scripts/options/set_nml.qc @@ -13,3 +13,4 @@ diagfreq = 24 histfreq = 'd','x','x','x','x' f_hi = 'd' hist_avg = .false. +distribution_wght = 'blockall' diff --git a/configuration/scripts/setup_machparams.csh b/configuration/scripts/setup_machparams.csh new file mode 100755 index 000000000..db9f00244 --- /dev/null +++ b/configuration/scripts/setup_machparams.csh @@ -0,0 +1,64 @@ +#!/bin/csh -f + +# inputs +# mpi tasks +set ntasks = ${ICE_NTASKS} +# threads +set nthrds = ${ICE_NTHRDS} +# max tasks per node +set maxtpn = ${ICE_MACHINE_TPNODE} +# batch charge account +set acct = ${ICE_ACCOUNT} + +# compute total cores needed and distribution of cores on nodes +# ncores = total cores needed (tasks * threads) +# taskpernode = number of MPI tasks per node based on size of node and threads +# nodes = number of total nodes needed based on tasks/threads +# taskpernodelimit = max(taskpernode, ntasks), when using less than 1 node +# corespernode = number of cores per node used +@ ncores = ${ntasks} * ${nthrds} +@ taskpernode = ${maxtpn} / $nthrds +if (${taskpernode} == 0) set taskpernode = 1 +@ nnodes = ${ntasks} / ${taskpernode} +if (${nnodes} * ${taskpernode} < ${ntasks}) @ nnodes = $nnodes + 1 +set taskpernodelimit = ${taskpernode} +if (${taskpernodelimit} > ${ntasks}) set taskpernodelimit = ${ntasks} +@ corespernode = ${taskpernodelimit} * ${nthrds} + +set runlength = ${ICE_RUNLENGTH} +if ($?ICE_MACHINE_MAXRUNLENGTH) then + if (${runlength} > ${ICE_MACHINE_MAXRUNLENGTH}) then + set runlength = ${ICE_MACHINE_MAXRUNLENGTH} + endif +endif + +set memuse = ${ICE_MEMUSE} +if ($?ICE_MACHINE_MAXMEMUSE) then + if (${memuse} > ${ICE_MACHINE_MAXMEMUSE}) then + set memuse = ${ICE_MACHINE_MAXMEMUSE} + endif +endif + +set queue = "${ICE_QUEUE}" +set batchtime = "00:15:00" +if (${runlength} == 0) set batchtime = "00:29:00" +if (${runlength} == 1) set batchtime = "00:59:00" +if (${runlength} == 2) set batchtime = "2:00:00" +if (${runlength} == 3) set batchtime = "3:00:00" +if (${runlength} == 4) set batchtime = "4:00:00" +if (${runlength} == 5) set batchtime = "5:00:00" +if (${runlength} == 6) set batchtime = "6:00:00" +if (${runlength} == 7) set batchtime = "7:00:00" +if (${runlength} >= 8) set batchtime = "8:00:00" +set batchmem = "5" +if (${memuse} == 1) set batchmem = "5" +if (${memuse} == 2) set batchmem = "10" +if (${memuse} == 3) set batchmem = "15" +if (${memuse} == 4) set batchmem = "20" +if (${memuse} == 5) set batchmem = "50" +if (${memuse} == 6) set batchmem = "100" +if (${memuse} == 7) set batchmem = "150" +if (${memuse} >= 8) set batchmem = "200" + +set shortcase = `echo ${ICE_CASENAME} | cut -c1-15` + diff --git a/configuration/scripts/tests/QC/cice.t-test.py b/configuration/scripts/tests/QC/cice.t-test.py index c84583baa..b941c4912 100755 --- a/configuration/scripts/tests/QC/cice.t-test.py +++ b/configuration/scripts/tests/QC/cice.t-test.py @@ -379,8 +379,8 @@ def plot_data(data, lat, lon, units, case, plot_type): try: # Load the necessary plotting libraries import matplotlib.pyplot as plt - from mpl_toolkits.basemap import Basemap - from mpl_toolkits.axes_grid1 import make_axes_locatable + import cartopy.crs as ccrs + import cartopy.feature as cfeature except ImportError: logger.warning('Error loading necessary Python modules in plot_data function') return @@ -389,87 +389,200 @@ def plot_data(data, lat, lon, units, case, plot_type): import warnings warnings.filterwarnings("ignore", category=UserWarning) - # Create the figure and axis - fig, axes = plt.subplots(nrows=1, ncols=2,figsize=(14, 8)) - - # Plot the northern hemisphere data as a scatter plot - # Create the basemap, and draw boundaries - plt.sca(axes[0]) - m = Basemap(projection='npstere', boundinglat=35,lon_0=270, resolution='l') - m.drawcoastlines() - m.fillcontinents() - m.drawcountries() + # define north and south polar stereographic coord ref system + npstereo = ccrs.NorthPolarStereo(central_longitude=-90.0) # define projection + spstereo = ccrs.SouthPolarStereo(central_longitude= 90.0) # define projection + + # define figure + fig = plt.figure(figsize=[14,7]) + + # add axis for each hemishpere + ax1 = fig.add_subplot(121,projection=npstereo) + ax2 = fig.add_subplot(122,projection=spstereo) + + # set plot extents + ax1.set_extent([-180.,180.,35.,90.],ccrs.PlateCarree()) + ax2.set_extent([-180.,180.,-90.,-35.],ccrs.PlateCarree()) + + # add land features NH plot + ax1.add_feature(cfeature.LAND, color='lightgray') + ax1.add_feature(cfeature.BORDERS) + ax1.add_feature(cfeature.COASTLINE) + + # add land features SH plot + ax2.add_feature(cfeature.LAND, color='lightgray') + ax2.add_feature(cfeature.BORDERS) + ax2.add_feature(cfeature.COASTLINE) + + # add grid lines + dlon = 30.0 + dlat = 15.0 + mpLons = np.arange(-180. ,180.0+dlon,dlon) + mpLats = np.arange(-90.,90.0+dlat ,dlat) + + g1 = ax1.gridlines(xlocs=mpLons,ylocs=mpLats, + draw_labels=True, + x_inline=False,y_inline=False) + + g2 = ax2.gridlines(xlocs=mpLons,ylocs=mpLats, + draw_labels=True, + x_inline=False,y_inline=False) + + + # Specify Min/max colors for each hemisphere + # check for minus to see if it is a difference plot + if '\n- ' in case: # this is a difference plot + # specify colormap + mycmap = 'seismic' # blue,white,red with white centered colormap + + # determine max absolute value to use for color range + # intent is use same min/max with center zero + dmin = np.abs(data.min()) + dmax = np.abs(data.max()) + clim = np.max([dmin,dmax]) + + # this specifies both hemishperes the same range. + cminNH = -clim + cmaxNH = clim + cminSH = -clim + cmaxSH = clim + + else: # not a difference plot + # specify colormap + mycmap = 'jet' + + # arbitrary limits for each Hemishpere + cminNH = 0.0 + cmaxNH = 5.0 + cminSH = 0.0 + cmaxSH = 2.0 if plot_type == 'scatter': - x, y = m(lon,lat) - sc = m.scatter(x, y, c=data, cmap='jet', lw=0, s=4) - else: - # Create new arrays to add 1 additional longitude value to prevent a - # small amount of whitespace around longitude of 0/360 degrees. - lon_cyc = np.zeros((lon.shape[0],lon.shape[1]+1)) - mask = np.zeros((data.shape[0],data.shape[1]+1)) - lat_cyc = np.zeros((lat.shape[0],lat.shape[1]+1)) - - mask[:,0:-1] = data.mask[:,:] - mask[:,-1] = data.mask[:,0] - lon_cyc[:,0:-1] = lon[:,:]; lon_cyc[:,-1] = lon[:,0] - lat_cyc[:,0:-1] = lat[:,:]; lat_cyc[:,-1] = lat[:,0] - - lon1 = np.ma.masked_array(lon_cyc, mask=mask) - lat1 = np.ma.masked_array(lat_cyc, mask=mask) - - d = np.zeros((data.shape[0],data.shape[1]+1)) - d[:,0:-1] = data[:,:] - d[:,-1] = data[:,0] - d1 = np.ma.masked_array(d,mask=mask) - - x, y = m(lon1.data, lat1.data) + # plot NH + scNH = ax1.scatter(lon,lat,c=data,cmap=mycmap,s=4,edgecolors='none', + vmin=cminNH, vmax=cmaxNH, + transform=ccrs.PlateCarree()) + + # plot SH + scSH = ax2.scatter(lon,lat,c=data,cmap=mycmap,s=4,edgecolors='none', + vmin=cminSH, vmax=cmaxSH, + transform=ccrs.PlateCarree()) + else: if plot_type == 'contour': - sc = m.contourf(x, y, d1, cmap='jet') - else: # pcolor - sc = m.pcolor(x, y, d1, cmap='jet') + print("contour plot depreciated. using pcolor.") + + scNH = ax1.pcolormesh(lon,lat,data,cmap=mycmap, + vmin=cminNH, vmax=cmaxNH, + transform=ccrs.PlateCarree()) + + scSH = ax2.pcolormesh(lon,lat,data,cmap=mycmap, + vmin=cminSH, vmax=cmaxSH, + transform=ccrs.PlateCarree()) + + #else: + # # Create new arrays to add 1 additional longitude value to prevent a + # # small amount of whitespace around seam + # lon_cyc = np.zeros((lon.shape[0],lon.shape[1]+1)) + # lat_cyc = np.zeros((lat.shape[0],lat.shape[1]+1)) + # data1 = np.zeros((data.shape[0],data.shape[1]+1)) + # mask = np.zeros((data.shape[0],data.shape[1]+1)) + + # mask[:,0:-1] = data.mask[:,:] + # mask[:,-1] = data.mask[:,0] + # lon_cyc[:,0:-1] = lon[:,:] + # lon_cyc[:,-1] = lon[:,0] + # lat_cyc[:,0:-1] = lat[:,:] + # lat_cyc[:,-1] = lat[:,0] + # data1[:,0:-1] = data[:,:] + # data1[:,-1] = data[:,0] + + # lon1 = np.ma.masked_array(lon_cyc, mask=mask) + # lat1 = np.ma.masked_array(lat_cyc, mask=mask) + # data1 = np.ma.masked_array(data1, mask=mask) + + # if plot_type == 'contour': + # # plotting around -180/180 and 0/360 is a challenge. + # # need to use lons in both 0-360 and +- 180 + # # make lons +/- 180 + # lon1_pm180 = np.where(lon1 < 180.0, lon1, lon1-360.0) + # lon1_pm180 = np.ma.masked_where(lon1.mask,lon1_pm180) + + # # get 90-270 lons from the lon 0-360 array (lon1) + # # note: use 91, 269 to prevent small amount of white space in contour plots + # lonmask = np.logical_or(lon1 <= 91.0,lon1 >= 269.0) + # lons_90_270 = np.ma.masked_where(lonmask,lon1) + # lats_90_270 = np.ma.MaskedArray(lat1,mask=lons_90_270.mask) + # data_90_270 = np.ma.MaskedArray(data1,mask=lons_90_270.mask) + # data_90_270.mask = np.logical_or(data1.mask,data_90_270.mask) + + # # get -92-92 lons from +/- 180 (lon1_pm180) + # # note: use 92 to prevent small amount of white space in contour plots + # lonmask = np.logical_or(lon1_pm180 <= -92.0, lon1_pm180 >= 92.0) + # lons_m90_90 = np.ma.masked_where(lonmask,lon1_pm180) + # lats_m90_90 = np.ma.MaskedArray(lat1,mask=lons_m90_90.mask) + # data_m90_90 = np.ma.MaskedArray(data1,mask=lons_m90_90.mask) + # data_m90_90.mask = np.logical_or(data1.mask,data_m90_90.mask) + + # # plot NH 90-270 + # sc = ax1.contourf(lons_90_270, lats_90_270, data_90_270, cmap=mycmap, + # transform=ccrs.PlateCarree(), + # extend='both') + # # plot NH -90-90 + # sc = ax1.contourf(lons_m90_90, lats_m90_90, data_m90_90, cmap=mycmap, + # transform=ccrs.PlateCarree(), + # extend='both') + + # # plot SH 90-270 + # sc = ax2.contourf(lons_90_270, lats_90_270, data_90_270, cmap=mycmap, + # transform=ccrs.PlateCarree(), + # extend='both') + # # plot SH -90-90 + # sc = ax2.contourf(lons_m90_90, lats_m90_90, data_m90_90, cmap=mycmap, + # transform=ccrs.PlateCarree(), + # extend='both') + + + #plt.suptitle('CICE Mean Ice Thickness\n{}'.format(case), y=0.95) + plt.suptitle(f'CICE Mean Ice Thickness\n{case:s}') + + # add more whitespace between plots for colorbar. + plt.subplots_adjust(wspace=0.4) + + # add separate axes for colorbars + # first get position/size of current axes + pos1 = ax1.get_position() + pos2 = ax2.get_position() + + # now add new colormap axes using the position ax1, ax2 as reference + cax1 = fig.add_axes([pos1.x0+pos1.width+0.03, + pos1.y0, + 0.02, + pos1.height]) + + cax2 = fig.add_axes([pos2.x0+pos2.width+0.03, + pos2.y0, + 0.02, + pos2.height]) - m.drawparallels(np.arange(-90.,120.,15.),labels=[1,0,0,0]) # draw parallels - m.drawmeridians(np.arange(0.,420.,30.),labels=[1,1,1,1]) # draw meridians - # Plot the southern hemisphere data as a scatter plot - plt.sca(axes[1]) - m = Basemap(projection='spstere', boundinglat=-45,lon_0=270, resolution='l') - m.drawcoastlines() - m.fillcontinents() - m.drawcountries() + if '\n- ' in case: + # If making a difference plot, use scientific notation for colorbar + cbNH = plt.colorbar(scNH, cax=cax1, orientation="vertical", + pad=0.1, format="%.1e") + cbSH = plt.colorbar(scSH, cax=cax2, orientation="vertical", + pad=0.1, format="%.1e") - if plot_type == 'scatter': - x, y = m(lon,lat) - sc = m.scatter(x, y, c=data, cmap='jet', lw=0, s=4) else: - x, y = m(lon1.data, lat1.data) + #pass + # If plotting non-difference data, do not use scientific notation for colorbar + cbNH = plt.colorbar(scNH, cax=cax1, orientation="vertical", + pad=0.1, format="%.2f") + cbSH = plt.colorbar(scSH, cax=cax2, orientation="vertical", + pad=0.1, format="%.2f") - # Bandaid for a bug in the version of Basemap used during development - outside = (x <= m.xmin) | (x >= m.xmax) | (y <= m.ymin) | (y >= m.ymax) - tmp = np.ma.masked_where(outside,d1) - - if plot_type == 'contour': - sc = m.contourf(x, y, tmp, cmap='jet') - else: # pcolor - sc = m.pcolor(x, y, tmp, cmap='jet') - - m.drawparallels(np.arange(-90.,120.,15.),labels=[1,0,0,0]) # draw parallels - m.drawmeridians(np.arange(0.,420.,30.),labels=[1,1,1,1]) # draw meridians - - plt.suptitle('CICE Mean Ice Thickness\n{}'.format(case), y=0.95) - - # Make some room at the bottom of the figure, and create a colorbar - fig.subplots_adjust(bottom=0.2) - cbar_ax = fig.add_axes([0.11,0.1,0.8,0.05]) - if '\n- ' in case: - # If making a difference plot, use scientific notation for colorbar - cb = plt.colorbar(sc, cax=cbar_ax, orientation="horizontal", format="%.2e") - else: - # If plotting non-difference data, do not use scientific notation for colorbar - cb = plt.colorbar(sc, cax=cbar_ax, orientation="horizontal", format="%.2f") - cb.set_label(units, x=1.0) + cbNH.set_label(units, loc='center') + cbSH.set_label(units, loc='center') outfile = 'ice_thickness_{}.png'.format(case.replace('\n- ','_minus_')) logger.info('Creating map of the data ({})'.format(outfile)) @@ -489,7 +602,8 @@ def plot_two_stage_failures(data, lat, lon): logger.info('Creating map of the failures (two_stage_test_failure_map.png)') # Load the necessary plotting libraries import matplotlib.pyplot as plt - from mpl_toolkits.basemap import Basemap + import cartopy.crs as ccrs + import cartopy.feature as cfeature from mpl_toolkits.axes_grid1 import make_axes_locatable from matplotlib.colors import LinearSegmentedColormap @@ -497,15 +611,19 @@ def plot_two_stage_failures(data, lat, lon): import warnings warnings.filterwarnings("ignore", category=UserWarning) - # Create the figure and axis + # Create the figure fig = plt.figure(figsize=(12, 8)) - ax = fig.add_axes([0.05, 0.08, 0.9, 0.9]) - - # Create the basemap, and draw boundaries - m = Basemap(projection='moll', lon_0=0., resolution='l') - m.drawmapboundary(fill_color='white') - m.drawcoastlines() - m.drawcountries() + + # define plot projection and create axis + pltprj = ccrs.Mollweide(central_longitude=0.0) + ax = fig.add_subplot(111,projection=pltprj) + + # add land + ax.add_feature(cfeature.LAND, color='lightgray') + ax.add_feature(cfeature.BORDERS) + ax.add_feature(cfeature.COASTLINE) + #gshhs = cfeature.GSHHSFeature(scale='auto',facecolor='lightgray',edgecolor='none') + #ax.add_feature(gshhs) # Create the custom colormap colors = [(0, 0, 1), (1, 0, 0)] # Blue, Red @@ -513,11 +631,20 @@ def plot_two_stage_failures(data, lat, lon): cm = LinearSegmentedColormap.from_list(cmap_name, colors, N=2) # Plot the data as a scatter plot - x, y = m(lon, lat) - sc = m.scatter(x, y, c=int_data, cmap=cm, lw=0, vmin=0, vmax=1, s=4) - - m.drawmeridians(np.arange(0, 360, 60), labels=[0, 0, 0, 1], fontsize=10) - m.drawparallels(np.arange(-90, 90, 30), labels=[1, 0, 0, 0], fontsize=10) + sc = ax.scatter(lon,lat,c=int_data,cmap=cm,s=4,lw=0, + vmin=0.,vmax=1., + transform=ccrs.PlateCarree()) + + # add grid lines + dlon = 60.0 + dlat = 30.0 + mpLons = np.arange(-180. ,180.0+dlon,dlon) + mpLats = np.arange(-90.,90.0+dlat ,dlat) + mpLabels = {"left": "y", + "bottom": "x"} + + ax.gridlines(xlocs=mpLons,ylocs=mpLats, + draw_labels=mpLabels) plt.title('CICE Two-Stage Test Failures') diff --git a/configuration/scripts/tests/omp_suite.ts b/configuration/scripts/tests/omp_suite.ts index 937a3ec90..5d5e18376 100644 --- a/configuration/scripts/tests/omp_suite.ts +++ b/configuration/scripts/tests/omp_suite.ts @@ -39,7 +39,7 @@ smoke gx1 18x1 reprosum,run10day,cmplogrest,thread smoke gx1 18x1 seabedprob,reprosum,run10day,cmplogrest,thread smoke_gx1_15x2_reprosum_run10day_seabedprob smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread smoke_gx3_14x2_fsd12_reprosum_run10day smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread smoke_gx3_11x2_isotope_reprosum_run10day -smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread smoke_gx3_8x4_icdefault_reprosum_run10day_snwitdrdg_snwgrain +smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread smoke_gx3_8x4_icdefault_reprosum_run10day_snwgrain_snwitdrdg smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread smoke_gx3_6x4_dynpicard_reprosum_run10day smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread smoke_gx3_8x3_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day smoke_gx3_1x1x100x116x1_reprosum_run10day_thread @@ -79,8 +79,8 @@ smoke gbox128 14x2 boxrestore,reprosum,run10day,gridc smoke gbox80 4x5 box2001,reprosum,run10day,gridc smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridc -smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest,gridc smoke_gx3_8x4_gridc_diag1_reprosum_run10day -smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x4_gridc_diag1_reprosum_run10day +smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest,gridc smoke_gx3_8x4_diag1_gridc_reprosum_run10day +smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x4_diag1_gridc_reprosum_run10day smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_6x2_alt01_gridc_reprosum_run10day smoke gx3 8x1 alt02,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x2_alt02_gridc_reprosum_run10day #smoke gx3 8x1 alt03,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_12x2_alt03_droundrobin_gridc_reprosum_run10day @@ -93,7 +93,7 @@ smoke gx1 18x1 reprosum,run10day,cmplogrest,thread,gridc smoke gx1 18x1 seabedprob,reprosum,run10day,cmplogrest,thread,gridc smoke_gx1_15x2_gridc_reprosum_run10day_seabedprob smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_14x2_fsd12_gridc_reprosum_run10day smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_11x2_gridc_isotope_reprosum_run10day -smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x4_gridc_icdefault_reprosum_run10day_snwitdrdg_snwgrain +smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x4_gridc_icdefault_reprosum_run10day_snwgrain_snwitdrdg #smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_6x4_dynpicard_gridc_reprosum_run10day smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x3_gridc_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day_thread @@ -133,8 +133,8 @@ smoke gbox128 14x2 boxrestore,reprosum,run10day,gridcd smoke gbox80 4x5 box2001,reprosum,run10day,gridcd smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridcd -smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest,gridcd smoke_gx3_8x4_gridcd_diag1_reprosum_run10day -smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x4_gridcd_diag1_reprosum_run10day +smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest,gridcd smoke_gx3_8x4_diag1_gridcd_reprosum_run10day +smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x4_diag1_gridcd_reprosum_run10day smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_6x2_alt01_gridcd_reprosum_run10day smoke gx3 8x1 alt02,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x2_alt02_gridcd_reprosum_run10day #smoke gx3 8x1 alt03,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_12x2_alt03_droundrobin_gridcd_reprosum_run10day @@ -147,7 +147,7 @@ smoke gx1 18x1 reprosum,run10day,cmplogrest,thread,gridcd smoke gx1 18x1 seabedprob,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx1_15x2_gridcd_reprosum_run10day_seabedprob smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_14x2_fsd12_gridcd_reprosum_run10day smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_11x2_gridcd_isotope_reprosum_run10day -smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x4_gridcd_icdefault_reprosum_run10day_snwitdrdg_snwgrain +smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x4_gridcd_icdefault_reprosum_run10day_snwgrain_snwitdrdg #smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_6x4_dynpicard_gridcd_reprosum_run10day smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x3_gridcd_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day_thread diff --git a/configuration/scripts/tests/qctest.yml b/configuration/scripts/tests/qctest.yml new file mode 100644 index 000000000..72479a563 --- /dev/null +++ b/configuration/scripts/tests/qctest.yml @@ -0,0 +1,11 @@ +name: qctest +channels: + - conda-forge + - nodefaults +dependencies: +# Python dependencies for plotting scripts + - numpy + - matplotlib-base + - cartopy + - netcdf4 + diff --git a/configuration/scripts/tests/unittest_suite.ts b/configuration/scripts/tests/unittest_suite.ts index 76c9f4312..319c91aa6 100644 --- a/configuration/scripts/tests/unittest_suite.ts +++ b/configuration/scripts/tests/unittest_suite.ts @@ -1,5 +1,6 @@ # Test Grid PEs Sets BFB-compare unittest gx3 1x1 helloworld +unittest gx3 1x1 optargs unittest gx3 1x1 calchk,short unittest gx3 4x1x25x29x4 sumchk unittest gx3 1x1x25x29x16 sumchk diff --git a/configuration/tools/cice4_restart_conversion/convert_restarts.f90 b/configuration/tools/cice4_restart_conversion/convert_restarts.f90 index 51f8027b2..30c952510 100644 --- a/configuration/tools/cice4_restart_conversion/convert_restarts.f90 +++ b/configuration/tools/cice4_restart_conversion/convert_restarts.f90 @@ -39,7 +39,12 @@ program convert_restarts logical (kind=log_kind), parameter :: & oceanmixed_ice = .true., & ! if true, read/write ocean mixed layer fields heat_capacity = .true., & ! if true, ice has nonzero heat capacity +#ifdef UNDEPRECATE_0LAYER ! if false, use zero-layer thermodynamics +#else + ! heat_capacity = .false. (zero-layer thermodynamics) + ! has been deprecated in CICE and Icepack +#endif diag = .true. ! write min/max diagnostics for fields ! file names diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index df8e4616b..99679e791 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -251,7 +251,7 @@ either Celsius or Kelvin units). "flux_bio_ai", "all biogeochemistry fluxes passed to ocean, grid cell mean", "" "flw", "incoming longwave radiation", "W/m\ :math:`^2`" "flwout", "outgoing longwave radiation", "W/m\ :math:`^2`" - "fm", "Coriolis parameter * mass in U cell", "kg/s" + "fmU", "Coriolis parameter * mass in U cell", "kg/s" "formdrag", "calculate form drag", "" "fpond", "fresh water flux to ponds", "kg/m\ :math:`^2`/s" "fr_resp", "bgc respiration fraction", "0.05" @@ -307,7 +307,6 @@ either Celsius or Kelvin units). "Gstar", "piecewise-linear ridging participation function parameter", "0.15" "**H**", "", "" "halo_info", "information for updating ghost cells", "" - "heat_capacity", "if true, use salinity-dependent thermodynamics", "T" "hfrazilmin", "minimum thickness of new frazil ice", "0.05 m" "hi_min", "minimum ice thickness for thinnest ice category", "0.01 m" "hi_ssl", "ice surface scattering layer thickness", "0.05 m" @@ -355,6 +354,8 @@ either Celsius or Kelvin units). "icells", "number of grid cells with specified property (for vectorization)", "" "iceruf", "ice surface roughness at atmosphere interface", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" "iceruf_ocn", "under-ice roughness (at ocean interface)", "0.03 m" + "iceemask", "ice extent mask (E-cell)", "" + "icenmask", "ice extent mask (N-cell)", "" "icetmask", "ice extent mask (T-cell)", "" "iceumask", "ice extent mask (U-cell)", "" "idate", "the date at the end of the current time step (yyyymmdd)", "" @@ -386,10 +387,9 @@ either Celsius or Kelvin units). "krdg_partic", "ridging participation function", "1" "krdg_redist", "ridging redistribution function", "1" "krgdn", "mean ridge thickness per thickness of ridging ice", "" - "kseaice", "thermal conductivity of ice for zero-layer thermodynamics", "2.0 W/m/deg" "ksno", "thermal conductivity of snow", "0.30 W/m/deg" "kstrength", "ice stength formulation (1= :cite:`Rothrock75`, 0 = :cite:`Hibler79`)", "1" - "ktherm", "thermodynamic formulation (0 = zero-layer, 1 = :cite:`Bitz99`, 2 = mushy)", "" + "ktherm", "thermodynamic formulation (-1 = off, 1 = :cite:`Bitz99`, 2 = mushy)", "" "**L**", "", "" "l_brine", "flag for brine pocket effects", "" "l_fixed_area", "flag for prescribing remapping fluxes", "" @@ -655,17 +655,17 @@ either Celsius or Kelvin units). "Sswabs", "shortwave radiation absorbed in snow layers", "W/m\ :math:`^2`" "stefan-boltzmann", "Stefan-Boltzmann constant", "5.67\ :math:`\times`\ 10\ :math:`^{-8}` W/m\ :math:`^2`\ K\ :math:`^4`" "stop_now", "if 1, end program execution", "" - "strairx(y)", "stress on ice by air in the x(y)-direction (centered in U cell)", "N/m\ :math:`^2`" + "strairx(y)U", "stress on ice by air in the x(y)-direction (centered in U cell)", "N/m\ :math:`^2`" "strairx(y)T", "stress on ice by air, x(y)-direction (centered in T cell)", "N/m\ :math:`^2`" "strax(y)", "wind stress components from data", "N/m\ :math:`^2`" "strength", "ice strength", "N/m" "stress12", "internal ice stress, :math:`\sigma_{12}`", "N/m" "stressm", "internal ice stress, :math:`\sigma_{11}-\sigma_{22}`", "N/m" "stressp", "internal ice stress, :math:`\sigma_{11}+\sigma_{22}`", "N/m" - "strintx(y)", "divergence of internal ice stress, x(y)", "N/m\ :math:`^2`" - "strocnx(y)", "ice–ocean stress in the x(y)-direction (U-cell)", "N/m\ :math:`^2`" + "strintx(y)U", "divergence of internal ice stress, x(y)", "N/m\ :math:`^2`" + "strocnx(y)U", "ice–ocean stress in the x(y)-direction (U-cell)", "N/m\ :math:`^2`" "strocnx(y)T", "ice–ocean stress, x(y)-dir. (T-cell)", "N/m\ :math:`^2`" - "strtltx(y)", "surface stress due to sea surface slope", "N/m\ :math:`^2`" + "strtltx(y)U", "surface stress due to sea surface slope", "N/m\ :math:`^2`" "swv(n)dr(f)", "incoming shortwave radiation, visible (near IR), direct (diffuse)", "W/m\ :math:`^2`" "**T**", "", "" "Tair", "air temperature at 10 m", "K" @@ -738,7 +738,7 @@ either Celsius or Kelvin units). "vice(n)", "volume per unit area of ice (in category n)", "m" "vicen_init", "ice volume at beginning of timestep", "m" "viscosity_dyn", "dynamic viscosity of brine", ":math:`1.79\times 10^{-3}` kg/m/s" - "visc_method", "method for calculating viscosities (‘avg_strength’ or ‘avg_zeta’)", "avg_strength" + "visc_method", "method for calculating viscosities (‘avg_strength’ or ‘avg_zeta’)", "avg_zeta" "vocn", "ocean current in the y-direction", "m/s" "vonkar", "von Karman constant", "0.4" "vraftn", "volume of rafted ice", "m" @@ -772,3 +772,9 @@ either Celsius or Kelvin units). "zref", "reference height for stability", "10. m" "zTrf", "reference height for :math:`T_{ref}`, :math:`Q_{ref}`, :math:`U_{ref}`", "2. m" "zvir", "gas constant (water vapor)/gas constant (air) - 1", "0.606" + +.. + ktherm=0 has been deprecated + "heat_capacity", "if true, use salinity-dependent thermodynamics", "T" + "kseaice", "thermal conductivity of ice for zero-layer thermodynamics", "2.0 W/m/deg" + "ktherm", "thermodynamic formulation (0 = zero-layer, 1 = :cite:`Bitz99`, 2 = mushy)", "" diff --git a/doc/source/conf.py b/doc/source/conf.py index 8b9aecaa6..a1b2871ae 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.3.1' +version = u'6.4.0' # The full version, including alpha/beta/rc tags. -version = u'6.3.1' +version = u'6.4.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/science_guide/sg_tracers.rst b/doc/source/science_guide/sg_tracers.rst index 215c13d08..b75edfb00 100644 --- a/doc/source/science_guide/sg_tracers.rst +++ b/doc/source/science_guide/sg_tracers.rst @@ -78,8 +78,6 @@ is not in use. "tr_FY", "1", "aice", "nt_FY", " " "tr_lvl", "2", "aice", "nt_alvl", " " " ", " ", "vice", "nt_vlvl", " " - "tr_pond_cesm", "2", "aice", "nt_apnd", " " - " ", " ", "apnd", "nt_vpnd", " " "tr_pond_lvl", "3", "aice", "nt_apnd", " " " ", " ", "apnd", "nt_vpnd", " " " ", " ", "apnd", "nt_ipnd", " " @@ -113,7 +111,9 @@ is not in use. "tr_zaero", "n_zaero", "fbri or (a,v)ice", "nt_zaero", "nlt_zaero" " ", "1", "fbri", "nt_zbgc_frac", " " - +.. + "tr_pond_cesm", "2", "aice", "nt_apnd", " " + " ", " ", "apnd", "nt_vpnd", " " Users may add any number of additional tracers that are transported conservatively, provided that the dependency ``trcr_depend`` is defined appropriately. diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 6b10a2165..64264613c 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -353,8 +353,8 @@ tracer_nml "``tr_iage``", "logical", "ice age", "``.false.``" "``tr_iso``", "logical", "isotopes", "``.false.``" "``tr_lvl``", "logical", "level ice area and volume", "``.false.``" - "``tr_pond_cesm``", "logical", "CESM melt ponds", "``.false.``" "``tr_pond_lvl``", "logical", "level-ice melt ponds", "``.false.``" + "``tr_pond_cesm``", " ", "DEPRECATED", " " "``tr_pond_topo``", "logical", "topo melt ponds", "``.false.``" "``tr_snow``", "logical", "advanced snow physics", "``.false.``" "``restart_aero``", "logical", "restart tracer values from file", "``.false.``" @@ -363,12 +363,15 @@ tracer_nml "``restart_FY``", "logical", "restart tracer values from file", "``.false.``" "``restart_iso``", "logical", "restart tracer values from file", "``.false.``" "``restart_lvl``", "logical", "restart tracer values from file", "``.false.``" - "``restart_pond_cesm``", "logical", "restart tracer values from file", "``.false.``" "``restart_pond_lvl``", "logical", "restart tracer values from file", "``.false.``" "``restart_pond_topo``", "logical", "restart tracer values from file", "``.false.``" "``restart_snow``", "logical", "restart snow tracer values from file", "``.false.``" "", "", "", "" +.. + "``tr_pond_cesm``", "logical", "CESM melt ponds", "``.false.``" + "``restart_pond_cesm``", "logical", "restart tracer values from file", "``.false.``" + thermo_nml ~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -388,7 +391,6 @@ thermo_nml "", "``1``", "linear remapping ITD approximation", "" "``ksno``", "real", "snow thermal conductivity", "0.3" "``ktherm``", "``-1``", "thermodynamic model disabled", "1" - "", "``0``", "zero-layer thermodynamic model", "" "", "``1``", "Bitz and Lipscomb thermodynamic model", "" "", "``2``", "mushy-layer thermodynamic model", "" "``phi_c_slow_mode``", ":math:`0<\phi_c < 1`", "critical liquid fraction", "0.05" @@ -396,6 +398,10 @@ thermo_nml "``Rac_rapid_mode``", "real", "critical Rayleigh number", "10.0" "", "", "", "" +.. + ktherm=0 has been deprecated + "", "``0``", "zero-layer thermodynamic model", "" + .. _dynamics_nml: dynamics_nml @@ -482,7 +488,7 @@ dynamics_nml "", "``geostropic``", "computed from ocean velocity", "" "``threshold_hw``", "real", "Max water depth for grounding (see :cite:`Amundrud04`)", "30." "``use_mean_vrel``", "logical", "Use mean of two previous iterations for vrel in VP", "``.true.``" - "``visc_method``", "``avg_strength``", "average strength for viscosities on U grid", "``avg_strength``" + "``visc_method``", "``avg_strength``", "average strength for viscosities on U grid", "``avg_zeta``" "", "``avg_zeta``", "average zeta for viscosities on U grid", "" "``yield_curve``", "``ellipse``", "elliptical yield curve", "``ellipse``" "", "", "", "" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index 500209326..a3f7d11bc 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -5,7 +5,9 @@ Implementation ======================== CICE is written in FORTRAN90 and runs on platforms using UNIX, LINUX, -and other operating systems. The code is based on a two-dimensional +and other operating systems. The current coding standard is Fortran2003 +with use of Fortran2008 feature CONTIGUOUS in the 1d evp solver. +The code is based on a two-dimensional horizontal orthogonal grid that is broken into two-dimensional horizontal blocks and parallelized over blocks with MPI and OpenMP threads. The code also includes some optimizations diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index aca7d4933..3f3cd3495 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -535,10 +535,10 @@ On macOS: .. code-block:: bash - # Download the Miniconda installer to ~/Downloads/miniconda.sh - curl -L https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -o ~/Downloads/miniconda.sh + # Download the Miniconda installer to ~/miniconda.sh + curl -L https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -o ~/miniconda.sh # Install Miniconda - bash ~/Downloads/miniconda.sh + bash ~/miniconda.sh # Follow the prompts diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index b8d42ad6d..284de72f1 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -325,7 +325,7 @@ If a user adds ``--set`` to the suite, all tests in that suite will add that opt ./cice.setup --suite base_suite,decomp_suite --mach wolf --env gnu --testid myid -s debug -The option settings defined in the suite have precendence over the command line +The option settings defined at the command line have precedence over the test suite values if there are conflicts. The predefined test suites are defined under **configuration/scripts/tests** and @@ -347,7 +347,6 @@ Lines that begin with # or are blank are ignored. For example, smoke col 1x1 debug,run1year restart col 1x1 debug restart col 1x1 diag1 - restart col 1x1 pondcesm restart col 1x1 pondlvl restart col 1x1 pondtopo @@ -474,7 +473,7 @@ Test Suite Examples ./results.csh If there are conflicts between the ``--set`` options in the suite and on the command line, - the suite will take precedence. + the command line options will take precedence. 5) **Multiple test suites from a single command line** @@ -1051,6 +1050,14 @@ To install the necessary Python packages, the ``pip`` Python utility can be used pip install --user netCDF4 pip install --user numpy pip install --user matplotlib + pip install --user cartopy + +You can also setup a conda env with the same utitities + +.. code-block:: bash + + conda env create -f configuration/scripts/tests/qctest.yml + conda activate qctest To run the validation test, setup a baseline run with the original baseline model and then a perturbation run based on recent model changes. Use ``--set qc`` in both runs in addition diff --git a/icepack b/icepack index 76ecd418d..3a039e598 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 76ecd418d2efad7e74fe35c4ec85f0830923bda6 +Subproject commit 3a039e598e6395333a278bb1822f03e9bc954ac6 From b893ee9424dd1256cda22e3cffbb0e3cf0754bfb Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Wed, 2 Nov 2022 15:45:10 -0600 Subject: [PATCH 71/90] add initializationsin ice_state * initialize vsnon/vsnon_init and vicen/vicen_init --- cicecore/cicedynB/general/ice_state.F90 | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cicecore/cicedynB/general/ice_state.F90 b/cicecore/cicedynB/general/ice_state.F90 index 47b360e99..a7842ed5e 100644 --- a/cicecore/cicedynB/general/ice_state.F90 +++ b/cicecore/cicedynB/general/ice_state.F90 @@ -194,6 +194,10 @@ subroutine alloc_state trcr_base = c0 aicen = c0 aicen_init = c0 + vicen = c0 + vicen_init = c0 + vsnon = c0 + vsnon_init = c0 end subroutine alloc_state From 775794598b4dddd89c26236f096b3cf7eea7cdae Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Fri, 25 Nov 2022 08:58:08 -0500 Subject: [PATCH 72/90] Update CICE (#54) * update to include recent PRs to Consortium/main --- .github/PULL_REQUEST_TEMPLATE.md | 3 + .../cicedynB/analysis/ice_diagnostics.F90 | 156 ++-- cicecore/cicedynB/analysis/ice_history.F90 | 2 +- .../cicedynB/analysis/ice_history_fsd.F90 | 53 +- .../cicedynB/analysis/ice_history_pond.F90 | 30 +- .../cicedynB/analysis/ice_history_shared.F90 | 64 +- cicecore/cicedynB/dynamics/ice_dyn_eap.F90 | 150 ++- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 534 ++++++----- cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 | 54 +- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 48 +- cicecore/cicedynB/dynamics/ice_dyn_vp.F90 | 877 +++++++++--------- .../dynamics/ice_transport_driver.F90 | 15 - .../cicedynB/dynamics/ice_transport_remap.F90 | 1 + cicecore/cicedynB/general/ice_flux.F90 | 22 +- cicecore/cicedynB/general/ice_forcing.F90 | 197 +++- cicecore/cicedynB/general/ice_forcing_bgc.F90 | 144 ++- cicecore/cicedynB/general/ice_init.F90 | 159 +--- cicecore/cicedynB/general/ice_state.F90 | 10 +- cicecore/cicedynB/general/ice_step_mod.F90 | 142 ++- .../infrastructure/comm/mpi/ice_boundary.F90 | 64 ++ .../comm/mpi/ice_global_reductions.F90 | 69 -- .../comm/serial/ice_boundary.F90 | 64 ++ .../comm/serial/ice_global_reductions.F90 | 69 -- .../cicedynB/infrastructure/ice_domain.F90 | 1 + cicecore/cicedynB/infrastructure/ice_grid.F90 | 443 ++++++--- .../infrastructure/ice_restart_driver.F90 | 50 +- .../io/io_binary/ice_restart.F90 | 71 -- .../io/io_netcdf/ice_restart.F90 | 15 - .../infrastructure/io/io_pio2/ice_restart.F90 | 15 - .../drivers/direct/hadgem3/CICE_InitMod.F90 | 30 - .../drivers/direct/hadgem3/CICE_RunMod.F90 | 15 - .../direct/nemo_concepts/CICE_InitMod.F90 | 30 - .../direct/nemo_concepts/CICE_RunMod.F90 | 15 - cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 30 - cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 15 - .../drivers/mct/cesm1/ice_import_export.F90 | 6 +- .../drivers/mct/cesm1/ice_prescribed_mod.F90 | 10 +- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 30 - cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 15 - .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 187 ++-- .../drivers/nuopc/cmeps/ice_import_export.F90 | 31 +- cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 | 54 +- .../nuopc/cmeps/ice_prescribed_mod.F90 | 10 +- cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 30 - cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 | 15 - cicecore/drivers/nuopc/dmi/cice_cap.info | 4 +- .../drivers/standalone/cice/CICE_InitMod.F90 | 30 - .../drivers/standalone/cice/CICE_RunMod.F90 | 15 - .../unittest/gridavgchk/CICE_InitMod.F90 | 30 - .../drivers/unittest/sumchk/CICE_InitMod.F90 | 30 - cicecore/drivers/unittest/sumchk/sumchk.F90 | 64 -- cicecore/shared/ice_arrays_column.F90 | 1 + cicecore/shared/ice_init_column.F90 | 29 - cicecore/shared/ice_restart_column.F90 | 74 -- configuration/scripts/cice.batch.csh | 17 +- configuration/scripts/cice.launch.csh | 70 +- configuration/scripts/ice_in | 9 +- .../scripts/machines/Macros.gust_cray | 71 ++ .../scripts/machines/Macros.gust_intel | 68 ++ .../scripts/machines/Macros.narwhal_aocc | 2 +- .../scripts/machines/Macros.narwhal_intelhpcx | 57 ++ .../scripts/machines/env.badger_intel | 0 .../scripts/machines/env.banting_gnu | 0 .../scripts/machines/env.banting_intel | 0 .../scripts/machines/env.cheyenne_gnu | 0 .../scripts/machines/env.cheyenne_intel | 0 .../scripts/machines/env.cheyenne_pgi | 0 .../scripts/machines/env.compy_intel | 0 .../scripts/machines/env.conda_linux | 0 .../scripts/machines/env.conda_macos | 0 configuration/scripts/machines/env.cori_intel | 0 configuration/scripts/machines/env.daley_gnu | 0 .../scripts/machines/env.daley_intel | 0 configuration/scripts/machines/env.fram_intel | 0 configuration/scripts/machines/env.freya_gnu | 0 .../scripts/machines/env.freya_intel | 0 configuration/scripts/machines/env.gaea_intel | 0 .../scripts/machines/env.gaffney_gnu | 0 .../scripts/machines/env.gaffney_intel | 0 configuration/scripts/machines/env.gust_cray | 72 ++ configuration/scripts/machines/env.gust_intel | 72 ++ configuration/scripts/machines/env.hera_intel | 0 .../scripts/machines/env.high_Sierra_gnu | 0 .../scripts/machines/env.hobart_intel | 0 configuration/scripts/machines/env.hobart_nag | 0 configuration/scripts/machines/env.izumi_gnu | 0 .../scripts/machines/env.izumi_intel | 0 configuration/scripts/machines/env.izumi_nag | 0 configuration/scripts/machines/env.izumi_pgi | 0 .../scripts/machines/env.koehr_intel | 0 .../scripts/machines/env.mustang_intel18 | 0 .../scripts/machines/env.mustang_intel19 | 0 .../scripts/machines/env.mustang_intel20 | 0 .../scripts/machines/env.narwhal_aocc | 19 +- .../scripts/machines/env.narwhal_cray | 20 +- .../scripts/machines/env.narwhal_gnu | 22 +- .../scripts/machines/env.narwhal_intel | 20 +- .../scripts/machines/env.narwhal_intelhpcx | 52 ++ configuration/scripts/machines/env.nrlssc_gnu | 4 +- configuration/scripts/machines/env.onyx_cray | 0 configuration/scripts/machines/env.onyx_gnu | 0 configuration/scripts/machines/env.onyx_intel | 0 .../scripts/machines/env.orion_intel | 0 .../scripts/machines/env.phase3_intel | 0 configuration/scripts/machines/env.ppp5_intel | 7 + configuration/scripts/machines/env.ppp6_gnu | 3 +- .../scripts/machines/env.ppp6_gnu-impi | 7 + configuration/scripts/machines/env.ppp6_intel | 7 + .../scripts/machines/env.ppp6_intel19 | 7 + .../scripts/machines/env.robert_intel | 2 + .../scripts/machines/env.testmachine_intel | 0 .../scripts/machines/env.travisCI_gnu | 0 .../scripts/machines/env.underhill_intel | 2 + configuration/scripts/options/set_nml.bgczm | 30 + .../{set_nml.boxsymw => set_nml.boxclosed} | 5 - .../scripts/options/set_nml.boxforcee | 5 + .../scripts/options/set_nml.boxforcen | 5 + .../scripts/options/set_nml.boxforcene | 5 + .../scripts/options/set_nml.boxislandsn | 46 - .../scripts/options/set_nml.boxislandsne | 46 - .../{set_nml.boxislandse => set_nml.boxopen} | 6 - configuration/scripts/options/set_nml.boxsyme | 58 -- configuration/scripts/options/set_nml.boxsymn | 58 -- .../scripts/options/set_nml.boxsymne | 58 -- configuration/scripts/options/set_nml.boxsyms | 58 -- configuration/scripts/options/set_nml.gbox12 | 2 +- configuration/scripts/options/set_nml.gbox180 | 2 +- configuration/scripts/options/set_nml.gbox80 | 2 +- configuration/scripts/options/set_nml.modal | 4 + configuration/scripts/options/set_nml.scale1 | 2 + configuration/scripts/options/set_nml.vargrid | 13 + configuration/scripts/tests/base_suite.ts | 3 +- configuration/scripts/tests/baseline.script | 6 +- configuration/scripts/tests/cice.results.csh | 18 +- configuration/scripts/tests/gridsys_suite.ts | 41 +- configuration/scripts/tests/io_suite.ts | 10 +- configuration/scripts/tests/nothread_suite.ts | 2 +- configuration/scripts/tests/omp_suite.ts | 12 +- .../convert_restarts.f90 | 4 - doc/source/cice_index.rst | 23 +- doc/source/developer_guide/dg_other.rst | 8 +- doc/source/intro/quickstart.rst | 11 +- doc/source/science_guide/sg_dynamics.rst | 3 +- doc/source/science_guide/sg_fundvars.rst | 2 +- doc/source/user_guide/ug_case_settings.rst | 14 +- doc/source/user_guide/ug_implementation.rst | 34 +- icepack | 2 +- 147 files changed, 2777 insertions(+), 2753 deletions(-) create mode 100644 configuration/scripts/machines/Macros.gust_cray create mode 100644 configuration/scripts/machines/Macros.gust_intel create mode 100644 configuration/scripts/machines/Macros.narwhal_intelhpcx mode change 100755 => 100644 configuration/scripts/machines/env.badger_intel mode change 100755 => 100644 configuration/scripts/machines/env.banting_gnu mode change 100755 => 100644 configuration/scripts/machines/env.banting_intel mode change 100755 => 100644 configuration/scripts/machines/env.cheyenne_gnu mode change 100755 => 100644 configuration/scripts/machines/env.cheyenne_intel mode change 100755 => 100644 configuration/scripts/machines/env.cheyenne_pgi mode change 100755 => 100644 configuration/scripts/machines/env.compy_intel mode change 100755 => 100644 configuration/scripts/machines/env.conda_linux mode change 100755 => 100644 configuration/scripts/machines/env.conda_macos mode change 100755 => 100644 configuration/scripts/machines/env.cori_intel mode change 100755 => 100644 configuration/scripts/machines/env.daley_gnu mode change 100755 => 100644 configuration/scripts/machines/env.daley_intel mode change 100755 => 100644 configuration/scripts/machines/env.fram_intel mode change 100755 => 100644 configuration/scripts/machines/env.freya_gnu mode change 100755 => 100644 configuration/scripts/machines/env.freya_intel mode change 100755 => 100644 configuration/scripts/machines/env.gaea_intel mode change 100755 => 100644 configuration/scripts/machines/env.gaffney_gnu mode change 100755 => 100644 configuration/scripts/machines/env.gaffney_intel create mode 100644 configuration/scripts/machines/env.gust_cray create mode 100644 configuration/scripts/machines/env.gust_intel mode change 100755 => 100644 configuration/scripts/machines/env.hera_intel mode change 100755 => 100644 configuration/scripts/machines/env.high_Sierra_gnu mode change 100755 => 100644 configuration/scripts/machines/env.hobart_intel mode change 100755 => 100644 configuration/scripts/machines/env.hobart_nag mode change 100755 => 100644 configuration/scripts/machines/env.izumi_gnu mode change 100755 => 100644 configuration/scripts/machines/env.izumi_intel mode change 100755 => 100644 configuration/scripts/machines/env.izumi_nag mode change 100755 => 100644 configuration/scripts/machines/env.izumi_pgi mode change 100755 => 100644 configuration/scripts/machines/env.koehr_intel mode change 100755 => 100644 configuration/scripts/machines/env.mustang_intel18 mode change 100755 => 100644 configuration/scripts/machines/env.mustang_intel19 mode change 100755 => 100644 configuration/scripts/machines/env.mustang_intel20 mode change 100755 => 100644 configuration/scripts/machines/env.narwhal_aocc mode change 100755 => 100644 configuration/scripts/machines/env.narwhal_cray mode change 100755 => 100644 configuration/scripts/machines/env.narwhal_gnu mode change 100755 => 100644 configuration/scripts/machines/env.narwhal_intel create mode 100644 configuration/scripts/machines/env.narwhal_intelhpcx mode change 100755 => 100644 configuration/scripts/machines/env.nrlssc_gnu mode change 100755 => 100644 configuration/scripts/machines/env.onyx_cray mode change 100755 => 100644 configuration/scripts/machines/env.onyx_gnu mode change 100755 => 100644 configuration/scripts/machines/env.onyx_intel mode change 100755 => 100644 configuration/scripts/machines/env.orion_intel mode change 100755 => 100644 configuration/scripts/machines/env.phase3_intel mode change 100755 => 100644 configuration/scripts/machines/env.testmachine_intel mode change 100755 => 100644 configuration/scripts/machines/env.travisCI_gnu create mode 100644 configuration/scripts/options/set_nml.bgczm rename configuration/scripts/options/{set_nml.boxsymw => set_nml.boxclosed} (89%) create mode 100644 configuration/scripts/options/set_nml.boxforcee create mode 100644 configuration/scripts/options/set_nml.boxforcen create mode 100644 configuration/scripts/options/set_nml.boxforcene delete mode 100644 configuration/scripts/options/set_nml.boxislandsn delete mode 100644 configuration/scripts/options/set_nml.boxislandsne rename configuration/scripts/options/{set_nml.boxislandse => set_nml.boxopen} (84%) delete mode 100644 configuration/scripts/options/set_nml.boxsyme delete mode 100644 configuration/scripts/options/set_nml.boxsymn delete mode 100644 configuration/scripts/options/set_nml.boxsymne delete mode 100644 configuration/scripts/options/set_nml.boxsyms create mode 100644 configuration/scripts/options/set_nml.modal create mode 100644 configuration/scripts/options/set_nml.scale1 create mode 100644 configuration/scripts/options/set_nml.vargrid diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 6256f9bc6..77cf56ddc 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -18,6 +18,9 @@ please refer to: umax_stab) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (abs(work1(i,j,iblk) - umaxn) < puny) then - write(nu_diag,*) ' ' - write(nu_diag,*) 'Warning, large ice speed' - write(nu_diag,*) 'my_task, iblk, i, j, umaxn:', & - my_task, iblk, i, j, umaxn - endif - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + if (abs(work1(i,j,iblk) - umaxn) < puny) then + write(nu_diag,*) ' ' + write(nu_diag,*) 'Warning, large ice speed' + write(nu_diag,*) 'my_task, iblk, i, j, umaxn:', & + my_task, iblk, i, j, umaxn + endif + enddo + enddo enddo !$OMP END PARALLEL DO elseif (umaxs > umax_stab) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (abs(work1(i,j,iblk) - umaxs) < puny) then - write(nu_diag,*) ' ' - write(nu_diag,*) 'Warning, large ice speed' - write(nu_diag,*) 'my_task, iblk, i, j, umaxs:', & - my_task, iblk, i, j, umaxs - endif - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + if (abs(work1(i,j,iblk) - umaxs) < puny) then + write(nu_diag,*) ' ' + write(nu_diag,*) 'Warning, large ice speed' + write(nu_diag,*) 'my_task, iblk, i, j, umaxs:', & + my_task, iblk, i, j, umaxs + endif + enddo + enddo enddo !$OMP END PARALLEL DO endif ! umax @@ -1357,14 +1349,14 @@ subroutine init_mass_diags do n=1,n_aero !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = trcr(i,j,nt_aero +4*(n-1),iblk)*vsno(i,j,iblk) & - + trcr(i,j,nt_aero+1+4*(n-1),iblk)*vsno(i,j,iblk) & - + trcr(i,j,nt_aero+2+4*(n-1),iblk)*vice(i,j,iblk) & - + trcr(i,j,nt_aero+3+4*(n-1),iblk)*vice(i,j,iblk) - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = trcr(i,j,nt_aero +4*(n-1),iblk)*vsno(i,j,iblk) & + + trcr(i,j,nt_aero+1+4*(n-1),iblk)*vsno(i,j,iblk) & + + trcr(i,j,nt_aero+2+4*(n-1),iblk)*vice(i,j,iblk) & + + trcr(i,j,nt_aero+3+4*(n-1),iblk)*vice(i,j,iblk) + enddo + enddo enddo !$OMP END PARALLEL DO totaeron(n)= global_sum(work1, distrb_info, field_loc_center, tarean) @@ -1377,17 +1369,17 @@ subroutine init_mass_diags totps = c0 !$OMP PARALLEL DO PRIVATE(iblk,i,j,n) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - do n = 1, ncat - work1(i,j,iblk) = work1(i,j,iblk) & - + aicen(i,j,n,iblk) & - * trcrn(i,j,nt_apnd,n,iblk) & - * trcrn(i,j,nt_hpnd,n,iblk) + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + do n = 1, ncat + work1(i,j,iblk) = work1(i,j,iblk) & + + aicen(i,j,n,iblk) & + * trcrn(i,j,nt_apnd,n,iblk) & + * trcrn(i,j,nt_hpnd,n,iblk) + enddo + enddo enddo - enddo - enddo enddo !$OMP END PARALLEL DO totpn = global_sum(work1, distrb_info, field_loc_center, tarean) diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedynB/analysis/ice_history.F90 index caaa56295..f5e7d0d16 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedynB/analysis/ice_history.F90 @@ -3546,7 +3546,7 @@ subroutine accum_hist (dt) call accum_hist_drag (iblk) ! floe size distribution - call accum_hist_fsd (iblk) + call accum_hist_fsd (dt, iblk) ! advanced snow physics call accum_hist_snow (iblk) diff --git a/cicecore/cicedynB/analysis/ice_history_fsd.F90 b/cicecore/cicedynB/analysis/ice_history_fsd.F90 index 50fee99e7..b52db4e05 100644 --- a/cicecore/cicedynB/analysis/ice_history_fsd.F90 +++ b/cicecore/cicedynB/analysis/ice_history_fsd.F90 @@ -137,7 +137,7 @@ subroutine init_hist_fsd_2D do ns = 1, nstreams if (f_wave_sig_ht(1:1) /= 'x') & - call define_hist_field(n_wave_sig_ht,"wave_sig_ht","1",tstr2D, tcstr, & + call define_hist_field(n_wave_sig_ht,"wave_sig_ht","m",tstr2D, tcstr, & "significant height of wind and swell waves", & "from attenuated spectrum in ice", c1, c0, & ns, f_wave_sig_ht) @@ -147,7 +147,7 @@ subroutine init_hist_fsd_2D "for waves", c1, c0, & ns, f_aice_ww) if (f_diam_ww(1:1) /= 'x') & - call define_hist_field(n_diam_ww,"diam_ww","1",tstr2D, tcstr, & + call define_hist_field(n_diam_ww,"diam_ww","m",tstr2D, tcstr, & "Average (number) diameter of floes > Dmin", & "for waves", c1, c0, & ns, f_diam_ww) @@ -216,27 +216,27 @@ subroutine init_hist_fsd_3Df if (histfreq(ns) /= 'x') then if (f_afsd(1:1) /= 'x') & - call define_hist_field(n_afsd,"afsd", "1", tstr3Df, tcstr, & + call define_hist_field(n_afsd,"afsd", "1/m", tstr3Df, tcstr, & "areal floe size distribution", & "per unit bin width ", c1, c0, ns, f_afsd) if (f_dafsd_newi(1:1) /= 'x') & - call define_hist_field(n_dafsd_newi,"dafsd_newi","1",tstr3Df, tcstr, & + call define_hist_field(n_dafsd_newi,"dafsd_newi","1/s",tstr3Df, tcstr, & "Change in fsd: new ice", & "Avg over freq period", c1, c0, ns, f_dafsd_newi) if (f_dafsd_latg(1:1) /= 'x') & - call define_hist_field(n_dafsd_latg,"dafsd_latg","1",tstr3Df, tcstr, & + call define_hist_field(n_dafsd_latg,"dafsd_latg","1/s",tstr3Df, tcstr, & "Change in fsd: lateral growth", & "Avg over freq period", c1, c0, ns, f_dafsd_latg) if (f_dafsd_latm(1:1) /= 'x') & - call define_hist_field(n_dafsd_latm,"dafsd_latm","1",tstr3Df, tcstr, & + call define_hist_field(n_dafsd_latm,"dafsd_latm","1/s",tstr3Df, tcstr, & "Change in fsd: lateral melt", & "Avg over freq period", c1, c0, ns, f_dafsd_latm) if (f_dafsd_wave(1:1) /= 'x') & - call define_hist_field(n_dafsd_wave,"dafsd_wave","1",tstr3Df, tcstr, & + call define_hist_field(n_dafsd_wave,"dafsd_wave","1/s",tstr3Df, tcstr, & "Change in fsd: waves", & "Avg over freq period", c1, c0, ns, f_dafsd_wave) if (f_dafsd_weld(1:1) /= 'x') & - call define_hist_field(n_dafsd_weld,"dafsd_weld","1",tstr3Df, tcstr, & + call define_hist_field(n_dafsd_weld,"dafsd_weld","1/s",tstr3Df, tcstr, & "Change in fsd: welding", & "Avg over freq period", c1, c0, ns, f_dafsd_weld) endif ! if (histfreq(ns) /= 'x') @@ -272,7 +272,7 @@ subroutine init_hist_fsd_4Df if (histfreq(ns) /= 'x') then if (f_afsdn(1:1) /= 'x') & - call define_hist_field(n_afsdn,"afsdn","1",tstr4Df, tcstr, & + call define_hist_field(n_afsdn,"afsdn","1/m",tstr4Df, tcstr, & "areal floe size and thickness distribution", & "per unit bin width", c1, c0, ns, f_afsdn) @@ -288,16 +288,19 @@ end subroutine init_hist_fsd_4Df ! accumulate average ice quantities or snapshots ! author: Elizabeth C. Hunke, LANL - subroutine accum_hist_fsd (iblk) + subroutine accum_hist_fsd (dt, iblk) use ice_blocks, only: nx_block, ny_block use ice_constants, only: c0, c1, c2, c4 use ice_history_shared, only: a2D, a3Df, a4Df, nfsd_hist, & ncat_hist, accum_hist_field, n3Dacum, n4Dscum - use ice_state, only: trcrn, aicen_init, vicen, aice_init + use ice_state, only: trcrn, aicen, vicen, aice use ice_arrays_column, only: wave_sig_ht, floe_rad_c, floe_binwidth, & d_afsd_newi, d_afsd_latg, d_afsd_latm, d_afsd_wave, d_afsd_weld + real (kind=dbl_kind), intent(in) :: & + dt ! time step + integer (kind=int_kind), intent(in) :: & iblk ! block index @@ -342,7 +345,7 @@ subroutine accum_hist_fsd (iblk) worka(i,j) = c0 do n = 1, ncat_hist do k = 1, nfsd_hist - worka(i,j) = worka(i,j) + aicen_init(i,j,n,iblk)*trcrn(i,j,nt_fsd+k-1,n,iblk) + worka(i,j) = worka(i,j) + aicen(i,j,n,iblk)*trcrn(i,j,nt_fsd+k-1,n,iblk) end do end do end do @@ -357,7 +360,7 @@ subroutine accum_hist_fsd (iblk) workb = c0 do n = 1, ncat_hist do k = 1, nfsd_hist - workc = aicen_init(i,j,n,iblk)*trcrn(i,j,nt_fsd+k-1,n,iblk) & + workc = aicen(i,j,n,iblk)*trcrn(i,j,nt_fsd+k-1,n,iblk) & / (c4*floeshape*floe_rad_c(k)**2) ! number-mean radius worka(i,j) = worka(i,j) + workc * floe_rad_c(k) @@ -380,7 +383,7 @@ subroutine accum_hist_fsd (iblk) workb = c0 do n = 1, ncat_hist do k = 1, nfsd_hist - workb = workb + aicen_init(i,j,n,iblk)*trcrn(i,j,nt_fsd+k-1,n,iblk) + workb = workb + aicen(i,j,n,iblk)*trcrn(i,j,nt_fsd+k-1,n,iblk) worka(i,j) = worka(i,j) + vicen(i,j,n,iblk)*trcrn(i,j,nt_fsd+k-1,n,iblk) end do end do @@ -398,12 +401,12 @@ subroutine accum_hist_fsd (iblk) do j = 1, ny_block do i = 1, nx_block worka(i,j) = c0 - if (aice_init(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then do k = 1, nfsd_hist do n = 1, ncat_hist worka(i,j) = worka(i,j) & + (trcrn(i,j,nt_fsd+k-1,n,iblk) * floe_rad_c(k) & - * aicen_init(i,j,n,iblk)/aice_init(i,j,iblk)) + * aicen(i,j,n,iblk)/aice(i,j,iblk)) end do end do endif @@ -416,12 +419,12 @@ subroutine accum_hist_fsd (iblk) do j = 1, ny_block do i = 1, nx_block worka(i,j) = c0 - if (aice_init(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then do k = 1, nfsd_hist do n = 1, ncat_hist worka(i,j) = worka(i,j) & + (c8*floeshape*trcrn(i,j,nt_fsd+k-1,n,iblk)*floe_rad_c(k) & - *aicen_init(i,j,n,iblk)/(c4*floeshape*floe_rad_c(k)**2 *aice_init(i,j,iblk))) + *aicen(i,j,n,iblk)/(c4*floeshape*floe_rad_c(k)**2 *aice(i,j,iblk))) end do end do endif @@ -442,7 +445,7 @@ subroutine accum_hist_fsd (iblk) worke(i,j,k)=c0 do n = 1, ncat_hist worke(i,j,k) = worke(i,j,k) + (trcrn(i,j,nt_fsd+k-1,n,iblk) & - * aicen_init(i,j,n,iblk)/floe_binwidth(k)) + * aicen(i,j,n,iblk)/floe_binwidth(k)) end do end do end do @@ -452,19 +455,19 @@ subroutine accum_hist_fsd (iblk) if (f_dafsd_newi(1:1)/= 'x') & call accum_hist_field(n_dafsd_newi-n3Dacum, iblk, nfsd_hist, & - d_afsd_newi(:,:,1:nfsd_hist,iblk), a3Df) + d_afsd_newi(:,:,1:nfsd_hist,iblk)/dt, a3Df) if (f_dafsd_latg(1:1)/= 'x') & call accum_hist_field(n_dafsd_latg-n3Dacum, iblk, nfsd_hist, & - d_afsd_latg(:,:,1:nfsd_hist,iblk), a3Df) + d_afsd_latg(:,:,1:nfsd_hist,iblk)/dt, a3Df) if (f_dafsd_latm(1:1)/= 'x') & call accum_hist_field(n_dafsd_latm-n3Dacum, iblk, nfsd_hist, & - d_afsd_latm(:,:,1:nfsd_hist,iblk), a3Df) + d_afsd_latm(:,:,1:nfsd_hist,iblk)/dt, a3Df) if (f_dafsd_wave(1:1)/= 'x') & call accum_hist_field(n_dafsd_wave-n3Dacum, iblk, nfsd_hist, & - d_afsd_wave(:,:,1:nfsd_hist,iblk), a3Df) + d_afsd_wave(:,:,1:nfsd_hist,iblk)/dt, a3Df) if (f_dafsd_weld(1:1)/= 'x') & call accum_hist_field(n_dafsd_weld-n3Dacum, iblk, nfsd_hist, & - d_afsd_weld(:,:,1:nfsd_hist,iblk), a3Df) + d_afsd_weld(:,:,1:nfsd_hist,iblk)/dt, a3Df) endif ! a3Df allocated ! 4D floe size, thickness category fields @@ -476,7 +479,7 @@ subroutine accum_hist_fsd (iblk) do j = 1, ny_block do i = 1, nx_block workd(i,j,k,n) = trcrn(i,j,nt_fsd+k-1,n,iblk) & - * aicen_init(i,j,n,iblk)/floe_binwidth(k) + * aicen(i,j,n,iblk)/floe_binwidth(k) end do end do end do diff --git a/cicecore/cicedynB/analysis/ice_history_pond.F90 b/cicecore/cicedynB/analysis/ice_history_pond.F90 index f6e4b8737..8818ff94e 100644 --- a/cicecore/cicedynB/analysis/ice_history_pond.F90 +++ b/cicecore/cicedynB/analysis/ice_history_pond.F90 @@ -268,13 +268,8 @@ subroutine accum_hist_pond (iblk) integer (kind=int_kind) :: & nt_apnd, nt_hpnd, nt_alvl, nt_ipnd -#ifdef UNDEPRECATE_CESMPONDS - logical (kind=log_kind) :: & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo -#else logical (kind=log_kind) :: & tr_pond_lvl, tr_pond_topo -#endif real (kind=dbl_kind) :: & puny @@ -289,13 +284,8 @@ subroutine accum_hist_pond (iblk) !--------------------------------------------------------------- call icepack_query_parameters(puny_out=puny) -#ifdef UNDEPRECATE_CESMPONDS - call icepack_query_tracer_flags(tr_pond_cesm_out=tr_pond_cesm, & - tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) -#else call icepack_query_tracer_flags(tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo) -#endif call icepack_query_tracer_indices(nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & nt_alvl_out=nt_alvl, nt_ipnd_out=nt_ipnd) call icepack_warnings_flush(nu_diag) @@ -303,27 +293,9 @@ subroutine accum_hist_pond (iblk) file=__FILE__, line=__LINE__) if (allocated(a2D)) then -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - if (f_apond(1:1)/= 'x') & - call accum_hist_field(n_apond, iblk, & - trcr(:,:,nt_apnd,iblk), a2D) - if (f_apond_ai(1:1)/= 'x') & - call accum_hist_field(n_apond_ai, iblk, & - aice(:,:,iblk) * trcr(:,:,nt_apnd,iblk), a2D) - if (f_hpond(1:1)/= 'x') & - call accum_hist_field(n_hpond, iblk, & - trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_hpnd,iblk), a2D) - if (f_hpond_ai(1:1)/= 'x') & - call accum_hist_field(n_hpond_ai, iblk, & - aice(:,:,iblk) * trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_hpnd,iblk), a2D) - elseif (tr_pond_lvl) then -#else if (tr_pond_lvl) then -#endif + if (f_apond(1:1)/= 'x') & call accum_hist_field(n_apond, iblk, & trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk), a2D) diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index ee48a9996..d9c62edde 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -156,57 +156,25 @@ module ice_history_shared igrdz(nvar_grdz) ! true if category/vertical grid field is written character (len=25), public, parameter :: & - tcstr = 'area: tarea' , & ! vcellmeas for T cell quantities - ucstr = 'area: uarea' , & ! vcellmeas for U cell quantities - ncstr = 'area: narea' , & ! vcellmeas for N cell quantities - ecstr = 'area: earea' , & ! vcellmeas for E cell quantities - tstr2D = 'TLON TLAT time' , & ! vcoord for T cell quantities, 2D - ustr2D = 'ULON ULAT time' , & ! vcoord for U cell quantities, 2D - nstr2D = 'NLON NLAT time' , & ! vcoord for N cell quantities, 2D - estr2D = 'ELON ELAT time' , & ! vcoord for E cell quantities, 2D - tstr3Dz = 'TLON TLAT VGRDi time',& ! vcoord for T cell quantities, 3D - ustr3Dz = 'ULON ULAT VGRDi time',& ! vcoord for U cell quantities, 3D - nstr3Dz = 'NLON NLAT VGRDi time',& ! vcoord for N cell quantities, 3D - estr3Dz = 'ELON ELAT VGRDi time',& ! vcoord for E cell quantities, 3D - tstr3Dc = 'TLON TLAT NCAT time',& ! vcoord for T cell quantities, 3D - ustr3Dc = 'ULON ULAT NCAT time',& ! vcoord for U cell quantities, 3D - nstr3Dc = 'NLON NLAT NCAT time',& ! vcoord for N cell quantities, 3D - estr3Dc = 'ELON ELAT NCAT time',& ! vcoord for E cell quantities, 3D - tstr3Db = 'TLON TLAT VGRDb time',& ! vcoord for T cell quantities, 3D - ustr3Db = 'ULON ULAT VGRDb time',& ! vcoord for U cell quantities, 3D - nstr3Db = 'NLON NLAT VGRDb time',& ! vcoord for N cell quantities, 3D - estr3Db = 'ELON ELAT VGRDb time',& ! vcoord for E cell quantities, 3D - tstr3Da = 'TLON TLAT VGRDa time',& ! vcoord for T cell quantities, 3D - ustr3Da = 'ULON ULAT VGRDa time',& ! vcoord for U cell quantities, 3D - nstr3Da = 'NLON NLAT VGRDa time',& ! vcoord for N cell quantities, 3D - estr3Da = 'ELON ELAT VGRDa time',& ! vcoord for E cell quantities, 3D - tstr3Df = 'TLON TLAT NFSD time',& ! vcoord for T cell quantities, 3D - ustr3Df = 'ULON ULAT NFSD time',& ! vcoord for U cell quantities, 3D - nstr3Df = 'NLON NLAT NFSD time',& ! vcoord for N cell quantities, 3D - estr3Df = 'ELON ELAT NFSD time',& ! vcoord for E cell quantities, 3D - -!ferret + ! T grids + tcstr = 'area: tarea' , & ! vcellmeas for T cell quantities + tstr2D = 'TLON TLAT time' , & ! vcoord for T cell, 2D + tstr3Dc = 'TLON TLAT NCAT time', & ! vcoord for T cell, 3D, ncat + tstr3Da = 'TLON TLAT VGRDa time', & ! vcoord for T cell, 3D, ice-snow-bio + tstr3Db = 'TLON TLAT VGRDb time', & ! vcoord for T cell, 3D, ice-bio + tstr3Df = 'TLON TLAT NFSD time', & ! vcoord for T cell, 3D, fsd tstr4Di = 'TLON TLAT VGRDi NCAT', & ! vcoord for T cell, 4D, ice - ustr4Di = 'ULON ULAT VGRDi NCAT', & ! vcoord for U cell, 4D, ice - nstr4Di = 'NLON NLAT VGRDi NCAT', & ! vcoord for N cell, 4D, ice - estr4Di = 'ELON ELAT VGRDi NCAT', & ! vcoord for E cell, 4D, ice tstr4Ds = 'TLON TLAT VGRDs NCAT', & ! vcoord for T cell, 4D, snow - ustr4Ds = 'ULON ULAT VGRDs NCAT', & ! vcoord for U cell, 4D, snow - nstr4Ds = 'NLON NLAT VGRDs NCAT', & ! vcoord for N cell, 4D, snow - estr4Ds = 'ELON ELAT VGRDs NCAT', & ! vcoord for E cell, 4D, snow tstr4Df = 'TLON TLAT NFSD NCAT', & ! vcoord for T cell, 4D, fsd - ustr4Df = 'ULON ULAT NFSD NCAT', & ! vcoord for U cell, 4D, fsd - nstr4Df = 'NLON NLAT NFSD NCAT', & ! vcoord for N cell, 4D, fsd - estr4Df = 'ELON ELAT NFSD NCAT' ! vcoord for E cell, 4D, fsd -!ferret -! tstr4Di = 'TLON TLAT VGRDi NCAT time', & ! ferret can not handle time -! ustr4Di = 'ULON ULAT VGRDi NCAT time', & ! index on 4D variables. -! tstr4Ds = 'TLON TLAT VGRDs NCAT time', & ! Use 'ferret' lines instead -! ustr4Ds = 'ULON ULAT VGRDs NCAT time', & ! (below also) -! tstr4Db = 'TLON TLAT VGRDb NCAT time', & -! ustr4Db = 'ULON ULAT VGRDb NCAT time', & -! tstr4Df = 'TLON TLAT NFSD NCAT time', & -! ustr4Df = 'ULON ULAT NFSD NCAT time', & + ! U grids + ucstr = 'area: uarea' , & ! vcellmeas for U cell quantities + ustr2D = 'ULON ULAT time' , & ! vcoord for U cell, 2D + ! N grids + ncstr = 'area: narea' , & ! vcellmeas for N cell quantities + nstr2D = 'NLON NLAT time' , & ! vcoord for N cell, 2D + ! E grids + ecstr = 'area: earea' , & ! vcellmeas for E cell quantities + estr2D = 'ELON ELAT time' ! vcoord for E cell, 2D !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value diff --git a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 index f71d959da..28a047c4e 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 @@ -134,19 +134,19 @@ subroutine eap (dt) dyn_prep1, dyn_prep2, stepu, dyn_finish, & seabed_stress_factor_LKD, seabed_stress_factor_prob, & seabed_stress_method, seabed_stress, & - stack_fields, unstack_fields + stack_fields, unstack_fields, iceTmask, iceUmask use ice_flux, only: rdg_conv, strairxT, strairyT, & strairxU, strairyU, uocn, vocn, ss_tltx, ss_tlty, fmU, & strtltxU, strtltyU, strocnxU, strocnyU, strintxU, strintyU, taubxU, taubyU, & - strocnxT, strocnyT, strax, stray, & + strax, stray, & TbU, hwater, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 use ice_grid, only: tmask, umask, dxT, dyT, dxhy, dyhx, cxp, cyp, cxm, cym, & - tarear, uarear, grid_average_X2Y, iceumask, & + tarear, uarear, grid_average_X2Y, & grid_atm_dynu, grid_atm_dynv, grid_ocn_dynu, grid_ocn_dynv - use ice_state, only: aice, vice, vsno, uvel, vvel, divu, shear, & + use ice_state, only: aice, aiU, vice, vsno, uvel, vvel, divu, shear, & aice_init, aice0, aicen, vicen, strength use ice_timers, only: timer_dynamics, timer_bound, & ice_timer_start, ice_timer_stop @@ -163,31 +163,33 @@ subroutine eap (dt) i, j, ij integer (kind=int_kind), dimension(max_blocks) :: & - icellt , & ! no. of cells where icetmask = 1 - icellu ! no. of cells where iceumask = 1 + icellT , & ! no. of cells where iceTmask = .true. + icellU ! no. of cells where iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block, max_blocks) :: & - indxti , & ! compressed index in i-direction - indxtj , & ! compressed index in j-direction - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj , & ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & uocnU , & ! i ocean current (m/s) vocnU , & ! j ocean current (m/s) ss_tltxU , & ! sea surface slope, x-direction (m/m) ss_tltyU , & ! sea surface slope, y-direction (m/m) + cdn_ocnU , & ! ocn drag coefficient tmass , & ! total mass of ice and snow (kg/m^2) waterxU , & ! for ocean stress calculation, x (m/s) wateryU , & ! for ocean stress calculation, y (m/s) forcexU , & ! work array: combined atm stress and ocn tilt, x forceyU , & ! work array: combined atm stress and ocn tilt, y - aiU , & ! ice fraction on u-grid umass , & ! total mass of ice and snow (u grid) umassdti ! mass of U-cell/dte (kg/m^2 s) real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:) ! temporary for stacking fields for halo update + fld2(:,:,:,:), & ! temporary for stacking fields for halo update + fld3(:,:,:,:), & ! temporary for stacking fields for halo update + fld4(:,:,:,:) ! temporary for stacking fields for halo update real (kind=dbl_kind), dimension(nx_block,ny_block,8):: & strtmp ! stress combinations for momentum equation @@ -196,7 +198,6 @@ subroutine eap (dt) calc_strair integer (kind=int_kind), dimension (nx_block,ny_block,max_blocks) :: & - icetmask , & ! ice extent mask (T-cell) halomask ! ice mask for halo update type (ice_halo) :: & @@ -205,10 +206,6 @@ subroutine eap (dt) type (block) :: & this_block ! block information for current block - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1 , & ! temporary - work2 ! temporary - character(len=*), parameter :: subname = '(eap)' call ice_timer_start(timer_dynamics) ! dynamics @@ -218,6 +215,8 @@ subroutine eap (dt) !----------------------------------------------------------------- allocate(fld2(nx_block,ny_block,2,max_blocks)) + allocate(fld3(nx_block,ny_block,3,max_blocks)) + allocate(fld4(nx_block,ny_block,4,max_blocks)) ! This call is needed only if dt changes during runtime. ! call set_evp_parameters (dt) @@ -256,13 +255,13 @@ subroutine eap (dt) ilo, ihi, jlo, jhi, & aice (:,:,iblk), vice (:,:,iblk), & vsno (:,:,iblk), tmask (:,:,iblk), & - tmass (:,:,iblk), icetmask(:,:,iblk)) + tmass (:,:,iblk), iceTmask(:,:,iblk)) enddo ! iblk !$OMP END PARALLEL DO call ice_timer_start(timer_bound) - call ice_HaloUpdate (icetmask, halo_info, & + call ice_HaloUpdate (iceTmask, halo_info, & field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) @@ -270,8 +269,18 @@ subroutine eap (dt) ! convert fields from T to U grid !----------------------------------------------------------------- - call grid_average_X2Y('F', tmass , 'T' , umass, 'U') - call grid_average_X2Y('F', aice_init, 'T' , aiU , 'U') + call stack_fields(tmass, aice_init, cdn_ocn, fld3) + call ice_HaloUpdate (fld3, halo_info, & + field_loc_center, field_type_scalar) + call stack_fields(uocn, vocn, ss_tltx, ss_tlty, fld4) + call ice_HaloUpdate (fld4, halo_info, & + field_loc_center, field_type_vector) + call unstack_fields(fld3, tmass, aice_init, cdn_ocn) + call unstack_fields(fld4, uocn, vocn, ss_tltx, ss_tlty) + + call grid_average_X2Y('S', tmass , 'T' , umass , 'U') + call grid_average_X2Y('S', aice_init, 'T' , aiU , 'U') + call grid_average_X2Y('S', cdn_ocn , 'T' , cdn_ocnU, 'U') call grid_average_X2Y('S', uocn , grid_ocn_dynu, uocnU , 'U') call grid_average_X2Y('S', vocn , grid_ocn_dynv, vocnU , 'U') call grid_average_X2Y('S', ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') @@ -314,16 +323,16 @@ subroutine eap (dt) call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & - icellt (iblk), icellu (iblk), & - indxti (:,iblk), indxtj (:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellT (iblk), icellU (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & aiU (:,:,iblk), umass (:,:,iblk), & umassdti (:,:,iblk), fcor_blk (:,:,iblk), & umask (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & - icetmask (:,:,iblk), iceumask (:,:,iblk), & + iceTmask (:,:,iblk), iceUmask (:,:,iblk), & fmU (:,:,iblk), dt, & strtltxU (:,:,iblk), strtltyU (:,:,iblk), & strocnxU (:,:,iblk), strocnyU (:,:,iblk), & @@ -347,7 +356,7 @@ subroutine eap (dt) do j = 1, ny_block do i = 1, nx_block - if (icetmask(i,j,iblk)==0) then + if (.not.iceTmask(i,j,iblk)) then if (tmask(i,j,iblk)) then ! structure tensor a11_1(i,j,iblk) = p5 @@ -364,7 +373,7 @@ subroutine eap (dt) a12_2(i,j,iblk) = c0 a12_3(i,j,iblk) = c0 a12_4(i,j,iblk) = c0 - endif ! icetmask + endif ! iceTmask enddo ! i enddo ! j @@ -374,9 +383,9 @@ subroutine eap (dt) !----------------------------------------------------------------- strength(:,:,iblk) = c0 ! initialize - do ij = 1, icellt(iblk) - i = indxti(ij, iblk) - j = indxtj(ij, iblk) + do ij = 1, icellT(iblk) + i = indxTi(ij, iblk) + j = indxTj(ij, iblk) call icepack_ice_strength(ncat=ncat, & aice = aice (i,j, iblk), & vice = vice (i,j, iblk), & @@ -405,7 +414,7 @@ subroutine eap (dt) if (maskhalo_dyn) then call ice_timer_start(timer_bound) halomask = 0 - where (iceumask) halomask = 1 + where (iceUmask) halomask = 1 call ice_HaloUpdate (halomask, halo_info, & field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) @@ -421,8 +430,8 @@ subroutine eap (dt) !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks call seabed_stress_factor_LKD (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj(:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj(:,iblk), & vice (:,:,iblk), aice(:,:,iblk), & hwater(:,:,iblk), TbU (:,:,iblk)) enddo @@ -432,8 +441,8 @@ subroutine eap (dt) !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks call seabed_stress_factor_prob (nx_block , ny_block , & - icellt(iblk), indxti(:,iblk), indxtj(:,iblk), & - icellu(iblk), indxui(:,iblk), indxuj(:,iblk), & + icellT(iblk), indxTi(:,iblk), indxTj(:,iblk), & + icellU(iblk), indxUi(:,iblk), indxUj(:,iblk), & aicen(:,:,:,iblk), vicen(:,:,:,iblk), & hwater (:,:,iblk), TbU (:,:,iblk)) enddo @@ -453,8 +462,8 @@ subroutine eap (dt) ! call ice_timer_start(timer_tmp1,iblk) call stress_eap (nx_block, ny_block, & ksub, ndte, & - icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & + icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & arlx1i, denom1, & uvel (:,:,iblk), vvel (:,:,iblk), & dxT (:,:,iblk), dyT (:,:,iblk), & @@ -491,8 +500,8 @@ subroutine eap (dt) ! call ice_timer_start(timer_tmp2,iblk) call stepu (nx_block, ny_block, & - icellu (iblk), Cdn_ocn (:,:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), Cdn_ocnU (:,:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & aiU (:,:,iblk), strtmp (:,:,:), & uocnU (:,:,iblk), vocnU (:,:,iblk), & waterxU (:,:,iblk), wateryU (:,:,iblk), & @@ -513,8 +522,8 @@ subroutine eap (dt) ! call ice_timer_start(timer_tmp3,iblk) if (mod(ksub,10) == 1) then ! only called every 10th timestep call stepa (nx_block , ny_block , & - dtei , icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & + dtei , icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & a11 (:,:,iblk), a12 (:,:,iblk), & a11_1 (:,:,iblk), a11_2 (:,:,iblk), & a11_3 (:,:,iblk), a11_4 (:,:,iblk), & @@ -545,7 +554,7 @@ subroutine eap (dt) enddo ! subcycling - deallocate(fld2) + deallocate(fld2,fld3,fld4) if (maskhalo_dyn) call ice_HaloDestroy(halo_info_mask) !----------------------------------------------------------------- @@ -557,8 +566,8 @@ subroutine eap (dt) call dyn_finish & (nx_block, ny_block, & - icellu (iblk), Cdn_ocn (:,:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), Cdn_ocnU(:,:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & aiU (:,:,iblk), fmU (:,:,iblk), & @@ -567,27 +576,6 @@ subroutine eap (dt) enddo !$OMP END PARALLEL DO - ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T - ! TODO: This should be done elsewhere as part of generalization? - ! conservation requires aiU be divided before averaging - work1 = c0 - work2 = c0 - !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) - do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij,iblk) - j = indxuj(ij,iblk) - work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) - work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) - enddo - enddo - call ice_HaloUpdate (work1, halo_info, & - field_loc_NEcorner, field_type_vector) - call ice_HaloUpdate (work2, halo_info, & - field_loc_NEcorner, field_type_vector) - call grid_average_X2Y('F', work1, 'U', strocnxT, 'T') ! shift - call grid_average_X2Y('F', work2, 'U', strocnyT, 'T') - call ice_timer_stop(timer_dynamics) ! dynamics end subroutine eap @@ -1165,8 +1153,8 @@ end FUNCTION s22ks subroutine stress_eap (nx_block, ny_block, & ksub, ndte, & - icellt, & - indxti, indxtj, & + icellT, & + indxTi, indxTj, & arlx1i, denom1, & uvel, vvel, & dxT, dyT, & @@ -1198,11 +1186,11 @@ subroutine stress_eap (nx_block, ny_block, & nx_block, ny_block, & ! block dimensions ksub , & ! subcycling step ndte , & ! number of subcycles - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), intent(in) :: & arlx1i , & ! dte/2T (original) or 1/alpha1 (revised) @@ -1285,9 +1273,9 @@ subroutine stress_eap (nx_block, ny_block, & strtmp(:,:,:) = c0 - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! strain rates @@ -1889,8 +1877,8 @@ end subroutine update_stress_rdg ! Solves evolution equation for structure tensor (A19, A20) subroutine stepa (nx_block, ny_block, & - dtei, icellt, & - indxti, indxtj, & + dtei, icellT, & + indxTi, indxTj, & a11, a12, & a11_1, a11_2, a11_3, a11_4, & a12_1, a12_2, a12_3, a12_4, & @@ -1903,14 +1891,14 @@ subroutine stepa (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = .true. real (kind=dbl_kind), intent(in) :: & dtei ! 1/dte, where dte is subcycling timestep (1/s) integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & ! ice stress tensor (kg/s^2) in each corner of T cell @@ -1940,9 +1928,9 @@ subroutine stepa (nx_block, ny_block, & dteikth = c1 / (dtei + kth) p5kth = p5 * kth - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) ! ne call calc_ffrac(stressp_1(i,j), stressm_1(i,j), & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index c2060285a..8eab5e260 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -85,7 +85,7 @@ subroutine evp (dt) use ice_flux, only: rdg_conv, rdg_shear, strairxT, strairyT, & strairxU, strairyU, uocn, vocn, ss_tltx, ss_tlty, fmU, & strtltxU, strtltyU, strocnxU, strocnyU, strintxU, strintyU, taubxU, taubyU, & - strocnxT, strocnyT, strax, stray, & + strax, stray, & TbU, hwater, & strairxN, strairyN, fmN, & strtltxN, strtltyN, strocnxN, strocnyN, strintxN, strintyN, taubxN, taubyN, & @@ -99,14 +99,13 @@ subroutine evp (dt) stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U use ice_grid, only: tmask, umask, umaskCD, nmask, emask, uvm, epm, npm, & - iceumask, iceemask, icenmask, & dxE, dxN, dxT, dxU, dyE, dyN, dyT, dyU, & ratiodxN, ratiodxNr, ratiodyE, ratiodyEr, & dxhy, dyhx, cxp, cyp, cxm, cym, & tarear, uarear, earear, narear, grid_average_X2Y, uarea, & grid_type, grid_ice, & grid_atm_dynu, grid_atm_dynv, grid_ocn_dynu, grid_ocn_dynv - use ice_state, only: aice, vice, vsno, uvel, vvel, uvelN, vvelN, & + use ice_state, only: aice, aiU, vice, vsno, uvel, vvel, uvelN, vvelN, & uvelE, vvelE, divu, shear, & aice_init, aice0, aicen, vicen, strength use ice_timers, only: timer_dynamics, timer_bound, & @@ -116,6 +115,7 @@ subroutine evp (dt) use ice_dyn_shared, only: evp_algorithm, stack_fields, unstack_fields, & DminTarea, visc_method, deformations, deformationsC_T, deformationsCD_T, & strain_rates_U, & + iceTmask, iceUmask, iceEmask, iceNmask, & dyn_haloUpdate real (kind=dbl_kind), intent(in) :: & @@ -130,32 +130,32 @@ subroutine evp (dt) i, j, ij ! local indices integer (kind=int_kind), dimension(max_blocks) :: & - icellt , & ! no. of cells where icetmask = 1 - icelln , & ! no. of cells where icenmask = .true. - icelle , & ! no. of cells where iceemask = .true. - icellu ! no. of cells where iceumask = .true. + icellT , & ! no. of cells where iceTmask = .true. + icellN , & ! no. of cells where iceNmask = .true. + icellE , & ! no. of cells where iceEmask = .true. + icellU ! no. of cells where iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block, max_blocks) :: & - indxti , & ! compressed index in i-direction - indxtj , & ! compressed index in j-direction - indxei , & ! compressed index in i-direction - indxej , & ! compressed index in j-direction - indxni , & ! compressed index in i-direction - indxnj , & ! compressed index in j-direction - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj , & ! compressed index in j-direction + indxEi , & ! compressed index in i-direction + indxEj , & ! compressed index in j-direction + indxNi , & ! compressed index in i-direction + indxNj , & ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & uocnU , & ! i ocean current (m/s) vocnU , & ! j ocean current (m/s) ss_tltxU , & ! sea surface slope, x-direction (m/m) ss_tltyU , & ! sea surface slope, y-direction (m/m) + cdn_ocnU , & ! ocn drag coefficient tmass , & ! total mass of ice and snow (kg/m^2) waterxU , & ! for ocean stress calculation, x (m/s) wateryU , & ! for ocean stress calculation, y (m/s) forcexU , & ! work array: combined atm stress and ocn tilt, x forceyU , & ! work array: combined atm stress and ocn tilt, y - aiU , & ! ice fraction on u-grid umass , & ! total mass of ice and snow (u grid) umassdti ! mass of U-cell/dte (kg/m^2 s) @@ -164,6 +164,7 @@ subroutine evp (dt) vocnN , & ! j ocean current (m/s) ss_tltxN , & ! sea surface slope, x-direction (m/m) ss_tltyN , & ! sea surface slope, y-direction (m/m) + cdn_ocnN , & ! ocn drag coefficient waterxN , & ! for ocean stress calculation, x (m/s) wateryN , & ! for ocean stress calculation, y (m/s) forcexN , & ! work array: combined atm stress and ocn tilt, x @@ -177,6 +178,7 @@ subroutine evp (dt) vocnE , & ! j ocean current (m/s) ss_tltxE , & ! sea surface slope, x-direction (m/m) ss_tltyE , & ! sea surface slope, y-direction (m/m) + cdn_ocnE , & ! ocn drag coefficient waterxE , & ! for ocean stress calculation, x (m/s) wateryE , & ! for ocean stress calculation, y (m/s) forcexE , & ! work array: combined atm stress and ocn tilt, x @@ -186,9 +188,9 @@ subroutine evp (dt) emassdti ! mass of E-cell/dte (kg/m^2 s) real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:) , & ! 2 bundled fields - fld3(:,:,:,:) , & ! 3 bundled fields - fld4(:,:,:,:) ! 4 bundled fields + fld2(:,:,:,:), & ! 2 bundled fields + fld3(:,:,:,:), & ! 3 bundled fields + fld4(:,:,:,:) ! 4 bundled fields real (kind=dbl_kind), allocatable :: & strengthU(:,:,:), & ! strength averaged to U points @@ -208,7 +210,6 @@ subroutine evp (dt) calc_strair ! calculate air/ice stress integer (kind=int_kind), dimension (nx_block,ny_block,max_blocks) :: & - icetmask, & ! ice extent mask (T-cell) halomask ! generic halo mask type (ice_halo) :: & @@ -217,10 +218,6 @@ subroutine evp (dt) type (block) :: & this_block ! block information for current block - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1, & ! temporary - work2 ! temporary - logical (kind=log_kind), save :: & first_time = .true. ! first time logical @@ -303,13 +300,13 @@ subroutine evp (dt) ilo, ihi, jlo, jhi, & aice (:,:,iblk), vice (:,:,iblk), & vsno (:,:,iblk), tmask (:,:,iblk), & - tmass (:,:,iblk), icetmask(:,:,iblk)) + tmass (:,:,iblk), iceTmask(:,:,iblk)) enddo ! iblk !$OMP END PARALLEL DO call ice_timer_start(timer_bound) - call ice_HaloUpdate (icetmask, halo_info, & + call ice_HaloUpdate (iceTmask, halo_info, & field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) @@ -317,22 +314,34 @@ subroutine evp (dt) ! convert fields from T to U grid !----------------------------------------------------------------- - call grid_average_X2Y('F', tmass , 'T' , umass , 'U') - call grid_average_X2Y('F', aice_init, 'T' , aiU , 'U') + call stack_fields(tmass, aice_init, cdn_ocn, fld3) + call ice_HaloUpdate (fld3, halo_info, & + field_loc_center, field_type_scalar) + call stack_fields(uocn, vocn, ss_tltx, ss_tlty, fld4) + call ice_HaloUpdate (fld4, halo_info, & + field_loc_center, field_type_vector) + call unstack_fields(fld3, tmass, aice_init, cdn_ocn) + call unstack_fields(fld4, uocn, vocn, ss_tltx, ss_tlty) + + call grid_average_X2Y('S', tmass , 'T' , umass , 'U') + call grid_average_X2Y('S', aice_init, 'T' , aiU , 'U') + call grid_average_X2Y('S', cdn_ocn , 'T' , cdn_ocnU, 'U') call grid_average_X2Y('S', uocn , grid_ocn_dynu, uocnU , 'U') call grid_average_X2Y('S', vocn , grid_ocn_dynv, vocnU , 'U') call grid_average_X2Y('S', ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') call grid_average_X2Y('S', ss_tlty , grid_ocn_dynv, ss_tltyU, 'U') if (grid_ice == 'CD' .or. grid_ice == 'C') then - call grid_average_X2Y('F', tmass , 'T' , emass , 'E') - call grid_average_X2Y('F', aice_init, 'T' , aie , 'E') + call grid_average_X2Y('S', tmass , 'T' , emass , 'E') + call grid_average_X2Y('S', aice_init, 'T' , aie , 'E') + call grid_average_X2Y('S', cdn_ocn , 'T' , cdn_ocnE, 'E') call grid_average_X2Y('S', uocn , grid_ocn_dynu, uocnE , 'E') call grid_average_X2Y('S', vocn , grid_ocn_dynv, vocnE , 'E') call grid_average_X2Y('S', ss_tltx , grid_ocn_dynu, ss_tltxE, 'E') call grid_average_X2Y('S', ss_tlty , grid_ocn_dynv, ss_tltyE, 'E') - call grid_average_X2Y('F', tmass , 'T' , nmass , 'N') - call grid_average_X2Y('F', aice_init, 'T' , ain , 'N') + call grid_average_X2Y('S', tmass , 'T' , nmass , 'N') + call grid_average_X2Y('S', aice_init, 'T' , ain , 'N') + call grid_average_X2Y('S', cdn_ocn , 'T' , cdn_ocnN, 'N') call grid_average_X2Y('S', uocn , grid_ocn_dynu, uocnN , 'N') call grid_average_X2Y('S', vocn , grid_ocn_dynv, vocnN , 'N') call grid_average_X2Y('S', ss_tltx , grid_ocn_dynu, ss_tltxN, 'N') @@ -390,16 +399,16 @@ subroutine evp (dt) if (trim(grid_ice) == 'B') then call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & - icellt (iblk), icellu (iblk), & - indxti (:,iblk), indxtj (:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellT (iblk), icellU (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & aiU (:,:,iblk), umass (:,:,iblk), & umassdti (:,:,iblk), fcor_blk (:,:,iblk), & umask (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & - icetmask (:,:,iblk), iceumask (:,:,iblk), & + iceTmask (:,:,iblk), iceUmask (:,:,iblk), & fmU (:,:,iblk), dt, & strtltxU (:,:,iblk), strtltyU (:,:,iblk), & strocnxU (:,:,iblk), strocnyU (:,:,iblk), & @@ -420,16 +429,16 @@ subroutine evp (dt) elseif (trim(grid_ice) == 'CD' .or. grid_ice == 'C') then call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & - icellt (iblk), icellu (iblk), & - indxti (:,iblk), indxtj (:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellT (iblk), icellU (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & aiU (:,:,iblk), umass (:,:,iblk), & umassdti (:,:,iblk), fcor_blk (:,:,iblk), & umaskCD (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & - icetmask (:,:,iblk), iceumask (:,:,iblk), & + iceTmask (:,:,iblk), iceUmask (:,:,iblk), & fmU (:,:,iblk), dt, & strtltxU (:,:,iblk), strtltyU (:,:,iblk), & strocnxU (:,:,iblk), strocnyU (:,:,iblk), & @@ -453,9 +462,9 @@ subroutine evp (dt) !----------------------------------------------------------------- strength(:,:,iblk) = c0 ! initialize - do ij = 1, icellt(iblk) - i = indxti(ij, iblk) - j = indxtj(ij, iblk) + do ij = 1, icellT(iblk) + i = indxTi(ij, iblk) + j = indxTj(ij, iblk) call icepack_ice_strength(ncat = ncat, & aice = aice (i,j, iblk), & vice = vice (i,j, iblk), & @@ -485,16 +494,16 @@ subroutine evp (dt) call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & - icellt (iblk), icelln (iblk), & - indxti (:,iblk), indxtj (:,iblk), & - indxni (:,iblk), indxnj (:,iblk), & + icellT (iblk), icellN (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxNi (:,iblk), indxNj (:,iblk), & aiN (:,:,iblk), nmass (:,:,iblk), & nmassdti (:,:,iblk), fcorN_blk (:,:,iblk), & nmask (:,:,iblk), & uocnN (:,:,iblk), vocnN (:,:,iblk), & strairxN (:,:,iblk), strairyN (:,:,iblk), & ss_tltxN (:,:,iblk), ss_tltyN (:,:,iblk), & - icetmask (:,:,iblk), icenmask (:,:,iblk), & + iceTmask (:,:,iblk), iceNmask (:,:,iblk), & fmN (:,:,iblk), dt, & strtltxN (:,:,iblk), strtltyN (:,:,iblk), & strocnxN (:,:,iblk), strocnyN (:,:,iblk), & @@ -518,16 +527,16 @@ subroutine evp (dt) call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & - icellt (iblk), icelle (iblk), & - indxti (:,iblk), indxtj (:,iblk), & - indxei (:,iblk), indxej (:,iblk), & + icellT (iblk), icellE (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxEi (:,iblk), indxEj (:,iblk), & aiE (:,:,iblk), emass (:,:,iblk), & emassdti (:,:,iblk), fcorE_blk (:,:,iblk), & emask (:,:,iblk), & uocnE (:,:,iblk), vocnE (:,:,iblk), & strairxE (:,:,iblk), strairyE (:,:,iblk), & ss_tltxE (:,:,iblk), ss_tltyE (:,:,iblk), & - icetmask (:,:,iblk), iceemask (:,:,iblk), & + iceTmask (:,:,iblk), iceEmask (:,:,iblk), & fmE (:,:,iblk), dt, & strtltxE (:,:,iblk), strtltyE (:,:,iblk), & strocnxE (:,:,iblk), strocnyE (:,:,iblk), & @@ -548,12 +557,12 @@ subroutine evp (dt) do i=1,nx_block do j=1,ny_block - if (.not.iceumask(i,j,iblk)) then + if (.not.iceUmask(i,j,iblk)) then stresspU (i,j,iblk) = c0 stressmU (i,j,iblk) = c0 stress12U(i,j,iblk) = c0 endif - if (icetmask(i,j,iblk) == 0) then + if (.not.iceTmask(i,j,iblk)) then stresspT (i,j,iblk) = c0 stressmT (i,j,iblk) = c0 stress12T(i,j,iblk) = c0 @@ -612,7 +621,7 @@ subroutine evp (dt) if (maskhalo_dyn) then halomask = 0 if (grid_ice == 'B') then - where (iceumask) halomask = 1 + where (iceUmask) halomask = 1 elseif (grid_ice == 'C' .or. grid_ice == 'CD') then !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,i,j) SCHEDULE(runtime) do iblk = 1, nblocks @@ -623,11 +632,11 @@ subroutine evp (dt) jhi = this_block%jhi do j = jlo,jhi do i = ilo,ihi - if (icetmask(i ,j ,iblk) /= 0 .or. & - icetmask(i-1,j ,iblk) /= 0 .or. & - icetmask(i+1,j ,iblk) /= 0 .or. & - icetmask(i ,j-1,iblk) /= 0 .or. & - icetmask(i ,j+1,iblk) /= 0) then + if (iceTmask(i ,j ,iblk) .or. & + iceTmask(i-1,j ,iblk) .or. & + iceTmask(i+1,j ,iblk) .or. & + iceTmask(i ,j-1,iblk) .or. & + iceTmask(i ,j+1,iblk)) then halomask(i,j,iblk) = 1 endif enddo @@ -654,8 +663,8 @@ subroutine evp (dt) !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks call seabed_stress_factor_LKD (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj(:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj(:,iblk), & vice (:,:,iblk), aice(:,:,iblk), & hwater(:,:,iblk), TbU (:,:,iblk)) enddo @@ -665,8 +674,8 @@ subroutine evp (dt) !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks call seabed_stress_factor_prob (nx_block , ny_block , & - icellt(iblk), indxti(:,iblk), indxtj(:,iblk), & - icellu(iblk), indxui(:,iblk), indxuj(:,iblk), & + icellT(iblk), indxTi(:,iblk), indxTj(:,iblk), & + icellU(iblk), indxUi(:,iblk), indxUj(:,iblk), & aicen(:,:,:,iblk), vicen(:,:,:,iblk) , & hwater (:,:,iblk), TbU (:,:,iblk)) enddo @@ -679,13 +688,13 @@ subroutine evp (dt) !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks call seabed_stress_factor_LKD (nx_block , ny_block, & - icelle (iblk), & - indxei (:,iblk), indxej(:,iblk), & + icellE (iblk), & + indxEi (:,iblk), indxEj(:,iblk), & vice (:,:,iblk), aice(:,:,iblk), & hwater(:,:,iblk), TbE (:,:,iblk)) call seabed_stress_factor_LKD (nx_block , ny_block, & - icelln (iblk), & - indxni (:,iblk), indxnj(:,iblk), & + icellN (iblk), & + indxNi (:,iblk), indxNj(:,iblk), & vice (:,:,iblk), aice(:,:,iblk), & hwater(:,:,iblk), TbN (:,:,iblk)) enddo @@ -695,13 +704,13 @@ subroutine evp (dt) !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) do iblk = 1, nblocks call seabed_stress_factor_prob (nx_block , ny_block , & - icellt(iblk), indxti(:,iblk), indxtj(:,iblk), & - icellu(iblk), indxui(:,iblk), indxuj(:,iblk), & + icellT(iblk), indxTi(:,iblk), indxTj(:,iblk), & + icellU(iblk), indxUi(:,iblk), indxUj(:,iblk), & aicen(:,:,:,iblk), vicen(:,:,:,iblk) , & hwater (:,:,iblk), TbU (:,:,iblk) , & TbE (:,:,iblk), TbN (:,:,iblk) , & - icelle(iblk), indxei(:,iblk), indxej(:,iblk), & - icelln(iblk), indxni(:,iblk), indxnj(:,iblk) ) + icellE(iblk), indxEi(:,iblk), indxEj(:,iblk), & + icellN(iblk), indxNi(:,iblk), indxNj(:,iblk) ) enddo !$OMP END PARALLEL DO endif @@ -724,8 +733,8 @@ subroutine evp (dt) call ice_timer_start(timer_evp_1d) call ice_dyn_evp_1d_copyin( & nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost, & - icetmask, iceumask, & - cdn_ocn,aiU,uocnU,vocnU,forcexU,forceyU,TbU, & + iceTmask, iceUmask, & + cdn_ocnU,aiU,uocnU,vocnU,forcexU,forceyU,TbU, & umassdti,fmU,uarear,tarear,strintxU,strintyU,uvel_init,vvel_init,& strength,uvel,vvel,dxT,dyT, & stressp_1 ,stressp_2, stressp_3, stressp_4, & @@ -745,9 +754,10 @@ subroutine evp (dt) else ! evp_algorithm == standard_2d (Standard CICE) call ice_timer_start(timer_evp_2d) - do ksub = 1,ndte ! subcycling - if (grid_ice == "B") then + if (grid_ice == "B") then + + do ksub = 1,ndte ! subcycling !$OMP PARALLEL DO PRIVATE(iblk,strtmp) SCHEDULE(runtime) do iblk = 1, nblocks @@ -756,8 +766,8 @@ subroutine evp (dt) ! stress tensor equation, total surface stress !----------------------------------------------------------------- call stress (nx_block , ny_block , & - icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & + icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & dxT (:,:,iblk), dyT (:,:,iblk), & dxhy (:,:,iblk), dyhx (:,:,iblk), & @@ -773,28 +783,12 @@ subroutine evp (dt) stress12_3(:,:,iblk), stress12_4(:,:,iblk), & strtmp (:,:,:) ) - !----------------------------------------------------------------- - ! on last subcycle, save quantities for mechanical redistribution - !----------------------------------------------------------------- - if (ksub == ndte) then - call deformations (nx_block , ny_block , & - icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & - uvel (:,:,iblk), vvel (:,:,iblk), & - dxT (:,:,iblk), dyT (:,:,iblk), & - cxp (:,:,iblk), cyp (:,:,iblk), & - cxm (:,:,iblk), cym (:,:,iblk), & - tarear (:,:,iblk), & - shear (:,:,iblk), divu (:,:,iblk), & - rdg_conv(:,:,iblk), rdg_shear(:,:,iblk) ) - endif - !----------------------------------------------------------------- ! momentum equation !----------------------------------------------------------------- call stepu (nx_block , ny_block , & - icellu (iblk), Cdn_ocn (:,:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), Cdn_ocnU(:,:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & aiU (:,:,iblk), strtmp (:,:,:), & uocnU (:,:,iblk), vocnU (:,:,iblk), & waterxU (:,:,iblk), wateryU (:,:,iblk), & @@ -810,7 +804,37 @@ subroutine evp (dt) enddo ! iblk !$OMP END PARALLEL DO - elseif (grid_ice == "C") then + ! U fields at NE corner + ! calls ice_haloUpdate, controls bundles and masks + call dyn_haloUpdate (halo_info, halo_info_mask, & + field_loc_NEcorner, field_type_vector, & + uvel, vvel) + + enddo ! sub cycling + + !----------------------------------------------------------------- + ! save quantities for mechanical redistribution + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + call deformations (nx_block , ny_block , & + icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + uvel (:,:,iblk), vvel (:,:,iblk), & + dxT (:,:,iblk), dyT (:,:,iblk), & + cxp (:,:,iblk), cyp (:,:,iblk), & + cxm (:,:,iblk), cym (:,:,iblk), & + tarear (:,:,iblk), & + shear (:,:,iblk), divu (:,:,iblk), & + rdg_conv(:,:,iblk), rdg_shear(:,:,iblk) ) + enddo + !$OMP END PARALLEL DO + + + elseif (grid_ice == "C") then + + do ksub = 1,ndte ! subcycling !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks @@ -820,8 +844,8 @@ subroutine evp (dt) ! NOTE these are actually strain rates * area (m^2/s) !----------------------------------------------------------------- call strain_rates_U (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & uvelE (:,:,iblk), vvelE (:,:,iblk), & uvelN (:,:,iblk), vvelN (:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & @@ -837,15 +861,15 @@ subroutine evp (dt) !$OMP END PARALLEL DO ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_NEcorner, field_type_scalar, & shearU) !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call stressC_T (nx_block , ny_block , & - icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & + icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & uvelE (:,:,iblk), vvelE (:,:,iblk), & uvelN (:,:,iblk), vvelN (:,:,iblk), & dxN (:,:,iblk), dyE (:,:,iblk), & @@ -855,29 +879,11 @@ subroutine evp (dt) zetax2T (:,:,iblk), etax2T (:,:,iblk), & stresspT (:,:,iblk), stressmT (:,:,iblk)) - !----------------------------------------------------------------- - ! on last subcycle, save quantities for mechanical redistribution - !----------------------------------------------------------------- - if (ksub == ndte) then - - call deformationsC_T (nx_block , ny_block , & - icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & - uvelE (:,:,iblk), vvelE (:,:,iblk), & - uvelN (:,:,iblk), vvelN (:,:,iblk), & - dxN (:,:,iblk), dyE (:,:,iblk), & - dxT (:,:,iblk), dyT (:,:,iblk), & - tarear (:,:,iblk), uarea (:,:,iblk), & - shearU (:,:,iblk), & - shear (:,:,iblk), divu (:,:,iblk), & - rdg_conv(:,:,iblk), rdg_shear(:,:,iblk)) - - endif enddo !$OMP END PARALLEL DO ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_center, field_type_scalar, & zetax2T, etax2T, stresspT, stressmT) @@ -890,8 +896,8 @@ subroutine evp (dt) !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call stressC_U (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & uarea (:,:,iblk), & etax2U (:,:,iblk), deltaU (:,:,iblk), & strengthU (:,:,iblk), shearU (:,:,iblk), & @@ -900,7 +906,7 @@ subroutine evp (dt) !$OMP END PARALLEL DO ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info , halo_info_mask, & + call dyn_haloUpdate (halo_info , halo_info_mask, & field_loc_NEcorner, field_type_scalar, & stress12U) @@ -908,8 +914,8 @@ subroutine evp (dt) do iblk = 1, nblocks call div_stress_Ex (nx_block , ny_block , & - icelle (iblk), & - indxei (:,iblk), indxej (:,iblk), & + icellE (iblk), & + indxEi (:,iblk), indxEj (:,iblk), & dxE (:,:,iblk), dyE (:,:,iblk), & dxU (:,:,iblk), dyT (:,:,iblk), & earear (:,:,iblk) , & @@ -917,8 +923,8 @@ subroutine evp (dt) stress12U (:,:,iblk), strintxE (:,:,iblk) ) call div_stress_Ny (nx_block , ny_block , & - icelln (iblk), & - indxni (:,iblk), indxnj (:,iblk), & + icellN (iblk), & + indxNi (:,iblk), indxNj (:,iblk), & dxN (:,:,iblk), dyN (:,:,iblk), & dxT (:,:,iblk), dyU (:,:,iblk), & narear (:,:,iblk) , & @@ -932,8 +938,8 @@ subroutine evp (dt) do iblk = 1, nblocks call stepu_C (nx_block , ny_block , & ! u, E point - icelle (iblk), Cdn_ocn (:,:,iblk), & - indxei (:,iblk), indxej (:,iblk), & + icellE (iblk), Cdn_ocnE (:,:,iblk), & + indxEi (:,iblk), indxEj (:,iblk), & aiE (:,:,iblk), & uocnE (:,:,iblk), vocnE (:,:,iblk), & waterxE (:,:,iblk), forcexE (:,:,iblk), & @@ -944,8 +950,8 @@ subroutine evp (dt) TbE (:,:,iblk)) call stepv_C (nx_block, ny_block, & ! v, N point - icelln (iblk), Cdn_ocn (:,:,iblk), & - indxni (:,iblk), indxnj (:,iblk), & + icellN (iblk), Cdn_ocnN (:,:,iblk), & + indxNi (:,iblk), indxNj (:,iblk), & aiN (:,:,iblk), & uocnN (:,:,iblk), vocnN (:,:,iblk), & wateryN (:,:,iblk), forceyN (:,:,iblk), & @@ -958,10 +964,10 @@ subroutine evp (dt) !$OMP END PARALLEL DO ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_Eface, field_type_vector, & uvelE) - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_Nface, field_type_vector, & vvelN) @@ -971,10 +977,10 @@ subroutine evp (dt) vvelE(:,:,:) = vvelE(:,:,:)*epm(:,:,:) ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_Nface, field_type_vector, & uvelN) - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_Eface, field_type_vector, & vvelE) @@ -983,14 +989,43 @@ subroutine evp (dt) uvel(:,:,:) = uvel(:,:,:)*uvm(:,:,:) vvel(:,:,:) = vvel(:,:,:)*uvm(:,:,:) + ! U fields at NE corner + ! calls ice_haloUpdate, controls bundles and masks + call dyn_haloUpdate (halo_info, halo_info_mask, & + field_loc_NEcorner, field_type_vector, & + uvel, vvel) + + enddo ! subcycling - elseif (grid_ice == "CD") then + !----------------------------------------------------------------- + ! save quantities for mechanical redistribution + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + call deformationsC_T (nx_block , ny_block , & + icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + uvelE (:,:,iblk), vvelE (:,:,iblk), & + uvelN (:,:,iblk), vvelN (:,:,iblk), & + dxN (:,:,iblk), dyE (:,:,iblk), & + dxT (:,:,iblk), dyT (:,:,iblk), & + tarear (:,:,iblk), uarea (:,:,iblk), & + shearU (:,:,iblk), & + shear (:,:,iblk), divu (:,:,iblk), & + rdg_conv(:,:,iblk), rdg_shear(:,:,iblk)) + enddo + !$OMP END PARALLEL DO + + elseif (grid_ice == "CD") then + + do ksub = 1,ndte ! subcycling !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call stressCD_T (nx_block , ny_block , & - icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & + icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & uvelE (:,:,iblk), vvelE (:,:,iblk), & uvelN (:,:,iblk), vvelN (:,:,iblk), & dxN (:,:,iblk), dyE (:,:,iblk), & @@ -1001,26 +1036,11 @@ subroutine evp (dt) stresspT (:,:,iblk), stressmT (:,:,iblk), & stress12T (:,:,iblk) ) - !----------------------------------------------------------------- - ! on last subcycle, save quantities for mechanical redistribution - !----------------------------------------------------------------- - if (ksub == ndte) then - call deformationsCD_T (nx_block , ny_block , & - icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & - uvelE (:,:,iblk), vvelE (:,:,iblk), & - uvelN (:,:,iblk), vvelN (:,:,iblk), & - dxN (:,:,iblk), dyE (:,:,iblk), & - dxT (:,:,iblk), dyT (:,:,iblk), & - tarear (:,:,iblk), & - shear (:,:,iblk), divu (:,:,iblk), & - rdg_conv(:,:,iblk), rdg_shear(:,:,iblk)) - endif enddo !$OMP END PARALLEL DO ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_center, field_type_scalar, & zetax2T, etax2T) @@ -1038,8 +1058,8 @@ subroutine evp (dt) ! NOTE these are actually strain rates * area (m^2/s) !----------------------------------------------------------------- call strain_rates_U (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & uvelE (:,:,iblk), vvelE (:,:,iblk), & uvelN (:,:,iblk), vvelN (:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & @@ -1052,8 +1072,8 @@ subroutine evp (dt) shearU (:,:,iblk), DeltaU (:,:,iblk) ) call stressCD_U (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & uarea (:,:,iblk), & zetax2U (:,:,iblk), etax2U (:,:,iblk), & strengthU(:,:,iblk), & @@ -1065,10 +1085,10 @@ subroutine evp (dt) !$OMP END PARALLEL DO ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_center, field_type_scalar, & stresspT, stressmT, stress12T) - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_NEcorner,field_type_scalar, & stresspU, stressmU, stress12U) @@ -1076,8 +1096,8 @@ subroutine evp (dt) do iblk = 1, nblocks call div_stress_Ex (nx_block , ny_block , & - icelle (iblk), & - indxei (:,iblk), indxej (:,iblk), & + icellE (iblk), & + indxEi (:,iblk), indxEj (:,iblk), & dxE (:,:,iblk), dyE (:,:,iblk), & dxU (:,:,iblk), dyT (:,:,iblk), & earear (:,:,iblk) , & @@ -1085,8 +1105,8 @@ subroutine evp (dt) stress12U (:,:,iblk), strintxE (:,:,iblk) ) call div_stress_Ey (nx_block , ny_block , & - icelle (iblk), & - indxei (:,iblk), indxej (:,iblk), & + icellE (iblk), & + indxEi (:,iblk), indxEj (:,iblk), & dxE (:,:,iblk), dyE (:,:,iblk), & dxU (:,:,iblk), dyT (:,:,iblk), & earear (:,:,iblk) , & @@ -1094,8 +1114,8 @@ subroutine evp (dt) stress12T (:,:,iblk), strintyE (:,:,iblk) ) call div_stress_Nx (nx_block , ny_block , & - icelln (iblk), & - indxni (:,iblk), indxnj (:,iblk), & + icellN (iblk), & + indxNi (:,iblk), indxNj (:,iblk), & dxN (:,:,iblk), dyN (:,:,iblk), & dxT (:,:,iblk), dyU (:,:,iblk), & narear (:,:,iblk) , & @@ -1103,8 +1123,8 @@ subroutine evp (dt) stress12T (:,:,iblk), strintxN (:,:,iblk) ) call div_stress_Ny (nx_block , ny_block , & - icelln (iblk), & - indxni (:,iblk), indxnj (:,iblk), & + icellN (iblk), & + indxNi (:,iblk), indxNj (:,iblk), & dxN (:,:,iblk), dyN (:,:,iblk), & dxT (:,:,iblk), dyU (:,:,iblk), & narear (:,:,iblk) , & @@ -1118,8 +1138,8 @@ subroutine evp (dt) do iblk = 1, nblocks call stepuv_CD (nx_block , ny_block , & ! E point - icelle (iblk), Cdn_ocn (:,:,iblk), & - indxei (:,iblk), indxej (:,iblk), & + icellE (iblk), Cdn_ocnE (:,:,iblk), & + indxEi (:,iblk), indxEj (:,iblk), & aiE (:,:,iblk), & uocnE (:,:,iblk), vocnE (:,:,iblk), & waterxE (:,:,iblk), wateryE (:,:,iblk), & @@ -1132,8 +1152,8 @@ subroutine evp (dt) TbE (:,:,iblk)) call stepuv_CD (nx_block , ny_block , & ! N point - icelln (iblk), Cdn_ocn (:,:,iblk), & - indxni (:,iblk), indxnj (:,iblk), & + icellN (iblk), Cdn_ocnN (:,:,iblk), & + indxNi (:,iblk), indxNj (:,iblk), & aiN (:,:,iblk), & uocnN (:,:,iblk), vocnN (:,:,iblk), & waterxN (:,:,iblk), wateryN (:,:,iblk), & @@ -1148,10 +1168,10 @@ subroutine evp (dt) !$OMP END PARALLEL DO ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_Eface, field_type_vector, & uvelE, vvelE) - call dyn_HaloUpdate (halo_info, halo_info_mask, & + call dyn_haloUpdate (halo_info, halo_info_mask, & field_loc_Nface, field_type_vector, & uvelN, vvelN) @@ -1160,16 +1180,34 @@ subroutine evp (dt) uvel(:,:,:) = uvel(:,:,:)*uvm(:,:,:) vvel(:,:,:) = vvel(:,:,:)*uvm(:,:,:) + ! U fields at NE corner + ! calls ice_haloUpdate, controls bundles and masks + call dyn_haloUpdate (halo_info, halo_info_mask, & + field_loc_NEcorner, field_type_vector, & + uvel, vvel) + + enddo ! subcycling - endif ! grid_ice + !----------------------------------------------------------------- + ! save quantities for mechanical redistribution + !----------------------------------------------------------------- - ! U fields at NE corner - ! calls ice_haloUpdate, controls bundles and masks - call dyn_HaloUpdate (halo_info, halo_info_mask, & - field_loc_NEcorner, field_type_vector, & - uvel, vvel) + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + call deformationsCD_T (nx_block , ny_block , & + icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + uvelE (:,:,iblk), vvelE (:,:,iblk), & + uvelN (:,:,iblk), vvelN (:,:,iblk), & + dxN (:,:,iblk), dyE (:,:,iblk), & + dxT (:,:,iblk), dyT (:,:,iblk), & + tarear (:,:,iblk), & + shear (:,:,iblk), divu (:,:,iblk), & + rdg_conv(:,:,iblk), rdg_shear(:,:,iblk)) + enddo + !$OMP END PARALLEL DO + endif ! grid_ice - enddo ! subcycling call ice_timer_stop(timer_evp_2d) endif ! evp_algorithm @@ -1260,8 +1298,8 @@ subroutine evp (dt) do iblk = 1, nblocks call dyn_finish & (nx_block , ny_block , & - icellu (iblk), Cdn_ocn (:,:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), Cdn_ocnU(:,:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & aiU (:,:,iblk), fmU (:,:,iblk), & @@ -1276,8 +1314,8 @@ subroutine evp (dt) call dyn_finish & (nx_block , ny_block , & - icelln (iblk), Cdn_ocn (:,:,iblk), & - indxni (:,iblk), indxnj (:,iblk), & + icellN (iblk), Cdn_ocnN(:,:,iblk), & + indxNi (:,iblk), indxNj (:,iblk), & uvelN (:,:,iblk), vvelN (:,:,iblk), & uocnN (:,:,iblk), vocnN (:,:,iblk), & aiN (:,:,iblk), fmN (:,:,iblk), & @@ -1285,8 +1323,8 @@ subroutine evp (dt) call dyn_finish & (nx_block , ny_block , & - icelle (iblk), Cdn_ocn (:,:,iblk), & - indxei (:,iblk), indxej (:,iblk), & + icellE (iblk), Cdn_ocnE(:,:,iblk), & + indxEi (:,iblk), indxEj (:,iblk), & uvelE (:,:,iblk), vvelE (:,:,iblk), & uocnE (:,:,iblk), vocnE (:,:,iblk), & aiE (:,:,iblk), fmE (:,:,iblk), & @@ -1297,30 +1335,12 @@ subroutine evp (dt) endif - ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T - ! TODO: This should be done elsewhere as part of generalization? - ! TODO: Rename strocn[x,y]T since it's different than strocn[x,y][U,N,E] - ! conservation requires aiU be divided before averaging - work1 = c0 - work2 = c0 - !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) SCHEDULE(runtime) - do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij,iblk) - j = indxuj(ij,iblk) - work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) - work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) - enddo - enddo - !$OMP END PARALLEL DO - call ice_HaloUpdate (work1, halo_info, & - field_loc_NEcorner, field_type_vector) - call ice_HaloUpdate (work2, halo_info, & - field_loc_NEcorner, field_type_vector) - call grid_average_X2Y('F', work1, 'U', strocnxT, 'T') ! shift - call grid_average_X2Y('F', work2, 'U', strocnyT, 'T') - if (grid_ice == 'CD' .or. grid_ice == 'C') then + call ice_HaloUpdate (strintxE, halo_info, & + field_loc_Eface, field_type_vector) + call ice_HaloUpdate (strintyN, halo_info, & + field_loc_Nface, field_type_vector) + call ice_timer_stop(timer_bound) call grid_average_X2Y('S', strintxE, 'E', strintxU, 'U') ! diagnostic call grid_average_X2Y('S', strintyN, 'N', strintyU, 'U') ! diagnostic endif @@ -1337,8 +1357,8 @@ end subroutine evp ! author: Elizabeth C. Hunke, LANL subroutine stress (nx_block, ny_block, & - icellt, & - indxti, indxtj, & + icellT, & + indxTi, indxTj, & uvel, vvel, & dxT, dyT, & dxhy, dyhx, & @@ -1358,11 +1378,11 @@ subroutine stress (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & strength , & ! ice strength (N/m) @@ -1418,9 +1438,9 @@ subroutine stress (nx_block, ny_block, & str(:,:,:) = c0 - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! strain rates @@ -1639,8 +1659,8 @@ end subroutine stress ! for solving the sea ice momentum equation. Ocean Model., 101, 59-67. subroutine stressC_T (nx_block, ny_block , & - icellt , & - indxti , indxtj , & + icellT , & + indxTi , indxTj , & uvelE , vvelE , & uvelN , vvelN , & dxN , dyE , & @@ -1655,11 +1675,11 @@ subroutine stressC_T (nx_block, ny_block , & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvelE , & ! x-component of velocity (m/s) at the E point @@ -1702,17 +1722,17 @@ subroutine stressC_T (nx_block, ny_block , & !----------------------------------------------------------------- call strain_rates_T (nx_block , ny_block , & - icellt , & - indxti(:) , indxtj (:) , & + icellT , & + indxTi(:) , indxTj (:) , & uvelE (:,:), vvelE (:,:), & uvelN (:,:), vvelN (:,:), & dxN (:,:), dyE (:,:), & dxT (:,:), dyT (:,:), & divT (:,:), tensionT(:,:) ) - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! Square of shear strain rate at T obtained from interpolation of @@ -1764,8 +1784,8 @@ end subroutine stressC_T ! for solving the sea ice momentum equation. Ocean Model., 101, 59-67. subroutine stressC_U (nx_block , ny_block, & - icellu, & - indxui , indxuj, & + icellU, & + indxUi , indxUj, & uarea , & etax2U , deltaU, & strengthU, shearU, & @@ -1776,11 +1796,11 @@ subroutine stressC_U (nx_block , ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! no. of cells where iceumask = 1 + icellU ! no. of cells where iceUmask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uarea , & ! area of U point @@ -1813,17 +1833,17 @@ subroutine stressC_U (nx_block , ny_block, & !----------------------------------------------------------------- if (visc_method == 'avg_zeta') then - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) stress12(i,j) = (stress12(i,j)*(c1-arlx1i*revp) & + arlx1i*p5*etax2U(i,j)*shearU(i,j)) * denom1 enddo elseif (visc_method == 'avg_strength') then - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) DminUarea = deltaminEVP*uarea(i,j) ! only need etax2U here, but other terms are calculated with etax2U ! minimal extra calculations here even though it seems like there is @@ -1844,8 +1864,8 @@ end subroutine stressC_U ! Nov 2021 subroutine stressCD_T (nx_block, ny_block, & - icellt, & - indxti, indxtj, & + icellT, & + indxTi, indxTj, & uvelE, vvelE, & uvelN, vvelN, & dxN, dyE, & @@ -1861,11 +1881,11 @@ subroutine stressCD_T (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvelE , & ! x-component of velocity (m/s) at the E point @@ -1908,8 +1928,8 @@ subroutine stressCD_T (nx_block, ny_block, & !----------------------------------------------------------------- call strain_rates_T (nx_block , ny_block , & - icellt , & - indxti(:) , indxtj (:) , & + icellT , & + indxTi(:) , indxTj (:) , & uvelE (:,:), vvelE (:,:), & uvelN (:,:), vvelN (:,:), & dxN (:,:), dyE (:,:), & @@ -1917,9 +1937,9 @@ subroutine stressCD_T (nx_block, ny_block, & divT (:,:), tensionT(:,:), & shearT(:,:), DeltaT (:,:) ) - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! viscosities and replacement pressure at T point @@ -1954,8 +1974,8 @@ end subroutine stressCD_T ! Nov 2021 subroutine stressCD_U (nx_block, ny_block, & - icellu, & - indxui, indxuj, & + icellU, & + indxUi, indxUj, & uarea, & zetax2U, etax2U, & strengthU, & @@ -1970,11 +1990,11 @@ subroutine stressCD_U (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! no. of cells where iceumask = 1 + icellU ! no. of cells where iceUmask = 1 integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uarea , & ! area of U-cell (m^2) @@ -2004,9 +2024,9 @@ subroutine stressCD_U (nx_block, ny_block, & character(len=*), parameter :: subname = '(stressCD_U)' - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) !----------------------------------------------------------------- ! viscosities and replacement pressure at U point diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 index fe04a3d63..e874611bd 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 @@ -1017,7 +1017,7 @@ end subroutine dealloc1d !======================================================================= subroutine ice_dyn_evp_1d_copyin(nx, ny, nblk, nx_glob, ny_glob, & - I_icetmask, I_iceumask, I_cdn_ocn, I_aiu, I_uocn, I_vocn, & + I_iceTmask, I_iceUmask, I_cdn_ocn, I_aiu, I_uocn, I_vocn, & I_forcex, I_forcey, I_Tbu, I_umassdti, I_fm, I_uarear, I_tarear, & I_strintx, I_strinty, I_uvel_init, I_vvel_init, I_strength, & I_uvel, I_vvel, I_dxT, I_dyT, I_stressp_1, I_stressp_2, & @@ -1035,9 +1035,7 @@ subroutine ice_dyn_evp_1d_copyin(nx, ny, nblk, nx_glob, ny_glob, & integer(int_kind), intent(in) :: nx, ny, nblk, nx_glob, ny_glob logical(kind=log_kind), dimension(nx, ny, nblk), intent(in) :: & - I_iceumask - integer(kind=int_kind), dimension(nx, ny, nblk), intent(in) :: & - I_icetmask + I_iceTmask, I_iceUmask real(kind=dbl_kind), dimension(nx, ny, nblk), intent(in) :: & I_cdn_ocn, I_aiu, I_uocn, I_vocn, I_forcex, I_forcey, I_Tbu, & I_umassdti, I_fm, I_uarear, I_tarear, I_strintx, I_strinty, & @@ -1049,9 +1047,7 @@ subroutine ice_dyn_evp_1d_copyin(nx, ny, nblk, nx_glob, ny_glob, & ! local variables logical(kind=log_kind), dimension(nx_glob, ny_glob) :: & - G_iceumask - integer(kind=int_kind), dimension(nx_glob, ny_glob) :: & - G_icetmask + G_iceTmask, G_iceUmask real(kind=dbl_kind), dimension(nx_glob, ny_glob) :: & G_cdn_ocn, G_aiu, G_uocn, G_vocn, G_forcex, G_forcey, G_Tbu, & G_umassdti, G_fm, G_uarear, G_tarear, G_strintx, G_strinty, & @@ -1063,8 +1059,8 @@ subroutine ice_dyn_evp_1d_copyin(nx, ny, nblk, nx_glob, ny_glob, & character(len=*), parameter :: & subname = '(ice_dyn_evp_1d_copyin)' - call gather_global_ext(G_icetmask, I_icetmask, master_task, distrb_info ) - call gather_global_ext(G_iceumask, I_iceumask, master_task, distrb_info ) + call gather_global_ext(G_iceTmask, I_iceTmask, master_task, distrb_info ) + call gather_global_ext(G_iceUmask, I_iceUmask, master_task, distrb_info ) call gather_global_ext(G_cdn_ocn, I_cdn_ocn, master_task, distrb_info ) call gather_global_ext(G_aiu, I_aiu, master_task, distrb_info ) call gather_global_ext(G_uocn, I_uocn, master_task, distrb_info ) @@ -1101,9 +1097,9 @@ subroutine ice_dyn_evp_1d_copyin(nx, ny, nblk, nx_glob, ny_glob, & ! all calculations id done on master task if (my_task == master_task) then ! find number of active points and allocate 1D vectors - call calc_na(nx_glob, ny_glob, NA_len, G_icetmask, G_iceumask) + call calc_na(nx_glob, ny_glob, NA_len, G_iceTmask, G_iceUmask) call alloc1d(NA_len) - call calc_2d_indices(nx_glob, ny_glob, NA_len, G_icetmask, G_iceumask) + call calc_2d_indices(nx_glob, ny_glob, NA_len, G_iceTmask, G_iceUmask) call calc_navel(nx_glob, ny_glob, NA_len, NAVEL_len) call alloc1d_navel(NAVEL_len) ! initialize OpenMP. FIXME: ought to be called from main @@ -1120,7 +1116,7 @@ subroutine ice_dyn_evp_1d_copyin(nx, ny, nblk, nx_glob, ny_glob, & G_stressp_2, G_stressp_3, G_stressp_4, G_stressm_1, & G_stressm_2, G_stressm_3, G_stressm_4, G_stress12_1, & G_stress12_2, G_stress12_3, G_stress12_4) - call calc_halo_parent(nx_glob, ny_glob, NA_len, NAVEL_len, G_icetmask) + call calc_halo_parent(nx_glob, ny_glob, NA_len, NAVEL_len, G_iceTmask) end if end subroutine ice_dyn_evp_1d_copyin @@ -1336,7 +1332,7 @@ end subroutine ice_dyn_evp_1d_kernel !======================================================================= - subroutine calc_na(nx, ny, na, icetmask, iceumask) + subroutine calc_na(nx, ny, na, iceTmask, iceUmask) ! Calculate number of active points use ice_blocks, only : nghost @@ -1344,10 +1340,8 @@ subroutine calc_na(nx, ny, na, icetmask, iceumask) implicit none integer(kind=int_kind), intent(in) :: nx, ny - integer(kind=int_kind), dimension(nx, ny), intent(in) :: & - icetmask logical(kind=log_kind), dimension(nx, ny), intent(in) :: & - iceumask + iceTmask, iceUmask integer(kind=int_kind), intent(out) :: na ! local variables @@ -1360,7 +1354,7 @@ subroutine calc_na(nx, ny, na, icetmask, iceumask) ! NOTE: T mask includes northern and eastern ghost cells do j = 1 + nghost, ny do i = 1 + nghost, nx - if (icetmask(i,j) == 1 .or. iceumask(i,j)) na = na + 1 + if (iceTmask(i,j) .or. iceUmask(i,j)) na = na + 1 end do end do @@ -1368,17 +1362,15 @@ end subroutine calc_na !======================================================================= - subroutine calc_2d_indices(nx, ny, na, icetmask, iceumask) + subroutine calc_2d_indices(nx, ny, na, iceTmask, iceUmask) use ice_blocks, only : nghost implicit none integer(kind=int_kind), intent(in) :: nx, ny, na - integer(kind=int_kind), dimension(nx, ny), intent(in) :: & - icetmask logical(kind=log_kind), dimension(nx, ny), intent(in) :: & - iceumask + iceTmask, iceUmask ! local variables @@ -1394,12 +1386,12 @@ subroutine calc_2d_indices(nx, ny, na, icetmask, iceumask) ! NOTE: T mask includes northern and eastern ghost cells do j = 1 + nghost, ny do i = 1 + nghost, nx - if (icetmask(i,j) == 1 .or. iceumask(i,j)) then + if (iceTmask(i,j) .or. iceUmask(i,j)) then Nmaskt = Nmaskt + 1 indi(Nmaskt) = i indj(Nmaskt) = j - if (icetmask(i,j) /= 1) skiptcell(Nmaskt) = .true. - if (.not. iceumask(i,j)) skipucell(Nmaskt) = .true. + if (.not. iceTmask(i,j)) skiptcell(Nmaskt) = .true. + if (.not. iceUmask(i,j)) skipucell(Nmaskt) = .true. ! NOTE: U mask does not include northern and eastern ! ghost cells. Skip northern and eastern ghost cells if (i == nx) skipucell(Nmaskt) = .true. @@ -1588,13 +1580,13 @@ end subroutine convert_2d_1d !======================================================================= - subroutine calc_halo_parent(nx, ny, na, navel, I_icetmask) + subroutine calc_halo_parent(nx, ny, na, navel, I_iceTmask) implicit none integer(kind=int_kind), intent(in) :: nx, ny, na, navel - integer(kind=int_kind), dimension(nx, ny), intent(in) :: & - I_icetmask + logical(kind=log_kind), dimension(nx, ny), intent(in) :: & + I_iceTmask ! local variables @@ -1619,10 +1611,10 @@ subroutine calc_halo_parent(nx, ny, na, navel, I_icetmask) j = int((indij(iw) - 1) / (nx)) + 1 i = indij(iw) - (j - 1) * nx ! if within ghost zone - if (i == nx .and. I_icetmask(2, j) == 1) Ihalo(iw) = 2 + (j - 1) * nx - if (i == 1 .and. I_icetmask(nx - 1, j) == 1) Ihalo(iw) = (nx - 1) + (j - 1) * nx - if (j == ny .and. I_icetmask(i, 2) == 1) Ihalo(iw) = i + nx - if (j == 1 .and. I_icetmask(i, ny - 1) == 1) Ihalo(iw) = i + (ny - 2) * nx + if (i == nx .and. I_iceTmask(2, j) ) Ihalo(iw) = 2 + (j - 1) * nx + if (i == 1 .and. I_iceTmask(nx - 1, j) ) Ihalo(iw) = (nx - 1) + (j - 1) * nx + if (j == ny .and. I_iceTmask(i, 2) ) Ihalo(iw) = i + nx + if (j == 1 .and. I_iceTmask(i, ny - 1) ) Ihalo(iw) = i + (ny - 2) * nx end do ! relate halo indices to indij vector diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index 95d2eedb1..187ec55cc 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -106,6 +106,12 @@ module ice_dyn_shared uvelE_init , & ! x-component of velocity (m/s), beginning of timestep vvelE_init ! y-component of velocity (m/s), beginning of timestep + logical (kind=log_kind), dimension (:,:,:), allocatable, public :: & + iceTmask, & ! ice extent mask (T-cell) + iceUmask, & ! ice extent mask (U-cell) + iceNmask, & ! ice extent mask (N-cell) + iceEmask ! ice extent mask (E-cell) + real (kind=dbl_kind), allocatable, public :: & DminTarea(:,:,:) ! deltamin * tarea (m^2/s) @@ -168,6 +174,8 @@ subroutine alloc_dyn_shared allocate( & uvel_init (nx_block,ny_block,max_blocks), & ! x-component of velocity (m/s), beginning of timestep vvel_init (nx_block,ny_block,max_blocks), & ! y-component of velocity (m/s), beginning of timestep + iceTmask (nx_block,ny_block,max_blocks), & ! T mask for dynamics + iceUmask (nx_block,ny_block,max_blocks), & ! U mask for dynamics stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') @@ -177,6 +185,8 @@ subroutine alloc_dyn_shared vvelE_init (nx_block,ny_block,max_blocks), & ! y-component of velocity (m/s), beginning of timestep uvelN_init (nx_block,ny_block,max_blocks), & ! x-component of velocity (m/s), beginning of timestep vvelN_init (nx_block,ny_block,max_blocks), & ! y-component of velocity (m/s), beginning of timestep + iceEmask (nx_block,ny_block,max_blocks), & ! T mask for dynamics + iceNmask (nx_block,ny_block,max_blocks), & ! U mask for dynamics stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') endif @@ -199,7 +209,7 @@ subroutine init_dyn (dt) stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U use ice_state, only: uvel, vvel, uvelE, vvelE, uvelN, vvelN, divu, shear - use ice_grid, only: ULAT, NLAT, ELAT, tarea, iceumask, iceemask, icenmask + use ice_grid, only: ULAT, NLAT, ELAT, tarea real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -310,10 +320,10 @@ subroutine init_dyn (dt) endif ! ice extent mask on velocity points - iceumask(i,j,iblk) = .false. + iceUmask(i,j,iblk) = .false. if (grid_ice == 'CD' .or. grid_ice == 'C') then - iceemask(i,j,iblk) = .false. - icenmask(i,j,iblk) = .false. + iceEmask(i,j,iblk) = .false. + iceNmask(i,j,iblk) = .false. end if enddo ! i enddo ! j @@ -394,7 +404,7 @@ subroutine dyn_prep1 (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & Tmass ! total mass of ice and snow (kg/m^2) - integer (kind=int_kind), dimension (nx_block,ny_block), intent(out) :: & + logical (kind=log_kind), dimension (nx_block,ny_block), intent(out) :: & iceTmask ! ice extent mask (T-cell) ! local variables @@ -438,7 +448,7 @@ subroutine dyn_prep1 (nx_block, ny_block, & !----------------------------------------------------------------- ! augmented mask (land + open ocean) !----------------------------------------------------------------- - iceTmask (i,j) = 0 + iceTmask (i,j) = .false. enddo enddo @@ -450,10 +460,10 @@ subroutine dyn_prep1 (nx_block, ny_block, & if (tmphm(i-1,j+1) .or. tmphm(i,j+1) .or. tmphm(i+1,j+1) .or. & tmphm(i-1,j) .or. tmphm(i,j) .or. tmphm(i+1,j) .or. & tmphm(i-1,j-1) .or. tmphm(i,j-1) .or. tmphm(i+1,j-1) ) then - iceTmask(i,j) = 1 + iceTmask(i,j) = .true. endif - if (.not.Tmask(i,j)) iceTmask(i,j) = 0 + if (.not.Tmask(i,j)) iceTmask(i,j) = .false. enddo enddo @@ -504,8 +514,8 @@ subroutine dyn_prep2 (nx_block, ny_block, & ilo,ihi,jlo,jhi ! beginning and end of physical domain integer (kind=int_kind), intent(out) :: & - icellT , & ! no. of cells where iceTmask = 1 - icellX ! no. of cells where iceXmask = 1 + icellT , & ! no. of cells where iceTmask = .true. + icellX ! no. of cells where iceXmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(out) :: & indxTi , & ! compressed index in i-direction on T grid @@ -516,7 +526,7 @@ subroutine dyn_prep2 (nx_block, ny_block, & logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & Xmask ! land/boundary mask, thickness (X-grid-cell) - integer (kind=int_kind), dimension (nx_block,ny_block), intent(in) :: & + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & iceTmask ! ice extent mask (T-cell) logical (kind=log_kind), dimension (nx_block,ny_block), intent(inout) :: & @@ -590,7 +600,7 @@ subroutine dyn_prep2 (nx_block, ny_block, & taubx (i,j) = c0 tauby (i,j) = c0 - if (iceTmask(i,j)==0) then + if (.not.iceTmask(i,j)) then stressp_1 (i,j) = c0 stressp_2 (i,j) = c0 stressp_3 (i,j) = c0 @@ -608,7 +618,7 @@ subroutine dyn_prep2 (nx_block, ny_block, & enddo ! j !----------------------------------------------------------------- - ! Identify cells where iceTmask = 1 + ! Identify cells where iceTmask = .true. ! Note: The icellT mask includes north and east ghost cells ! where stresses are needed. !----------------------------------------------------------------- @@ -616,7 +626,7 @@ subroutine dyn_prep2 (nx_block, ny_block, & icellT = 0 do j = jlo, jhi+1 do i = ilo, ihi+1 - if (iceTmask(i,j) == 1) then + if (iceTmask(i,j)) then icellT = icellT + 1 indxTi(icellT) = i indxTj(icellT) = j @@ -729,7 +739,7 @@ subroutine stepu (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellU ! total count when iceumask is true + icellU ! total count when iceUmask is true integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & indxUi , & ! compressed index in i-direction @@ -1166,7 +1176,7 @@ subroutine dyn_finish (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellU ! total count when iceumask is true + icellU ! total count when iceUmask is true integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & indxUi , & ! compressed index in i-direction @@ -1635,7 +1645,7 @@ subroutine deformations (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellT ! no. of cells where iceTmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & indxTi , & ! compressed index in i-direction @@ -1733,7 +1743,7 @@ subroutine deformationsCD_T (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellT ! no. of cells where iceTmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & indxTi , & ! compressed index in i-direction @@ -1830,7 +1840,7 @@ subroutine deformationsC_T (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellT ! no. of cells where iceTmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & indxTi , & ! compressed index in i-direction diff --git a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 index 17fd0b73f..6534e7568 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 @@ -51,7 +51,7 @@ module ice_dyn_vp seabed_stress, Ktens, stack_fields, unstack_fields use ice_fileunits, only: nu_diag use ice_flux, only: fmU - use ice_global_reductions, only: global_sum, global_allreduce_sum + use ice_global_reductions, only: global_sum use ice_grid, only: dxT, dyT, dxhy, dyhx, cxp, cyp, cxm, cym, uarear use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -96,17 +96,19 @@ module ice_dyn_vp ! module variables integer (kind=int_kind), allocatable :: & - icellt(:) , & ! no. of cells where icetmask = 1 - icellu(:) ! no. of cells where iceumask = 1 + icellT(:) , & ! no. of cells where iceTmask = .true. + icellU(:) ! no. of cells where iceUmask = .true. integer (kind=int_kind), allocatable :: & - indxti(:,:) , & ! compressed index in i-direction - indxtj(:,:) , & ! compressed index in j-direction - indxui(:,:) , & ! compressed index in i-direction - indxuj(:,:) ! compressed index in j-direction + indxTi(:,:) , & ! compressed index in i-direction + indxTj(:,:) , & ! compressed index in j-direction + indxUi(:,:) , & ! compressed index in i-direction + indxUj(:,:) ! compressed index in j-direction real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:) ! work array for boundary updates + fld2(:,:,:,:), & ! work array for boundary updates + fld3(:,:,:,:), & ! work array for boundary updates + fld4(:,:,:,:) ! work array for boundary updates !======================================================================= @@ -136,12 +138,14 @@ subroutine init_vp this_block ! block information for current block ! Initialize module variables - allocate(icellt(max_blocks), icellu(max_blocks)) - allocate(indxti(nx_block*ny_block, max_blocks), & - indxtj(nx_block*ny_block, max_blocks), & - indxui(nx_block*ny_block, max_blocks), & - indxuj(nx_block*ny_block, max_blocks)) + allocate(icellT(max_blocks), icellU(max_blocks)) + allocate(indxTi(nx_block*ny_block, max_blocks), & + indxTj(nx_block*ny_block, max_blocks), & + indxUi(nx_block*ny_block, max_blocks), & + indxUj(nx_block*ny_block, max_blocks)) allocate(fld2(nx_block,ny_block,2,max_blocks)) + allocate(fld3(nx_block,ny_block,3,max_blocks)) + allocate(fld4(nx_block,ny_block,4,max_blocks)) end subroutine init_vp @@ -167,19 +171,19 @@ subroutine implicit_solver (dt) use ice_blocks, only: block, get_block, nx_block, ny_block use ice_domain, only: blocks_ice, halo_info, maskhalo_dyn use ice_domain_size, only: max_blocks, ncat - use ice_dyn_shared, only: deformations + use ice_dyn_shared, only: deformations, iceTmask, iceUmask use ice_flux, only: rdg_conv, rdg_shear, strairxT, strairyT, & strairxU, strairyU, uocn, vocn, ss_tltx, ss_tlty, fmU, & strtltxU, strtltyU, strocnxU, strocnyU, strintxU, strintyU, taubxU, taubyU, & - strocnxT, strocnyT, strax, stray, & + strax, stray, & TbU, hwater, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 use ice_grid, only: tmask, umask, dxT, dyT, cxp, cyp, cxm, cym, & - tarear, grid_type, grid_average_X2Y, iceumask, & + tarear, grid_type, grid_average_X2Y, & grid_atm_dynu, grid_atm_dynv, grid_ocn_dynu, grid_ocn_dynv - use ice_state, only: aice, vice, vsno, uvel, vvel, divu, shear, & + use ice_state, only: aice, aiU, vice, vsno, uvel, vvel, divu, shear, & aice_init, aice0, aicen, vicen, strength use ice_timers, only: timer_dynamics, timer_bound, & ice_timer_start, ice_timer_stop @@ -200,6 +204,7 @@ subroutine implicit_solver (dt) vocnU , & ! j ocean current (m/s) ss_tltxU , & ! sea surface slope, x-direction (m/m) ss_tltyU , & ! sea surface slope, y-direction (m/m) + cdn_ocnU , & ! ocn drag coefficient tmass , & ! total mass of ice and snow (kg/m^2) waterxU , & ! for ocean stress calculation, x (m/s) wateryU , & ! for ocean stress calculation, y (m/s) @@ -210,7 +215,6 @@ subroutine implicit_solver (dt) Cb , & ! seabed stress coefficient fpresx , & ! fixed point residual vector, x components: fx = uvel - uprev_k fpresy , & ! fixed point residual vector, y components: fy = vvel - vprev_k - aiU , & ! ice fraction on u-grid umass , & ! total mass of ice and snow (u grid) umassdti ! mass of U-cell/dte (kg/m^2 s) @@ -222,7 +226,6 @@ subroutine implicit_solver (dt) logical (kind=log_kind) :: calc_strair integer (kind=int_kind), dimension (nx_block,ny_block,max_blocks) :: & - icetmask, & ! ice extent mask (T-cell) halomask ! generic halo mask type (ice_halo) :: & @@ -234,10 +237,6 @@ subroutine implicit_solver (dt) real (kind=dbl_kind), allocatable :: & sol(:) ! solution vector - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1, & ! temporary - work2 ! temporary - character(len=*), parameter :: subname = '(implicit_solver)' call ice_timer_start(timer_dynamics) ! dynamics @@ -290,13 +289,13 @@ subroutine implicit_solver (dt) ilo, ihi, jlo, jhi, & aice (:,:,iblk), vice (:,:,iblk), & vsno (:,:,iblk), tmask (:,:,iblk), & - tmass (:,:,iblk), icetmask(:,:,iblk)) + tmass (:,:,iblk), iceTmask(:,:,iblk)) enddo ! iblk !$OMP END PARALLEL DO call ice_timer_start(timer_bound) - call ice_HaloUpdate (icetmask, halo_info, & + call ice_HaloUpdate (iceTmask, halo_info, & field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) @@ -304,12 +303,22 @@ subroutine implicit_solver (dt) ! convert fields from T to U grid !----------------------------------------------------------------- - call grid_average_X2Y('F',tmass , 'T', umass, 'U') - call grid_average_X2Y('F',aice_init, 'T', aiU , 'U') - call grid_average_X2Y('S',uocn , grid_ocn_dynu, uocnU , 'U') - call grid_average_X2Y('S',vocn , grid_ocn_dynv, vocnU , 'U') - call grid_average_X2Y('S',ss_tltx, grid_ocn_dynu, ss_tltxU, 'U') - call grid_average_X2Y('S',ss_tlty, grid_ocn_dynv, ss_tltyU, 'U') + call stack_fields(tmass, aice_init, cdn_ocn, fld3) + call ice_HaloUpdate (fld3, halo_info, & + field_loc_center, field_type_scalar) + call stack_fields(uocn, vocn, ss_tltx, ss_tlty, fld4) + call ice_HaloUpdate (fld4, halo_info, & + field_loc_center, field_type_vector) + call unstack_fields(fld3, tmass, aice_init, cdn_ocn) + call unstack_fields(fld4, uocn, vocn, ss_tltx, ss_tlty) + + call grid_average_X2Y('S',tmass , 'T' , umass , 'U') + call grid_average_X2Y('S',aice_init, 'T' , aiU , 'U') + call grid_average_X2Y('S',cdn_ocn , 'T' , cdn_ocnU, 'U') + call grid_average_X2Y('S',uocn , grid_ocn_dynu, uocnU , 'U') + call grid_average_X2Y('S',vocn , grid_ocn_dynv, vocnU , 'U') + call grid_average_X2Y('S',ss_tltx , grid_ocn_dynu, ss_tltxU, 'U') + call grid_average_X2Y('S',ss_tlty , grid_ocn_dynv, ss_tltyU, 'U') !---------------------------------------------------------------- ! Set wind stress to values supplied via NEMO or other forcing @@ -332,9 +341,7 @@ subroutine implicit_solver (dt) call grid_average_X2Y('F',strairyT,'T',strairyU,'U') endif -! tcraig, tcx, threading here leads to some non-reproducbile results and failures in icepack_ice_strength -! need to do more debugging - !$TCXOMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) do iblk = 1, nblocks !----------------------------------------------------------------- @@ -349,16 +356,16 @@ subroutine implicit_solver (dt) call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & - icellt(iblk), icellu(iblk), & - indxti (:,iblk), indxtj (:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellT(iblk), icellU(iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & aiU (:,:,iblk), umass (:,:,iblk), & umassdti (:,:,iblk), fcor_blk (:,:,iblk), & umask (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & - icetmask (:,:,iblk), iceumask (:,:,iblk), & + iceTmask (:,:,iblk), iceUmask (:,:,iblk), & fmU (:,:,iblk), dt, & strtltxU (:,:,iblk), strtltyU (:,:,iblk), & strocnxU (:,:,iblk), strocnyU (:,:,iblk), & @@ -377,8 +384,8 @@ subroutine implicit_solver (dt) TbU (:,:,iblk)) call calc_bfix (nx_block , ny_block , & - icellu(iblk) , & - indxui (:,iblk), indxuj (:,iblk), & + icellU(iblk) , & + indxUi (:,iblk), indxUj (:,iblk), & umassdti (:,:,iblk), & forcexU (:,:,iblk), forceyU (:,:,iblk), & uvel_init (:,:,iblk), vvel_init (:,:,iblk), & @@ -389,9 +396,9 @@ subroutine implicit_solver (dt) !----------------------------------------------------------------- strength(:,:,iblk) = c0 ! initialize - do ij = 1, icellt(iblk) - i = indxti(ij, iblk) - j = indxtj(ij, iblk) + do ij = 1, icellT(iblk) + i = indxTi(ij, iblk) + j = indxTj(ij, iblk) call icepack_ice_strength (ncat, & aice (i,j, iblk), & vice (i,j, iblk), & @@ -402,7 +409,7 @@ subroutine implicit_solver (dt) enddo ! ij enddo ! iblk - !$TCXOMP END PARALLEL DO + !$OMP END PARALLEL DO call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -421,7 +428,7 @@ subroutine implicit_solver (dt) if (maskhalo_dyn) then call ice_timer_start(timer_bound) halomask = 0 - where (iceumask) halomask = 1 + where (iceUmask) halomask = 1 call ice_HaloUpdate (halomask, halo_info, & field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) @@ -436,8 +443,8 @@ subroutine implicit_solver (dt) !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call seabed_stress_factor_LKD (nx_block, ny_block, & - icellu (iblk), & - indxui(:,iblk), indxuj(:,iblk), & + icellU (iblk), & + indxUi(:,iblk), indxUj(:,iblk), & vice(:,:,iblk), aice(:,:,iblk), & hwater(:,:,iblk), TbU(:,:,iblk)) enddo @@ -448,8 +455,8 @@ subroutine implicit_solver (dt) do iblk = 1, nblocks call seabed_stress_factor_prob (nx_block, ny_block, & - icellt(iblk), indxti(:,iblk), indxtj(:,iblk), & - icellu(iblk), indxui(:,iblk), indxuj(:,iblk), & + icellT(iblk), indxTi(:,iblk), indxTj(:,iblk), & + icellU(iblk), indxUi(:,iblk), indxUj(:,iblk), & aicen(:,:,:,iblk), vicen(:,:,:,iblk), & hwater(:,:,iblk), TbU(:,:,iblk)) enddo @@ -465,7 +472,7 @@ subroutine implicit_solver (dt) ntot = 0 do iblk = 1, nblocks - ntot = ntot + icellu(iblk) + ntot = ntot + icellU(iblk) enddo ntot = 2 * ntot ! times 2 because of u and v @@ -474,9 +481,9 @@ subroutine implicit_solver (dt) !----------------------------------------------------------------- ! Start of nonlinear iteration !----------------------------------------------------------------- - call anderson_solver (icellt , icellu , & - indxti , indxtj , & - indxui , indxuj , & + call anderson_solver (icellT , icellU , & + indxTi , indxTj , & + indxUi , indxUj , & aiU , ntot , & uocnU , vocnU , & waterxU , wateryU, & @@ -484,7 +491,7 @@ subroutine implicit_solver (dt) umassdti, sol , & fpresx , fpresy , & zetax2 , etax2 , & - rep_prs , & + rep_prs , cdn_ocnU,& Cb, halo_info_mask) !----------------------------------------------------------------- ! End of nonlinear iteration @@ -500,8 +507,8 @@ subroutine implicit_solver (dt) !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call stress_vp (nx_block , ny_block , & - icellt(iblk) , & - indxti (:,iblk), indxtj (:,iblk), & + icellT(iblk) , & + indxTi (:,iblk), indxTj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & dxT (:,:,iblk), dyT (:,:,iblk), & cxp (:,:,iblk), cyp (:,:,iblk), & @@ -523,8 +530,8 @@ subroutine implicit_solver (dt) !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call deformations (nx_block , ny_block , & - icellt(iblk) , & - indxti (:,iblk), indxtj (:,iblk), & + icellT(iblk) , & + indxTi (:,iblk), indxTj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & dxT (:,:,iblk), dyT (:,:,iblk), & cxp (:,:,iblk), cyp (:,:,iblk), & @@ -542,8 +549,8 @@ subroutine implicit_solver (dt) !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call calc_seabed_stress (nx_block , ny_block , & - icellu(iblk) , & - indxui (:,iblk), indxuj (:,iblk), & + icellU(iblk) , & + indxUi (:,iblk), indxUj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & Cb (:,:,iblk), & taubxU (:,:,iblk), taubyU (:,:,iblk)) @@ -628,8 +635,8 @@ subroutine implicit_solver (dt) call dyn_finish & (nx_block, ny_block, & - icellu (iblk), Cdn_ocn (:,:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), Cdn_ocnU(:,:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & uocnU (:,:,iblk), vocnU (:,:,iblk), & aiU (:,:,iblk), fmU (:,:,iblk), & @@ -640,27 +647,6 @@ subroutine implicit_solver (dt) enddo !$OMP END PARALLEL DO - ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T - ! TODO: This should be done elsewhere as part of generalization? - ! conservation requires aiU be divided before averaging - work1 = c0 - work2 = c0 - !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) - do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij,iblk) - j = indxuj(ij,iblk) - work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) - work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) - enddo - enddo - call ice_HaloUpdate (work1, halo_info, & - field_loc_NEcorner, field_type_vector) - call ice_HaloUpdate (work2, halo_info, & - field_loc_NEcorner, field_type_vector) - call grid_average_X2Y('F',work1,'U',strocnxT,'T') ! shift - call grid_average_X2Y('F',work2,'U',strocnyT,'T') - ! shift velocity components from CD grid locations (N, E) to B grid location (U) for transport ! commented out in order to focus on EVP for now within the cdgrid ! should be used when routine is ready @@ -685,9 +671,9 @@ end subroutine implicit_solver ! H. F. Walker, “Anderson Acceleration: Algorithms and Implementations” ! [Online]. Available: https://users.wpi.edu/~walker/Papers/anderson_accn_algs_imps.pdf - subroutine anderson_solver (icellt , icellu , & - indxti , indxtj , & - indxui , indxuj , & + subroutine anderson_solver (icellT , icellU , & + indxTi , indxTj , & + indxUi , indxUj , & aiU , ntot , & uocn , vocn , & waterxU , wateryU, & @@ -695,10 +681,9 @@ subroutine anderson_solver (icellt , icellu , & umassdti, sol , & fpresx , fpresy , & zetax2 , etax2 , & - rep_prs , & + rep_prs , cdn_ocn, & Cb, halo_info_mask) - use ice_arrays_column, only: Cdn_ocn use ice_blocks, only: nx_block, ny_block use ice_boundary, only: ice_HaloUpdate use ice_constants, only: c1 @@ -715,19 +700,20 @@ subroutine anderson_solver (icellt , icellu , & ntot ! size of problem for Anderson integer (kind=int_kind), dimension(max_blocks), intent(in) :: & - icellt , & ! no. of cells where icetmask = 1 - icellu ! no. of cells where iceumask = 1 + icellT , & ! no. of cells where iceTmask = .true. + icellU ! no. of cells where iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block, max_blocks), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj , & ! compressed index in j-direction - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj , & ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & aiU , & ! ice fraction on u-grid uocn , & ! i ocean current (m/s) vocn , & ! j ocean current (m/s) + cdn_ocn , & ! ocn drag coefficient waterxU , & ! for ocean stress calculation, x (m/s) wateryU , & ! for ocean stress calculation, y (m/s) bxfix , & ! part of bx that is constant during Picard @@ -783,9 +769,6 @@ subroutine anderson_solver (icellt , icellu , & stress_Pr, & ! x,y-derivatives of the replacement pressure diag_rheo ! contributions of the rhelogy term to the diagonal - real (kind=dbl_kind), dimension (max_blocks) :: & - L2norm ! array used to compute l^2 norm of grid function - real (kind=dbl_kind), dimension (ntot) :: & res , & ! current residual res_old , & ! previous residual @@ -821,7 +804,6 @@ subroutine anderson_solver (icellt , icellu , & ! Initialization res_num = 0 - L2norm = c0 !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks @@ -850,8 +832,8 @@ subroutine anderson_solver (icellt , icellu , & vprev_k(:,:,iblk) = vvel(:,:,iblk) call calc_zeta_dPr (nx_block , ny_block , & - icellt (iblk), & - indxti (:,iblk), indxtj (:,iblk), & + icellT (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & uprev_k (:,:,iblk), vprev_k (:,:,iblk), & dxT (:,:,iblk), dyT (:,:,iblk), & dxhy (:,:,iblk), dyhx (:,:,iblk), & @@ -862,8 +844,8 @@ subroutine anderson_solver (icellt , icellu , & rep_prs(:,:,iblk,:), stress_Pr (:,:,:)) call calc_vrel_Cb (nx_block , ny_block , & - icellu (iblk), Cdn_ocn (:,:,iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), Cdn_ocn (:,:,iblk), & + indxUi (:,iblk), indxUj (:,iblk), & aiU (:,:,iblk), TbU (:,:,iblk), & uocn (:,:,iblk), vocn (:,:,iblk), & ulin (:,:,iblk), vlin (:,:,iblk), & @@ -871,8 +853,8 @@ subroutine anderson_solver (icellt , icellu , & ! prepare b vector (RHS) call calc_bvec (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & stress_Pr (:,:,:), uarear (:,:,iblk), & waterxU (:,:,iblk), wateryU (:,:,iblk), & bxfix (:,:,iblk), byfix (:,:,iblk), & @@ -881,9 +863,9 @@ subroutine anderson_solver (icellt , icellu , & ! Compute nonlinear residual norm (PDE residual) call matvec (nx_block , ny_block , & - icellu (iblk) , icellt (iblk), & - indxui (:,iblk) , indxuj (:,iblk), & - indxti (:,iblk) , indxtj (:,iblk), & + icellU (iblk) , icellT (iblk), & + indxUi (:,iblk) , indxUj (:,iblk), & + indxTi (:,iblk) , indxTj (:,iblk), & dxT (:,:,iblk) , dyT (:,:,iblk), & dxhy (:,:,iblk) , dyhx (:,:,iblk), & cxp (:,:,iblk) , cyp (:,:,iblk), & @@ -895,15 +877,17 @@ subroutine anderson_solver (icellt , icellu , & uarear (:,:,iblk) , & Au (:,:,iblk) , Av (:,:,iblk)) call residual_vec (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & bx (:,:,iblk), by (:,:,iblk), & Au (:,:,iblk), Av (:,:,iblk), & - Fx (:,:,iblk), Fy (:,:,iblk), & - L2norm (iblk)) + Fx (:,:,iblk), Fy (:,:,iblk)) enddo !$OMP END PARALLEL DO - nlres_norm = sqrt(global_sum(sum(L2norm), distrb_info)) + nlres_norm = global_norm(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + Fx , Fy ) if (my_task == master_task .and. monitor_nonlin) then write(nu_diag, '(a,i4,a,d26.16)') "monitor_nonlin: iter_nonlin= ", it_nl, & " nonlin_res_L2norm= ", nlres_norm @@ -923,8 +907,8 @@ subroutine anderson_solver (icellt , icellu , & soly = vprev_k call arrays_to_vec (nx_block , ny_block , & nblocks , max_blocks , & - icellu (:), ntot , & - indxui (:,:), indxuj (:,:), & + icellU (:), ntot , & + indxUi (:,:), indxUj (:,:), & uprev_k (:,:,:), vprev_k (:,:,:), & sol (:)) @@ -938,8 +922,8 @@ subroutine anderson_solver (icellt , icellu , & do iblk = 1, nblocks ! first compute diagonal contributions due to rheology term call formDiag_step1 (nx_block , ny_block , & - icellu (iblk) , & - indxui (:,iblk) , indxuj(:,iblk), & + icellU (iblk) , & + indxUi (:,iblk) , indxUj(:,iblk), & dxT (:,:,iblk) , dyT (:,:,iblk), & dxhy (:,:,iblk) , dyhx(:,:,iblk), & cxp (:,:,iblk) , cyp (:,:,iblk), & @@ -948,8 +932,8 @@ subroutine anderson_solver (icellt , icellu , & diag_rheo(:,:,:)) ! second compute the full diagonal call formDiag_step2 (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & diag_rheo (:,:,:), vrel (:,:,iblk), & umassdti (:,:,iblk), & uarear (:,:,iblk), Cb (:,:,iblk), & @@ -972,8 +956,8 @@ subroutine anderson_solver (icellt , icellu , & ! Put FGMRES solution solx,soly in fpfunc vector (needed for Anderson) call arrays_to_vec (nx_block , ny_block , & nblocks , max_blocks , & - icellu (:), ntot , & - indxui (:,:), indxuj (:,:), & + icellU (:), ntot , & + indxUi (:,:), indxUj (:,:), & solx (:,:,:), soly (:,:,:), & fpfunc (:)) elseif (fpfunc_andacc == 2) then @@ -989,20 +973,14 @@ subroutine anderson_solver (icellt , icellu , & #else call vec_to_arrays (nx_block , ny_block , & nblocks , max_blocks , & - icellu (:), ntot , & - indxui (:,:), indxuj(:,:) , & + icellU (:), ntot , & + indxUi (:,:), indxUj(:,:) , & res (:), & fpresx (:,:,:), fpresy (:,:,:)) - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - call calc_L2norm_squared (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & - fpresx(:,:,iblk), fpresy(:,:,iblk), & - L2norm (iblk)) - enddo - !$OMP END PARALLEL DO - fpres_norm = sqrt(global_sum(sum(L2norm), distrb_info)) + fpres_norm = global_norm(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + fpresx , fpresy ) #endif if (my_task == master_task .and. monitor_nonlin) then write(nu_diag, '(a,i4,a,d26.16)') "monitor_nonlin: iter_nonlin= ", it_nl, & @@ -1108,8 +1086,8 @@ subroutine anderson_solver (icellt , icellu , & !----------------------------------------------------------------------- call vec_to_arrays (nx_block , ny_block , & nblocks , max_blocks , & - icellu (:), ntot , & - indxui (:,:), indxuj (:,:), & + icellU (:), ntot , & + indxUi (:,:), indxUj (:,:), & sol (:), & uvel (:,:,:), vvel (:,:,:)) @@ -1131,14 +1109,12 @@ subroutine anderson_solver (icellt , icellu , & do iblk = 1, nblocks fpresx(:,:,iblk) = uvel(:,:,iblk) - uprev_k(:,:,iblk) fpresy(:,:,iblk) = vvel(:,:,iblk) - vprev_k(:,:,iblk) - call calc_L2norm_squared (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & - fpresx(:,:,iblk), fpresy(:,:,iblk), & - L2norm (iblk)) enddo !$OMP END PARALLEL DO - prog_norm = sqrt(global_sum(sum(L2norm), distrb_info)) + prog_norm = global_norm(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + fpresx , fpresy ) if (my_task == master_task .and. monitor_nonlin) then write(nu_diag, '(a,i4,a,d26.16)') "monitor_nonlin: iter_nonlin= ", it_nl, & " progress_res_L2norm= ", prog_norm @@ -1153,8 +1129,8 @@ end subroutine anderson_solver ! Computes the viscosities and dPr/dx, dPr/dy subroutine calc_zeta_dPr (nx_block, ny_block, & - icellt , & - indxti , indxtj , & + icellT , & + indxTi , indxTj , & uvel , vvel , & dxT , dyT , & dxhy , dyhx , & @@ -1169,11 +1145,11 @@ subroutine calc_zeta_dPr (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & strength , & ! ice strength (N/m) @@ -1215,14 +1191,14 @@ subroutine calc_zeta_dPr (nx_block, ny_block, & character(len=*), parameter :: subname = '(calc_zeta_dPr)' ! Initialize stPr, zetax2 and etax2 to zero - ! (for cells where icetmask is false) + ! (for cells where iceTmask is false) stPr = c0 zetax2 = c0 etax2 = c0 - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! strain rates @@ -1348,8 +1324,8 @@ end subroutine calc_zeta_dPr ! viscous-plastic sea ice stresses, Geosci. Model Dev., 13, 1763–1769, subroutine stress_vp (nx_block , ny_block , & - icellt , & - indxti , indxtj , & + icellT , & + indxTi , indxTj , & uvel , vvel , & dxT , dyT , & cxp , cyp , & @@ -1367,11 +1343,11 @@ subroutine stress_vp (nx_block , ny_block , & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellt ! no. of cells where icetmask = 1 + icellT ! no. of cells where iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvel , & ! x-component of velocity (m/s) @@ -1406,9 +1382,9 @@ subroutine stress_vp (nx_block , ny_block , & character(len=*), parameter :: subname = '(stress_vp)' - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! strain rates @@ -1458,8 +1434,8 @@ end subroutine stress_vp ! Compute vrel and seabed stress coefficients subroutine calc_vrel_Cb (nx_block, ny_block, & - icellu , Cw , & - indxui , indxuj , & + icellU , Cw , & + indxUi , indxUj , & aiU , TbU , & uocn , vocn , & uvel , vvel , & @@ -1469,11 +1445,11 @@ subroutine calc_vrel_Cb (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! total count when iceumask is true + icellU ! total count when iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & TbU, & ! seabed stress factor (N/m^2) @@ -1505,9 +1481,9 @@ subroutine calc_vrel_Cb (nx_block, ny_block, & if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) ! (magnitude of relative ocean current)*rhow*drag*aice vrel(i,j) = aiU(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uvel(i,j))**2 + & @@ -1523,19 +1499,19 @@ end subroutine calc_vrel_Cb ! Compute seabed stress (diagnostic) subroutine calc_seabed_stress (nx_block, ny_block, & - icellu , & - indxui , indxuj , & + icellU , & + indxUi , indxUj , & uvel , vvel , & Cb , & taubxU , taubyU) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! total count when iceumask is true + icellU ! total count when iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvel , & ! x-component of velocity (m/s) @@ -1553,9 +1529,9 @@ subroutine calc_seabed_stress (nx_block, ny_block, & character(len=*), parameter :: subname = '(calc_seabed_stress)' - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) taubxU(i,j) = -uvel(i,j)*Cb(i,j) taubyU(i,j) = -vvel(i,j)*Cb(i,j) @@ -1570,9 +1546,9 @@ end subroutine calc_seabed_stress ! Av = A(u,v)_[y] * vvel (y components of A(u,v) * (u,v)) subroutine matvec (nx_block, ny_block, & - icellu , icellt , & - indxui , indxuj , & - indxti , indxtj , & + icellU , icellT , & + indxUi , indxUj , & + indxTi , indxTj , & dxT , dyT , & dxhy , dyhx , & cxp , cyp , & @@ -1588,14 +1564,14 @@ subroutine matvec (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu, & ! total count when iceumask is true - icellt ! no. of cells where icetmask = 1 + icellU, & ! total count when iceUmask = .true. + icellT ! total count when iceTmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj , & ! compressed index in j-direction - indxti , & ! compressed index in i-direction - indxtj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj , & ! compressed index in j-direction + indxTi , & ! compressed index in i-direction + indxTj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & dxT , & ! width of T-cell through the middle (m) @@ -1664,9 +1640,9 @@ subroutine matvec (nx_block, ny_block, & str(:,:,:) = c0 - do ij = 1, icellt - i = indxti(ij) - j = indxtj(ij) + do ij = 1, icellT + i = indxTi(ij) + j = indxTj(ij) !----------------------------------------------------------------- ! strain rates @@ -1807,15 +1783,15 @@ subroutine matvec (nx_block, ny_block, & str(i,j,8) = strp_tmp - strm_tmp + str12sn & - dyhx(i,j)*(csigpsw + csigmsw) + dxhy(i,j)*csig12sw - enddo ! ij - icellt + enddo ! ij - icellT !----------------------------------------------------------------- ! Form Au and Av !----------------------------------------------------------------- - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) ccaimp = umassdti(i,j) + vrel(i,j) * cosw + Cb(i,j) ! kg/m^2 s @@ -1829,7 +1805,7 @@ subroutine matvec (nx_block, ny_block, & Au(i,j) = ccaimp*uvel(i,j) - ccb*vvel(i,j) - strintx Av(i,j) = ccaimp*vvel(i,j) + ccb*uvel(i,j) - strinty - enddo ! ij - icellu + enddo ! ij - icellU end subroutine matvec @@ -1839,8 +1815,8 @@ end subroutine matvec ! does not depend on (u,v) and thus do not change during the nonlinear iteration subroutine calc_bfix (nx_block , ny_block , & - icellu , & - indxui , indxuj , & + icellU , & + indxUi , indxUj , & umassdti , & forcexU , forceyU , & uvel_init, vvel_init, & @@ -1848,11 +1824,11 @@ subroutine calc_bfix (nx_block , ny_block , & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! no. of cells where iceumask = 1 + icellU ! no. of cells where iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uvel_init,& ! x-component of velocity (m/s), beginning of time step @@ -1872,9 +1848,9 @@ subroutine calc_bfix (nx_block , ny_block , & character(len=*), parameter :: subname = '(calc_bfix)' - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) bxfix(i,j) = umassdti(i,j)*uvel_init(i,j) + forcexU(i,j) byfix(i,j) = umassdti(i,j)*vvel_init(i,j) + forceyU(i,j) @@ -1889,8 +1865,8 @@ end subroutine calc_bfix ! depending on (u,v) subroutine calc_bvec (nx_block, ny_block, & - icellu , & - indxui , indxuj , & + icellU , & + indxUi , indxUj , & stPr , uarear , & waterxU , wateryU , & bxfix , byfix , & @@ -1899,11 +1875,11 @@ subroutine calc_bvec (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! total count when iceumask is true + icellU ! total count when iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & uarear , & ! 1/uarea @@ -1941,9 +1917,9 @@ subroutine calc_bvec (nx_block, ny_block, & if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) ! ice/ocean stress taux = vrel(i,j)*waterxU(i,j) ! NOTE this is not the entire @@ -1969,20 +1945,19 @@ end subroutine calc_bvec ! Av = A(u,v)_[y] * vvel (y components of A(u,v) * (u,v)) subroutine residual_vec (nx_block , ny_block, & - icellu , & - indxui , indxuj , & + icellU , & + indxUi , indxUj , & bx , by , & Au , Av , & - Fx , Fy , & - sum_squared) + Fx , Fy ) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! total count when iceumask is true + icellU ! total count when iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & bx , & ! b vector, bx = taux + bxfix (N/m^2) @@ -1994,9 +1969,6 @@ subroutine residual_vec (nx_block , ny_block, & Fx , & ! x residual vector, Fx = bx - Au (N/m^2) Fy ! y residual vector, Fy = by - Av (N/m^2) - real (kind=dbl_kind), intent(out), optional :: & - sum_squared ! sum of squared residual vector components - ! local variables integer (kind=int_kind) :: & @@ -2005,22 +1977,15 @@ subroutine residual_vec (nx_block , ny_block, & character(len=*), parameter :: subname = '(residual_vec)' !----------------------------------------------------------------- - ! compute residual and sum its squared components + ! compute residual !----------------------------------------------------------------- - if (present(sum_squared)) then - sum_squared = c0 - endif - - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) Fx(i,j) = bx(i,j) - Au(i,j) Fy(i,j) = by(i,j) - Av(i,j) - if (present(sum_squared)) then - sum_squared = sum_squared + Fx(i,j)**2 + Fy(i,j)**2 - endif enddo ! ij end subroutine residual_vec @@ -2031,8 +1996,8 @@ end subroutine residual_vec ! Part 1: compute the contributions to the diagonal from the rheology term subroutine formDiag_step1 (nx_block, ny_block, & - icellu , & - indxui , indxuj , & + icellU , & + indxUi , indxUj , & dxT , dyT , & dxhy , dyhx , & cxp , cyp , & @@ -2042,11 +2007,11 @@ subroutine formDiag_step1 (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! no. of cells where icetmask = 1 + icellU ! no. of cells where iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & dxT , & ! width of T-cell through the middle (m) @@ -2202,10 +2167,10 @@ subroutine formDiag_step1 (nx_block, ny_block, & dj = 1 endif - do ij = 1, icellu + do ij = 1, icellU - iu = indxui(ij) - ju = indxuj(ij) + iu = indxUi(ij) + ju = indxUj(ij) i = iu + di j = ju + dj @@ -2406,8 +2371,8 @@ end subroutine formDiag_step1 ! Part 2: compute diagonal subroutine formDiag_step2 (nx_block, ny_block, & - icellu , & - indxui , indxuj , & + icellU , & + indxUi , indxUj , & Drheo , vrel , & umassdti, & uarear , Cb , & @@ -2415,11 +2380,11 @@ subroutine formDiag_step2 (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions - icellu ! total count when iceumask is true + icellU ! total count when iceUmask = .true. integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & vrel, & ! coefficient for tauw @@ -2465,9 +2430,9 @@ subroutine formDiag_step2 (nx_block, ny_block, & ! Drheo(i,j,7) corresponds to str(i+1,j,7) ! Drheo(i,j,8) corresponds to str(i+1,j+1,8)) - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + do ij = 1, icellU + i = indxUi(ij) + j = indxUj(ij) ccaimp = umassdti(i,j) + vrel(i,j) * cosw + Cb(i,j) ! kg/m^2 s @@ -2484,50 +2449,121 @@ end subroutine formDiag_step2 !======================================================================= -! Compute squared l^2 norm of a grid function (tpu,tpv) +! Compute global l^2 norm of a vector field (field_x, field_y) - subroutine calc_L2norm_squared (nx_block, ny_block, & - icellu , & - indxui , indxuj , & - tpu , tpv , & - L2norm) + function global_norm (nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + field_x , field_y ) & + result(norm) + + use ice_domain, only: distrb_info + use ice_domain_size, only: max_blocks integer (kind=int_kind), intent(in) :: & - nx_block, ny_block, & ! block dimensions - icellu ! total count when iceumask is true + nx_block, ny_block ! block dimensions - integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + integer (kind=int_kind), dimension (max_blocks), intent(in) :: & + icellU ! total count when iceumask is true - real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & - tpu , & ! x-component of vector grid function - tpv ! y-component of vector grid function + integer (kind=int_kind), dimension (nx_block*ny_block,max_blocks), intent(in) :: & + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & + field_x , & ! x-component of vector field + field_y ! y-component of vector field - real (kind=dbl_kind), intent(out) :: & - L2norm ! squared l^2 norm of vector grid function (tpu,tpv) + real (kind=dbl_kind) :: & + norm ! l^2 norm of vector field ! local variables integer (kind=int_kind) :: & - i, j, ij + i, j, ij, iblk - character(len=*), parameter :: subname = '(calc_L2norm_squared)' + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + squared ! temporary array for summed squared components - !----------------------------------------------------------------- - ! compute squared l^2 norm of vector grid function (tpu,tpv) - !----------------------------------------------------------------- + character(len=*), parameter :: subname = '(global_norm)' - L2norm = c0 + norm = sqrt(global_dot_product (nx_block , ny_block , & + icellU , & + indxUi , indxUj , & + field_x , field_y , & + field_x , field_y )) - do ij = 1, icellu - i = indxui(ij) - j = indxuj(ij) + end function global_norm - L2norm = L2norm + tpu(i,j)**2 + tpv(i,j)**2 - enddo ! ij +!======================================================================= + +! Compute global dot product of two grid vectors, each split into X and Y components + + function global_dot_product (nx_block , ny_block , & + icellU , & + indxUi , indxUj , & + vector1_x , vector1_y, & + vector2_x , vector2_y) & + result(dot_product) + + use ice_domain, only: distrb_info + use ice_domain_size, only: max_blocks + use ice_fileunits, only: bfbflag + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block ! block dimensions + + integer (kind=int_kind), dimension (max_blocks), intent(in) :: & + icellU ! total count when iceumask is true + + integer (kind=int_kind), dimension (nx_block*ny_block,max_blocks), intent(in) :: & + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & + vector1_x , & ! x-component of first vector + vector1_y , & ! y-component of first vector + vector2_x , & ! x-component of second vector + vector2_y ! y-component of second vector + + real (kind=dbl_kind) :: & + dot_product ! l^2 norm of vector field + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij, iblk + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + prod ! temporary array + + real (kind=dbl_kind), dimension(max_blocks) :: & + dot ! temporary scalar for accumulating the result + + character(len=*), parameter :: subname = '(global_dot_product)' + + prod = c0 + dot = c0 + + !$OMP PARALLEL DO PRIVATE(i, j, ij, iblk) + do iblk = 1, nblocks + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) + prod(i,j,iblk) = vector1_x(i,j,iblk)*vector2_x(i,j,iblk) + vector1_y(i,j,iblk)*vector2_y(i,j,iblk) + dot(iblk) = dot(iblk) + prod(i,j,iblk) + enddo ! ij + enddo + !$OMP END PARALLEL DO + + ! Use local summation result unless bfbflag is active + if (bfbflag == 'off') then + dot_product = global_sum(sum(dot), distrb_info) + else + dot_product = global_sum(prod, distrb_info, field_loc_NEcorner) + endif - end subroutine calc_L2norm_squared + end function global_dot_product !======================================================================= @@ -2535,8 +2571,8 @@ end subroutine calc_L2norm_squared subroutine arrays_to_vec (nx_block, ny_block , & nblocks , max_blocks, & - icellu , ntot , & - indxui , indxuj , & + icellU , ntot , & + indxUi , indxUj , & tpu , tpv , & outvec) @@ -2547,11 +2583,11 @@ subroutine arrays_to_vec (nx_block, ny_block , & ntot ! size of problem for Anderson integer (kind=int_kind), dimension (max_blocks), intent(in) :: & - icellu + icellU integer (kind=int_kind), dimension (nx_block*ny_block, max_blocks), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (nx_block,ny_block, max_blocks), intent(in) :: & tpu , & ! x-component of vector @@ -2575,9 +2611,9 @@ subroutine arrays_to_vec (nx_block, ny_block , & tot = 0 do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) tot = tot + 1 outvec(tot) = tpu(i, j, iblk) tot = tot + 1 @@ -2593,8 +2629,8 @@ end subroutine arrays_to_vec subroutine vec_to_arrays (nx_block, ny_block , & nblocks , max_blocks, & - icellu , ntot , & - indxui , indxuj , & + icellU , ntot , & + indxUi , indxUj , & invec , & tpu , tpv) @@ -2605,11 +2641,11 @@ subroutine vec_to_arrays (nx_block, ny_block , & ntot ! size of problem for Anderson integer (kind=int_kind), dimension (max_blocks), intent(in) :: & - icellu + icellU integer (kind=int_kind), dimension (nx_block*ny_block, max_blocks), intent(in) :: & - indxui , & ! compressed index in i-direction - indxuj ! compressed index in j-direction + indxUi , & ! compressed index in i-direction + indxUj ! compressed index in j-direction real (kind=dbl_kind), dimension (ntot), intent(in) :: & invec ! input 1D vector @@ -2634,9 +2670,9 @@ subroutine vec_to_arrays (nx_block, ny_block , & tot = 0 do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) tot = tot + 1 tpu(i, j, iblk) = invec(tot) tot = tot + 1 @@ -2780,6 +2816,7 @@ subroutine fgmres (zetax2 , etax2 , & real (kind=dbl_kind) :: & norm_residual , & ! current L^2 norm of residual vector inverse_norm , & ! inverse of the norm of a vector + norm_rhs , & ! L^2 norm of right-hand-side vector nu, t ! local temporary values integer (kind=int_kind) :: & @@ -2819,14 +2856,25 @@ subroutine fgmres (zetax2 , etax2 , & arnoldi_basis_x = c0 arnoldi_basis_y = c0 + ! solution is zero if RHS is zero + norm_rhs = global_norm(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + bx , by ) + if (norm_rhs == c0) then + solx = bx + soly = by + return + endif + ! Residual of the initial iterate !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call matvec (nx_block , ny_block , & - icellu (iblk) , icellt (iblk), & - indxui (:,iblk) , indxuj (:,iblk), & - indxti (:,iblk) , indxtj (:,iblk), & + icellU (iblk) , icellT (iblk), & + indxUi (:,iblk) , indxUj (:,iblk), & + indxTi (:,iblk) , indxTj (:,iblk), & dxT (:,:,iblk) , dyT (:,:,iblk), & dxhy (:,:,iblk) , dyhx (:,:,iblk), & cxp (:,:,iblk) , cyp (:,:,iblk), & @@ -2838,8 +2886,8 @@ subroutine fgmres (zetax2 , etax2 , & uarear (:,:,iblk) , & workspace_x(:,:,iblk) , workspace_y(:,:,iblk)) call residual_vec (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & bx (:,:,iblk), by (:,:,iblk), & workspace_x(:,:,iblk), workspace_y(:,:,iblk), & arnoldi_basis_x (:,:,iblk, 1), & @@ -2850,18 +2898,11 @@ subroutine fgmres (zetax2 , etax2 , & ! Start outer (restarts) loop do ! Compute norm of initial residual - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - call calc_L2norm_squared(nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj(:,iblk), & - arnoldi_basis_x(:,:,iblk, 1) , & - arnoldi_basis_y(:,:,iblk, 1) , & - norm_squared(iblk)) - - enddo - !$OMP END PARALLEL DO - norm_residual = sqrt(global_sum(sum(norm_squared), distrb_info)) + norm_residual = global_norm(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + arnoldi_basis_x(:,:,:, 1), & + arnoldi_basis_y(:,:,:, 1)) if (my_task == master_task .and. monitor_fgmres) then write(nu_diag, '(a,i4,a,d26.16)') "monitor_fgmres: iter_fgmres= ", nbiter, & @@ -2877,9 +2918,9 @@ subroutine fgmres (zetax2 , etax2 , & inverse_norm = c1 / norm_residual !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) arnoldi_basis_x(i, j, iblk, 1) = arnoldi_basis_x(i, j, iblk, 1) * inverse_norm arnoldi_basis_y(i, j, iblk, 1) = arnoldi_basis_y(i, j, iblk, 1) * inverse_norm @@ -2907,6 +2948,7 @@ subroutine fgmres (zetax2 , etax2 , & call precondition(zetax2 , etax2 , & Cb , vrel , & umassdti , & + halo_info_mask, & arnoldi_basis_x(:,:,:,initer), & arnoldi_basis_y(:,:,:,initer), & diagx , diagy , & @@ -2931,9 +2973,9 @@ subroutine fgmres (zetax2 , etax2 , & !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call matvec (nx_block , ny_block , & - icellu (iblk) , icellt (iblk), & - indxui (:,iblk) , indxuj (:,iblk), & - indxti (:,iblk) , indxtj (:,iblk), & + icellU (iblk) , icellT (iblk), & + indxUi (:,iblk) , indxUj (:,iblk), & + indxTi (:,iblk) , indxTj (:,iblk), & dxT (:,:,iblk) , dyT (:,:,iblk), & dxhy (:,:,iblk) , dyhx (:,:,iblk), & cxp (:,:,iblk) , cyp (:,:,iblk), & @@ -2955,17 +2997,11 @@ subroutine fgmres (zetax2 , etax2 , & hessenberg) ! Compute norm of new Arnoldi vector and update Hessenberg matrix - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - call calc_L2norm_squared(nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj(:, iblk) , & - arnoldi_basis_x(:,:,iblk, nextit), & - arnoldi_basis_y(:,:,iblk, nextit), & - norm_squared(iblk)) - enddo - !$OMP END PARALLEL DO - hessenberg(nextit,initer) = sqrt(global_sum(sum(norm_squared), distrb_info)) + hessenberg(nextit,initer) = global_norm(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + arnoldi_basis_x(:,:,:, nextit), & + arnoldi_basis_y(:,:,:, nextit)) ! Watch out for happy breakdown if (.not. almost_zero( hessenberg(nextit,initer) ) ) then @@ -2973,9 +3009,9 @@ subroutine fgmres (zetax2 , etax2 , & inverse_norm = c1 / hessenberg(nextit,initer) !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) arnoldi_basis_x(i, j, iblk, nextit) = arnoldi_basis_x(i, j, iblk, nextit)*inverse_norm arnoldi_basis_y(i, j, iblk, nextit) = arnoldi_basis_y(i, j, iblk, nextit)*inverse_norm @@ -3039,9 +3075,9 @@ subroutine fgmres (zetax2 , etax2 , & t = rhs_hess(it) !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) solx(i, j, iblk) = solx(i, j, iblk) + t * orig_basis_x(i, j, iblk, it) soly(i, j, iblk) = soly(i, j, iblk) + t * orig_basis_y(i, j, iblk, it) @@ -3083,9 +3119,9 @@ subroutine fgmres (zetax2 , etax2 , & do it = 1, nextit !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) workspace_x(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_x(i, j, iblk, it) workspace_y(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_y(i, j, iblk, it) @@ -3109,6 +3145,7 @@ end subroutine fgmres subroutine pgmres (zetax2 , etax2 , & Cb , vrel , & umassdti , & + halo_info_mask, & bx , by , & diagx , diagy , & tolerance, maxinner, & @@ -3116,6 +3153,11 @@ subroutine pgmres (zetax2 , etax2 , & solx , soly , & nbiter) + use ice_boundary, only: ice_HaloUpdate + use ice_domain, only: maskhalo_dyn, halo_info + use ice_fileunits, only: bfbflag + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks,4), intent(in) :: & zetax2 , & ! zetax2 = 2*zeta (bulk viscosity) etax2 ! etax2 = 2*eta (shear viscosity) @@ -3125,6 +3167,9 @@ subroutine pgmres (zetax2 , etax2 , & Cb , & ! seabed stress coefficient umassdti ! mass of U-cell/dte (kg/m^2 s) + type (ice_halo), intent(in) :: & + halo_info_mask ! ghost cell update info for masked halo + real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(in) :: & bx , & ! Right hand side of the linear system (x components) by ! Right hand side of the linear system (y components) @@ -3217,9 +3262,9 @@ subroutine pgmres (zetax2 , etax2 , & !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call matvec (nx_block , ny_block , & - icellu (iblk) , icellt (iblk), & - indxui (:,iblk) , indxuj (:,iblk), & - indxti (:,iblk) , indxtj (:,iblk), & + icellU (iblk) , icellT (iblk), & + indxUi (:,iblk) , indxUj (:,iblk), & + indxTi (:,iblk) , indxTj (:,iblk), & dxT (:,:,iblk) , dyT (:,:,iblk), & dxhy (:,:,iblk) , dyhx (:,:,iblk), & cxp (:,:,iblk) , cyp (:,:,iblk), & @@ -3231,8 +3276,8 @@ subroutine pgmres (zetax2 , etax2 , & uarear (:,:,iblk) , & workspace_x(:,:,iblk) , workspace_y(:,:,iblk)) call residual_vec (nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj (:,iblk), & + icellU (iblk), & + indxUi (:,iblk), indxUj (:,iblk), & bx (:,:,iblk), by (:,:,iblk), & workspace_x(:,:,iblk), workspace_y(:,:,iblk), & arnoldi_basis_x (:,:,iblk, 1), & @@ -3243,18 +3288,11 @@ subroutine pgmres (zetax2 , etax2 , & ! Start outer (restarts) loop do ! Compute norm of initial residual - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - call calc_L2norm_squared(nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj(:, iblk), & - arnoldi_basis_x(:,:,iblk, 1), & - arnoldi_basis_y(:,:,iblk, 1), & - norm_squared(iblk)) - - enddo - !$OMP END PARALLEL DO - norm_residual = sqrt(global_sum(sum(norm_squared), distrb_info)) + norm_residual = global_norm(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + arnoldi_basis_x(:,:,:, 1), & + arnoldi_basis_y(:,:,:, 1)) if (my_task == master_task .and. monitor_pgmres) then write(nu_diag, '(a,i4,a,d26.16)') "monitor_pgmres: iter_pgmres= ", nbiter, & @@ -3270,9 +3308,9 @@ subroutine pgmres (zetax2 , etax2 , & inverse_norm = c1 / norm_residual !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) arnoldi_basis_x(i, j, iblk, 1) = arnoldi_basis_x(i, j, iblk, 1) * inverse_norm arnoldi_basis_y(i, j, iblk, 1) = arnoldi_basis_y(i, j, iblk, 1) * inverse_norm @@ -3301,21 +3339,36 @@ subroutine pgmres (zetax2 , etax2 , & call precondition(zetax2 , etax2 , & Cb , vrel , & umassdti , & + halo_info_mask, & arnoldi_basis_x(:,:,:,initer), & arnoldi_basis_y(:,:,:,initer), & diagx , diagy , & precond_type, & workspace_x , workspace_y) - ! NOTE: halo updates for (workspace_x, workspace_y) - ! are skipped here for efficiency since this is just a preconditioner + ! Update workspace with boundary values + ! NOTE: skipped for efficiency since this is just a preconditioner + ! unless bfbflag is active + if (bfbflag /= 'off') then + call stack_fields(workspace_x, workspace_y, fld2) + call ice_timer_start(timer_bound) + if (maskhalo_dyn) then + call ice_HaloUpdate (fld2, halo_info_mask, & + field_loc_NEcorner, field_type_vector) + else + call ice_HaloUpdate (fld2, halo_info, & + field_loc_NEcorner, field_type_vector) + endif + call ice_timer_stop(timer_bound) + call unstack_fields(fld2, workspace_x, workspace_y) + endif !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks call matvec (nx_block , ny_block , & - icellu (iblk) , icellt (iblk), & - indxui (:,iblk) , indxuj (:,iblk), & - indxti (:,iblk) , indxtj (:,iblk), & + icellU (iblk) , icellT (iblk), & + indxUi (:,iblk) , indxUj (:,iblk), & + indxTi (:,iblk) , indxTj (:,iblk), & dxT (:,:,iblk) , dyT (:,:,iblk), & dxhy (:,:,iblk) , dyhx (:,:,iblk), & cxp (:,:,iblk) , cyp (:,:,iblk), & @@ -3337,17 +3390,11 @@ subroutine pgmres (zetax2 , etax2 , & hessenberg) ! Compute norm of new Arnoldi vector and update Hessenberg matrix - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - call calc_L2norm_squared(nx_block , ny_block , & - icellu (iblk), & - indxui (:,iblk), indxuj(:, iblk) , & - arnoldi_basis_x(:,:,iblk, nextit), & - arnoldi_basis_y(:,:,iblk, nextit), & - norm_squared(iblk)) - enddo - !$OMP END PARALLEL DO - hessenberg(nextit,initer) = sqrt(global_sum(sum(norm_squared), distrb_info)) + hessenberg(nextit,initer) = global_norm(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + arnoldi_basis_x(:,:,:, nextit), & + arnoldi_basis_y(:,:,:, nextit)) ! Watch out for happy breakdown if (.not. almost_zero( hessenberg(nextit,initer) ) ) then @@ -3355,9 +3402,9 @@ subroutine pgmres (zetax2 , etax2 , & inverse_norm = c1 / hessenberg(nextit,initer) !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) arnoldi_basis_x(i, j, iblk, nextit) = arnoldi_basis_x(i, j, iblk, nextit)*inverse_norm arnoldi_basis_y(i, j, iblk, nextit) = arnoldi_basis_y(i, j, iblk, nextit)*inverse_norm @@ -3423,9 +3470,9 @@ subroutine pgmres (zetax2 , etax2 , & t = rhs_hess(it) !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) workspace_x(i, j, iblk) = workspace_x(i, j, iblk) + t * arnoldi_basis_x(i, j, iblk, it) workspace_y(i, j, iblk) = workspace_y(i, j, iblk) + t * arnoldi_basis_y(i, j, iblk, it) @@ -3438,6 +3485,7 @@ subroutine pgmres (zetax2 , etax2 , & call precondition(zetax2 , etax2 , & Cb , vrel , & umassdti , & + halo_info_mask, & workspace_x , workspace_y, & diagx , diagy , & precond_type, & @@ -3479,9 +3527,9 @@ subroutine pgmres (zetax2 , etax2 , & do it = 1, nextit !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) workspace_x(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_x(i, j, iblk, it) workspace_y(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_y(i, j, iblk, it) @@ -3504,6 +3552,7 @@ end subroutine pgmres subroutine precondition(zetax2 , etax2, & Cb , vrel , & umassdti , & + halo_info_mask, & vx , vy , & diagx , diagy, & precond_type, & @@ -3518,6 +3567,9 @@ subroutine precondition(zetax2 , etax2, & Cb , & ! seabed stress coefficient umassdti ! mass of U-cell/dte (kg/m^2 s) + type (ice_halo), intent(in) :: & + halo_info_mask ! ghost cell update info for masked halo + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & vx , & ! input vector (x components) vy ! input vector (y components) @@ -3560,9 +3612,9 @@ subroutine precondition(zetax2 , etax2, & elseif (precond_type == 'diag') then ! Jacobi preconditioner (diagonal) !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) wx(i,j,iblk) = vx(i,j,iblk) / diagx(i,j,iblk) wy(i,j,iblk) = vy(i,j,iblk) / diagy(i,j,iblk) @@ -3579,6 +3631,7 @@ subroutine precondition(zetax2 , etax2, & call pgmres (zetax2, etax2 , & Cb , vrel , & umassdti , & + halo_info_mask , & vx , vy , & diagx , diagy , & tolerance, maxinner, & @@ -3626,46 +3679,27 @@ subroutine orthogonalize(ortho_type , initer , & ij , & ! compressed index i, j ! grid indices - real (kind=dbl_kind), dimension (max_blocks) :: & - local_dot ! local array value to accumulate dot product of grid function over blocks - - real (kind=dbl_kind), dimension(maxinner) :: & - dotprod_local ! local array to accumulate several dot product computations - character(len=*), parameter :: subname = '(orthogonalize)' if (trim(ortho_type) == 'cgs') then ! Classical Gram-Schmidt ! Classical Gram-Schmidt orthogonalisation process ! First loop of Gram-Schmidt (compute coefficients) - dotprod_local = c0 do it = 1, initer - local_dot = c0 - - !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) - do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) - - local_dot(iblk) = local_dot(iblk) + & - (arnoldi_basis_x(i, j, iblk, it) * arnoldi_basis_x(i, j, iblk, nextit)) + & - (arnoldi_basis_y(i, j, iblk, it) * arnoldi_basis_y(i, j, iblk, nextit)) - enddo ! ij - enddo - !$OMP END PARALLEL DO - - dotprod_local(it) = sum(local_dot) + hessenberg(it, initer) = global_dot_product(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + arnoldi_basis_x(:,:,:, it) , & + arnoldi_basis_y(:,:,:, it) , & + arnoldi_basis_x(:,:,:, nextit), & + arnoldi_basis_y(:,:,:, nextit)) end do - - hessenberg(1:initer, initer) = global_allreduce_sum(dotprod_local(1:initer), distrb_info) - ! Second loop of Gram-Schmidt (orthonormalize) do it = 1, initer !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) arnoldi_basis_x(i, j, iblk, nextit) = arnoldi_basis_x(i, j, iblk, nextit) & - hessenberg(it,initer) * arnoldi_basis_x(i, j, iblk, it) @@ -3678,28 +3712,19 @@ subroutine orthogonalize(ortho_type , initer , & elseif (trim(ortho_type) == 'mgs') then ! Modified Gram-Schmidt ! Modified Gram-Schmidt orthogonalisation process do it = 1, initer - local_dot = c0 - - !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) - do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) - - local_dot(iblk) = local_dot(iblk) + & - (arnoldi_basis_x(i, j, iblk, it) * arnoldi_basis_x(i, j, iblk, nextit)) + & - (arnoldi_basis_y(i, j, iblk, it) * arnoldi_basis_y(i, j, iblk, nextit)) - enddo ! ij - enddo - !$OMP END PARALLEL DO - - hessenberg(it,initer) = global_sum(sum(local_dot), distrb_info) + hessenberg(it, initer) = global_dot_product(nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + arnoldi_basis_x(:,:,:, it) , & + arnoldi_basis_y(:,:,:, it) , & + arnoldi_basis_x(:,:,:, nextit), & + arnoldi_basis_y(:,:,:, nextit)) !$OMP PARALLEL DO PRIVATE(iblk,ij,i,j) do iblk = 1, nblocks - do ij = 1, icellu(iblk) - i = indxui(ij, iblk) - j = indxuj(ij, iblk) + do ij = 1, icellU(iblk) + i = indxUi(ij, iblk) + j = indxUj(ij, iblk) arnoldi_basis_x(i, j, iblk, nextit) = arnoldi_basis_x(i, j, iblk, nextit) & - hessenberg(it,initer) * arnoldi_basis_x(i, j, iblk, it) diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 index 43fe5af13..30fe546e0 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_driver.F90 @@ -1534,13 +1534,8 @@ subroutine state_to_work (nx_block, ny_block, & integer (kind=int_kind) :: & nt_alvl, nt_apnd, nt_fbri -#ifdef UNDEPRECATE_CESMPONDS - logical (kind=log_kind) :: & - tr_pond_cesm, tr_pond_lvl, tr_pond_topo -#else logical (kind=log_kind) :: & tr_pond_lvl, tr_pond_topo -#endif integer (kind=int_kind) :: & i, j, n, it, & ! counting indices @@ -1548,13 +1543,8 @@ subroutine state_to_work (nx_block, ny_block, & character(len=*), parameter :: subname = '(state_to_work)' -#ifdef UNDEPRECATE_CESMPONDS - call icepack_query_tracer_flags(tr_pond_cesm_out=tr_pond_cesm, & - tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) -#else call icepack_query_tracer_flags(tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo) -#endif call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_apnd_out=nt_apnd, & nt_fbri_out=nt_fbri) call icepack_warnings_flush(nu_diag) @@ -1613,13 +1603,8 @@ subroutine state_to_work (nx_block, ny_block, & * trcrn(i,j,it ,n) enddo enddo -#ifdef UNDEPRECATE_CESMPONDS - elseif (trcr_depend(it) == 2+nt_apnd .and. & - tr_pond_cesm .or. tr_pond_topo) then -#else elseif (trcr_depend(it) == 2+nt_apnd .and. & tr_pond_topo) then -#endif do j = 1, ny_block do i = 1, nx_block works(i,j,narrays+it) = aicen(i,j ,n) & diff --git a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 b/cicecore/cicedynB/dynamics/ice_transport_remap.F90 index 6fd037b7b..286a51711 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_remap.F90 @@ -633,6 +633,7 @@ subroutine horizontal_remap (dt, ntrace, & endif ! nghost + ! tcraig, this OMP loop sometimes fails with cce/14.0.3, compiler bug?? !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block,n, & !$OMP edgearea_e,edgearea_n,edge,iflux,jflux, & !$OMP xp,yp,indxing,indxjng,mflxe,mflxn, & diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index a7e5aa584..8d190753e 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -35,6 +35,16 @@ module ice_flux ! Dynamics component ! All variables are assumed to be on the atm or ocn thermodynamic ! grid except as noted + ! + ! scale_fluxes divides several of these by aice "in place", so + ! the state of some of these variables is not well defined. In the + ! future, we need to refactor and add "_iavg" versions of the + ! fields to clearly differentiate fields that have been divided + ! by aice and others that are not. The challenge is that we need + ! to go thru each field carefully to see which version is used. + ! For instance, in diagnostics, there are places where these + ! fields are multiplied by aice to compute things properly. + ! strocn[x,y]T_iavg is the first field defined using _iavg. !----------------------------------------------------------------- real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & @@ -56,8 +66,8 @@ module ice_flux ! out to ocean T-cell (kg/m s^2) ! Note, CICE_IN_NEMO uses strocnx and strocny for coupling - strocnxT, & ! ice-ocean stress, x-direction at T points, per ice fraction - strocnyT ! ice-ocean stress, y-direction at T points, per ice fraction + strocnxT_iavg, & ! ice-ocean stress, x-direction at T points, per ice fraction (scaled flux) + strocnyT_iavg ! ice-ocean stress, y-direction at T points, per ice fraction (scaled flux) ! diagnostic @@ -389,8 +399,8 @@ subroutine alloc_flux hwater (nx_block,ny_block,max_blocks), & ! water depth for seabed stress calc (landfast ice) strairxT (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction strairyT (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction - strocnxT (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction - strocnyT (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction + strocnxT_iavg(nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction, per ice area + strocnyT_iavg(nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction, per ice area sig1 (nx_block,ny_block,max_blocks), & ! normalized principal stress component sig2 (nx_block,ny_block,max_blocks), & ! normalized principal stress component sigP (nx_block,ny_block,max_blocks), & ! internal ice pressure (N/m) @@ -765,8 +775,8 @@ subroutine init_coupler_flux ! fluxes sent to ocean !----------------------------------------------------------------- - strocnxT(:,:,:) = c0 ! ice-ocean stress, x-direction (T-cell) - strocnyT(:,:,:) = c0 ! ice-ocean stress, y-direction (T-cell) + strocnxT_iavg (:,:,:) = c0 ! ice-ocean stress, x-direction (T-cell) + strocnyT_iavg (:,:,:) = c0 ! ice-ocean stress, y-direction (T-cell) fresh (:,:,:) = c0 fsalt (:,:,:) = c0 fpond (:,:,:) = c0 diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index edff03b9f..ff79778c5 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -22,7 +22,7 @@ module ice_forcing use ice_boundary, only: ice_HaloUpdate use ice_blocks, only: nx_block, ny_block use ice_domain, only: halo_info - use ice_domain_size, only: ncat, max_blocks, nx_global, ny_global + use ice_domain_size, only: ncat, max_blocks, nx_global, ny_global, nfreq use ice_communicate, only: my_task, master_task use ice_calendar, only: istep, istep1, & msec, mday, mmonth, myear, yday, daycal, & @@ -116,6 +116,9 @@ module ice_forcing topmelt_data, & botmelt_data + real (kind=dbl_kind), dimension(:,:,:,:,:), allocatable :: & + wave_spectrum_data ! field values at 2 temporal data points + character(char_len), public :: & atm_data_format, & ! 'bin'=binary or 'nc'=netcdf ocn_data_format, & ! 'bin'=binary or 'nc'=netcdf @@ -229,6 +232,7 @@ subroutine alloc_forcing ocn_frc_m(nx_block,ny_block, max_blocks,nfld,12), & ! ocn data for 12 months topmelt_file(ncat), & botmelt_file(ncat), & + wave_spectrum_data(nx_block,ny_block,nfreq,2,max_blocks), & stat=ierr) if (ierr/=0) call abort_ice('(alloc_forcing): Out of Memory') @@ -1527,6 +1531,44 @@ subroutine interpolate_data (field_data, field) end subroutine interpolate_data +!======================================================================= + + subroutine interpolate_wavespec_data (field_data, field) + +! Linear interpolation + +! author: Elizabeth C. Hunke, LANL + + use ice_domain, only: nblocks + + real (kind=dbl_kind), dimension(nx_block,ny_block,nfreq,2,max_blocks), intent(in) :: & + field_data ! 2 values used for interpolation + + real (kind=dbl_kind), dimension(nx_block,ny_block,nfreq,max_blocks), intent(out) :: & + field ! interpolated field + + ! local variables + + integer (kind=int_kind) :: i,j, iblk, freq + + character(len=*), parameter :: subname = '(interpolate data)' + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + do freq = 1, nfreq + field(i,j,freq,iblk) = c1intp * field_data(i,j,freq,1,iblk) & + + c2intp * field_data(i,j,freq,2,iblk) + enddo + enddo + enddo + enddo + !$OMP END PARALLEL DO + + end subroutine interpolate_wavespec_data + + !======================================================================= subroutine file_year (data_file, yr) @@ -4129,8 +4171,8 @@ subroutine ocn_data_ncar_init_3D work1(:,:,:) = ocn_frc_m(:,:,:,n ,m) work2(:,:,:) = ocn_frc_m(:,:,:,n+1,m) - call grid_average_X2Y('F',work1,'T',ocn_frc_m(:,:,:,n ,m),'U') - call grid_average_X2Y('F',work2,'T',ocn_frc_m(:,:,:,n+1,m),'U') + call grid_average_X2Y('A',work1,'T',ocn_frc_m(:,:,:,n ,m),'U') + call grid_average_X2Y('A',work2,'T',ocn_frc_m(:,:,:,n+1,m),'U') enddo ! month loop enddo ! field loop @@ -4373,7 +4415,7 @@ subroutine ocn_data_hadgem(dt) use ice_domain, only: nblocks use ice_domain_size, only: max_blocks use ice_flux, only: sst, uocn, vocn - use ice_grid, only: grid_average_X2Y, ANGLET + use ice_grid, only: ANGLET real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -5566,7 +5608,6 @@ subroutine get_wave_spec use ice_arrays_column, only: wave_spectrum, & dwavefreq, wavefreq use ice_constants, only: c0 - use ice_domain_size, only: nfreq use ice_timers, only: ice_timer_start, ice_timer_stop, timer_fsd ! local variables @@ -5592,7 +5633,6 @@ subroutine get_wave_spec ! if no wave data is provided, wave_spectrum is zero everywhere wave_spectrum(:,:,:,:) = c0 - wave_spec_dir = ocn_data_dir debug_forcing = .false. ! wave spectrum and frequencies @@ -5610,10 +5650,7 @@ subroutine get_wave_spec file=__FILE__, line=__LINE__) else #ifdef USE_NETCDF - call ice_open_nc(wave_spec_file,fid) - call ice_read_nc_xyf (fid, 1, 'efreq', wave_spectrum(:,:,:,:), debug_forcing, & - field_loc_center, field_type_scalar) - call ice_close_nc(fid) + call wave_spec_data #else write (nu_diag,*) "wave spectrum file not available, requires cpp USE_NETCDF" write (nu_diag,*) "wave spectrum file not available, using default profile" @@ -5628,6 +5665,146 @@ subroutine get_wave_spec end subroutine get_wave_spec +!======================================================================= +! +! Read in wave spectrum forcing as a function of time. 6 hourly +! LR started working from JRA55_data routine +! Changed fields, and changed 3 hourly to 6 hourly +! + subroutine wave_spec_data + + use ice_blocks, only: block, get_block + use ice_global_reductions, only: global_minval, global_maxval + use ice_domain, only: nblocks, distrb_info, blocks_ice + use ice_arrays_column, only: wave_spectrum, & + dwavefreq, wavefreq + use ice_read_write, only: ice_read_nc_xyf + use ice_grid, only: hm, tlon, tlat, tmask, umask + use ice_calendar, only: days_per_year, use_leap_years + + integer (kind=int_kind) :: & + ncid , & ! netcdf file id + i, j, freq , & + ixm,ixx,ixp , & ! record numbers for neighboring months + recnum , & ! record number + maxrec , & ! maximum record number + recslot , & ! spline slot for current record + midmonth , & ! middle day of month + dataloc , & ! = 1 for data located in middle of time interval + ! = 2 for date located at end of time interval + iblk , & ! block index + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + yr ! current forcing year + + real (kind=dbl_kind) :: & + sec6hr , & ! number of seconds in 3 hours + secday , & ! number of seconds in day + vmin, vmax + + logical (kind=log_kind) :: readm, read6,debug_n_d + + type (block) :: & + this_block ! block information for current block + + real(kind=dbl_kind), dimension(nfreq) :: & + wave_spectrum_profile ! wave spectrum + + character(len=64) :: fieldname !netcdf field name + character(char_len_long) :: spec_file + character(char_len) :: wave_spec_type + logical (kind=log_kind) :: wave_spec + character(len=*), parameter :: subname = '(wave_spec_data)' + + + + debug_n_d = .false. !usually false + + call icepack_query_parameters(secday_out=secday) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_init_wave(nfreq, & + wave_spectrum_profile, & + wavefreq, dwavefreq) + + + !spec_file = trim(ocn_data_dir)//'/'//trim(wave_spec_file) + spec_file = trim(wave_spec_file) + wave_spectrum_data = c0 + wave_spectrum = c0 + yr = fyear ! current year + !------------------------------------------------------------------- + ! 6-hourly data + ! + ! Assume that the 6-hourly value is located at the end of the + ! 6-hour period. This is the convention for NCEP reanalysis data. + ! E.g. record 1 gives conditions at 6 am GMT on 1 January. + !------------------------------------------------------------------- + + dataloc = 2 ! data located at end of interval + sec6hr = secday/c4 ! seconds in 6 hours + !maxrec = 2920 ! 365*8; for leap years = 366*8 + + if (use_leap_years) days_per_year = 366 !overrides setting of 365 in ice_calendar + maxrec = days_per_year*4 + + if(days_per_year == 365 .and. (mod(yr, 4) == 0)) then + call abort_ice('days_per_year should be set to 366 for leap years') + end if + + ! current record number + recnum = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec6hr) + + ! Compute record numbers for surrounding data (2 on each side) + + ixm = mod(recnum+maxrec-2,maxrec) + 1 + ixx = mod(recnum-1, maxrec) + 1 + + ! Compute interpolation coefficients + ! If data is located at the end of the time interval, then the + ! data value for the current record goes in slot 2 + + recslot = 2 + ixp = -99 + call interp_coeff (recnum, recslot, sec6hr, dataloc) + + ! Read + read6 = .false. + if (istep==1 .or. oldrecnum .ne. recnum) read6 = .true. + !------------------------------------------------------------------- + ! File is NETCDF + ! file variable names are: + ! efreq (wave spectrum, energy as a function of wave frequency UNITS) + !------------------------------------------------------------------- + call ice_open_nc(spec_file,ncid) + + call ice_read_nc_xyf(ncid,recnum,'efreq',wave_spectrum_data(:,:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc_xyf(ncid,recnum,'efreq',wave_spectrum_data(:,:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_close_nc(ncid) + + + ! Interpolate + call interpolate_wavespec_data (wave_spectrum_data, wave_spectrum) + + ! Save record number + oldrecnum = recnum + + if (local_debug) then + if (my_task == master_task) write (nu_diag,*) & + 'wave_spec_data ',spec_file + if (my_task.eq.master_task) & + write (nu_diag,*) 'maxrec',maxrec + write (nu_diag,*) 'days_per_year', days_per_year + + endif ! local debug + + end subroutine wave_spec_data + !======================================================================= ! initial snow aging lookup table diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedynB/general/ice_forcing_bgc.F90 index fc440834c..2f07d05f1 100644 --- a/cicecore/cicedynB/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedynB/general/ice_forcing_bgc.F90 @@ -889,68 +889,116 @@ subroutine faero_optics ! this data is used in bulk aerosol treatment in dEdd radiation kaer_tab = reshape((/ & ! aerosol mass extinction cross section (m2/kg) - 11580.61872, 5535.41835, 2793.79690, & - 25798.96479, 11536.03871, 4688.24207, & - 196.49772, 204.14078, 214.42287, & - 2665.85867, 2256.71027, 820.36024, & - 840.78295, 1028.24656, 1163.03298, & - 387.51211, 414.68808, 450.29814/), & +! 11580.61872, 5535.41835, 2793.79690, & +! 25798.96479, 11536.03871, 4688.24207, & +! 196.49772, 204.14078, 214.42287, & +! 2665.85867, 2256.71027, 820.36024, & +! 840.78295, 1028.24656, 1163.03298, & +! 387.51211, 414.68808, 450.29814/), & + 11580.61872_dbl_kind, 5535.41835_dbl_kind, 2793.79690_dbl_kind, & + 25798.96479_dbl_kind, 11536.03871_dbl_kind, 4688.24207_dbl_kind, & + 196.49772_dbl_kind, 204.14078_dbl_kind, 214.42287_dbl_kind, & + 2665.85867_dbl_kind, 2256.71027_dbl_kind, 820.36024_dbl_kind, & + 840.78295_dbl_kind, 1028.24656_dbl_kind, 1163.03298_dbl_kind, & + 387.51211_dbl_kind, 414.68808_dbl_kind, 450.29814_dbl_kind/), & (/icepack_nspint,icepack_max_aero/)) waer_tab = reshape((/ & ! aerosol single scatter albedo (fraction) - 0.29003, 0.17349, 0.06613, & - 0.51731, 0.41609, 0.21324, & - 0.84467, 0.94216, 0.95666, & - 0.97764, 0.99402, 0.98552, & - 0.94146, 0.98527, 0.99093, & - 0.90034, 0.96543, 0.97678/), & +! 0.29003, 0.17349, 0.06613, & +! 0.51731, 0.41609, 0.21324, & +! 0.84467, 0.94216, 0.95666, & +! 0.97764, 0.99402, 0.98552, & +! 0.94146, 0.98527, 0.99093, & +! 0.90034, 0.96543, 0.97678/), & + 0.29003_dbl_kind, 0.17349_dbl_kind, 0.06613_dbl_kind, & + 0.51731_dbl_kind, 0.41609_dbl_kind, 0.21324_dbl_kind, & + 0.84467_dbl_kind, 0.94216_dbl_kind, 0.95666_dbl_kind, & + 0.97764_dbl_kind, 0.99402_dbl_kind, 0.98552_dbl_kind, & + 0.94146_dbl_kind, 0.98527_dbl_kind, 0.99093_dbl_kind, & + 0.90034_dbl_kind, 0.96543_dbl_kind, 0.97678_dbl_kind/), & (/icepack_nspint,icepack_max_aero/)) gaer_tab = reshape((/ & ! aerosol asymmetry parameter (cos(theta)) - 0.35445, 0.19838, 0.08857, & - 0.52581, 0.32384, 0.14970, & - 0.83162, 0.78306, 0.74375, & - 0.68861, 0.70836, 0.54171, & - 0.70239, 0.66115, 0.71983, & - 0.78734, 0.73580, 0.64411/), & +! 0.35445, 0.19838, 0.08857, & +! 0.52581, 0.32384, 0.14970, & +! 0.83162, 0.78306, 0.74375, & +! 0.68861, 0.70836, 0.54171, & +! 0.70239, 0.66115, 0.71983, & +! 0.78734, 0.73580, 0.64411/), & + 0.35445_dbl_kind, 0.19838_dbl_kind, 0.08857_dbl_kind, & + 0.52581_dbl_kind, 0.32384_dbl_kind, 0.14970_dbl_kind, & + 0.83162_dbl_kind, 0.78306_dbl_kind, 0.74375_dbl_kind, & + 0.68861_dbl_kind, 0.70836_dbl_kind, 0.54171_dbl_kind, & + 0.70239_dbl_kind, 0.66115_dbl_kind, 0.71983_dbl_kind, & + 0.78734_dbl_kind, 0.73580_dbl_kind, 0.64411_dbl_kind/), & (/icepack_nspint,icepack_max_aero/)) ! this data is used in MODAL AEROSOL treatment in dEdd radiation kaer_bc_tab = reshape((/ & ! aerosol mass extinction cross section (m2/kg) - 12955.44732, 5946.89461, 2772.33366, & - 12085.30664, 7438.83131, 3657.13084, & - 9753.99698, 7342.87139, 4187.79304, & - 7815.74879, 6659.65096, 4337.98863, & - 6381.28194, 5876.78408, 4254.65054, & - 5326.93163, 5156.74532, 4053.66581, & - 4538.09763, 4538.60875, 3804.10884, & - 3934.17604, 4020.20799, 3543.27199, & - 3461.20656, 3587.80962, 3289.98060, & - 3083.03396, 3226.27231, 3052.91441/), & +! 12955.44732, 5946.89461, 2772.33366, & +! 12085.30664, 7438.83131, 3657.13084, & +! 9753.99698, 7342.87139, 4187.79304, & +! 7815.74879, 6659.65096, 4337.98863, & +! 6381.28194, 5876.78408, 4254.65054, & +! 5326.93163, 5156.74532, 4053.66581, & +! 4538.09763, 4538.60875, 3804.10884, & +! 3934.17604, 4020.20799, 3543.27199, & +! 3461.20656, 3587.80962, 3289.98060, & +! 3083.03396, 3226.27231, 3052.91441/), & + 12955.4473151973_dbl_kind, 5946.89461205564_dbl_kind, 2772.33366387720_dbl_kind, & + 12085.3066388712_dbl_kind, 7438.83131367992_dbl_kind, 3657.13084442081_dbl_kind, & + 9753.99697536893_dbl_kind, 7342.87139082553_dbl_kind, 4187.79303607928_dbl_kind, & + 7815.74879345131_dbl_kind, 6659.65096365965_dbl_kind, 4337.98863414228_dbl_kind, & + 6381.28194381772_dbl_kind, 5876.78408231865_dbl_kind, 4254.65053724305_dbl_kind, & + 5326.93163497508_dbl_kind, 5156.74531505734_dbl_kind, 4053.66581550147_dbl_kind, & + 4538.09762614960_dbl_kind, 4538.60874501597_dbl_kind, 3804.10884202567_dbl_kind, & + 3934.17604000777_dbl_kind, 4020.20798667897_dbl_kind, 3543.27199302277_dbl_kind, & + 3461.20655708248_dbl_kind, 3587.80961820605_dbl_kind, 3289.98060303894_dbl_kind, & + 3083.03396032095_dbl_kind, 3226.27231329114_dbl_kind, 3052.91440681137_dbl_kind/), & (/icepack_nspint,10/)) waer_bc_tab = reshape((/ & ! aerosol single scatter albedo (fraction) - 0.26107, 0.15861, 0.06535, & - 0.37559, 0.30318, 0.19483, & - 0.42224, 0.36913, 0.27875, & - 0.44777, 0.40503, 0.33026, & - 0.46444, 0.42744, 0.36426, & - 0.47667, 0.44285, 0.38827, & - 0.48635, 0.45428, 0.40617, & - 0.49440, 0.46328, 0.42008, & - 0.50131, 0.47070, 0.43128, & - 0.50736, 0.47704, 0.44056/), & +! 0.26107, 0.15861, 0.06535, & +! 0.37559, 0.30318, 0.19483, & +! 0.42224, 0.36913, 0.27875, & +! 0.44777, 0.40503, 0.33026, & +! 0.46444, 0.42744, 0.36426, & +! 0.47667, 0.44285, 0.38827, & +! 0.48635, 0.45428, 0.40617, & +! 0.49440, 0.46328, 0.42008, & +! 0.50131, 0.47070, 0.43128, & +! 0.50736, 0.47704, 0.44056/), & + 0.261071919959011_dbl_kind, 0.158608047940651_dbl_kind, 0.0653546447770291_dbl_kind, & + 0.375593873543050_dbl_kind, 0.303181671502553_dbl_kind, 0.194832290545495_dbl_kind, & + 0.422240383488477_dbl_kind, 0.369134186611324_dbl_kind, 0.278752556671685_dbl_kind, & + 0.447772153910671_dbl_kind, 0.405033725319593_dbl_kind, 0.330260831965086_dbl_kind, & + 0.464443094570456_dbl_kind, 0.427439117980081_dbl_kind, 0.364256689383418_dbl_kind, & + 0.476668995985241_dbl_kind, 0.442854173154887_dbl_kind, 0.388270470928338_dbl_kind, & + 0.486347881475941_dbl_kind, 0.454284736567521_dbl_kind, 0.406167596922937_dbl_kind, & + 0.494397834153785_dbl_kind, 0.463279526357470_dbl_kind, 0.420084410794128_dbl_kind, & + 0.501307856563459_dbl_kind, 0.470696914968199_dbl_kind, 0.431284889617716_dbl_kind, & + 0.507362336297419_dbl_kind, 0.477038272961243_dbl_kind, 0.440559363958571_dbl_kind/), & (/icepack_nspint,10/)) gaer_bc_tab = reshape((/ & ! aerosol asymmetry parameter (cos(theta)) - 0.28328, 0.19644, 0.10498, & - 0.44488, 0.32615, 0.19612, & - 0.54724, 0.41611, 0.26390, & - 0.61711, 0.48475, 0.31922, & - 0.66673, 0.53923, 0.36632, & - 0.70296, 0.58337, 0.40732, & - 0.73002, 0.61960, 0.44344, & - 0.75064, 0.64959, 0.47551, & - 0.76663, 0.67461, 0.50415, & - 0.77926, 0.69561, 0.52981/),& +! 0.28328, 0.19644, 0.10498, & +! 0.44488, 0.32615, 0.19612, & +! 0.54724, 0.41611, 0.26390, & +! 0.61711, 0.48475, 0.31922, & +! 0.66673, 0.53923, 0.36632, & +! 0.70296, 0.58337, 0.40732, & +! 0.73002, 0.61960, 0.44344, & +! 0.75064, 0.64959, 0.47551, & +! 0.76663, 0.67461, 0.50415, & +! 0.77926, 0.69561, 0.52981/),& + 0.283282988564031_dbl_kind, 0.196444209821980_dbl_kind, 0.104976473902976_dbl_kind, & + 0.444877326083453_dbl_kind, 0.326147707342261_dbl_kind, 0.196121968923488_dbl_kind, & + 0.547243414035631_dbl_kind, 0.416106187964493_dbl_kind, 0.263903486903711_dbl_kind, & + 0.617111563012282_dbl_kind, 0.484745531707601_dbl_kind, 0.319218974395050_dbl_kind, & + 0.666728525631754_dbl_kind, 0.539228555802301_dbl_kind, 0.366323180358996_dbl_kind, & + 0.702956870835387_dbl_kind, 0.583372441336763_dbl_kind, 0.407316408184865_dbl_kind, & + 0.730016668453191_dbl_kind, 0.619595539349710_dbl_kind, 0.443436944107423_dbl_kind, & + 0.750635997128011_dbl_kind, 0.649589805870541_dbl_kind, 0.475512089138887_dbl_kind, & + 0.766634959089444_dbl_kind, 0.674609076223658_dbl_kind, 0.504145461809103_dbl_kind, & + 0.779256641759228_dbl_kind, 0.695614224933709_dbl_kind, 0.529805346632687_dbl_kind/), & (/icepack_nspint,10/)) bcenh(:,:,:) = c0 diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index c2cc986f8..45ae58d8b 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -73,15 +73,9 @@ subroutine input_data npt, dt, ndtd, days_per_year, use_leap_years, & write_ic, dump_last, npt_unit use ice_arrays_column, only: oceanmixed_ice -#ifdef UNDEPRECATE_CESMPONDS - use ice_restart_column, only: restart_age, restart_FY, restart_lvl, & - restart_pond_cesm, restart_pond_lvl, restart_pond_topo, restart_aero, & - restart_fsd, restart_iso, restart_snow -#else use ice_restart_column, only: restart_age, restart_FY, restart_lvl, & restart_pond_lvl, restart_pond_topo, restart_aero, & restart_fsd, restart_iso, restart_snow -#endif use ice_restart_shared, only: & restart, restart_ext, restart_coszen, restart_dir, restart_file, pointer_file, & runid, runtype, use_restart_time, restart_format, lcdf64 @@ -110,8 +104,8 @@ subroutine input_data grid_ice, grid_ice_thrm, grid_ice_dynu, grid_ice_dynv, & grid_ocn, grid_ocn_thrm, grid_ocn_dynu, grid_ocn_dynv, & grid_atm, grid_atm_thrm, grid_atm_dynu, grid_atm_dynv, & - dxrect, dyrect, & - pgl_global_ext + dxrect, dyrect, dxscale, dyscale, scale_dxdy, & + lonrefrect, latrefrect, pgl_global_ext use ice_dyn_shared, only: ndte, kdyn, revised_evp, yield_curve, & evp_algorithm, visc_method, & seabed_stress, seabed_stress_method, & @@ -165,23 +159,12 @@ subroutine input_data logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd, tr_snow -#ifdef UNDEPRECATE_CESMPONDS - logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo -#else logical (kind=log_kind) :: tr_pond_lvl, tr_pond_topo -#endif integer (kind=int_kind) :: numin, numax ! unit number limits -#ifdef UNDEPRECATE_CESMPONDS - integer (kind=int_kind) :: rpcesm, rplvl, rptopo -#else integer (kind=int_kind) :: rplvl, rptopo -#endif real (kind=dbl_kind) :: Cf, ksno, puny character (len=char_len) :: abort_list -#ifdef CESMCOUPLED - character (len=64) :: tmpstr -#endif character (len=128) :: tmpstr2 character(len=*), parameter :: subname='(input_data)' @@ -212,6 +195,8 @@ subroutine input_data bathymetry_file, use_bathymetry, nfsd, bathymetry_format, & ncat, nilyr, nslyr, nblyr, & kcatbound, gridcpl_file, dxrect, dyrect, & + dxscale, dyscale, lonrefrect, latrefrect, & + scale_dxdy, & close_boundaries, orca_halogrid, grid_ice, kmt_type, & grid_atm, grid_ocn @@ -219,9 +204,6 @@ subroutine input_data tr_iage, restart_age, & tr_FY, restart_FY, & tr_lvl, restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm, restart_pond_cesm, & -#endif tr_pond_lvl, restart_pond_lvl, & tr_pond_topo, restart_pond_topo, & tr_snow, restart_snow, & @@ -398,6 +380,11 @@ subroutine input_data ksno = 0.3_dbl_kind ! snow thermal conductivity dxrect = 0.0_dbl_kind ! user defined grid spacing in cm in x direction dyrect = 0.0_dbl_kind ! user defined grid spacing in cm in y direction + lonrefrect = -156.50_dbl_kind ! lower left corner lon for rectgrid + latrefrect = 71.35_dbl_kind ! lower left corner lat for rectgrid + scale_dxdy = .false. ! apply dxscale, dyscale to rectgrid + dxscale = 1.0_dbl_kind ! user defined rectgrid x-grid scale factor (e.g., 1.02) + dyscale = 1.0_dbl_kind ! user defined rectgrid y-grid scale factor (e.g., 1.02) close_boundaries = .false. ! true = set land on edges of grid seabed_stress= .false. ! if true, seabed stress for landfast is on seabed_stress_method = 'LKD'! LKD = Lemieux et al 2015, probabilistic = Dupont et al. in prep @@ -412,7 +399,7 @@ subroutine input_data deltaminEVP = 1e-11_dbl_kind ! minimum delta for viscosities (EVP, Hunke 2001) deltaminVP = 2e-9_dbl_kind ! minimum delta for viscosities (VP, Hibler 1979) capping_method = 'max' ! method for capping of viscosities (max=Hibler 1979,sum=Kreyscher2000) - maxits_nonlin = 4 ! max nb of iteration for nonlinear solver + maxits_nonlin = 10 ! max nb of iteration for nonlinear solver precond = 'pgmres' ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), ! 'pgmres' (Jacobi-preconditioned GMRES) dim_fgmres = 50 ! size of fgmres Krylov subspace @@ -424,7 +411,7 @@ subroutine input_data monitor_pgmres = .false. ! print pgmres residual norm ortho_type = 'mgs' ! orthogonalization procedure 'cgs' or 'mgs' reltol_nonlin = 1e-8_dbl_kind ! nonlinear stopping criterion: reltol_nonlin*res(k=0) - reltol_fgmres = 1e-2_dbl_kind ! fgmres stopping criterion: reltol_fgmres*res(k) + reltol_fgmres = 1e-1_dbl_kind ! fgmres stopping criterion: reltol_fgmres*res(k) reltol_pgmres = 1e-6_dbl_kind ! pgmres stopping criterion: reltol_pgmres*res(k) algo_nonlin = 'picard' ! nonlinear algorithm: 'picard' (Picard iteration), 'anderson' (Anderson acceleration) fpfunc_andacc = 1 ! fixed point function for Anderson acceleration: @@ -438,11 +425,7 @@ subroutine input_data conserv_check = .false. ! tracer conservation check shortwave = 'ccsm3' ! 'ccsm3' or 'dEdd' (delta-Eddington) albedo_type = 'ccsm3' ! 'ccsm3' or 'constant' -#ifdef UNDEPRECATE_0LAYER - ktherm = 1 ! -1 = OFF, 0 = 0-layer, 1 = BL99, 2 = mushy thermo -#else ktherm = 1 ! -1 = OFF, 1 = BL99, 2 = mushy thermo -#endif conduct = 'bubbly' ! 'MU71' or 'bubbly' (Pringle et al 2007) coriolis = 'latitude' ! latitude dependent, or 'constant' ssh_stress = 'geostrophic' ! 'geostrophic' or 'coupled' @@ -552,10 +535,6 @@ subroutine input_data restart_FY = .false. ! ice age restart tr_lvl = .false. ! level ice restart_lvl = .false. ! level ice restart -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm = .false. ! CESM melt ponds - restart_pond_cesm = .false. ! melt ponds restart -#endif tr_pond_lvl = .false. ! level-ice melt ponds restart_pond_lvl = .false. ! melt ponds restart tr_pond_topo = .false. ! explicit melt ponds (topographic) @@ -853,6 +832,11 @@ subroutine input_data call broadcast_scalar(grid_format, master_task) call broadcast_scalar(dxrect, master_task) call broadcast_scalar(dyrect, master_task) + call broadcast_scalar(scale_dxdy, master_task) + call broadcast_scalar(dxscale, master_task) + call broadcast_scalar(dyscale, master_task) + call broadcast_scalar(lonrefrect, master_task) + call broadcast_scalar(latrefrect, master_task) call broadcast_scalar(close_boundaries, master_task) call broadcast_scalar(grid_type, master_task) call broadcast_scalar(grid_ice, master_task) @@ -968,6 +952,7 @@ subroutine input_data call broadcast_scalar(albsnowi, master_task) call broadcast_scalar(ahmax, master_task) call broadcast_scalar(atmbndy, master_task) + call broadcast_scalar(default_season, master_task) call broadcast_scalar(fyear_init, master_task) call broadcast_scalar(ycycle, master_task) call broadcast_scalar(atm_data_format, master_task) @@ -1021,10 +1006,6 @@ subroutine input_data call broadcast_scalar(restart_FY, master_task) call broadcast_scalar(tr_lvl, master_task) call broadcast_scalar(restart_lvl, master_task) -#ifdef UNDEPRECATE_CESMPONDS - call broadcast_scalar(tr_pond_cesm, master_task) - call broadcast_scalar(restart_pond_cesm, master_task) -#endif call broadcast_scalar(tr_pond_lvl, master_task) call broadcast_scalar(restart_pond_lvl, master_task) call broadcast_scalar(tr_pond_topo, master_task) @@ -1117,9 +1098,6 @@ subroutine input_data restart_age = .false. restart_fy = .false. restart_lvl = .false. -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm = .false. -#endif restart_pond_lvl = .false. restart_pond_topo = .false. restart_snow = .false. @@ -1236,29 +1214,15 @@ subroutine input_data endif endif -#ifdef UNDEPRECATE_CESMPONDS - rpcesm = 0 -#endif rplvl = 0 rptopo = 0 -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) rpcesm = 1 -#endif if (tr_pond_lvl ) rplvl = 1 if (tr_pond_topo) rptopo = 1 tr_pond = .false. ! explicit melt ponds -#ifdef UNDEPRECATE_CESMPONDS - if (rpcesm + rplvl + rptopo > 0) tr_pond = .true. -#else if (rplvl + rptopo > 0) tr_pond = .true. -#endif -#ifdef UNDEPRECATE_CESMPONDS - if (rpcesm + rplvl + rptopo > 1) then -#else if (rplvl + rptopo > 1) then -#endif if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: Must use only one melt pond scheme' endif @@ -1482,13 +1446,6 @@ subroutine input_data abort_list = trim(abort_list)//":16" endif -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and frzpnd=cesm' - abort_list = trim(abort_list)//":17" - endif -#endif - if (.not. tr_lvl) then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and tr_lvl=F' abort_list = trim(abort_list)//":18" @@ -1570,7 +1527,12 @@ subroutine input_data wave_spec = .false. if (tr_fsd .and. (trim(wave_spec_type) /= 'none')) wave_spec = .true. - + if (tr_fsd .and. (trim(wave_spec_type) == 'none')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: tr_fsd=T but wave_spec=F - not recommended' + endif + end if + ! compute grid locations for thermo, u and v fields grid_ice_thrm = 'T' @@ -1907,10 +1869,6 @@ subroutine input_data tmpstr2 = ' : Bitz and Lipscomb 1999 thermo' elseif (ktherm == 2) then tmpstr2 = ' : mushy-layer thermo' -#ifdef UNDEPRECATE_0LAYER - elseif (ktherm == 0) then - tmpstr2 = ' : zero-layer thermo' -#endif elseif (ktherm < 0) then tmpstr2 = ' : Thermodynamics disabled' else @@ -2062,18 +2020,18 @@ subroutine input_data if (wave_spec) then tmpstr2 = ' : use wave spectrum for floe size distribution' else - tmpstr2 = ' : floe size distribution does not use wave spectrum' + tmpstr2 = 'WARNING : floe size distribution does not use wave spectrum' endif write(nu_diag,1010) ' wave_spec = ', wave_spec,trim(tmpstr2) if (wave_spec) then if (trim(wave_spec_type) == 'none') then tmpstr2 = ' : no wave data provided, no wave-ice interactions' elseif (trim(wave_spec_type) == 'profile') then - tmpstr2 = ' : use fixed dummy wave spectrum for testing' + tmpstr2 = ' : use fixed dummy wave spectrum for testing, sea surface height generated using constant phase (1 iteration of wave fracture)' elseif (trim(wave_spec_type) == 'constant') then - tmpstr2 = ' : constant wave spectrum data file provided for testing' + tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using constant phase (1 iteration of wave fracture)' elseif (trim(wave_spec_type) == 'random') then - tmpstr2 = ' : wave data file provided, spectrum generated using random number' + tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using random number (multiple iterations of wave fracture to convergence)' else tmpstr2 = ' : unknown value' endif @@ -2091,14 +2049,7 @@ subroutine input_data write(nu_diag,*) ' ' write(nu_diag,*) ' Melt ponds' write(nu_diag,*) '--------------------------------' -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - write(nu_diag,1010) ' tr_pond_cesm = ', tr_pond_cesm,' : CESM pond formulation' - write(nu_diag,1002) ' pndaspect = ', pndaspect,' : ratio of pond depth to area fraction' - elseif (tr_pond_lvl) then -#else if (tr_pond_lvl) then -#endif write(nu_diag,1010) ' tr_pond_lvl = ', tr_pond_lvl,' : level-ice pond formulation' write(nu_diag,1002) ' pndaspect = ', pndaspect,' : ratio of pond depth to area fraction' write(nu_diag,1000) ' dpscale = ', dpscale,' : time scale for flushing in permeable ice' @@ -2211,9 +2162,6 @@ subroutine input_data if (tr_lvl) write(nu_diag,1010) ' tr_lvl = ', tr_lvl,' : ridging related tracers' if (tr_pond_lvl) write(nu_diag,1010) ' tr_pond_lvl = ', tr_pond_lvl,' : level-ice pond formulation' if (tr_pond_topo) write(nu_diag,1010) ' tr_pond_topo = ', tr_pond_topo,' : topo pond formulation' -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) write(nu_diag,1010) ' tr_pond_cesm = ', tr_pond_cesm,' : CESM pond formulation' -#endif if (tr_snow) write(nu_diag,1010) ' tr_snow = ', tr_snow,' : advanced snow physics' if (tr_iage) write(nu_diag,1010) ' tr_iage = ', tr_iage,' : chronological ice age' if (tr_FY) write(nu_diag,1010) ' tr_FY = ', tr_FY,' : first-year ice area' @@ -2338,9 +2286,6 @@ subroutine input_data write(nu_diag,1011) ' restart_age = ', restart_age write(nu_diag,1011) ' restart_FY = ', restart_FY write(nu_diag,1011) ' restart_lvl = ', restart_lvl -#ifdef UNDEPRECATE_CESMPONDS - write(nu_diag,1011) ' restart_pond_cesm= ', restart_pond_cesm -#endif write(nu_diag,1011) ' restart_pond_lvl = ', restart_pond_lvl write(nu_diag,1011) ' restart_pond_topo= ', restart_pond_topo write(nu_diag,1011) ' restart_snow = ', restart_snow @@ -2439,11 +2384,7 @@ subroutine input_data call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & tr_fsd_in=tr_fsd, tr_snow_in=tr_snow, tr_pond_in=tr_pond, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm_in=tr_pond_cesm, tr_pond_lvl_in=tr_pond_lvl, tr_pond_topo_in=tr_pond_topo) -#else tr_pond_lvl_in=tr_pond_lvl, tr_pond_topo_in=tr_pond_topo) -#endif call icepack_init_tracer_sizes(ncat_in=ncat, nilyr_in=nilyr, nslyr_in=nslyr, nblyr_in=nblyr, & nfsd_in=nfsd, n_algae_in=n_algae, n_iso_in=n_iso, n_aero_in=n_aero, & n_DOC_in=n_DOC, n_DON_in=n_DON, & @@ -2500,18 +2441,10 @@ subroutine init_state it , & ! tracer index iblk ! block index -#ifdef UNDEPRECATE_0LAYER - logical (kind=log_kind) :: & - heat_capacity ! from icepack -#endif integer (kind=int_kind) :: ntrcr logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero -#ifdef UNDEPRECATE_CESMPONDS - logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo -#else logical (kind=log_kind) :: tr_pond_lvl, tr_pond_topo -#endif logical (kind=log_kind) :: tr_snow, tr_fsd integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd @@ -2525,17 +2458,10 @@ subroutine init_state !----------------------------------------------------------------- -#ifdef UNDEPRECATE_0LAYER - call icepack_query_parameters(heat_capacity_out=heat_capacity) -#endif call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_iso_out=tr_iso, tr_aero_out=tr_aero, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & -#else tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & -#endif tr_snow_out=tr_snow, tr_fsd_out=tr_fsd) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & nt_qice_out=nt_qice, nt_qsno_out=nt_qsno, nt_iage_out=nt_iage, nt_fy_out=nt_fy, & @@ -2569,25 +2495,6 @@ subroutine init_state file=__FILE__, line=__LINE__) endif -#ifdef UNDEPRECATE_0LAYER - if (.not.heat_capacity) then - - if (nilyr > 1) then - write(nu_diag,*) subname//' ERROR: Must have nilyr = 1 if heat_capacity=F' - write(nu_diag,*) subname//' ERROR: nilyr =', nilyr - call abort_ice(error_message=subname//' Too many ice layers', & - file=__FILE__, line=__LINE__) - endif - - if (nslyr > 1) then - write(nu_diag,*) subname//' ERROR: Must have nslyr = 1 if heat_capacity=F' - write(nu_diag,*) subname//' ERROR: nslyr =', nslyr - call abort_ice(error_message=subname//' Too many snow layers', & - file=__FILE__, line=__LINE__) - endif - - endif ! heat_capacity = F -#endif endif ! my_task !----------------------------------------------------------------- @@ -2606,12 +2513,6 @@ subroutine init_state if (tr_FY) trcr_depend(nt_FY) = 0 ! area-weighted first-year ice area if (tr_lvl) trcr_depend(nt_alvl) = 0 ! level ice area if (tr_lvl) trcr_depend(nt_vlvl) = 1 ! level ice volume -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - trcr_depend(nt_apnd) = 0 ! melt pond area - trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth - endif -#endif if (tr_pond_lvl) then trcr_depend(nt_apnd) = 2+nt_alvl ! melt pond area trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth @@ -2673,12 +2574,6 @@ subroutine init_state nt_strata (it,2) = 0 enddo -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - n_trcr_strata(nt_hpnd) = 1 ! melt pond depth - nt_strata (nt_hpnd,1) = nt_apnd ! on melt pond area - endif -#endif if (tr_pond_lvl) then n_trcr_strata(nt_apnd) = 1 ! melt pond area nt_strata (nt_apnd,1) = nt_alvl ! on level ice area diff --git a/cicecore/cicedynB/general/ice_state.F90 b/cicecore/cicedynB/general/ice_state.F90 index 4a3f83ce2..862f0a8bc 100644 --- a/cicecore/cicedynB/general/ice_state.F90 +++ b/cicecore/cicedynB/general/ice_state.F90 @@ -54,7 +54,8 @@ module ice_state real (kind=dbl_kind), dimension(:,:,:), allocatable, & public :: & - aice , & ! concentration of ice + aice , & ! concentration of ice on T grid + aiU , & ! concentration of ice on U grid vice , & ! volume per unit area of ice (m) vsno ! volume per unit area of snow (m) @@ -149,7 +150,8 @@ subroutine alloc_state file=__FILE__, line=__LINE__) allocate ( & - aice (nx_block,ny_block,max_blocks) , & ! concentration of ice + aice (nx_block,ny_block,max_blocks) , & ! concentration of ice T grid + aiU (nx_block,ny_block,max_blocks) , & ! concentration of ice U grid vice (nx_block,ny_block,max_blocks) , & ! volume per unit area of ice (m) vsno (nx_block,ny_block,max_blocks) , & ! volume per unit area of snow (m) aice0 (nx_block,ny_block,max_blocks) , & ! concentration of open water @@ -188,6 +190,10 @@ subroutine alloc_state trcr_base = c0 aicen = c0 aicen_init = c0 + vicen = c0 + vicen_init = c0 + vsnon = c0 + vsnon_init = c0 end subroutine alloc_state diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index b6f8741c0..8dd6fe49a 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -12,7 +12,14 @@ module ice_step_mod use ice_kinds_mod + use ice_blocks, only: block, get_block + use ice_blocks, only: nx_block, ny_block use ice_constants, only: c0, c1, c1000, c4, p25 + use ice_constants, only: field_loc_center, field_loc_NEcorner, & + field_loc_Nface, field_loc_Eface, & + field_type_scalar, field_type_vector + use ice_domain, only: halo_info, nblocks, blocks_ice + use ice_domain_size, only: max_blocks use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -39,6 +46,10 @@ module ice_step_mod step_snow, prep_radiation, step_radiation, ocean_mixed_layer, & update_state, biogeochemistry, step_dyn_wave, step_prep + real (kind=dbl_kind), dimension (:,:,:), allocatable :: & + uvelT_icep, & ! uvel for wind stress computation in icepack + vvelT_icep ! vvel for wind stress computation in icepack + !======================================================================= contains @@ -73,6 +84,13 @@ subroutine step_prep use ice_flux, only: uatm, vatm, uatmT, vatmT use ice_grid, only: grid_atm_dynu, grid_atm_dynv, grid_average_X2Y + use ice_state, only: uvel, vvel + + logical (kind=log_kind) :: & + highfreq ! highfreq flag + + logical (kind=log_kind), save :: & + first_call = .true. ! first call flag character(len=*), parameter :: subname = '(step_prep)' @@ -85,6 +103,26 @@ subroutine step_prep call grid_average_X2Y('S',uatm,grid_atm_dynu,uatmT,'T') call grid_average_X2Y('S',vatm,grid_atm_dynv,vatmT,'T') + !----------------------------------------------------------------- + ! Compute uvelT_icep, vvelT_icep + !----------------------------------------------------------------- + + if (first_call) then + allocate(uvelT_icep(nx_block,ny_block,max_blocks)) + allocate(vvelT_icep(nx_block,ny_block,max_blocks)) + uvelT_icep = c0 + vvelT_icep = c0 + endif + + call icepack_query_parameters(highfreq_out=highfreq) + + if (highfreq) then + call grid_average_X2Y('A', uvel, 'U', uvelT_icep, 'T') + call grid_average_X2Y('A', vvel, 'U', vvelT_icep, 'T') + endif + + first_call = .false. + end subroutine step_prep !======================================================================= @@ -95,8 +133,6 @@ end subroutine step_prep subroutine prep_radiation (iblk) - use ice_blocks, only: block, get_block - use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & alvdr_ai, alvdf_ai, alidr_ai, alidf_ai, & @@ -188,14 +224,9 @@ subroutine step_therm1 (dt, iblk) hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & fswsfcn, fswintn, Sswabsn, Iswabsn, meltsliqn, meltsliq, & fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf - use ice_blocks, only: block, get_block -#ifdef CICE_IN_NEMO - use ice_blocks, only: nx_block, ny_block -#endif use ice_calendar, only: yday - use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero - use ice_flux, only: frzmlt, sst, Tf, strocnxT, strocnyT, rside, fbot, Tbot, Tsnice, & + use ice_flux, only: frzmlt, sst, Tf, strocnxT_iavg, strocnyT_iavg, rside, fbot, Tbot, Tsnice, & meltsn, melttn, meltbn, congeln, snoicen, uatmT, vatmT, fside, & wind, rhoa, potT, Qa, zlvl, zlvs, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, fsloss, & @@ -209,7 +240,7 @@ subroutine step_therm1 (dt, iblk) Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn use ice_grid, only: lmask_n, lmask_s, tmask use ice_state, only: aice, aicen, aicen_init, vicen_init, & - vice, vicen, vsno, vsnon, trcrn, uvel, vvel, vsnon_init + vice, vicen, vsno, vsnon, trcrn, vsnon_init #ifdef CICE_IN_NEMO use ice_state, only: aice_init #endif @@ -243,16 +274,10 @@ subroutine step_therm1 (dt, iblk) nt_isosno, nt_isoice, nt_rsnw, nt_smice, nt_smliq logical (kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, tr_pond_cesm, & -#else tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, & -#endif tr_pond_lvl, tr_pond_topo, calc_Tsfc, highfreq, tr_snow real (kind=dbl_kind) :: & - uvel_center, & ! cell-centered velocity, x component (m/s) - vvel_center, & ! cell-centered velocity, y component (m/s) puny ! a very small number real (kind=dbl_kind), dimension(n_aero,2,ncat) :: & @@ -275,11 +300,7 @@ subroutine step_therm1 (dt, iblk) call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_iso_out=tr_iso, & -#ifdef UNDEPRECATE_CESMPONDS - tr_aero_out=tr_aero, tr_pond_out=tr_pond, tr_pond_cesm_out=tr_pond_cesm, & -#else tr_aero_out=tr_aero, tr_pond_out=tr_pond, & -#endif tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & tr_snow_out=tr_snow) call icepack_query_tracer_indices( & @@ -336,16 +357,6 @@ subroutine step_therm1 (dt, iblk) do j = jlo, jhi do i = ilo, ihi - if (highfreq) then ! include ice velocity in calculation of wind stress - uvel_center = p25*(uvel(i,j ,iblk) + uvel(i-1,j ,iblk) & ! cell-centered velocity - + uvel(i,j-1,iblk) + uvel(i-1,j-1,iblk)) ! assumes wind components - vvel_center = p25*(vvel(i,j ,iblk) + vvel(i-1,j ,iblk) & ! are also cell-centered - + vvel(i,j-1,iblk) + vvel(i-1,j-1,iblk)) - else - uvel_center = c0 ! not used - vvel_center = c0 - endif ! highfreq - if (tr_snow) then do n = 1, ncat do k = 1, nslyr @@ -391,8 +402,8 @@ subroutine step_therm1 (dt, iblk) vicen = vicen (i,j,:,iblk), & vsno = vsno (i,j, iblk), & vsnon = vsnon (i,j,:,iblk), & - uvel = uvel_center , & - vvel = vvel_center , & + uvel = uvelT_icep (i,j, iblk), & + vvel = vvelT_icep (i,j, iblk), & Tsfc = trcrn (i,j,nt_Tsfc,:,iblk), & zqsn = trcrn (i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & zqin = trcrn (i,j,nt_qice:nt_qice+nilyr-1,:,iblk), & @@ -450,8 +461,8 @@ subroutine step_therm1 (dt, iblk) sst = sst (i,j, iblk), & sss = sss (i,j, iblk), & Tf = Tf (i,j, iblk), & - strocnxT = strocnxT (i,j, iblk), & - strocnyT = strocnyT (i,j, iblk), & + strocnxT = strocnxT_iavg(i,j, iblk), & + strocnyT = strocnyT_iavg(i,j, iblk), & fbot = fbot (i,j, iblk), & Tbot = Tbot (i,j, iblk), & Tsnice = Tsnice (i,j, iblk), & @@ -604,9 +615,7 @@ subroutine step_therm2 (dt, iblk) wave_spectrum, wavefreq, dwavefreq, & first_ice, bgrid, cgrid, igrid, floe_rad_c, floe_binwidth, & d_afsd_latg, d_afsd_newi, d_afsd_latm, d_afsd_weld - use ice_blocks, only: block, get_block use ice_calendar, only: yday - use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd use ice_flux, only: fresh, frain, fpond, frzmlt, frazil, frz_onset, & update_ocn_f, fsalt, Tf, sss, salinz, fhocn, rside, fside, & @@ -746,8 +755,6 @@ end subroutine step_therm2 subroutine update_state (dt, daidt, dvidt, dagedt, offset) - use ice_blocks, only: nx_block, ny_block - use ice_domain, only: nblocks use ice_domain_size, only: ncat ! use ice_grid, only: tmask use ice_state, only: aicen, trcrn, vicen, vsnon, & @@ -864,8 +871,6 @@ subroutine step_dyn_wave (dt) use ice_arrays_column, only: wave_spectrum, & d_afsd_wave, floe_rad_l, floe_rad_c, wavefreq, dwavefreq - use ice_blocks, only: block, get_block - use ice_domain, only: blocks_ice, nblocks use ice_domain_size, only: ncat, nfsd, nfreq use ice_state, only: trcrn, aicen, aice, vice use ice_timers, only: ice_timer_start, ice_timer_stop, timer_column, & @@ -939,16 +944,34 @@ end subroutine step_dyn_wave subroutine step_dyn_horiz (dt) + use ice_boundary, only: ice_HaloUpdate use ice_dyn_evp, only: evp use ice_dyn_eap, only: eap use ice_dyn_vp, only: implicit_solver use ice_dyn_shared, only: kdyn + use ice_flux, only: strocnxU, strocnyU, strocnxT_iavg, strocnyT_iavg use ice_flux, only: init_history_dyn + use ice_grid, only: grid_average_X2Y + use ice_state, only: aiU use ice_transport_driver, only: advection, transport_upwind, transport_remap real (kind=dbl_kind), intent(in) :: & dt ! dynamics time step + ! local variables + + type (block) :: & + this_block ! block information for current block + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + iblk, & ! block index + i, j ! horizontal indices + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1, & ! temporary + work2 ! temporary + character(len=*), parameter :: subname = '(step_dyn_horiz)' call init_history_dyn ! initialize dynamic history variables @@ -961,6 +984,38 @@ subroutine step_dyn_horiz (dt) if (kdyn == 2) call eap (dt) if (kdyn == 3) call implicit_solver (dt) + !----------------------------------------------------------------- + ! Compute strocnxT_iavg, strocnyT_iavg for thermo and coupling + !----------------------------------------------------------------- + + ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T + ! conservation requires aiU be divided before averaging + work1 = c0 + work2 = c0 + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk), iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + if (aiU(i,j,iblk) /= c0) then + work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) + work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + call ice_HaloUpdate (work1, halo_info, & + field_loc_NEcorner, field_type_vector) + call ice_HaloUpdate (work2, halo_info, & + field_loc_NEcorner, field_type_vector) + call grid_average_X2Y('F', work1, 'U', strocnxT_iavg, 'T') ! shift + call grid_average_X2Y('F', work2, 'U', strocnyT_iavg, 'T') + !----------------------------------------------------------------- ! Horizontal ice transport !----------------------------------------------------------------- @@ -983,8 +1038,6 @@ end subroutine step_dyn_horiz subroutine step_dyn_ridge (dt, ndtd, iblk) use ice_arrays_column, only: hin_max, fzsal, first_ice - use ice_blocks, only: block, get_block - use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr, n_aero, nblyr use ice_flux, only: & rdg_conv, rdg_shear, dardg1dt, dardg2dt, & @@ -1107,9 +1160,7 @@ end subroutine step_dyn_ridge subroutine step_snow (dt, iblk) - use ice_blocks, only: block, get_block use ice_calendar, only: nstreams - use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nslyr, nilyr use ice_flux, only: snwcnt, wind, fresh, fhocn, fsloss, fsnow use ice_state, only: trcrn, vsno, vsnon, vicen, aicen, aice @@ -1225,9 +1276,7 @@ subroutine step_radiation (dt, iblk) alvdrn, alidrn, alvdfn, alidfn, apeffn, trcrn_sw, snowfracn, & kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, & gaer_bc_tab, bcenh, swgrid, igrid - use ice_blocks, only: block, get_block use ice_calendar, only: calendar_type, days_per_year, nextsw_cday, yday, msec - use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, n_aero, nilyr, nslyr, n_zaero, n_algae, nblyr use ice_flux, only: swvdr, swvdf, swidr, swidf, coszen, fsnow use ice_grid, only: TLAT, TLON, tmask @@ -1415,7 +1464,6 @@ end subroutine step_radiation subroutine ocean_mixed_layer (dt, iblk) use ice_arrays_column, only: Cdn_atm, Cdn_atm_ratio - use ice_blocks, only: nx_block, ny_block use ice_flux, only: sst, Tf, Qa, uatmT, vatmT, wind, potT, rhoa, zlvl, & frzmlt, fhocn, fswthru, flw, flwout_ocn, fsens_ocn, flat_ocn, evap_ocn, & alvdr_ocn, alidr_ocn, alvdf_ocn, alidf_ocn, swidf, swvdf, swidr, swvdr, & @@ -1572,8 +1620,6 @@ subroutine biogeochemistry (dt, iblk) snow_bio_net, fswthrun, Rayleigh_criteria, & ocean_bio_all, sice_rho, fzsal, fzsal_g, & bgrid, igrid, icgrid, cgrid - use ice_blocks, only: block, get_block - use ice_domain, only: blocks_ice use ice_domain_size, only: nblyr, nilyr, nslyr, n_algae, n_zaero, ncat, & n_doc, n_dic, n_don, n_fed, n_fep use ice_flux, only: meltbn, melttn, congeln, snoicen, & diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 index 2b64f8932..9fda67dad 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 @@ -81,6 +81,7 @@ module ice_boundary module procedure ice_HaloUpdate2DR8, & ice_HaloUpdate2DR4, & ice_HaloUpdate2DI4, & + ice_HaloUpdate2DL1, & ice_HaloUpdate3DR8, & ice_HaloUpdate3DR4, & ice_HaloUpdate3DI4, & @@ -2384,6 +2385,69 @@ subroutine ice_HaloUpdate2DI4(array, halo, & end subroutine ice_HaloUpdate2DI4 +!*********************************************************************** + + subroutine ice_HaloUpdate2DL1(array, halo, & + fieldLoc, fieldKind, & + fillValue) + +! This routine updates ghost cells for an input array and is a +! member of a group of routines under the generic interface +! ice\_HaloUpdate. This routine is the specific interface +! for 2d horizontal logical arrays. + + type (ice_halo), intent(in) :: & + halo ! precomputed halo structure containing all + ! information needed for halo update + + integer (int_kind), intent(in) :: & + fieldKind, &! id for type of field (scalar, vector, angle) + fieldLoc ! id for location on horizontal grid + ! (center, NEcorner, Nface, Eface) + + integer (int_kind), intent(in), optional :: & + fillValue ! optional value to put in ghost cells + ! where neighbor points are unknown + ! (e.g. eliminated land blocks or + ! closed boundaries) + + logical (log_kind), dimension(:,:,:), intent(inout) :: & + array ! array containing field for which halo + ! needs to be updated + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind), dimension(:,:,:), allocatable :: & + iarray ! integer array for logical + + character(len=*), parameter :: subname = '(ice_HaloUpdate2DL1)' + +!----------------------------------------------------------------------- +! +! copy logical into integer array and call haloupdate on integer array +! +!----------------------------------------------------------------------- + + allocate(iarray(size(array,dim=1),size(array,dim=2),size(array,dim=3))) + iarray(:,:,:) = 0 + where (array) iarray = 1 + + call ice_HaloUpdate(iarray, halo, & + fieldLoc, fieldKind, & + fillValue) + + array = .false. + where (iarray /= 0) array = .true. + deallocate(iarray) + +!----------------------------------------------------------------------- + + end subroutine ice_HaloUpdate2DL1 + !*********************************************************************** subroutine ice_HaloUpdate3DR8(array, halo, & diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 index a5fed760b..4b94389f7 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 @@ -36,7 +36,6 @@ module ice_global_reductions private public :: global_sum, & - global_allreduce_sum, & global_sum_prod, & global_maxval, & global_minval @@ -56,12 +55,6 @@ module ice_global_reductions global_sum_scalar_int end interface - interface global_allreduce_sum - module procedure global_allreduce_sum_vector_dbl!, & - ! module procedure global_allreduce_sum_vector_real, & ! not yet implemented - ! module procedure global_allreduce_sum_vector_int ! not yet implemented - end interface - interface global_sum_prod module procedure global_sum_prod_dbl, & global_sum_prod_real, & @@ -707,68 +700,6 @@ function global_sum_scalar_int(scalar, dist) & end function global_sum_scalar_int -!*********************************************************************** - - function global_allreduce_sum_vector_dbl(vector, dist) & - result(globalSums) - -! Computes the global sums of sets of scalars (elements of 'vector') -! distributed across a parallel machine. -! -! This is actually the specific interface for the generic global_allreduce_sum -! function corresponding to double precision vectors. The generic -! interface is identical but will handle real and integer vectors. - - real (dbl_kind), dimension(:), intent(in) :: & - vector ! vector whose components are to be summed - - type (distrb), intent(in) :: & - dist ! block distribution - - real (dbl_kind), dimension(size(vector)) :: & - globalSums ! resulting array of global sums - -!----------------------------------------------------------------------- -! -! local variables -! -!----------------------------------------------------------------------- - - integer (int_kind) :: & - numProcs, &! number of processor participating - numBlocks, &! number of local blocks - communicator, &! communicator for this distribution - numElem ! number of elements in vector - - real (dbl_kind), dimension(:,:), allocatable :: & - work ! temporary local array - - character(len=*), parameter :: subname = '(global_allreduce_sum_vector_dbl)' - -!----------------------------------------------------------------------- -! -! get communicator for MPI calls -! -!----------------------------------------------------------------------- - - call ice_distributionGet(dist, & - numLocalBlocks = numBlocks, & - nprocs = numProcs, & - communicator = communicator) - - numElem = size(vector) - allocate(work(1,numElem)) - work(1,:) = vector - globalSums = c0 - - call compute_sums_dbl(work,globalSums,communicator,numProcs) - - deallocate(work) - -!----------------------------------------------------------------------- - - end function global_allreduce_sum_vector_dbl - !*********************************************************************** function global_sum_prod_dbl (array1, array2, dist, field_loc, & diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 index cafe4dc05..f10a9f432 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 @@ -68,6 +68,7 @@ module ice_boundary module procedure ice_HaloUpdate2DR8, & ice_HaloUpdate2DR4, & ice_HaloUpdate2DI4, & + ice_HaloUpdate2DL1, & ice_HaloUpdate3DR8, & ice_HaloUpdate3DR4, & ice_HaloUpdate3DI4, & @@ -1541,6 +1542,69 @@ subroutine ice_HaloUpdate2DI4(array, halo, & end subroutine ice_HaloUpdate2DI4 +!*********************************************************************** + + subroutine ice_HaloUpdate2DL1(array, halo, & + fieldLoc, fieldKind, & + fillValue) + +! This routine updates ghost cells for an input array and is a +! member of a group of routines under the generic interface +! ice\_HaloUpdate. This routine is the specific interface +! for 2d horizontal logical arrays. + + type (ice_halo), intent(in) :: & + halo ! precomputed halo structure containing all + ! information needed for halo update + + integer (int_kind), intent(in) :: & + fieldKind, &! id for type of field (scalar, vector, angle) + fieldLoc ! id for location on horizontal grid + ! (center, NEcorner, Nface, Eface) + + integer (int_kind), intent(in), optional :: & + fillValue ! optional value to put in ghost cells + ! where neighbor points are unknown + ! (e.g. eliminated land blocks or + ! closed boundaries) + + logical (log_kind), dimension(:,:,:), intent(inout) :: & + array ! array containing field for which halo + ! needs to be updated + +!----------------------------------------------------------------------- +! +! local variables +! +!----------------------------------------------------------------------- + + integer (int_kind), dimension(:,:,:), allocatable :: & + iarray ! integer array for logical + + character(len=*), parameter :: subname = '(ice_HaloUpdate2DL1)' + +!----------------------------------------------------------------------- +! +! copy logical into integer array and call haloupdate on integer array +! +!----------------------------------------------------------------------- + + allocate(iarray(size(array,dim=1),size(array,dim=2),size(array,dim=3))) + iarray(:,:,:) = 0 + where (array) iarray = 1 + + call ice_HaloUpdate(iarray, halo, & + fieldLoc, fieldKind, & + fillValue) + + array = .false. + where (iarray /= 0) array = .true. + deallocate(iarray) + +!----------------------------------------------------------------------- + + end subroutine ice_HaloUpdate2DL1 + !*********************************************************************** subroutine ice_HaloUpdate3DR8(array, halo, & diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 index e859ea2bd..5fcd45876 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 @@ -37,7 +37,6 @@ module ice_global_reductions private public :: global_sum, & - global_allreduce_sum, & global_sum_prod, & global_maxval, & global_minval @@ -57,12 +56,6 @@ module ice_global_reductions global_sum_scalar_int end interface - interface global_allreduce_sum - module procedure global_allreduce_sum_vector_dbl!, & - ! module procedure global_allreduce_sum_vector_real, & ! not yet implemented - ! module procedure global_allreduce_sum_vector_int ! not yet implemented - end interface - interface global_sum_prod module procedure global_sum_prod_dbl, & global_sum_prod_real, & @@ -708,68 +701,6 @@ function global_sum_scalar_int(scalar, dist) & end function global_sum_scalar_int -!*********************************************************************** - - function global_allreduce_sum_vector_dbl(vector, dist) & - result(globalSums) - -! Computes the global sums of sets of scalars (elements of 'vector') -! distributed across a parallel machine. -! -! This is actually the specific interface for the generic global_allreduce_sum -! function corresponding to double precision vectors. The generic -! interface is identical but will handle real and integer vectors. - - real (dbl_kind), dimension(:), intent(in) :: & - vector ! vector whose components are to be summed - - type (distrb), intent(in) :: & - dist ! block distribution - - real (dbl_kind), dimension(size(vector)) :: & - globalSums ! resulting array of global sums - -!----------------------------------------------------------------------- -! -! local variables -! -!----------------------------------------------------------------------- - - integer (int_kind) :: & - numProcs, &! number of processor participating - numBlocks, &! number of local blocks - communicator, &! communicator for this distribution - numElem ! number of elements in vector - - real (dbl_kind), dimension(:,:), allocatable :: & - work ! temporary local array - - character(len=*), parameter :: subname = '(global_allreduce_sum_vector_dbl)' - -!----------------------------------------------------------------------- -! -! get communicator for MPI calls -! -!----------------------------------------------------------------------- - - call ice_distributionGet(dist, & - numLocalBlocks = numBlocks, & - nprocs = numProcs, & - communicator = communicator) - - numElem = size(vector) - allocate(work(1,numElem)) - work(1,:) = vector - globalSums = c0 - - call compute_sums_dbl(work,globalSums,communicator,numProcs) - - deallocate(work) - -!----------------------------------------------------------------------- - - end function global_allreduce_sum_vector_dbl - !*********************************************************************** function global_sum_prod_dbl (array1, array2, dist, field_loc, & diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index ac56356e5..10254aa93 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -234,6 +234,7 @@ subroutine init_domain_blocks #if (defined CESMCOUPLED) nprocs = get_num_procs() #else + write(nu_diag,*) subname,'ERROR: nprocs, get_num_procs = ',nprocs,get_num_procs() call abort_ice(subname//'ERROR: Input nprocs not same as system request') #endif else if (nghost < 1) then diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index 523c7ea2c..dfccdd413 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -129,6 +129,15 @@ module ice_grid dxrect, & ! user_specified spacing (cm) in x-direction (uniform HTN) dyrect ! user_specified spacing (cm) in y-direction (uniform HTE) + ! growth factor for variable spaced grid + real (kind=dbl_kind), public :: & + dxscale, & ! scale factor for grid spacing in x direction (e.g., 1.02) + dyscale ! scale factor for gird spacing in y direction (e.g., 1.02) + + real (kind=dbl_kind), public :: & + lonrefrect, & ! lower left lon for rectgrid + latrefrect ! lower left lat for rectgrid + ! Corners of grid boxes for history output real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & lont_bounds, & ! longitude of gridbox corners for T point @@ -171,7 +180,8 @@ module ice_grid logical (kind=log_kind), public :: & use_bathymetry, & ! flag for reading in bathymetry_file - pgl_global_ext ! flag for init primary grid lengths (global ext.) + pgl_global_ext, & ! flag for init primary grid lengths (global ext.) + scale_dxdy ! flag to apply scale factor to vary dx/dy in rectgrid logical (kind=log_kind), dimension (:,:,:), allocatable, public :: & tmask , & ! land/boundary mask, thickness (T-cell) @@ -182,11 +192,6 @@ module ice_grid lmask_n, & ! northern hemisphere mask lmask_s ! southern hemisphere mask - logical (kind=log_kind), dimension (:,:,:), allocatable, public :: & - iceumask, & ! ice extent mask (U-cell) - icenmask, & ! ice extent mask (N-cell) - iceemask ! ice extent mask (E-cell) - real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & rndex_global ! global index for local subdomain (dbl) @@ -267,7 +272,6 @@ subroutine alloc_grid umaskCD (nx_block,ny_block,max_blocks), & ! land/boundary mask, velocity (U-cell) nmask (nx_block,ny_block,max_blocks), & ! land/boundary mask (N-cell) emask (nx_block,ny_block,max_blocks), & ! land/boundary mask (E-cell) - iceumask (nx_block,ny_block,max_blocks), & ! u mask for dynamics lmask_n (nx_block,ny_block,max_blocks), & ! northern hemisphere mask lmask_s (nx_block,ny_block,max_blocks), & ! southern hemisphere mask rndex_global(nx_block,ny_block,max_blocks), & ! global index for local subdomain (dbl) @@ -288,8 +292,6 @@ subroutine alloc_grid if (grid_ice == 'CD' .or. grid_ice == 'C') then allocate( & - iceemask (nx_block,ny_block,max_blocks), & ! e mask for dynamics - icenmask (nx_block,ny_block,max_blocks), & ! n mask for dynamics ratiodxN (nx_block,ny_block,max_blocks), & ratiodyE (nx_block,ny_block,max_blocks), & ratiodxNr(nx_block,ny_block,max_blocks), & @@ -475,6 +477,8 @@ subroutine init_grid2 ! lat, lon, cell widths, angle, land mask !----------------------------------------------------------------- + l_readCenter = .false. + call icepack_query_parameters(pi_out=pi, pi2_out=pi2, puny_out=puny) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -945,7 +949,6 @@ subroutine popgrid_nc call ice_open_nc(kmt_file,fid_kmt) diag = .true. ! write diagnostic info - l_readCenter = .false. !----------------------------------------------------------------- ! topography !----------------------------------------------------------------- @@ -1360,7 +1363,7 @@ subroutine rectgrid imid, jmid real (kind=dbl_kind) :: & - length, & + length, & rad_to_deg real (kind=dbl_kind), dimension(:,:), allocatable :: & @@ -1383,69 +1386,71 @@ subroutine rectgrid allocate(work_g1(nx_global,ny_global)) - ! Weddell Sea - ! lower left corner of grid is 55W, 75S - - ! Barrow AK - ! lower left corner of grid is 156.5W, 71.35N - - if (my_task == master_task) then - work_g1 = c0 - length = dxrect*cm_to_m/radius*rad_to_deg - -! work_g1(1,:) = -55._dbl_kind ! Weddell Sea - work_g1(1,:) = -156.5_dbl_kind ! Barrow AK - - do j = 1, ny_global - do i = 2, nx_global - work_g1(i,j) = work_g1(i-1,j) + length ! ULON - enddo - enddo - work_g1(:,:) = work_g1(:,:) / rad_to_deg - endif - call gridbox_verts(work_g1,lont_bounds) - call scatter_global(ULON, work_g1, master_task, distrb_info, & - field_loc_NEcorner, field_type_scalar) - call ice_HaloExtrapolate(ULON, distrb_info, & - ew_boundary_type, ns_boundary_type) - - if (my_task == master_task) then - work_g1 = c0 - length = dyrect*cm_to_m/radius*rad_to_deg - -! work_g1(:,1) = -75._dbl_kind ! Weddell Sea - work_g1(:,1) = 71.35_dbl_kind ! Barrow AK + if (scale_dxdy) then + ! scale grid spacing from center outward. + ! this different than original method in it + ! needs to define grid spacing before lat/lon. + ! original rectgrid defines latlon first + call rectgrid_scale_dxdy + else + ! rectgrid no grid spacing. + ! original method with addition to use namelist lat/lon reference + + if (my_task == master_task) then + work_g1 = c0 + length = dxrect*cm_to_m/radius*rad_to_deg + + work_g1(1,:) = lonrefrect ! reference lon from namelist + + do j = 1, ny_global + do i = 2, nx_global + work_g1(i,j) = work_g1(i-1,j) + length ! ULON + enddo + enddo + work_g1(:,:) = work_g1(:,:) / rad_to_deg + endif + call scatter_global(ULON, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULON, distrb_info, & + ew_boundary_type, ns_boundary_type) + + if (my_task == master_task) then + work_g1 = c0 + length = dyrect*cm_to_m/radius*rad_to_deg + + work_g1(:,1) = latrefrect ! reference latitude from namelist + + do i = 1, nx_global + do j = 2, ny_global + work_g1(i,j) = work_g1(i,j-1) + length ! ULAT + enddo + enddo + work_g1(:,:) = work_g1(:,:) / rad_to_deg + endif + call scatter_global(ULAT, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULAT, distrb_info, & + ew_boundary_type, ns_boundary_type) - do i = 1, nx_global - do j = 2, ny_global - work_g1(i,j) = work_g1(i,j-1) + length ! ULAT - enddo - enddo - work_g1(:,:) = work_g1(:,:) / rad_to_deg - endif - call gridbox_verts(work_g1,latt_bounds) - call scatter_global(ULAT, work_g1, master_task, distrb_info, & - field_loc_NEcorner, field_type_scalar) - call ice_HaloExtrapolate(ULAT, distrb_info, & - ew_boundary_type, ns_boundary_type) + if (my_task == master_task) then + do j = 1, ny_global + do i = 1, nx_global + work_g1(i,j) = dxrect ! HTN + enddo + enddo + endif + call primary_grid_lengths_HTN(work_g1) ! dxU, dxT, dxN, dxE - if (my_task == master_task) then - do j = 1, ny_global - do i = 1, nx_global - work_g1(i,j) = dxrect ! HTN - enddo - enddo - endif - call primary_grid_lengths_HTN(work_g1) ! dxU, dxT, dxN, dxE + if (my_task == master_task) then + do j = 1, ny_global + do i = 1, nx_global + work_g1(i,j) = dyrect ! HTE + enddo + enddo + endif + call primary_grid_lengths_HTE(work_g1) ! dyU, dyT, dyN, dyE - if (my_task == master_task) then - do j = 1, ny_global - do i = 1, nx_global - work_g1(i,j) = dyrect ! HTE - enddo - enddo - endif - call primary_grid_lengths_HTE(work_g1) ! dyU, dyT, dyN, dyE + endif ! scale_dxdy !----------------------------------------------------------------- ! Construct T-cell land mask @@ -1526,6 +1531,168 @@ subroutine rectgrid end subroutine rectgrid +!======================================================================= + + subroutine rectgrid_scale_dxdy + + ! generate a variable spaced rectangluar grid. + ! extend spacing from center of grid outward. + use ice_constants, only: c0, c1, c2, radius, cm_to_m, & + field_loc_center, field_loc_NEcorner, field_type_scalar + + integer (kind=int_kind) :: & + i, j, iblk, & + imid, jmid, & + center1, center2 ! array centers for expanding dx, dy + + real (kind=dbl_kind) :: & + length, & + rad_to_deg + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + character(len=*), parameter :: subname = '(rectgrid_scale_dxdy)' + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + + allocate(work_g1(nx_global,ny_global)) + + ! determine dx spacing + ! strategy: initialize with dxrect. + ! if want to scale the grid, work from center outwards, + ! multplying neighbor cell by scale factor. + ! this assumes dx varies in x direction only. + ! (i.e, dx is the same across same y location) + if (my_task == master_task) then + + ! initialize with initial dxrect + work_g1(:,:) = dxrect + + ! check if nx is even or odd + ! if even, middle 2 columns are center + ! of odd, middle 1 column is center + if (mod(nx_global,2) == 0) then ! nx_global is even + + ! with even number of x locatons, + ! the center two y columns are center + center1 = nx_global/2 ! integer math + center2 = center1 + 1 ! integer math + + else ! nx_global = odd + ! only one center index. set center2=center1 + center1 = ceiling(real(nx_global/2),int_kind) + center2 = center1 + endif + + ! note loop over only half the x grid points (center1)-1 + ! working from the center outward. + do j = 1, ny_global + do i = 1, center1-1 + ! work from center1 to left + work_g1(center1-i,j) = dxscale*work_g1(center1-i+1,j) + + ! work from center2 to right + work_g1(center2+i,j) = dxscale*work_g1(center2+i-1,j) + enddo ! i + enddo ! j + + endif ! my_task == master_task + + + ! note work_g1 is converted to meters in primary_grid_lengths_HTN + call primary_grid_lengths_HTN(work_g1) ! dxU, dxT, dxN, dxE + + ! make ULON array + if (my_task == master_task) then + + ! make first column reference lon in radians. + ! the remaining work_g1 is still dx in meters + work_g1(1,:) = lonrefrect/rad_to_deg ! radians + + ! loop over remaining points and add spacing to successive + ! x locations + do j = 1, ny_global + do i = 2, nx_global ! start from i=2. i=1 is lonrefrect + length = work_g1(i,j)/radius ! grid spacing in radians + work_g1(i,j) = work_g1(i-1,j) + length ! ULON + enddo ! i + enddo ! j + endif ! mytask == master_task + call scatter_global(ULON, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULON, distrb_info, & + ew_boundary_type, ns_boundary_type) + + ! determine dy spacing + ! strategy: initialize with dyrect. + ! if want to scale the grid, work from center outwards, + ! multplying neighbor cell by scale factor. + ! this assumes dy varies in y direction only. + ! (i.e, dy is the same across same x location) + if (my_task == master_task) then + + ! initialize with initial dxrect + work_g1(:,:) = dyrect + + ! check if ny is even or odd + ! if even, middle 2 rows are center + ! of odd, middle 1 row is center + if (mod(ny_global,2) == 0) then ! ny_global is even + + ! with even number of x locatons, + ! the center two y columns are center + center1 = ny_global/2 ! integer math + center2 = center1 + 1 ! integer math + + else ! ny_global = odd + ! only one center index. set center2=center1 + center1 = ceiling(real(ny_global/2),int_kind) + center2 = center1 + endif + + ! note loop over only half the y grid points (center1)-1 + ! working from the center outward. + do i = 1, nx_global + do j = 1, center1-1 + ! work from center1 to bottom + work_g1(i,center1-j) = dyscale*work_g1(i,center1-j+1) + + ! work from center2 to top + work_g1(i,center2+j) = dyscale*work_g1(i,center2+j-1) + enddo ! i + enddo ! j + endif ! mytask == master_task + ! note work_g1 is converted to meters primary_grid_lengths_HTE + call primary_grid_lengths_HTE(work_g1) ! dyU, dyT, dyN, dyE + + ! make ULAT array + if (my_task == master_task) then + + ! make first row reference lat in radians. + ! the remaining work_g1 is still dy in meters + work_g1(:,1) = latrefrect/rad_to_deg ! radians + + + ! loop over remaining points and add spacing to successive + ! x locations + do j = 2, ny_global ! start from j=2. j=1 is latrefrect + do i = 1, nx_global + length = work_g1(i,j)/radius ! grid spacing in radians + work_g1(i,j) = work_g1(i,j-1) + length ! ULAT + enddo ! i + enddo ! j + endif ! mytask == master_task + call scatter_global(ULAT, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULAT, distrb_info, & + ew_boundary_type, ns_boundary_type) + + + deallocate(work_g1) + + end subroutine rectgrid_scale_dxdy + !======================================================================= ! Complex land mask for testing box cases @@ -2513,13 +2680,23 @@ subroutine grid_average_X2Y_NEversion(type,work1a,grid1a,work1b,grid1b,work2,gri ! state masked case('NE2US') - call grid_average_X2YS_2('NE2US',work1a,narea,npm,work1b,earea,epm,work2) + call grid_average_X2Y_2('NE2US',work1a,narea,npm,work1b,earea,epm,work2) case('EN2US') - call grid_average_X2YS_2('NE2US',work1b,narea,npm,work1a,earea,epm,work2) + call grid_average_X2Y_2('NE2US',work1b,narea,npm,work1a,earea,epm,work2) case('NE2TS') - call grid_average_X2YS_2('NE2TS',work1a,narea,npm,work1b,earea,epm,work2) + call grid_average_X2Y_2('NE2TS',work1a,narea,npm,work1b,earea,epm,work2) case('EN2TS') - call grid_average_X2YS_2('NE2TS',work1b,narea,npm,work1a,earea,epm,work2) + call grid_average_X2Y_2('NE2TS',work1b,narea,npm,work1a,earea,epm,work2) + + ! state unmasked + case('NE2UA') + call grid_average_X2Y_2('NE2UA',work1a,narea,npm,work1b,earea,epm,work2) + case('EN2UA') + call grid_average_X2Y_2('NE2UA',work1b,narea,npm,work1a,earea,epm,work2) + case('NE2TA') + call grid_average_X2Y_2('NE2TA',work1a,narea,npm,work1b,earea,epm,work2) + case('EN2TA') + call grid_average_X2Y_2('NE2TA',work1b,narea,npm,work1a,earea,epm,work2) case default call abort_ice(subname//'ERROR: unknown X2Y '//trim(X2Y)) @@ -3413,36 +3590,6 @@ subroutine grid_average_X2YF(dir,work1,wght1,work2,wght2) end subroutine grid_average_X2YF -!======================================================================= -! Compute the minimum of adjacent values of a field at specific indices, -! depending on the grid location (U, E, N) -! - real(kind=dbl_kind) function grid_neighbor_min(field, i, j, grid_location) result(mini) - - real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & - field ! field defined at T point - - integer (kind=int_kind), intent(in) :: & - i, j - - character(len=*), intent(in) :: & - grid_location ! grid location at which to compute the minumum (U, E, N) - - character(len=*), parameter :: subname = '(grid_neighbor_min)' - - select case (trim(grid_location)) - case('U') - mini = min(field(i,j), field(i+1,j), field(i,j+1), field(i+1,j+1)) - case('E') - mini = min(field(i,j), field(i+1,j)) - case('N') - mini = min(field(i,j), field(i,j+1)) - case default - call abort_ice(subname // ' unknown grid_location: ' // grid_location) - end select - - end function grid_neighbor_min - !======================================================================= ! Shifts quantities from one grid to another ! State masked version, simple weighted averager @@ -3451,7 +3598,7 @@ end function grid_neighbor_min ! ! author: T. Craig - subroutine grid_average_X2YS_2(dir,work1a,wght1a,mask1a,work1b,wght1b,mask1b,work2) + subroutine grid_average_X2Y_2(dir,work1a,wght1a,mask1a,work1b,wght1b,mask1b,work2) use ice_constants, only: c0 @@ -3478,7 +3625,7 @@ subroutine grid_average_X2YS_2(dir,work1a,wght1a,mask1a,work1b,wght1b,mask1b,wor type (block) :: & this_block ! block information for current block - character(len=*), parameter :: subname = '(grid_average_X2YS_2)' + character(len=*), parameter :: subname = '(grid_average_X2Y_2)' work2(:,:,:) = c0 @@ -3534,11 +3681,91 @@ subroutine grid_average_X2YS_2(dir,work1a,wght1a,mask1a,work1b,wght1b,mask1b,wor enddo !$OMP END PARALLEL DO + case('NE2UA') + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block,wtmp) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + wtmp = (wght1a(i ,j ,iblk) & + + wght1a(i+1,j ,iblk) & + + wght1b(i ,j ,iblk) & + + wght1b(i ,j+1,iblk)) + if (wtmp /= c0) & + work2(i,j,iblk) = (work1a(i ,j ,iblk)*wght1a(i ,j ,iblk) & + + work1a(i+1,j ,iblk)*wght1a(i+1,j ,iblk) & + + work1b(i ,j ,iblk)*wght1b(i ,j ,iblk) & + + work1b(i ,j+1,iblk)*wght1b(i ,j+1,iblk)) & + / wtmp + enddo + enddo + enddo + !$OMP END PARALLEL DO + + case('NE2TA') + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block,wtmp) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + wtmp = (wght1a(i ,j-1,iblk) & + + wght1a(i ,j ,iblk) & + + wght1b(i-1,j ,iblk) & + + wght1b(i ,j ,iblk)) + if (wtmp /= c0) & + work2(i,j,iblk) = (work1a(i ,j-1,iblk)*wght1a(i ,j-1,iblk) & + + work1a(i ,j ,iblk)*wght1a(i ,j ,iblk) & + + work1b(i-1,j ,iblk)*wght1b(i-1,j ,iblk) & + + work1b(i ,j ,iblk)*wght1b(i ,j ,iblk)) & + / wtmp + enddo + enddo + enddo + !$OMP END PARALLEL DO + case default call abort_ice(subname//'ERROR: unknown option '//trim(dir)) end select - end subroutine grid_average_X2YS_2 + end subroutine grid_average_X2Y_2 + +!======================================================================= +! Compute the minimum of adjacent values of a field at specific indices, +! depending on the grid location (U, E, N) +! + real(kind=dbl_kind) function grid_neighbor_min(field, i, j, grid_location) result(mini) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + field ! field defined at T point + + integer (kind=int_kind), intent(in) :: & + i, j + + character(len=*), intent(in) :: & + grid_location ! grid location at which to compute the minumum (U, E, N) + + character(len=*), parameter :: subname = '(grid_neighbor_min)' + + select case (trim(grid_location)) + case('U') + mini = min(field(i,j), field(i+1,j), field(i,j+1), field(i+1,j+1)) + case('E') + mini = min(field(i,j), field(i+1,j)) + case('N') + mini = min(field(i,j), field(i,j+1)) + case default + call abort_ice(subname // ' unknown grid_location: ' // grid_location) + end select + + end function grid_neighbor_min !======================================================================= ! Compute the maximum of adjacent values of a field at specific indices, diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 index 64b8d2101..bd5a49eaf 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 @@ -55,15 +55,16 @@ subroutine dumpfile(filename_spec) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_domain_size, only: nilyr, nslyr, ncat, max_blocks + use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT, strocnyT, sst, frzmlt, & + strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U use ice_flux, only: coszen - use ice_grid, only: grid_ice, tmask, iceumask, iceemask, icenmask + use ice_grid, only: grid_ice, tmask use ice_state, only: aicen, vicen, vsnon, trcrn, uvel, vvel, & uvelE, vvelE, uvelN, vvelN @@ -175,8 +176,8 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! ocean stress (for bottom heat flux in thermo) !----------------------------------------------------------------- - call write_restart_field(nu_dump,0,strocnxT,'ruf8','strocnxT',1,diag) - call write_restart_field(nu_dump,0,strocnyT,'ruf8','strocnyT',1,diag) + call write_restart_field(nu_dump,0,strocnxT_iavg,'ruf8','strocnxT',1,diag) + call write_restart_field(nu_dump,0,strocnyT_iavg,'ruf8','strocnyT',1,diag) !----------------------------------------------------------------- ! internal stress @@ -220,7 +221,7 @@ subroutine dumpfile(filename_spec) do j = 1, ny_block do i = 1, nx_block work1(i,j,iblk) = c0 - if (iceumask(i,j,iblk)) work1(i,j,iblk) = c1 + if (iceUmask(i,j,iblk)) work1(i,j,iblk) = c1 enddo enddo enddo @@ -234,7 +235,7 @@ subroutine dumpfile(filename_spec) do j = 1, ny_block do i = 1, nx_block work1(i,j,iblk) = c0 - if (icenmask(i,j,iblk)) work1(i,j,iblk) = c1 + if (iceNmask(i,j,iblk)) work1(i,j,iblk) = c1 enddo enddo enddo @@ -246,7 +247,7 @@ subroutine dumpfile(filename_spec) do j = 1, ny_block do i = 1, nx_block work1(i,j,iblk) = c0 - if (iceemask(i,j,iblk)) work1(i,j,iblk) = c1 + if (iceEmask(i,j,iblk)) work1(i,j,iblk) = c1 enddo enddo enddo @@ -276,16 +277,16 @@ subroutine restartfile (ice_ic) use ice_domain, only: nblocks, halo_info use ice_domain_size, only: nilyr, nslyr, ncat, & max_blocks + use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT, strocnyT, sst, frzmlt, & + strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U use ice_flux, only: coszen - use ice_grid, only: tmask, grid_type, grid_ice, & - iceumask, iceemask, icenmask + use ice_grid, only: tmask, grid_type, grid_ice, grid_average_X2Y use ice_state, only: trcr_depend, aice, vice, vsno, trcr, & aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & uvelE, vvelE, uvelN, vvelN, & @@ -428,9 +429,9 @@ subroutine restartfile (ice_ic) if (my_task == master_task) & write(nu_diag,*) 'min/max ocean stress components' - call read_restart_field(nu_restart,0,strocnxT,'ruf8', & + call read_restart_field(nu_restart,0,strocnxT_iavg,'ruf8', & 'strocnxT',1,diag,field_loc_center, field_type_vector) - call read_restart_field(nu_restart,0,strocnyT,'ruf8', & + call read_restart_field(nu_restart,0,strocnyT_iavg,'ruf8', & 'strocnyT',1,diag,field_loc_center, field_type_vector) !----------------------------------------------------------------- @@ -529,12 +530,12 @@ subroutine restartfile (ice_ic) call read_restart_field(nu_restart,0,work1,'ruf8', & 'iceumask',1,diag,field_loc_center, field_type_scalar) - iceumask(:,:,:) = .false. + iceUmask(:,:,:) = .false. !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceumask(i,j,iblk) = .true. + if (work1(i,j,iblk) > p5) iceUmask(i,j,iblk) = .true. enddo enddo enddo @@ -546,12 +547,12 @@ subroutine restartfile (ice_ic) call read_restart_field(nu_restart,0,work1,'ruf8', & 'icenmask',1,diag,field_loc_center, field_type_scalar) - icenmask(:,:,:) = .false. + iceNmask(:,:,:) = .false. !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - if (work1(i,j,iblk) > p5) icenmask(i,j,iblk) = .true. + if (work1(i,j,iblk) > p5) iceNmask(i,j,iblk) = .true. enddo enddo enddo @@ -562,12 +563,12 @@ subroutine restartfile (ice_ic) call read_restart_field(nu_restart,0,work1,'ruf8', & 'iceemask',1,diag,field_loc_center, field_type_scalar) - iceemask(:,:,:) = .false. + iceEmask(:,:,:) = .false. !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceemask(i,j,iblk) = .true. + if (work1(i,j,iblk) > p5) iceEmask(i,j,iblk) = .true. enddo enddo enddo @@ -707,13 +708,14 @@ subroutine restartfile_v4 (ice_ic) use ice_domain, only: nblocks, distrb_info use ice_domain_size, only: nilyr, nslyr, ncat, nx_global, ny_global, & max_blocks + use ice_dyn_shared, only: iceUmask use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT, strocnyT, sst, frzmlt, & + strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 use ice_gather_scatter, only: scatter_global_stress - use ice_grid, only: tmask, iceumask + use ice_grid, only: tmask use ice_read_write, only: ice_open, ice_read, ice_read_global use ice_state, only: trcr_depend, aice, vice, vsno, trcr, & aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & @@ -873,9 +875,9 @@ subroutine restartfile_v4 (ice_ic) if (my_task == master_task) & write(nu_diag,*) 'min/max ocean stress components' - call ice_read(nu_restart,0,strocnxT,'ruf8',diag, & + call ice_read(nu_restart,0,strocnxT_iavg,'ruf8',diag, & field_loc_center, field_type_vector) - call ice_read(nu_restart,0,strocnyT,'ruf8',diag, & + call ice_read(nu_restart,0,strocnyT_iavg,'ruf8',diag, & field_loc_center, field_type_vector) !----------------------------------------------------------------- @@ -942,12 +944,12 @@ subroutine restartfile_v4 (ice_ic) call ice_read(nu_restart,0,work1,'ruf8',diag, & field_loc_center, field_type_scalar) - iceumask(:,:,:) = .false. + iceUmask(:,:,:) = .false. !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceumask(i,j,iblk) = .true. + if (work1(i,j,iblk) > p5) iceUmask(i,j,iblk) = .true. enddo enddo enddo diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 index b2b438ebe..3e7abe3a3 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 @@ -58,11 +58,7 @@ subroutine init_restart_read(ice_ic) logical (kind=log_kind) :: & solve_zsal, tr_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & -#else tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & -#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow character(len=char_len_long) :: & @@ -87,11 +83,7 @@ subroutine init_restart_read(ice_ic) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & -#else tr_iso_out=tr_iso, tr_aero_out=tr_aero, & -#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) @@ -236,28 +228,6 @@ subroutine init_restart_read(ice_ic) endif endif -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - if (my_task == master_task) then - n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//'ERROR: pond_cesm restart: filename discrepancy') - string1 = trim(filename0(1:n-1)) - string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) - write(filename,'(a,a,a,a)') & - string1(1:lenstr(string1)), & - restart_file(1:lenstr(restart_file)),'.pond_cesm', & - string2(1:lenstr(string2)) - if (restart_ext) then - call ice_open_ext(nu_restart_pond,filename,0) - else - call ice_open(nu_restart_pond,filename,0) - endif - read (nu_restart_pond) iignore,rignore,rignore - write(nu_diag,*) 'Reading ',filename(1:lenstr(filename)) - endif - endif -#endif - if (tr_pond_lvl) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) @@ -424,11 +394,7 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & solve_zsal, tr_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & -#else tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & -#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow integer (kind=int_kind) :: & @@ -444,11 +410,7 @@ subroutine init_restart_write(filename_spec) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & -#else tr_iso_out=tr_iso, tr_aero_out=tr_aero, & -#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) @@ -581,28 +543,6 @@ subroutine init_restart_write(filename_spec) endif -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - - write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & - restart_dir(1:lenstr(restart_dir)), & - restart_file(1:lenstr(restart_file)),'.pond_cesm.', & - myear,'-',mmonth,'-',mday,'-',msec - - if (restart_ext) then - call ice_open_ext(nu_dump_pond,filename,0) - else - call ice_open(nu_dump_pond,filename,0) - endif - - if (my_task == master_task) then - write(nu_dump_pond) istep1,timesecs,time_forc - write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) - endif - - endif -#endif - if (tr_pond_lvl) then write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & @@ -871,11 +811,7 @@ subroutine final_restart() logical (kind=log_kind) :: & solve_zsal, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & -#else tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & -#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow integer (kind=int_kind) :: & @@ -889,11 +825,7 @@ subroutine final_restart() nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & -#else tr_iso_out=tr_iso, tr_aero_out=tr_aero, & -#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) @@ -908,9 +840,6 @@ subroutine final_restart() if (tr_iage) close(nu_dump_age) if (tr_FY) close(nu_dump_FY) if (tr_lvl) close(nu_dump_lvl) -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) close(nu_dump_pond) -#endif if (tr_pond_lvl) close(nu_dump_pond) if (tr_pond_topo) close(nu_dump_pond) if (tr_snow) close(nu_dump_snow) diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index f647bd96b..ed49a48f5 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -145,11 +145,7 @@ subroutine init_restart_write(filename_spec) logical (kind=log_kind) :: & solve_zsal, skl_bgc, z_tracers, tr_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & -#else tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & -#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & @@ -185,11 +181,7 @@ subroutine init_restart_write(filename_spec) nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & -#else tr_iso_out=tr_iso, tr_aero_out=tr_aero, & -#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & @@ -416,13 +408,6 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'vlvl',dims) end if -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - call define_rest_field(ncid,'apnd',dims) - call define_rest_field(ncid,'hpnd',dims) - end if -#endif - if (tr_pond_topo) then call define_rest_field(ncid,'apnd',dims) call define_rest_field(ncid,'hpnd',dims) diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index 1124cc048..679a2b6e6 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -151,11 +151,7 @@ subroutine init_restart_write(filename_spec) solve_zsal, skl_bgc, z_tracers logical (kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & -#else tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & -#endif tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & @@ -191,11 +187,7 @@ subroutine init_restart_write(filename_spec) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & -#else tr_iso_out=tr_iso, tr_aero_out=tr_aero, & -#endif tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & tr_snow_out=tr_snow, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & @@ -420,13 +412,6 @@ subroutine init_restart_write(filename_spec) call define_rest_field(File,'vlvl',dims) end if -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) then - call define_rest_field(File,'apnd',dims) - call define_rest_field(File,'hpnd',dims) - end if -#endif - if (tr_pond_topo) then call define_rest_field(File,'apnd',dims) call define_rest_field(File,'hpnd',dims) diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index 87dc8d9a1..0b8ed689e 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -235,17 +235,10 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, read_restart_pond_cesm, & -#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -260,11 +253,7 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & -#else tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & -#endif tr_pond_topo, tr_fsd, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -283,11 +272,7 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -346,21 +331,6 @@ subroutine init_restart enddo ! iblk endif endif -#ifdef UNDEPRECATE_CESMPONDS - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif -#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 index 91f7985bd..b67e1a223 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 @@ -145,11 +145,7 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & -#ifdef UNDEPRECATE_CESMPONDS - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & -#else write_restart_lvl, write_restart_pond_lvl, & -#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile @@ -172,11 +168,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & -#else tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & -#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -185,11 +177,7 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -333,9 +321,6 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) call write_restart_pond_cesm -#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 index 87dc8d9a1..0b8ed689e 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 @@ -235,17 +235,10 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, read_restart_pond_cesm, & -#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -260,11 +253,7 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & -#else tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & -#endif tr_pond_topo, tr_fsd, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -283,11 +272,7 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -346,21 +331,6 @@ subroutine init_restart enddo ! iblk endif endif -#ifdef UNDEPRECATE_CESMPONDS - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif -#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 index ea6a65165..c9875d769 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 @@ -145,11 +145,7 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & -#ifdef UNDEPRECATE_CESMPONDS - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & -#else write_restart_lvl, write_restart_pond_lvl, & -#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile @@ -172,11 +168,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & -#else tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & -#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -185,11 +177,7 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -333,9 +321,6 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) call write_restart_pond_cesm -#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_fsd) call write_restart_fsd diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index cfc5bece9..a8bf96ad2 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -264,17 +264,10 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, read_restart_pond_cesm, & -#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_snow, read_restart_snow, & @@ -291,11 +284,7 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & -#else tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & -#endif tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -315,11 +304,7 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -381,21 +366,6 @@ subroutine init_restart enddo ! iblk endif endif -#ifdef UNDEPRECATE_CESMPONDS - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif -#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index b96086c6d..5836479b4 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -137,11 +137,7 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & -#ifdef UNDEPRECATE_CESMPONDS - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & -#else write_restart_lvl, write_restart_pond_lvl, & -#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_iso, write_restart_bgc, write_restart_hbrine, & write_restart_snow @@ -166,11 +162,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & -#else tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & -#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -188,11 +180,7 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) @@ -403,9 +391,6 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) call write_restart_pond_cesm -#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_snow) call write_restart_snow diff --git a/cicecore/drivers/mct/cesm1/ice_import_export.F90 b/cicecore/drivers/mct/cesm1/ice_import_export.F90 index 7ac4f0bb7..868ed42b4 100644 --- a/cicecore/drivers/mct/cesm1/ice_import_export.F90 +++ b/cicecore/drivers/mct/cesm1/ice_import_export.F90 @@ -9,7 +9,7 @@ module ice_import_export use ice_constants , only: field_type_vector, c100 use ice_constants , only: p001, p5 use ice_blocks , only: block, get_block, nx_block, ny_block - use ice_flux , only: strairxT, strairyT, strocnxT, strocnyT + use ice_flux , only: strairxT, strairyT, strocnxT_iavg, strocnyT_iavg use ice_flux , only: alvdr, alidr, alvdf, alidf, Tref, Qref, Uref use ice_flux , only: flat, fsens, flwout, evap, fswabs, fhocn, fswthru use ice_flux , only: fresh, fsalt, zlvl, uatm, vatm, potT, Tair, Qa @@ -571,8 +571,8 @@ subroutine ice_export( i2x ) + workx*sin(ANGLET(i,j,iblk)) ! ice/ocean stress (on POP T-grid: convert to lat-lon) - workx = -strocnxT(i,j,iblk) ! N/m^2 - worky = -strocnyT(i,j,iblk) ! N/m^2 + workx = -strocnxT_iavg(i,j,iblk) ! N/m^2 + worky = -strocnyT_iavg(i,j,iblk) ! N/m^2 tauxo(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & - worky*sin(ANGLET(i,j,iblk)) tauyo(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & diff --git a/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 index 78b7d15c4..599249083 100644 --- a/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 +++ b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 @@ -585,11 +585,11 @@ subroutine ice_prescribed_phys ! set non-computed fluxes, ice velocities, ice-ocn stresses to zero !-------------------------------------------------------------------- - frzmlt (:,:,:) = c0 - uvel (:,:,:) = c0 - vvel (:,:,:) = c0 - strocnxT (:,:,:) = c0 - strocnyT (:,:,:) = c0 + frzmlt (:,:,:) = c0 + uvel (:,:,:) = c0 + vvel (:,:,:) = c0 + strocnxT_iavg(:,:,:) = c0 + strocnyT_iavg(:,:,:) = c0 !----------------------------------------------------------------- ! other atm and ocn fluxes diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 3d5e5cc2a..5fbde9cce 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -222,17 +222,10 @@ subroutine init_restart() use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, read_restart_pond_cesm, & -#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_snow, read_restart_snow, & @@ -249,11 +242,7 @@ subroutine init_restart() i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & -#else tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & -#endif tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, tr_snow, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -274,11 +263,7 @@ subroutine init_restart() call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -340,21 +325,6 @@ subroutine init_restart() enddo ! iblk endif endif -#ifdef UNDEPRECATE_CESMPONDS - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif -#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index c68583648..e908f509f 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -121,11 +121,7 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & -#ifdef UNDEPRECATE_CESMPONDS - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & -#else write_restart_lvl, write_restart_pond_lvl, & -#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_iso, write_restart_bgc, write_restart_hbrine, & write_restart_snow @@ -150,11 +146,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & -#else tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & -#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -172,11 +164,7 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) @@ -382,9 +370,6 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) call write_restart_pond_cesm -#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_snow) call write_restart_snow diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 182308973..afdee5590 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -17,12 +17,13 @@ module ice_comp_nuopc use NUOPC_Model , only : NUOPC_ModelGet, SetVM use ice_constants , only : ice_init_constants, c0 use ice_shr_methods , only : chkerr, state_setscalar, state_getscalar, state_diagnose, alarmInit - use ice_shr_methods , only : set_component_logging, get_component_instance, state_flddebug + use ice_shr_methods , only : get_component_instance, state_flddebug + use ice_import_export , only : ice_import, ice_export, ice_advertise_fields, ice_realize_fields use ice_domain_size , only : nx_global, ny_global use ice_grid , only : grid_type, init_grid2 use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice - use ice_calendar , only : force_restart_now, write_ic + use ice_calendar , only : force_restart_now, write_ic, init_calendar use ice_calendar , only : idate, mday, mmonth, myear, year_init use ice_calendar , only : msec, dt, calendar, calendar_type, nextsw_cday, istep use ice_calendar , only : ice_calendar_noleap, ice_calendar_gregorian @@ -40,7 +41,10 @@ module ice_comp_nuopc #ifdef CESMCOUPLED use shr_const_mod use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT - use ice_scam , only : scmlat, scmlon, scol_mask, scol_frac, scol_ni, scol_nj + use ice_scam , only : scmlat, scmlon, scol_mask, scol_frac, scol_ni, scol_nj, scol_area + use nuopc_shr_methods , only : set_component_logging +#else + use ice_shr_methods , only : set_component_logging #endif use ice_timers use CICE_InitMod , only : cice_init1, cice_init2 @@ -87,6 +91,15 @@ module ice_comp_nuopc integer :: dbug = 0 logical :: profile_memory = .false. + logical :: mastertask + integer :: start_ymd ! Start date (YYYYMMDD) + integer :: start_tod ! start time of day (s) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: stop_ymd ! stop date (YYYYMMDD) + integer :: stop_tod ! stop time of day (sec) + integer :: ref_ymd ! Reference date (YYYYMMDD) + integer :: ref_tod ! reference time of day (s) integer , parameter :: debug_import = 0 ! internal debug level integer , parameter :: debug_export = 0 ! internal debug level character(*), parameter :: modName = "(ice_comp_nuopc)" @@ -201,12 +214,8 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) real(kind=dbl_kind) :: atmiter_conv_driver integer (kind=int_kind) :: natmiter integer (kind=int_kind) :: natmiter_driver - character(len=char_len) :: tfrz_option_driver ! tfrz_option from driver attributes - character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist - integer(int_kind) :: ktherm integer :: localPet integer :: npes - logical :: mastertask type(ESMF_VM) :: vm integer :: lmpicom ! local communicator type(ESMF_Time) :: currTime ! Current time @@ -215,14 +224,6 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) type(ESMF_Time) :: refTime ! Ref time type(ESMF_TimeInterval) :: timeStep ! Model timestep type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type - integer :: start_ymd ! Start date (YYYYMMDD) - integer :: start_tod ! start time of day (s) - integer :: curr_ymd ! Current date (YYYYMMDD) - integer :: curr_tod ! Current time of day (s) - integer :: stop_ymd ! stop date (YYYYMMDD) - integer :: stop_tod ! stop time of day (sec) - integer :: ref_ymd ! Reference date (YYYYMMDD) - integer :: ref_tod ! reference time of day (s) integer :: yy,mm,dd ! Temporaries for time query integer :: dtime ! time step integer :: shrlogunit ! original log unit @@ -232,12 +233,15 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) integer :: iblk, jblk ! indices integer :: ig, jg ! indices integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + character(len=char_len_long) :: diag_filename = 'unset' character(len=char_len_long) :: logmsg character(len=char_len_long) :: single_column_lnd_domainfile real(dbl_kind) :: scol_lon real(dbl_kind) :: scol_lat real(dbl_kind) :: scol_spval + character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist + character(len=char_len) :: tfrz_option_driver ! tfrz_option from cice namelist character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- @@ -541,12 +545,20 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call NUOPC_CompAttributeGet(gcomp, name='scol_nj', value=cvalue, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return read(cvalue,*) scol_nj + call NUOPC_CompAttributeGet(gcomp, name='scol_area', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_area call ice_mesh_create_scolumn(scmlon, scmlat, ice_mesh, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return scol_valid = (scol_mask == 1) if (.not. scol_valid) then + ! Read the cice namelist as part of the call to cice_init1 + ! Note that if single_column is true and scol_valid is not - will never get here + call t_startf ('cice_init1') + call cice_init1 + call t_stopf ('cice_init1') ! Advertise fields call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -559,13 +571,18 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) RETURN end if end if - ! Read the cice namelist as part of the call to cice_init1 ! Note that if single_column is true and scol_valid is not - will never get here - call t_startf ('cice_init1') call cice_init1 call t_stopf ('cice_init1') + + !----------------------------------------------------------------- + ! Advertise fields + !----------------------------------------------------------------- + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! Form of ocean freezing temperature ! 'minus1p8' = -1.8 C @@ -624,6 +641,12 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call cice_init1 call t_stopf ('cice_init1') + !----------------------------------------------------------------- + ! Advertise fields + !----------------------------------------------------------------- + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + #endif !---------------------------------------------------------------------------- @@ -675,6 +698,43 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if end if + call t_stopf ('cice_init_total') + + end subroutine InitializeAdvertise + + !=============================================================================== + + subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc + + ! Local variables + integer :: n + integer :: fieldcount + type(ESMF_Field) :: lfield + character(len=char_len_long) :: cvalue + real(dbl_kind) :: scol_lon + real(dbl_kind) :: scol_lat + real(dbl_kind) :: scol_spval + real(dbl_kind), pointer :: fldptr1d(:) + real(dbl_kind), pointer :: fldptr2d(:,:) + integer :: rank + character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist + integer(int_kind) :: ktherm + + character(len=char_len_long) :: single_column_lnd_domainfile + character(len=char_len_long) , pointer :: lfieldnamelist(:) => null() + character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + !---------------------------------------------------------------------------- ! Second cice initialization phase -after initializing grid info !---------------------------------------------------------------------------- @@ -685,29 +745,6 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call t_startf ('cice_init2') call cice_init2() call t_stopf ('cice_init2') - - !---------------------------------------------------------------------------- - ! reset shr logging to my log file - !---------------------------------------------------------------------------- - - call icepack_query_parameters(ktherm_out=ktherm) - call icepack_query_parameters(tfrz_option_out=tfrz_option) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - ! Now write output to nu_diag - this must happen AFTER call to cice_init - if (mastertask) then - write(nu_diag,'(a,d21.14)') trim(subname)//' cice init nextsw_cday = ',nextsw_cday - write(nu_diag,'(a)') trim(subname)//' tfrz_option = '//trim(tfrz_option) - if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then - write(nu_diag,*) trim(subname),' Warning: Using ktherm = 2 and tfrz_option = ', trim(tfrz_option) - endif - write(nu_diag,'(a )') trim(subname)//' inst_name = '//trim(inst_name) - write(nu_diag,'(a,i8 )') trim(subname)//' inst_index = ',inst_index - write(nu_diag,'(a )') trim(subname)//' inst_suffix = ',trim(inst_suffix) - endif - !--------------------------------------------------------------------------- ! use EClock to reset calendar information on initial start !--------------------------------------------------------------------------- @@ -758,6 +795,30 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call calendar() ! update calendar info + + !---------------------------------------------------------------------------- + ! reset shr logging to my log file + !---------------------------------------------------------------------------- + + call icepack_query_parameters(ktherm_out=ktherm) + call icepack_query_parameters(tfrz_option_out=tfrz_option) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Now write output to nu_diag - this must happen AFTER call to cice_init + if (mastertask) then + write(nu_diag,'(a,d21.14)') trim(subname)//' cice init nextsw_cday = ',nextsw_cday + write(nu_diag,'(a)') trim(subname)//' tfrz_option = '//trim(tfrz_option) + if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then + write(nu_diag,*) trim(subname),' Warning: Using ktherm = 2 and tfrz_option = ', trim(tfrz_option) + endif + write(nu_diag,'(a )') trim(subname)//' inst_name = '//trim(inst_name) + write(nu_diag,'(a,i8 )') trim(subname)//' inst_index = ',inst_index + write(nu_diag,'(a )') trim(subname)//' inst_suffix = ',trim(inst_suffix) + endif + + if (write_ic) then call accum_hist(dt) ! write initial conditions end if @@ -769,50 +830,6 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ice_prescribed_init(clock, ice_mesh, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - !----------------------------------------------------------------- - ! Advertise fields - !----------------------------------------------------------------- - - ! NOTE: the advertise phase needs to be called after the ice - ! initialization since the number of ice categories is needed for - ! ice_fraction_n and mean_sw_pen_to_ocn_ifrac_n - call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - call t_stopf ('cice_init_total') - - end subroutine InitializeAdvertise - - !=============================================================================== - - subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) - - ! Arguments - type(ESMF_GridComp) :: gcomp - type(ESMF_State) :: importState - type(ESMF_State) :: exportState - type(ESMF_Clock) :: clock - integer, intent(out) :: rc - - ! Local variables - integer :: n - integer :: fieldcount - type(ESMF_Field) :: lfield - character(len=char_len_long) :: cvalue - real(dbl_kind) :: scol_lon - real(dbl_kind) :: scol_lat - real(dbl_kind) :: scol_spval - real(dbl_kind), pointer :: fldptr1d(:) - real(dbl_kind), pointer :: fldptr2d(:,:) - integer :: rank - character(len=char_len_long) :: single_column_lnd_domainfile - character(len=char_len_long) , pointer :: lfieldnamelist(:) => null() - character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' - !-------------------------------- - - rc = ESMF_SUCCESS - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - #ifdef CESMCOUPLED ! if single column is not valid - set all export state fields to zero and return if (single_column .and. .not. scol_valid) then @@ -848,7 +865,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! *** RETURN HERE *** ! ******************* RETURN - else + else if(single_column) then write(nu_diag,'(a,3(f10.5,2x))')' (ice_comp_nuopc) single column mode lon/lat/frac is ',& scmlon,scmlat,scol_frac end if diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index f5a00cdf8..e4db010de 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -11,7 +11,7 @@ module ice_import_export use ice_domain_size , only : nx_global, ny_global, block_size_x, block_size_y, max_blocks, ncat use ice_domain_size , only : nfreq, nfsd use ice_exit , only : abort_ice - use ice_flux , only : strairxT, strairyT, strocnxT, strocnyT + use ice_flux , only : strairxT, strairyT, strocnxT_iavg, strocnyT_iavg use ice_flux , only : alvdr, alidr, alvdf, alidf, Tref, Qref, Uref use ice_flux , only : flat, fsens, flwout, evap, fswabs, fhocn, fswthru use ice_flux , only : fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf @@ -36,9 +36,9 @@ module ice_import_export use ice_shr_methods , only : chkerr, state_reset use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags - use icepack_intfc , only : icepack_query_tracer_indices use icepack_intfc , only : icepack_liquidus_temperature use icepack_intfc , only : icepack_sea_freezing_temperature + use icepack_intfc , only : icepack_query_tracer_indices use icepack_parameters , only : puny, c2 use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED @@ -307,6 +307,7 @@ end subroutine ice_advertise_fields !============================================================================== subroutine ice_realize_fields(gcomp, mesh, flds_scalar_name, flds_scalar_num, rc) + use ice_scam, only : single_column ! input/output variables type(ESMF_GridComp) :: gcomp @@ -320,7 +321,7 @@ subroutine ice_realize_fields(gcomp, mesh, flds_scalar_name, flds_scalar_num, rc type(ESMF_State) :: exportState type(ESMF_Field) :: lfield integer :: numOwnedElements - integer :: i, j, iblk, n, k + integer :: i, j, iblk, n integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain type(block) :: this_block ! block information for current block real(dbl_kind), allocatable :: mesh_areas(:) @@ -361,10 +362,10 @@ subroutine ice_realize_fields(gcomp, mesh, flds_scalar_name, flds_scalar_num, rc tag=subname//':CICE_Import',& mesh=mesh, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - #ifdef CESMCOUPLED ! Get mesh areas from second field - using second field since the ! first field is the scalar field + if (single_column) return call ESMF_MeshGet(mesh, numOwnedElements=numOwnedElements, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return @@ -889,6 +890,8 @@ end subroutine ice_import !=============================================================================== subroutine ice_export( exportState, rc ) + use ice_scam, only : single_column + ! input/output variables type(ESMF_State), intent(inout) :: exportState integer , intent(out) :: rc @@ -911,12 +914,13 @@ subroutine ice_export( exportState, rc ) real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) ! fractional ice area real (kind=dbl_kind) :: floediam(nx_block,ny_block,max_blocks) real (kind=dbl_kind) :: floethick(nx_block,ny_block,max_blocks) ! ice thickness - real (kind=dbl_kind) :: Tffresh logical (kind=log_kind) :: tr_fsd integer (kind=int_kind) :: nt_fsd + real (kind=dbl_kind) :: Tffresh real (kind=dbl_kind), allocatable :: tempfld(:,:,:) real (kind=dbl_kind), pointer :: dataptr_ifrac_n(:,:) real (kind=dbl_kind), pointer :: dataptr_swpen_n(:,:) + logical (kind=log_kind), save :: first_call = .true. character(len=*),parameter :: subname = 'ice_export' !----------------------------------------------------- @@ -963,6 +967,9 @@ subroutine ice_export( exportState, rc ) ! ice fraction ailohi(i,j,iblk) = min(aice(i,j,iblk), c1) + ! surface temperature + Tsrf(i,j,iblk) = Tffresh + trcr(i,j,1,iblk) !Kelvin (original ???) + if (tr_fsd) then ! floe thickness (m) if (aice(i,j,iblk) > puny) then @@ -984,9 +991,6 @@ subroutine ice_export( exportState, rc ) floediam(i,j,iblk) = MAX(c2*floe_rad_c(1),workx) endif - ! surface temperature - Tsrf(i,j,iblk) = Tffresh + trcr(i,j,1,iblk) !Kelvin (original ???) - ! wind stress (on POP T-grid: convert to lat-lon) workx = strairxT(i,j,iblk) ! N/m^2 worky = strairyT(i,j,iblk) ! N/m^2 @@ -994,8 +998,8 @@ subroutine ice_export( exportState, rc ) tauya(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) + workx*sin(ANGLET(i,j,iblk)) ! ice/ocean stress (on POP T-grid: convert to lat-lon) - workx = -strocnxT(i,j,iblk) ! N/m^2 - worky = -strocnyT(i,j,iblk) ! N/m^2 + workx = -strocnxT_iavg(i,j,iblk) ! N/m^2 + worky = -strocnyT_iavg(i,j,iblk) ! N/m^2 tauxo(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) - worky*sin(ANGLET(i,j,iblk)) tauyo(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) + workx*sin(ANGLET(i,j,iblk)) enddo @@ -1042,8 +1046,11 @@ subroutine ice_export( exportState, rc ) !--------------------------------- ! Zero out fields with tmask for proper coupler accumulation in ice free areas - call state_reset(exportState, c0, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (first_call .or. .not.single_column) then + call state_reset(exportState, c0, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + first_call = .false. + endif ! Create a temporary field allocate(tempfld(nx_block,ny_block,nblocks)) diff --git a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 index 0b1b9349c..a9b19df6b 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 @@ -427,7 +427,7 @@ subroutine ice_mesh_create_scolumn(scol_lon, scol_lat, ice_mesh, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Allocate module variable ocn_gridcell_frac - allocate(ocn_gridcell_frac(nx_block,ny_block,max_blocks)) + allocate(ocn_gridcell_frac(2,2,1)) ocn_gridcell_frac(:,:,:) = scol_frac end subroutine ice_mesh_create_scolumn @@ -560,7 +560,7 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) ! Check CICE mesh use ice_constants, only : c1,c0,c360 - use ice_grid , only : tlon, tlat + use ice_grid , only : tlon, tlat, hm ! input/output parameters type(ESMF_GridComp) , intent(inout) :: gcomp @@ -569,7 +569,8 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) ! local variables type(ESMF_DistGrid) :: distGrid - integer :: n,c,g,i,j,m ! indices + type(ESMF_Array) :: elemMaskArray + integer :: n,i,j ! indices integer :: iblk, jblk ! indices integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain type(block) :: this_block ! block information for current block @@ -578,11 +579,15 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) real(dbl_kind), pointer :: ownedElemCoords(:) real(dbl_kind), pointer :: lat(:), latMesh(:) real(dbl_kind), pointer :: lon(:), lonMesh(:) + integer , pointer :: model_mask(:) real(dbl_kind) :: diff_lon real(dbl_kind) :: diff_lat real(dbl_kind) :: rad_to_deg real(dbl_kind) :: tmplon, eps_imesh logical :: isPresent, isSet + logical :: mask_error + integer :: mask_internal + integer :: mask_file character(len=char_len_long) :: cvalue character(len=char_len_long) :: logmsg character(len=*), parameter :: subname = ' ice_mesh_check: ' @@ -606,7 +611,7 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) allocate(ownedElemCoords(spatialDim*numownedelements)) allocate(lonmesh(numOwnedElements)) allocate(latmesh(numOwnedElements)) - call ESMF_MeshGet(ice_mesh, ownedElemCoords=ownedElemCoords) + call ESMF_MeshGet(ice_mesh, ownedElemCoords=ownedElemCoords, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return do n = 1,numOwnedElements lonMesh(n) = ownedElemCoords(2*n-1) @@ -650,8 +655,45 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) enddo enddo -100 format('ERROR: CICE n, lonmesh, lon, diff_lon = ',i6,2(f21.13,3x),d21.5) -101 format('ERROR: CICE n, latmesh, lat, diff_lat = ',i6,2(f21.13,3x),d21.5) + ! obtain internally generated ice mask for error checks + allocate(model_mask(numOwnedElements)) + call ESMF_MeshGet(ice_mesh, elementdistGrid=distGrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + elemMaskArray = ESMF_ArrayCreate(distGrid, model_mask, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_MeshGet(ice_mesh, elemMaskArray=elemMaskArray, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + mask_error = .false. + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + do j = jlo, jhi + jlo = this_block%jlo + jhi = this_block%jhi + do i = ilo, ihi + ilo = this_block%ilo + ihi = this_block%ihi + n = n+1 + mask_internal = nint(hm(i,j,iblk),kind=dbl_kind) + mask_file = model_mask(n) + if (mask_internal /= mask_file) then + write(6,102) n,mask_internal,mask_file + mask_error = .true. + end if + enddo !i + enddo !j + enddo !iblk + if (mask_error) then + call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) + end if + + call ESMF_ArrayDestroy(elemMaskArray, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +100 format('ERROR: CICE n, mesh_lon , lon, diff_lon = ',i8,2(f21.13,3x),d21.5) +101 format('ERROR: CICE n, mesh_lat , lat, diff_lat = ',i8,2(f21.13,3x),d21.5) +102 format('ERROR: CICE n, mesh_internal, mask_file = ',i8,2(i2,2x)) ! deallocate memory deallocate(ownedElemCoords) diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index 84973e9dd..06b090ece 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -472,11 +472,11 @@ subroutine ice_prescribed_phys() ! set non-computed fluxes, ice velocities, ice-ocn stresses to zero !-------------------------------------------------------------------- - frzmlt (:,:,:) = c0 - uvel (:,:,:) = c0 - vvel (:,:,:) = c0 - strocnxT (:,:,:) = c0 - strocnyT (:,:,:) = c0 + frzmlt (:,:,:) = c0 + uvel (:,:,:) = c0 + vvel (:,:,:) = c0 + strocnxT_iavg(:,:,:) = c0 + strocnyT_iavg(:,:,:) = c0 !----------------------------------------------------------------- ! other atm and ocn fluxes diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index 78d462d4c..22596429d 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -271,17 +271,10 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, read_restart_pond_cesm, & -#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_snow, read_restart_snow, & @@ -298,11 +291,7 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & -#else tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & -#endif tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -322,11 +311,7 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -388,21 +373,6 @@ subroutine init_restart enddo ! iblk endif endif -#ifdef UNDEPRECATE_CESMPONDS - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif -#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index 6e799723e..77bb7738e 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -157,11 +157,7 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & -#ifdef UNDEPRECATE_CESMPONDS - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & -#else write_restart_lvl, write_restart_pond_lvl, & -#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_iso, write_restart_bgc, write_restart_hbrine, & write_restart_snow @@ -184,11 +180,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & -#else tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & -#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -206,11 +198,7 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) @@ -412,9 +400,6 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) call write_restart_pond_cesm -#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_snow) call write_restart_snow diff --git a/cicecore/drivers/nuopc/dmi/cice_cap.info b/cicecore/drivers/nuopc/dmi/cice_cap.info index 0ec1dea5a..c4c6bea55 100644 --- a/cicecore/drivers/nuopc/dmi/cice_cap.info +++ b/cicecore/drivers/nuopc/dmi/cice_cap.info @@ -1023,8 +1023,8 @@ module cice_cap dataPtr_vice (i1,j1,iblk) = vice(i,j,iblk) ! sea ice volume dataPtr_vsno (i1,j1,iblk) = vsno(i,j,iblk) ! snow volume dataPtr_fswthru (i1,j1,iblk) = fswthru(i,j,iblk) ! short wave penetration through ice - ui = strocnxT(i,j,iblk) - vj = strocnyT(i,j,iblk) + ui = strocnxT_iavg(i,j,iblk) + vj = strocnyT_iavg(i,j,iblk) angT = ANGLET(i,j,iblk) dataPtr_strocnxT(i1,j1,iblk) = ui*cos(-angT) + vj*sin(angT) ! ice ocean stress dataPtr_strocnyT(i1,j1,iblk) = -ui*sin(angT) + vj*cos(-angT) ! ice ocean stress diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 07a151a01..9c30b15a3 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -266,17 +266,10 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, read_restart_pond_cesm, & -#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_snow, read_restart_snow, & @@ -293,11 +286,7 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & -#else tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & -#endif tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -317,11 +306,7 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -383,21 +368,6 @@ subroutine init_restart enddo ! iblk endif endif -#ifdef UNDEPRECATE_CESMPONDS - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif -#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 00c7921d1..ae7f7ab1f 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -151,11 +151,7 @@ subroutine ice_step use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & -#ifdef UNDEPRECATE_CESMPONDS - write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & -#else write_restart_lvl, write_restart_pond_lvl, & -#endif write_restart_pond_topo, write_restart_aero, write_restart_fsd, & write_restart_iso, write_restart_bgc, write_restart_hbrine, & write_restart_snow @@ -178,11 +174,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & -#else tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & -#endif calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -200,11 +192,7 @@ subroutine ice_step solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) call icepack_warnings_flush(nu_diag) @@ -408,9 +396,6 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl -#ifdef UNDEPRECATE_CESMPONDS - if (tr_pond_cesm) call write_restart_pond_cesm -#endif if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo if (tr_snow) call write_restart_snow diff --git a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 index ad355d783..84d1a3a60 100644 --- a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 @@ -241,17 +241,10 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, read_restart_pond_cesm, & -#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -267,11 +260,7 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & -#else tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & -#endif tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -290,11 +279,7 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -354,21 +339,6 @@ subroutine init_restart enddo ! iblk endif endif -#ifdef UNDEPRECATE_CESMPONDS - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif -#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 index ad355d783..84d1a3a60 100644 --- a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 @@ -241,17 +241,10 @@ subroutine init_restart use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, read_restart_pond_cesm, & -#endif restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -267,11 +260,7 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & -#ifdef UNDEPRECATE_CESMPONDS - tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & -#else tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & -#endif tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -290,11 +279,7 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & -#ifdef UNDEPRECATE_CESMPONDS - tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & @@ -354,21 +339,6 @@ subroutine init_restart enddo ! iblk endif endif -#ifdef UNDEPRECATE_CESMPONDS - ! CESM melt ponds - if (tr_pond_cesm) then - if (trim(runtype) == 'continue') & - restart_pond_cesm = .true. - if (restart_pond_cesm) then - call read_restart_pond_cesm - else - do iblk = 1, nblocks - call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk)) - enddo ! iblk - endif - endif -#endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & diff --git a/cicecore/drivers/unittest/sumchk/sumchk.F90 b/cicecore/drivers/unittest/sumchk/sumchk.F90 index aba435b0e..d9ea72d8c 100644 --- a/cicecore/drivers/unittest/sumchk/sumchk.F90 +++ b/cicecore/drivers/unittest/sumchk/sumchk.F90 @@ -58,9 +58,6 @@ program sumchk integer(int_kind),parameter :: ntests3 = 3 character(len=8) :: errorflag3(ntests3) character(len=32) :: stringflag3(ntests3) - integer(int_kind),parameter :: ntests4 = 1 - character(len=8) :: errorflag4(ntests4) - character(len=32) :: stringflag4(ntests4) integer(int_kind) :: npes, ierr, ntask @@ -100,7 +97,6 @@ program sumchk errorflag1 = passflag errorflag2 = passflag errorflag3 = passflag - errorflag4 = passflag npes = get_num_procs() if (my_task == master_task) then @@ -600,63 +596,6 @@ program sumchk endif enddo - ! --------------------------- - ! Test Vector Reductions - ! --------------------------- - - if (my_task == master_task) write(6,*) ' ' - - n = 1 ; stringflag4(n) = 'dble sum vector' - allocate(vec8(3)) - allocate(sum8(3)) - - minval = -5. - maxval = 8. - - vec8(1) = 1. - - ! fill one gridcell with a min and max value - ntask = max(npes-1,1)-1 - if (my_task == ntask) then - vec8(1) = minval - endif - ntask = min(npes,2)-1 - if (my_task == ntask) then - vec8(1) = maxval - endif - vec8(2) = 2. * vec8(1) - vec8(3) = 3. * vec8(1) - - ! compute correct results - if (npes == 1) then - minval = maxval - corval = maxval - else - corval = (npes - 2) * 1.0 + minval + maxval - endif - - do k = 1,ntests4 - string = stringflag4(k) - sum8 = -888e12 - if (k == 1) then - sum8 = global_allreduce_sum(vec8, distrb_info) - else - call abort_ice(subname//' illegal k vector',file=__FILE__,line=__LINE__) - endif - - if (my_task == master_task) then - write(6,'(1x,a,3g16.8)') string, sum8(1),sum8(2),sum8(3) - endif - - if (sum8(1) /= corval .or. sum8(2) /= 2.*corval .or. sum8(3) /= 3.*corval) then - errorflag4(k) = failflag - errorflag0 = failflag - if (my_task == master_task) then - write(6,*) '**** ERROR ', sum8(1),sum8(2),sum8(3),corval - endif - endif - enddo - ! --------------------------- if (my_task == master_task) then @@ -670,9 +609,6 @@ program sumchk do k = 1,ntests3 write(6,*) errorflag3(k),stringflag3(k) enddo - do k = 1,ntests4 - write(6,*) errorflag4(k),stringflag4(k) - enddo write(6,*) ' ' write(6,*) 'SUMCHK COMPLETED SUCCESSFULLY' if (errorflag0 == passflag) then diff --git a/cicecore/shared/ice_arrays_column.F90 b/cicecore/shared/ice_arrays_column.F90 index c9e8be8db..b4727d3fd 100644 --- a/cicecore/shared/ice_arrays_column.F90 +++ b/cicecore/shared/ice_arrays_column.F90 @@ -24,6 +24,7 @@ module ice_arrays_column public :: alloc_arrays_column ! icepack_atmo.F90 + ! Cdn variables on the T-grid real (kind=dbl_kind), public, & dimension (:,:,:), allocatable :: & Cdn_atm , & ! atm drag coefficient diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 5339aa6ec..06ab79cdb 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -44,11 +44,7 @@ module ice_init_column private public :: init_thermo_vertical, init_shortwave, & init_age, init_FY, init_lvl, init_fsd, & -#ifdef UNDEPRECATE_CESMPONDS - init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & -#else init_meltponds_lvl, init_meltponds_topo, & -#endif init_aerosol, init_bgc, init_hbrine, init_zbgc, input_zbgc, & count_tracers, init_isotope, init_snowtracers @@ -547,23 +543,6 @@ subroutine init_lvl(iblk, alvl, vlvl) end subroutine init_lvl -#ifdef UNDEPRECATE_CESMPONDS -!======================================================================= - -! Initialize melt ponds. - - subroutine init_meltponds_cesm(apnd, hpnd) - - real(kind=dbl_kind), dimension(:,:,:), intent(out) :: & - apnd , & ! melt pond area fraction - hpnd ! melt pond depth - character(len=*),parameter :: subname='(init_meltponds_cesm)' - - apnd(:,:,:) = c0 - hpnd(:,:,:) = c0 - - end subroutine init_meltponds_cesm -#endif !======================================================================= ! Initialize melt ponds. @@ -1818,11 +1797,7 @@ subroutine count_tracers integer (kind=int_kind) :: ntrcr logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_fsd logical (kind=log_kind) :: tr_snow -#ifdef UNDEPRECATE_CESMPONDS - logical (kind=log_kind) :: tr_iso, tr_pond_cesm, tr_pond_lvl, tr_pond_topo -#else logical (kind=log_kind) :: tr_iso, tr_pond_lvl, tr_pond_topo -#endif integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero integer (kind=int_kind) :: nt_fsd, nt_isosno, nt_isoice @@ -1907,11 +1882,7 @@ subroutine count_tracers call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_aero_out=tr_aero, tr_pond_out=tr_pond, & -#ifdef UNDEPRECATE_CESMPONDS - tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & -#else tr_pond_lvl_out=tr_pond_lvl, & -#endif tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_fsd_out=tr_fsd, & tr_snow_out=tr_snow, tr_iso_out=tr_iso, & tr_bgc_Nit_out=tr_bgc_Nit, tr_bgc_Am_out =tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & diff --git a/cicecore/shared/ice_restart_column.F90 b/cicecore/shared/ice_restart_column.F90 index a15f9d2c1..86ff170c7 100644 --- a/cicecore/shared/ice_restart_column.F90 +++ b/cicecore/shared/ice_restart_column.F90 @@ -29,9 +29,6 @@ module ice_restart_column public :: write_restart_age, read_restart_age, & write_restart_FY, read_restart_FY, & write_restart_lvl, read_restart_lvl, & -#ifdef UNDEPRECATE_CESMPONDS - write_restart_pond_cesm, read_restart_pond_cesm, & -#endif write_restart_pond_lvl, read_restart_pond_lvl, & write_restart_pond_topo, read_restart_pond_topo, & write_restart_snow, read_restart_snow, & @@ -45,9 +42,6 @@ module ice_restart_column restart_age , & ! if .true., read age tracer restart file restart_FY , & ! if .true., read FY tracer restart file restart_lvl , & ! if .true., read lvl tracer restart file -#ifdef UNDEPRECATE_CESMPONDS - restart_pond_cesm, & ! if .true., read meltponds restart file -#endif restart_pond_lvl , & ! if .true., read meltponds restart file restart_pond_topo, & ! if .true., read meltponds restart file restart_snow , & ! if .true., read snow tracer restart file @@ -260,74 +254,6 @@ subroutine read_restart_lvl() end subroutine read_restart_lvl -#ifdef UNDEPRECATE_CESMPONDS -!======================================================================= -! -! Dumps all values needed for restarting -! -! authors Elizabeth C. Hunke, LANL -! David A. Bailey, NCAR - - subroutine write_restart_pond_cesm() - - use ice_fileunits, only: nu_dump_pond - use ice_state, only: trcrn - - ! local variables - - logical (kind=log_kind) :: diag - integer (kind=int_kind) :: nt_apnd, nt_hpnd - character(len=*),parameter :: subname='(write_restart_pond_cesm)' - - call icepack_query_tracer_indices(nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - diag = .true. - - call write_restart_field(nu_dump_pond,0,trcrn(:,:,nt_apnd,:,:),'ruf8', & - 'apnd',ncat,diag) - call write_restart_field(nu_dump_pond,0,trcrn(:,:,nt_hpnd,:,:),'ruf8', & - 'hpnd',ncat,diag) - - end subroutine write_restart_pond_cesm - -!======================================================================= - -! Reads all values needed for a meltpond volume restart -! -! authors Elizabeth C. Hunke, LANL -! David A. Bailey, NCAR - - subroutine read_restart_pond_cesm() - - use ice_fileunits, only: nu_restart_pond - use ice_state, only: trcrn - - ! local variables - - logical (kind=log_kind) :: & - diag - integer (kind=int_kind) :: nt_apnd, nt_hpnd - character(len=*),parameter :: subname='(read_restart_pond_cesm)' - - call icepack_query_tracer_indices(nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - diag = .true. - - if (my_task == master_task) write(nu_diag,*) subname,'min/max cesm ponds' - - call read_restart_field(nu_restart_pond,0,trcrn(:,:,nt_apnd,:,:),'ruf8', & - 'apnd',ncat,diag,field_loc_center,field_type_scalar) - call read_restart_field(nu_restart_pond,0,trcrn(:,:,nt_hpnd,:,:),'ruf8', & - 'hpnd',ncat,diag,field_loc_center,field_type_scalar) - - end subroutine read_restart_pond_cesm -#endif !======================================================================= ! ! Dumps all values needed for restarting diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index fbe172f51..1cf23da45 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -33,6 +33,19 @@ cat >> ${jobfile} << EOFB #PBS -l walltime=${batchtime} EOFB +else if (${ICE_MACHINE} =~ gust*) then +cat >> ${jobfile} << EOFB +#PBS -q ${queue} +#PBS -l job_priority=regular +#PBS -N ${ICE_CASENAME} +#PBS -A ${acct} +#PBS -l select=${nnodes}:ncpus=${corespernode}:mpiprocs=${taskpernodelimit}:ompthreads=${nthrds} +#PBS -l walltime=${batchtime} +#PBS -j oe +#PBS -W umask=022 +#PBS -o ${ICE_CASEDIR} +EOFB + else if (${ICE_MACHINE} =~ hobart*) then cat >> ${jobfile} << EOFB #PBS -j oe @@ -90,8 +103,8 @@ else if (${ICE_MACHINE} =~ nrlssc*) then # nrlssc queue system has nodes with different task per node if (${taskpernode} <= 12) set tpnstr = 'twelve' if (${taskpernode} == 20) set tpnstr = 'twenty' -if (${taskpernode} == 24) set tpnstr = 'twentyfour' -if (${taskpernode} == 28) set tpnstr = 'twentyeight' +if (${taskpernode} >= 24) set tpnstr = 'twentyfour' +#if (${taskpernode} == 28) set tpnstr = 'twentyeight' cat >> ${jobfile} <> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -23,7 +23,19 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ hobart* || ${ICE_MACHINE} =~ izumi*) then +else if (${ICE_MACHCOMP} =~ gust*) then +if (${ICE_COMMDIR} =~ serial*) then +cat >> ${jobfile} << EOFR +./cice >&! \$ICE_RUNLOG_FILE +EOFR +else +cat >> ${jobfile} << EOFR +mpiexec --cpu-bind depth -n ${ntasks} -ppn ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE +EOFR +endif + +#======= +else if (${ICE_MACHCOMP} =~ hobart* || ${ICE_MACHCOMP} =~ izumi*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -35,7 +47,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ gaffney* || ${ICE_MACHINE} =~ koehr* || ${ICE_MACHINE} =~ mustang*) then +else if (${ICE_MACHCOMP} =~ gaffney* || ${ICE_MACHCOMP} =~ koehr* || ${ICE_MACHCOMP} =~ mustang*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -47,7 +59,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ nrlssc*) then +else if (${ICE_MACHCOMP} =~ nrlssc*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -59,13 +71,19 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ onyx* || ${ICE_MACHINE} =~ narwhal) then +else if (${ICE_MACHCOMP} =~ narwhal_*hpcx*) then cat >> ${jobfile} << EOFR -aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE +mpirun -np ${ntasks} -hostfile \$PBS_NODEFILE \${EXTRA_OMPI_SETTINGS} ./cice >&! \$ICE_RUNLOG_FILE +EOFR + +#======= +else if (${ICE_MACHCOMP} =~ onyx* || ${ICE_MACHCOMP} =~ narwhal*) then +cat >> ${jobfile} << EOFR +aprun -q -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR #======= -else if (${ICE_MACHINE} =~ cori*) then +else if (${ICE_MACHCOMP} =~ cori*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -77,7 +95,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ compy*) then +else if (${ICE_MACHCOMP} =~ compy*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -89,7 +107,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ badger*) then +else if (${ICE_MACHCOMP} =~ badger*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -101,7 +119,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ fram*) then +else if (${ICE_MACHCOMP} =~ fram*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -113,7 +131,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ cesium*) then +else if (${ICE_MACHCOMP} =~ cesium*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -125,7 +143,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ millikan*) then +else if (${ICE_MACHCOMP} =~ millikan*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -137,7 +155,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ daley* || ${ICE_MACHINE} =~ banting*) then +else if (${ICE_MACHCOMP} =~ daley* || ${ICE_MACHCOMP} =~ banting*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -149,7 +167,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ ppp5* || ${ICE_MACHINE} =~ ppp6* || ${ICE_MACHINE} =~ robert* || ${ICE_MACHINE} =~ underhill*) then +else if (${ICE_MACHCOMP} =~ ppp5* || ${ICE_MACHCOMP} =~ ppp6* || ${ICE_MACHCOMP} =~ robert* || ${ICE_MACHCOMP} =~ underhill*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -161,7 +179,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ ppp3*) then +else if (${ICE_MACHCOMP} =~ ppp3*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -173,7 +191,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ gpsc3*) then +else if (${ICE_MACHCOMP} =~ gpsc3*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -186,7 +204,7 @@ endif #======= -else if (${ICE_MACHINE} =~ freya*) then +else if (${ICE_MACHCOMP} =~ freya*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR aprun -n 1 -N 1 -d 1 ./cice >&! \$ICE_RUNLOG_FILE @@ -198,45 +216,45 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ gaea*) then +else if (${ICE_MACHCOMP} =~ gaea*) then cat >> ${jobfile} << EOFR srun -n ${ntasks} -c ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR #======= -else if (${ICE_MACHINE} =~ hera*) then +else if (${ICE_MACHCOMP} =~ hera*) then cat >> ${jobfile} << EOFR srun -n ${ntasks} -c ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR #======= -else if (${ICE_MACHINE} =~ orion*) then +else if (${ICE_MACHCOMP} =~ orion*) then cat >> ${jobfile} << EOFR srun -n ${ntasks} -c ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR #======= -else if (${ICE_MACHINE} =~ high_Sierra*) then +else if (${ICE_MACHCOMP} =~ high_Sierra*) then cat >> ${jobfile} << EOFR mpirun -np ${ntasks} ./cice >&! \$ICE_RUNLOG_FILE #./cice >&! \$ICE_RUNLOG_FILE EOFR #======= -else if (${ICE_MACHINE} =~ phase3*) then +else if (${ICE_MACHCOMP} =~ phase3*) then cat >> ${jobfile} << EOFR mpirun -np ${ntasks} ./cice >&! \$ICE_RUNLOG_FILE #./cice >&! \$ICE_RUNLOG_FILE EOFR #======= -else if (${ICE_MACHINE} =~ testmachine*) then +else if (${ICE_MACHCOMP} =~ testmachine*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE EOFR #======= -else if (${ICE_MACHINE} =~ travisCI*) then +else if (${ICE_MACHCOMP} =~ travisCI*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -251,7 +269,7 @@ endif #EOFR #======= -else if (${ICE_MACHINE} =~ conda*) then +else if (${ICE_MACHCOMP} =~ conda*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE @@ -265,7 +283,7 @@ endif #======= else - echo "${0} ERROR ${ICE_MACHINE} unknown" + echo "${0} ERROR ${ICE_MACHCOMP} unknown" exit -1 endif #======= diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index ec582873a..8262f34ec 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -75,6 +75,11 @@ kcatbound = 0 dxrect = 30.e5 dyrect = 30.e5 + lonrefrect = -156.50d0 + latrefrect = 71.35d0 + scale_dxdy = .false. + dxscale = 1.d0 + dyscale = 1.d0 close_boundaries = .false. ncat = 5 nfsd = 1 @@ -162,7 +167,7 @@ kridge = 1 ktransport = 1 ssh_stress = 'geostrophic' - maxits_nonlin = 4 + maxits_nonlin = 10 precond = 'pgmres' dim_fgmres = 50 dim_pgmres = 5 @@ -173,7 +178,7 @@ monitor_pgmres = .false. ortho_type = 'mgs' reltol_nonlin = 1e-8 - reltol_fgmres = 1e-2 + reltol_fgmres = 1e-1 reltol_pgmres = 1e-6 algo_nonlin = 'picard' use_mean_vrel = .true. diff --git a/configuration/scripts/machines/Macros.gust_cray b/configuration/scripts/machines/Macros.gust_cray new file mode 100644 index 000000000..eadc14884 --- /dev/null +++ b/configuration/scripts/machines/Macros.gust_cray @@ -0,0 +1,71 @@ +#============================================================================== +# Makefile macros for NCAR cheyenne, intel compiler +#============================================================================== + +CPP := ftn -e P +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 + +FIXEDFLAGS := -132 +FREEFLAGS := +FFLAGS := -hbyteswapio +FFLAGS_NOOPT:= -O0 +LDFLAGS := -hbyteswapio + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -hfp0 -g -Rbcdps -Ktrap=fp +else + FFLAGS += -O2 -hfp0 +endif + +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +NETCDF_PATH := $(NETCDF) + +PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/u/apps/ch/opt/pio/2.2/mpt/2.15f/intel/17.0.1/lib + +INCLDIR := $(INCLDIR) + +LIB_NETCDF := $(NETCDF)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) + +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -fopenmp + CFLAGS += -fopenmp + FFLAGS += -fopenmp +else + LDFLAGS += -hnoomp +# CFLAGS += -hnoomp + FFLAGS += -hnoomp +endif + +#ifeq ($(ICE_IOTYPE), pio1) +# LIB_PIO := $(PIO_LIBDIR) +# SLIBS := $(SLIBS) -L$(LIB_PIO) -lpio +#endif + +ifeq ($(ICE_IOTYPE), pio2) + CPPDEFS := $(CPPDEFS) -DGPTL +# LIB_PIO := $(PIO_LIBDIR) +# SLIBS := $(SLIBS) -L$(LIB_PIO) -lpiof -lpioc -lgptl +endif + diff --git a/configuration/scripts/machines/Macros.gust_intel b/configuration/scripts/machines/Macros.gust_intel new file mode 100644 index 000000000..18d5d10d0 --- /dev/null +++ b/configuration/scripts/machines/Macros.gust_intel @@ -0,0 +1,68 @@ +#============================================================================== +# Makefile macros for NCAR cheyenne, intel compiler +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -link_mpi=dbg -stand f08 +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays -link_mpi=dbg +else + FFLAGS += -O2 +endif + +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +NETCDF_PATH := $(NETCDF) + +PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/u/apps/ch/opt/pio/2.2/mpt/2.15f/intel/17.0.1/lib + +INCLDIR := $(INCLDIR) + +LIB_NETCDF := $(NETCDF)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) + +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + +#ifeq ($(ICE_IOTYPE), pio1) +# LIB_PIO := $(PIO_LIBDIR) +# SLIBS := $(SLIBS) -L$(LIB_PIO) -lpio +#endif + +ifeq ($(ICE_IOTYPE), pio2) + CPPDEFS := $(CPPDEFS) -DGPTL +# LIB_PIO := $(PIO_LIBDIR) +# SLIBS := $(SLIBS) -L$(LIB_PIO) -lpiof -lpioc -lgptl +endif + diff --git a/configuration/scripts/machines/Macros.narwhal_aocc b/configuration/scripts/machines/Macros.narwhal_aocc index 95f301e85..b22aeda64 100644 --- a/configuration/scripts/machines/Macros.narwhal_aocc +++ b/configuration/scripts/machines/Macros.narwhal_aocc @@ -49,7 +49,7 @@ INCLDIR := $(INCLDIR) ifeq ($(ICE_THREADED), true) LDFLAGS += -mp - CFLAGS += -mp +# CFLAGS += -mp FFLAGS += -mp else LDFLAGS += -nomp diff --git a/configuration/scripts/machines/Macros.narwhal_intelhpcx b/configuration/scripts/machines/Macros.narwhal_intelhpcx new file mode 100644 index 000000000..0e2a94b20 --- /dev/null +++ b/configuration/scripts/machines/Macros.narwhal_intelhpcx @@ -0,0 +1,57 @@ +#============================================================================== +# Macros file for NAVYDSRC narwhal, intel compiler with hpcx hcoll openmpi +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -fcommon + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback +# -mcmodel medium -shared-intel +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays +else + FFLAGS += -O2 +endif + +SCC := cc +SFC := ftn +MPICC := mpicc +MPIFC := mpif90 + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +NETCDF_PATH := $(NETCDF_DIR) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +INCLDIR += -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +SLIBS := $(HCOLL_LIBS) -L$(NETCDF_PATH)/lib -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + diff --git a/configuration/scripts/machines/env.badger_intel b/configuration/scripts/machines/env.badger_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.banting_gnu b/configuration/scripts/machines/env.banting_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.banting_intel b/configuration/scripts/machines/env.banting_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.cheyenne_gnu b/configuration/scripts/machines/env.cheyenne_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.cheyenne_intel b/configuration/scripts/machines/env.cheyenne_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.cheyenne_pgi b/configuration/scripts/machines/env.cheyenne_pgi old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.compy_intel b/configuration/scripts/machines/env.compy_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.conda_linux b/configuration/scripts/machines/env.conda_linux old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.conda_macos b/configuration/scripts/machines/env.conda_macos old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.cori_intel b/configuration/scripts/machines/env.cori_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.daley_gnu b/configuration/scripts/machines/env.daley_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.daley_intel b/configuration/scripts/machines/env.daley_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.fram_intel b/configuration/scripts/machines/env.fram_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.freya_gnu b/configuration/scripts/machines/env.freya_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.freya_intel b/configuration/scripts/machines/env.freya_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.gaea_intel b/configuration/scripts/machines/env.gaea_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.gaffney_gnu b/configuration/scripts/machines/env.gaffney_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.gaffney_intel b/configuration/scripts/machines/env.gaffney_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.gust_cray b/configuration/scripts/machines/env.gust_cray new file mode 100644 index 000000000..8246cd9b9 --- /dev/null +++ b/configuration/scripts/machines/env.gust_cray @@ -0,0 +1,72 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module --force purge +module load ncarenv/22.10 +module load craype/2.7.17 +module load cce/14.0.3 +module load ncarcompilers/0.7.1 +module load cray-mpich/8.1.19 +module load hdf5/1.12.2 +module load netcdf/4.8.1 +module load cray-libsci/22.08.1.1 + +if ($?ICE_IOTYPE) then +if ($ICE_IOTYPE =~ pio*) then + module unload netcdf + module load netcdf-mpi/4.8.1 + module load pnetcdf/1.12.2 + if ($ICE_IOTYPE == "pio1") then + module load pio/1.10.1 + else + module load pio/2.5.9 + endif +endif +endif + +if ($?ICE_BFBTYPE) then +if ($ICE_BFBTYPE =~ qcchk*) then + module load conda +# conda env create -f ../../configuration/scripts/tests/qctest.yml + conda activate qctest +endif +endif + +# For perftools with mpiexec +# module load perftools-base +# module load perftools +#setenv PALS_TRANSFER FALSE + +endif + +limit coredumpsize unlimited +limit stacksize unlimited +setenv PALS_QUIET TRUE + +# May be needed for OpenMP memory +setenv OMP_STACKSIZE 64M +# OMP runtime diagnostics +#setenv OMP_DISPLAY_ENV TRUE + +setenv ICE_MACHINE_MACHNAME gust +setenv ICE_MACHINE_MACHINFO "Cray XE Milan Slingshot 11" +setenv ICE_MACHINE_ENVNAME crayg +setenv ICE_MACHINE_ENVINFO "cce 14.0.3, cray-mpich 2.19, netcdf4.8.1, pnetcdf1.12.2, pio2.5.9" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /glade/gust/scratch/$user/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /glade/work/tcraig/cice-consortium-data +setenv ICE_MACHINE_BASELINE /glade/gust/scratch/$user/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub" +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "main" +setenv ICE_MACHINE_TPNODE 128 +setenv ICE_MACHINE_BLDTHRDS 8 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.gust_intel b/configuration/scripts/machines/env.gust_intel new file mode 100644 index 000000000..08a99b940 --- /dev/null +++ b/configuration/scripts/machines/env.gust_intel @@ -0,0 +1,72 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module --force purge +module load ncarenv/22.10 +module load craype/2.7.17 +module load intel/2021.6.0 +module load ncarcompilers/0.7.1 +module load cray-mpich/8.1.19 +module load hdf5/1.12.2 +module load netcdf/4.8.1 +module load cray-libsci/22.08.1.1 + +if ($?ICE_IOTYPE) then +if ($ICE_IOTYPE =~ pio*) then + module unload netcdf + module load netcdf-mpi/4.8.1 + module load pnetcdf/1.12.2 + if ($ICE_IOTYPE == "pio1") then + module load pio/1.10.1 + else + module load pio/2.5.9 + endif +endif +endif + +if ($?ICE_BFBTYPE) then +if ($ICE_BFBTYPE =~ qcchk*) then + module load conda +# conda env create -f ../../configuration/scripts/tests/qctest.yml + conda activate qctest +endif +endif + +# For perftools with mpiexec +# module load perftools-base +# module load perftools +#setenv PALS_TRANSFER FALSE + +endif + +limit coredumpsize unlimited +limit stacksize unlimited +setenv PALS_QUIET TRUE + +# May be needed for OpenMP memory +setenv OMP_STACKSIZE 64M +# OMP runtime diagnostics +#setenv OMP_DISPLAY_ENV TRUE + +setenv ICE_MACHINE_MACHNAME gust +setenv ICE_MACHINE_MACHINFO "Cray XE Milan Slingshot 11" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "ifort 2021.6.0 20220226, cray-mpich 2.19, netcdf4.8.1, pnetcdf1.12.2, pio2.5.9" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /glade/gust/scratch/$user/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /glade/work/tcraig/cice-consortium-data +setenv ICE_MACHINE_BASELINE /glade/gust/scratch/$user/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub" +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "main" +setenv ICE_MACHINE_TPNODE 128 +setenv ICE_MACHINE_BLDTHRDS 8 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.hera_intel b/configuration/scripts/machines/env.hera_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.high_Sierra_gnu b/configuration/scripts/machines/env.high_Sierra_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.hobart_intel b/configuration/scripts/machines/env.hobart_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.hobart_nag b/configuration/scripts/machines/env.hobart_nag old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.izumi_gnu b/configuration/scripts/machines/env.izumi_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.izumi_intel b/configuration/scripts/machines/env.izumi_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.izumi_nag b/configuration/scripts/machines/env.izumi_nag old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.izumi_pgi b/configuration/scripts/machines/env.izumi_pgi old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.koehr_intel b/configuration/scripts/machines/env.koehr_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.mustang_intel18 b/configuration/scripts/machines/env.mustang_intel18 old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.mustang_intel19 b/configuration/scripts/machines/env.mustang_intel19 old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.mustang_intel20 b/configuration/scripts/machines/env.mustang_intel20 old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.narwhal_aocc b/configuration/scripts/machines/env.narwhal_aocc old mode 100755 new mode 100644 index 4016a1d7d..875296520 --- a/configuration/scripts/machines/env.narwhal_aocc +++ b/configuration/scripts/machines/env.narwhal_aocc @@ -14,21 +14,18 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-nvidia -module load PrgEnv-aocc/8.1.0 -module load cray-pals/1.0.17 -module load bct-env/0.1 -module unload aocc -module load aocc/2.2.0.1 -module unload cray-mpich -module load cray-mpich/8.1.5 - +module load PrgEnv-aocc +module load cpe/22.03 +module load aocc/3.0.0 +module unload cray-pals +module load cray-pals/1.2.2 module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.7.4.4 -module load cray-hdf5/1.12.0.4 +module load cray-netcdf/4.8.1.1 +module load cray-hdf5/1.12.1.1 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -41,7 +38,7 @@ endif setenv ICE_MACHINE_MACHNAME narwhal setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12 2.6GHz, Slingshot-10 Interconnect" setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "aocc_3.0.0-Build#78 2020_12_10 clang/flang 12.0.0, cray-mpich/8.1.5, netcdf/4.7.4.4" +setenv ICE_MACHINE_ENVINFO "aocc_3.0.0-Build#78 2020_12_10 clang/flang 12.0.0, cray-mpich/8.1.14, netcdf/4.8.1.1" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.narwhal_cray b/configuration/scripts/machines/env.narwhal_cray old mode 100755 new mode 100644 index d0fcc9ba7..a9e5bd14a --- a/configuration/scripts/machines/env.narwhal_cray +++ b/configuration/scripts/machines/env.narwhal_cray @@ -14,21 +14,17 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-nvidia -module load PrgEnv-cray/8.1.0 -module load cray-pals/1.0.17 -module load bct-env/0.1 -module unload cce -module load cce/12.0.3 -module unload cray-mpich -module load cray-mpich/8.1.9 - +module load PrgEnv-cray +module load cpe/22.03 +module unload cray-pals +module load cray-pals/1.2.2 module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.7.4.4 -module load cray-hdf5/1.12.0.4 +module load cray-netcdf/4.8.1.1 +module load cray-hdf5/1.12.1.1 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -39,9 +35,9 @@ setenv OMP_WAIT_POLICY PASSIVE endif setenv ICE_MACHINE_MACHNAME narwhal -setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12" +setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12 2.6GHz, Slingshot-10 Interconnect" setenv ICE_MACHINE_ENVNAME cray -setenv ICE_MACHINE_ENVINFO "cce 12.0.3, cray-mpich/8.1.9, netcdf/4.7.4.4" +setenv ICE_MACHINE_ENVINFO "cce 13.0.2, cray-mpich/8.1.14, netcdf/4.8.1.1" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.narwhal_gnu b/configuration/scripts/machines/env.narwhal_gnu old mode 100755 new mode 100644 index 51a272f4e..701920161 --- a/configuration/scripts/machines/env.narwhal_gnu +++ b/configuration/scripts/machines/env.narwhal_gnu @@ -14,21 +14,17 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-nvidia -module load PrgEnv-gnu/8.1.0 -module load cray-pals/1.0.17 -module load bct-env/0.1 -module unload gcc -module load gcc/11.2.0 -module unload cray-mpich -module load cray-mpich/8.1.9 - +module load PrgEnv-gnu +module load cpe/22.03 +module unload cray-pals +module load cray-pals/1.2.2 module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.7.4.4 -module load cray-hdf5/1.12.0.4 +module load cray-netcdf/4.8.1.1 +module load cray-hdf5/1.12.1.1 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -39,9 +35,9 @@ setenv OMP_WAIT_POLICY PASSIVE endif setenv ICE_MACHINE_MACHNAME narwhal -setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12" -setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "gnu fortran/c 11.2.0, cray-mpich/8.1.9, netcdf/4.7.4.4" +setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12 2.6GHz, Slingshot-10 Interconnect" +setenv ICE_MACHINE_ENVNAME gnu +setenv ICE_MACHINE_ENVINFO "gnu c/fortran 11.2.0 20210728, cray-mpich/8.1.14, netcdf/4.8.1.1" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.narwhal_intel b/configuration/scripts/machines/env.narwhal_intel old mode 100755 new mode 100644 index f79d962ff..4cc60acac --- a/configuration/scripts/machines/env.narwhal_intel +++ b/configuration/scripts/machines/env.narwhal_intel @@ -14,21 +14,17 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-nvidia -module load PrgEnv-intel/8.0.0 -module load cray-pals/1.0.17 -module load bct-env/0.1 -module unload intel -module load intel/2021.1 -module unload cray-mpich -module load cray-mpich/8.1.9 - +module load PrgEnv-intel +module load cpe/22.03 +module unload cray-pals +module load cray-pals/1.2.2 module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.7.4.4 -module load cray-hdf5/1.12.0.4 +module load cray-netcdf/4.8.1.1 +module load cray-hdf5/1.12.1.1 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -39,9 +35,9 @@ setenv OMP_WAIT_POLICY PASSIVE endif setenv ICE_MACHINE_MACHNAME narwhal -setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12" +setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12 2.6GHz, Slingshot-10 Interconnect" setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 2021.1 Beta 20201112, cray-mpich/8.1.9, netcdf/4.7.4.4" +setenv ICE_MACHINE_ENVINFO "ifort 2021.4.0 20210910, cray-mpich/8.1.14, netcdf/4.8.1.1" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.narwhal_intelhpcx b/configuration/scripts/machines/env.narwhal_intelhpcx new file mode 100644 index 000000000..5b7a33ece --- /dev/null +++ b/configuration/scripts/machines/env.narwhal_intelhpcx @@ -0,0 +1,52 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload hpcx-ompi-intel +module unload libfabric +module unload PrgEnv-aocc +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-nvidia +module unload craype-network-ofi +module unload craype-network-ucx +module load hpcx-ompi-intel +module load libfabric + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.8.1.1 +module load cray-hdf5/1.12.1.1 + +limit coredumpsize unlimited +limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE + +endif + +setenv ICE_MACHINE_MACHNAME narwhal +setenv ICE_MACHINE_MACHINFO "Cray EX AMD EPYC 7H12 2.6GHz, Slingshot-10 Interconnect" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "ifort 2021.4.0 20210910, hpcx2.9.0 openmpi, netcdf/4.8.1.1" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/work1/projects/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 128 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.nrlssc_gnu b/configuration/scripts/machines/env.nrlssc_gnu old mode 100755 new mode 100644 index f050a61b8..1f8dd4441 --- a/configuration/scripts/machines/env.nrlssc_gnu +++ b/configuration/scripts/machines/env.nrlssc_gnu @@ -1,7 +1,7 @@ #!/bin/csh -f -setenv ICE_MACHINE_MACHNAME NRLSSC -setenv ICE_MACHINE_MACHINFO "NRLSSC" +setenv ICE_MACHINE_MACHNAME nrlssc +setenv ICE_MACHINE_MACHINFO "nrlssc" setenv ICE_MACHINE_ENVNAME gnu setenv ICE_MACHINE_ENVINFO "gnu" setenv ICE_MACHINE_MAKE gmake diff --git a/configuration/scripts/machines/env.onyx_cray b/configuration/scripts/machines/env.onyx_cray old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.onyx_gnu b/configuration/scripts/machines/env.onyx_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.onyx_intel b/configuration/scripts/machines/env.onyx_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.orion_intel b/configuration/scripts/machines/env.orion_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.phase3_intel b/configuration/scripts/machines/env.phase3_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.ppp5_intel b/configuration/scripts/machines/env.ppp5_intel index c4987124a..c6e272692 100644 --- a/configuration/scripts/machines/env.ppp5_intel +++ b/configuration/scripts/machines/env.ppp5_intel @@ -18,6 +18,12 @@ source $ssmuse -d /fs/ssm/main/opt/intelcomp/inteloneapi-2022.1.2/intelcomp+mpi+ # module load -s icc mpi setenv FOR_DUMP_CORE_FILE 1 setenv I_MPI_DEBUG_COREDUMP 1 +# Reproducible collectives +if (${ICE_BASEGEN} != ${ICE_SPVAL} || ${ICE_BASECOM} != ${ICE_SPVAL}) then + setenv I_MPI_CBWR 1 +endif +# Stop being buggy +setenv I_MPI_FABRICS ofi # NetCDF source $ssmuse -d main/opt/hdf5-netcdf4/serial/shared/inteloneapi-2022.1.2/01 @@ -32,6 +38,7 @@ setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/ppp5/cice/runs/ setenv ICE_MACHINE_INPUTDATA /space/hall5/sitestore/eccc/cmd/e/sice500/ setenv ICE_MACHINE_BASELINE ~/data/ppp5/cice/baselines/ +setenv ICE_MACHINE_MAXRUNLENGTH 6 setenv ICE_MACHINE_SUBMIT qsub setenv ICE_MACHINE_TPNODE 80 setenv ICE_MACHINE_ACCT unused diff --git a/configuration/scripts/machines/env.ppp6_gnu b/configuration/scripts/machines/env.ppp6_gnu index 69ed6ff8b..94f478774 100644 --- a/configuration/scripts/machines/env.ppp6_gnu +++ b/configuration/scripts/machines/env.ppp6_gnu @@ -8,7 +8,7 @@ endif if ("$inp" != "-nomodules") then # OpenMPI -source /usr/mpi/gcc/openmpi-4.1.2a1/bin/mpivars.csh +setenv PATH "/home/phb001/.local_rhel-8-icelake-64_gcc/bin:$PATH" # OpenMP setenv OMP_STACKSIZE 64M @@ -21,6 +21,7 @@ setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/site6/cice/runs/ setenv ICE_MACHINE_INPUTDATA /space/hall6/sitestore/eccc/cmd/e/sice500/ setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baselines/ +setenv ICE_MACHINE_MAXRUNLENGTH 6 setenv ICE_MACHINE_SUBMIT qsub setenv ICE_MACHINE_TPNODE 80 setenv ICE_MACHINE_ACCT unused diff --git a/configuration/scripts/machines/env.ppp6_gnu-impi b/configuration/scripts/machines/env.ppp6_gnu-impi index 461e09a43..61cd46ed9 100644 --- a/configuration/scripts/machines/env.ppp6_gnu-impi +++ b/configuration/scripts/machines/env.ppp6_gnu-impi @@ -18,6 +18,12 @@ setenv I_MPI_F90 gfortran setenv I_MPI_FC gfortran setenv I_MPI_CC gcc setenv I_MPI_CXX g++ +# Reproducible collectives +if (${ICE_BASEGEN} != ${ICE_SPVAL} || ${ICE_BASECOM} != ${ICE_SPVAL}) then + setenv I_MPI_CBWR 1 +endif +# Stop being buggy +setenv I_MPI_FABRICS ofi # OpenMP setenv OMP_STACKSIZE 64M @@ -30,6 +36,7 @@ setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/site6/cice/runs/ setenv ICE_MACHINE_INPUTDATA /space/hall6/sitestore/eccc/cmd/e/sice500/ setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baselines/ +setenv ICE_MACHINE_MAXRUNLENGTH 6 setenv ICE_MACHINE_SUBMIT qsub setenv ICE_MACHINE_TPNODE 80 setenv ICE_MACHINE_ACCT unused diff --git a/configuration/scripts/machines/env.ppp6_intel b/configuration/scripts/machines/env.ppp6_intel index ef9396575..faecb9b5a 100644 --- a/configuration/scripts/machines/env.ppp6_intel +++ b/configuration/scripts/machines/env.ppp6_intel @@ -18,6 +18,12 @@ source $ssmuse -d /fs/ssm/main/opt/intelcomp/inteloneapi-2022.1.2/intelcomp+mpi+ # module load -s icc mpi setenv FOR_DUMP_CORE_FILE 1 setenv I_MPI_DEBUG_COREDUMP 1 +# Reproducible collectives +if (${ICE_BASEGEN} != ${ICE_SPVAL} || ${ICE_BASECOM} != ${ICE_SPVAL}) then + setenv I_MPI_CBWR 1 +endif +# Stop being buggy +setenv I_MPI_FABRICS ofi # NetCDF source $ssmuse -d main/opt/hdf5-netcdf4/serial/shared/inteloneapi-2022.1.2/01 @@ -32,6 +38,7 @@ setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/ppp6/cice/runs/ setenv ICE_MACHINE_INPUTDATA /space/hall6/sitestore/eccc/cmd/e/sice500/ setenv ICE_MACHINE_BASELINE ~/data/ppp6/cice/baselines/ +setenv ICE_MACHINE_MAXRUNLENGTH 6 setenv ICE_MACHINE_SUBMIT qsub setenv ICE_MACHINE_TPNODE 80 setenv ICE_MACHINE_ACCT unused diff --git a/configuration/scripts/machines/env.ppp6_intel19 b/configuration/scripts/machines/env.ppp6_intel19 index 6cdd9a036..554b31e42 100644 --- a/configuration/scripts/machines/env.ppp6_intel19 +++ b/configuration/scripts/machines/env.ppp6_intel19 @@ -17,6 +17,12 @@ setenv FOR_DUMP_CORE_FILE 1 source $ssmuse -d /fs/ssm/hpco/exp/intelpsxe-impi-19.0.3.199 setenv FI_PROVIDER verbs setenv I_MPI_DEBUG_COREDUMP 1 +# Reproducible collectives +if (${ICE_BASEGEN} != ${ICE_SPVAL} || ${ICE_BASECOM} != ${ICE_SPVAL}) then + setenv I_MPI_CBWR 1 +endif +# Stop being buggy +setenv I_MPI_FABRICS ofi # NetCDF source $ssmuse -d hpco/exp/hdf5-netcdf4/serial/static/intel-19.0.3.199/02 @@ -31,6 +37,7 @@ setenv ICE_MACHINE_MAKE make setenv ICE_MACHINE_WKDIR ~/data/site6/cice/runs/ setenv ICE_MACHINE_INPUTDATA /space/hall6/sitestore/eccc/cmd/e/sice500/ setenv ICE_MACHINE_BASELINE ~/data/site6/cice/baselines/ +setenv ICE_MACHINE_MAXRUNLENGTH 6 setenv ICE_MACHINE_SUBMIT "qsub" setenv ICE_MACHINE_TPNODE 80 setenv ICE_MACHINE_ACCT P0000000 diff --git a/configuration/scripts/machines/env.robert_intel b/configuration/scripts/machines/env.robert_intel index d3d9c1eae..592d765fe 100644 --- a/configuration/scripts/machines/env.robert_intel +++ b/configuration/scripts/machines/env.robert_intel @@ -18,6 +18,8 @@ source $ssmuse -d /fs/ssm/main/opt/intelcomp/inteloneapi-2022.1.2/intelcomp+mpi+ # module load -s icc mpi setenv FOR_DUMP_CORE_FILE 1 setenv I_MPI_DEBUG_COREDUMP 1 +# Stop being buggy +setenv I_MPI_FABRICS ofi # NetCDF source $ssmuse -d main/opt/hdf5-netcdf4/serial/shared/inteloneapi-2022.1.2/01 diff --git a/configuration/scripts/machines/env.testmachine_intel b/configuration/scripts/machines/env.testmachine_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.travisCI_gnu b/configuration/scripts/machines/env.travisCI_gnu old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.underhill_intel b/configuration/scripts/machines/env.underhill_intel index bc3eec857..dee0ab92b 100644 --- a/configuration/scripts/machines/env.underhill_intel +++ b/configuration/scripts/machines/env.underhill_intel @@ -18,6 +18,8 @@ source $ssmuse -d /fs/ssm/main/opt/intelcomp/inteloneapi-2022.1.2/intelcomp+mpi+ # module load -s icc mpi setenv FOR_DUMP_CORE_FILE 1 setenv I_MPI_DEBUG_COREDUMP 1 +# Stop being buggy +setenv I_MPI_FABRICS ofi # NetCDF source $ssmuse -d main/opt/hdf5-netcdf4/serial/shared/inteloneapi-2022.1.2/01 diff --git a/configuration/scripts/options/set_nml.bgczm b/configuration/scripts/options/set_nml.bgczm new file mode 100644 index 000000000..2644576cd --- /dev/null +++ b/configuration/scripts/options/set_nml.bgczm @@ -0,0 +1,30 @@ +nilyr = 7 +nslyr = 1 +ncat = 5 +nblyr = 7 +n_aero = 0 +n_zaero = 3 +n_algae = 3 +n_doc = 2 +n_dic = 0 +n_don = 1 +n_fed = 1 +n_fep = 1 +ice_ic = 'none' +tr_brine = .true. +tr_zaero = .true. +z_tracers = .true. +solve_zbgc = .true. +scale_bgc = .true. +bgc_data_type = 'default' +tr_bgc_Nit = .true. +tr_bgc_C = .true. +tr_bgc_Am = .true. +tr_bgc_Sil = .true. +tr_bgc_DMS = .true. +tr_bgc_PON = .true. +tr_bgc_hum = .true. +tr_bgc_DON = .true. +tr_bgc_Fe = .true. +modal_aero = .true. +# dEdd_algae = .true. diff --git a/configuration/scripts/options/set_nml.boxsymw b/configuration/scripts/options/set_nml.boxclosed similarity index 89% rename from configuration/scripts/options/set_nml.boxsymw rename to configuration/scripts/options/set_nml.boxclosed index 4be1f5f95..d55faa302 100644 --- a/configuration/scripts/options/set_nml.boxsymw +++ b/configuration/scripts/options/set_nml.boxclosed @@ -23,11 +23,6 @@ kridge = -1 ktransport = -1 coriolis = 'zero' atmbndy = 'constant' -atm_data_type = 'uniform_west' -ocn_data_type = 'calm' -ice_data_type = 'uniform' -ice_data_conc = 'parabolic' -ice_data_dist = 'uniform' calc_strair = .false. rotate_wind = .false. restore_ice = .false. diff --git a/configuration/scripts/options/set_nml.boxforcee b/configuration/scripts/options/set_nml.boxforcee new file mode 100644 index 000000000..357fa69d5 --- /dev/null +++ b/configuration/scripts/options/set_nml.boxforcee @@ -0,0 +1,5 @@ +atm_data_type = 'uniform_east' +ocn_data_type = 'calm' +ice_data_type = 'uniform' +ice_data_conc = 'parabolic' +ice_data_dist = 'uniform' diff --git a/configuration/scripts/options/set_nml.boxforcen b/configuration/scripts/options/set_nml.boxforcen new file mode 100644 index 000000000..87cbaea40 --- /dev/null +++ b/configuration/scripts/options/set_nml.boxforcen @@ -0,0 +1,5 @@ +atm_data_type = 'uniform_north' +ocn_data_type = 'calm' +ice_data_type = 'uniform' +ice_data_conc = 'parabolic' +ice_data_dist = 'uniform' diff --git a/configuration/scripts/options/set_nml.boxforcene b/configuration/scripts/options/set_nml.boxforcene new file mode 100644 index 000000000..396fabde0 --- /dev/null +++ b/configuration/scripts/options/set_nml.boxforcene @@ -0,0 +1,5 @@ +atm_data_type = 'uniform_northeast' +ocn_data_type = 'calm' +ice_data_type = 'uniform' +ice_data_conc = 'parabolic' +ice_data_dist = 'uniform' diff --git a/configuration/scripts/options/set_nml.boxislandsn b/configuration/scripts/options/set_nml.boxislandsn deleted file mode 100644 index dd386ce5a..000000000 --- a/configuration/scripts/options/set_nml.boxislandsn +++ /dev/null @@ -1,46 +0,0 @@ -npt = 48 -kmt_type = 'boxislands' -ice_ic = 'internal' -use_leap_years = .false. -histfreq = 'd','x','x','x','x' -grid_type = 'rectangular' -dxrect = 16.e5 -dyrect = 16.e5 -close_boundaries = .false. -ew_boundary_type = 'cyclic' -ns_boundary_type = 'open' -ktherm = -1 -kstrength = 0 -kdyn = 1 -kridge = -1 -ktransport = -1 -coriolis = 'constant' -atmbndy = 'constant' -atm_data_type = 'uniform_north' -ocn_data_type = 'calm' -ice_data_type = 'uniform' -ice_data_conc = 'parabolic' -ice_data_dist = 'uniform' -rotate_wind = .false. -calc_strair = .false. -restore_ice = .false. -tr_iage = .false. -tr_FY = .false. -tr_pond_lvl = .false. -f_aice = 'd' -f_hi = 'd' -f_hs = 'd' -f_Tsfc = 'd' -f_uvel = 'd' -f_vvel = 'd' -f_uatm = 'd' -f_vatm = 'd' -f_uocn = 'd' -f_vocn = 'd' -f_strairx = 'd' -f_strairy = 'd' -f_strocnx = 'd' -f_strocny = 'd' -f_divu = 'd' -f_sig1 = 'd' -f_sig2 = 'd' diff --git a/configuration/scripts/options/set_nml.boxislandsne b/configuration/scripts/options/set_nml.boxislandsne deleted file mode 100644 index 75db55722..000000000 --- a/configuration/scripts/options/set_nml.boxislandsne +++ /dev/null @@ -1,46 +0,0 @@ -npt = 48 -kmt_type = 'boxislands' -ice_ic = 'internal' -use_leap_years = .false. -histfreq = 'd','x','x','x','x' -grid_type = 'rectangular' -dxrect = 16.e5 -dyrect = 16.e5 -close_boundaries = .false. -ew_boundary_type = 'cyclic' -ns_boundary_type = 'open' -ktherm = -1 -kstrength = 0 -kdyn = 1 -kridge = -1 -ktransport = -1 -coriolis = 'constant' -atmbndy = 'constant' -atm_data_type = 'uniform_northeast' -ocn_data_type = 'calm' -ice_data_type = 'uniform' -ice_data_conc = 'parabolic' -ice_data_dist = 'uniform' -rotate_wind = .false. -calc_strair = .false. -restore_ice = .false. -tr_iage = .false. -tr_FY = .false. -tr_pond_lvl = .false. -f_aice = 'd' -f_hi = 'd' -f_hs = 'd' -f_Tsfc = 'd' -f_uvel = 'd' -f_vvel = 'd' -f_uatm = 'd' -f_vatm = 'd' -f_uocn = 'd' -f_vocn = 'd' -f_strairx = 'd' -f_strairy = 'd' -f_strocnx = 'd' -f_strocny = 'd' -f_divu = 'd' -f_sig1 = 'd' -f_sig2 = 'd' diff --git a/configuration/scripts/options/set_nml.boxislandse b/configuration/scripts/options/set_nml.boxopen similarity index 84% rename from configuration/scripts/options/set_nml.boxislandse rename to configuration/scripts/options/set_nml.boxopen index 4a8a47705..bdc832f7d 100644 --- a/configuration/scripts/options/set_nml.boxislandse +++ b/configuration/scripts/options/set_nml.boxopen @@ -1,5 +1,4 @@ npt = 48 -kmt_type = 'boxislands' ice_ic = 'internal' use_leap_years = .false. histfreq = 'd','x','x','x','x' @@ -16,11 +15,6 @@ kridge = -1 ktransport = -1 coriolis = 'constant' atmbndy = 'constant' -atm_data_type = 'uniform_east' -ocn_data_type = 'calm' -ice_data_type = 'uniform' -ice_data_conc = 'parabolic' -ice_data_dist = 'uniform' rotate_wind = .false. calc_strair = .false. restore_ice = .false. diff --git a/configuration/scripts/options/set_nml.boxsyme b/configuration/scripts/options/set_nml.boxsyme deleted file mode 100644 index 3ff31d2c4..000000000 --- a/configuration/scripts/options/set_nml.boxsyme +++ /dev/null @@ -1,58 +0,0 @@ -days_per_year = 360 -use_leap_years = .false. -npt_unit = 'd' -npt = 5 -ice_ic = 'internal' -restart_ext = .true. -histfreq = 'd','1','x','x','x' -grid_type = 'rectangular' -kmt_type = 'default' -dxrect = 16.e5 -dyrect = 16.e5 -close_boundaries = .true. -ew_boundary_type = 'open' -ns_boundary_type = 'open' -tr_iage = .false. -tr_FY = .false. -tr_lvl = .false. -tr_pond_lvl = .false. -ktherm = -1 -kstrength = 0 -kdyn = 1 -kridge = -1 -ktransport = -1 -coriolis = 'zero' -atmbndy = 'constant' -atm_data_type = 'uniform_east' -ocn_data_type = 'calm' -ice_data_type = 'uniform' -ice_data_conc = 'parabolic' -ice_data_dist = 'uniform' -calc_strair = .false. -rotate_wind = .false. -restore_ice = .false. -f_aice = 'd1' -f_hi = 'd1' -f_hs = 'd' -f_Tsfc = 'd' -f_uvel = 'd1' -f_vvel = 'd1' -f_uatm = 'd' -f_vatm = 'd' -f_uocn = 'd' -f_vocn = 'd' -f_strairx = 'd1' -f_strairy = 'd1' -f_strtltx = 'd1' -f_strtlty = 'd1' -f_strcorx = 'd1' -f_strcory = 'd1' -f_strocnx = 'd1' -f_strocny = 'd1' -f_strintx = 'd1' -f_strinty = 'd1' -f_taubx = 'd1' -f_tauby = 'd1' -f_divu = 'd1' -f_sig1 = 'd1' -f_sig2 = 'd1' diff --git a/configuration/scripts/options/set_nml.boxsymn b/configuration/scripts/options/set_nml.boxsymn deleted file mode 100644 index 90ef74813..000000000 --- a/configuration/scripts/options/set_nml.boxsymn +++ /dev/null @@ -1,58 +0,0 @@ -days_per_year = 360 -use_leap_years = .false. -npt_unit = 'd' -npt = 5 -ice_ic = 'internal' -restart_ext = .true. -histfreq = 'd','1','x','x','x' -grid_type = 'rectangular' -kmt_type = 'default' -dxrect = 16.e5 -dyrect = 16.e5 -close_boundaries = .true. -ew_boundary_type = 'open' -ns_boundary_type = 'open' -tr_iage = .false. -tr_FY = .false. -tr_lvl = .false. -tr_pond_lvl = .false. -ktherm = -1 -kstrength = 0 -kdyn = 1 -kridge = -1 -ktransport = -1 -coriolis = 'zero' -atmbndy = 'constant' -atm_data_type = 'uniform_north' -ocn_data_type = 'calm' -ice_data_type = 'uniform' -ice_data_conc = 'parabolic' -ice_data_dist = 'uniform' -calc_strair = .false. -rotate_wind = .false. -restore_ice = .false. -f_aice = 'd1' -f_hi = 'd1' -f_hs = 'd' -f_Tsfc = 'd' -f_uvel = 'd1' -f_vvel = 'd1' -f_uatm = 'd' -f_vatm = 'd' -f_uocn = 'd' -f_vocn = 'd' -f_strairx = 'd1' -f_strairy = 'd1' -f_strtltx = 'd1' -f_strtlty = 'd1' -f_strcorx = 'd1' -f_strcory = 'd1' -f_strocnx = 'd1' -f_strocny = 'd1' -f_strintx = 'd1' -f_strinty = 'd1' -f_taubx = 'd1' -f_tauby = 'd1' -f_divu = 'd1' -f_sig1 = 'd1' -f_sig2 = 'd1' diff --git a/configuration/scripts/options/set_nml.boxsymne b/configuration/scripts/options/set_nml.boxsymne deleted file mode 100644 index 5c7374976..000000000 --- a/configuration/scripts/options/set_nml.boxsymne +++ /dev/null @@ -1,58 +0,0 @@ -days_per_year = 360 -use_leap_years = .false. -npt_unit = 'd' -npt = 5 -ice_ic = 'internal' -restart_ext = .true. -histfreq = 'd','1','x','x','x' -grid_type = 'rectangular' -kmt_type = 'default' -dxrect = 16.e5 -dyrect = 16.e5 -close_boundaries = .true. -ew_boundary_type = 'open' -ns_boundary_type = 'open' -tr_iage = .false. -tr_FY = .false. -tr_lvl = .false. -tr_pond_lvl = .false. -ktherm = -1 -kstrength = 0 -kdyn = 1 -kridge = -1 -ktransport = -1 -coriolis = 'constant' -atmbndy = 'constant' -atm_data_type = 'uniform_northeast' -ocn_data_type = 'calm' -ice_data_type = 'uniform' -ice_data_conc = 'parabolic' -ice_data_dist = 'uniform' -calc_strair = .false. -rotate_wind = .false. -restore_ice = .false. -f_aice = 'd1' -f_hi = 'd1' -f_hs = 'd' -f_Tsfc = 'd' -f_uvel = 'd1' -f_vvel = 'd1' -f_uatm = 'd' -f_vatm = 'd' -f_uocn = 'd' -f_vocn = 'd' -f_strairx = 'd1' -f_strairy = 'd1' -f_strtltx = 'd1' -f_strtlty = 'd1' -f_strcorx = 'd1' -f_strcory = 'd1' -f_strocnx = 'd1' -f_strocny = 'd1' -f_strintx = 'd1' -f_strinty = 'd1' -f_taubx = 'd1' -f_tauby = 'd1' -f_divu = 'd1' -f_sig1 = 'd1' -f_sig2 = 'd1' diff --git a/configuration/scripts/options/set_nml.boxsyms b/configuration/scripts/options/set_nml.boxsyms deleted file mode 100644 index 7fc0fc5a0..000000000 --- a/configuration/scripts/options/set_nml.boxsyms +++ /dev/null @@ -1,58 +0,0 @@ -days_per_year = 360 -use_leap_years = .false. -npt_unit = 'd' -npt = 5 -ice_ic = 'internal' -restart_ext = .true. -histfreq = 'd','1','x','x','x' -grid_type = 'rectangular' -kmt_type = 'default' -dxrect = 16.e5 -dyrect = 16.e5 -close_boundaries = .true. -ew_boundary_type = 'open' -ns_boundary_type = 'open' -tr_iage = .false. -tr_FY = .false. -tr_lvl = .false. -tr_pond_lvl = .false. -ktherm = -1 -kstrength = 0 -kdyn = 1 -kridge = -1 -ktransport = -1 -coriolis = 'zero' -atmbndy = 'constant' -atm_data_type = 'uniform_south' -ocn_data_type = 'calm' -ice_data_type = 'uniform' -ice_data_conc = 'parabolic' -ice_data_dist = 'uniform' -calc_strair = .false. -rotate_wind = .false. -restore_ice = .false. -f_aice = 'd1' -f_hi = 'd1' -f_hs = 'd' -f_Tsfc = 'd' -f_uvel = 'd1' -f_vvel = 'd1' -f_uatm = 'd' -f_vatm = 'd' -f_uocn = 'd' -f_vocn = 'd' -f_strairx = 'd1' -f_strairy = 'd1' -f_strtltx = 'd1' -f_strtlty = 'd1' -f_strcorx = 'd1' -f_strcory = 'd1' -f_strocnx = 'd1' -f_strocny = 'd1' -f_strintx = 'd1' -f_strinty = 'd1' -f_taubx = 'd1' -f_tauby = 'd1' -f_divu = 'd1' -f_sig1 = 'd1' -f_sig2 = 'd1' diff --git a/configuration/scripts/options/set_nml.gbox12 b/configuration/scripts/options/set_nml.gbox12 index 96701522f..b8a07840b 100644 --- a/configuration/scripts/options/set_nml.gbox12 +++ b/configuration/scripts/options/set_nml.gbox12 @@ -2,7 +2,7 @@ ice_ic = 'internal' grid_type = 'rectangular' kmt_type = 'default' atm_data_type = 'box2001' -ocn_data_type = 'calm' +ocn_data_type = 'box2001' ice_data_type = 'box2001' ice_data_conc = 'box2001' ice_data_dist = 'box2001' diff --git a/configuration/scripts/options/set_nml.gbox180 b/configuration/scripts/options/set_nml.gbox180 index 96701522f..b8a07840b 100644 --- a/configuration/scripts/options/set_nml.gbox180 +++ b/configuration/scripts/options/set_nml.gbox180 @@ -2,7 +2,7 @@ ice_ic = 'internal' grid_type = 'rectangular' kmt_type = 'default' atm_data_type = 'box2001' -ocn_data_type = 'calm' +ocn_data_type = 'box2001' ice_data_type = 'box2001' ice_data_conc = 'box2001' ice_data_dist = 'box2001' diff --git a/configuration/scripts/options/set_nml.gbox80 b/configuration/scripts/options/set_nml.gbox80 index 96701522f..b8a07840b 100644 --- a/configuration/scripts/options/set_nml.gbox80 +++ b/configuration/scripts/options/set_nml.gbox80 @@ -2,7 +2,7 @@ ice_ic = 'internal' grid_type = 'rectangular' kmt_type = 'default' atm_data_type = 'box2001' -ocn_data_type = 'calm' +ocn_data_type = 'box2001' ice_data_type = 'box2001' ice_data_conc = 'box2001' ice_data_dist = 'box2001' diff --git a/configuration/scripts/options/set_nml.modal b/configuration/scripts/options/set_nml.modal new file mode 100644 index 000000000..767160a53 --- /dev/null +++ b/configuration/scripts/options/set_nml.modal @@ -0,0 +1,4 @@ +shortwave = 'dEdd' +tr_aero = .true. +modal_aero = .true. + diff --git a/configuration/scripts/options/set_nml.scale1 b/configuration/scripts/options/set_nml.scale1 new file mode 100644 index 000000000..af75bc151 --- /dev/null +++ b/configuration/scripts/options/set_nml.scale1 @@ -0,0 +1,2 @@ +dxscale = 1.d0 +dyscale = 1.d0 diff --git a/configuration/scripts/options/set_nml.vargrid b/configuration/scripts/options/set_nml.vargrid new file mode 100644 index 000000000..29dfcc8bb --- /dev/null +++ b/configuration/scripts/options/set_nml.vargrid @@ -0,0 +1,13 @@ +scale_dxdy = .true. +dxscale = 1.02d0 +dyscale = 1.01d0 +f_dxt = .true. +f_dyt = .true. +f_dxu = .true. +f_dyu = .true. +f_dxe = .true. +f_dye = .true. +f_dxn = .true. +f_dyn = .true. +f_HTN = .true. +f_HTE = .true. diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index d4bbe8031..3007380ab 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -43,8 +43,9 @@ smoke gbox128 4x4 boxrestore,short,debug restart gbox80 1x1 box2001 smoke gbox80 1x1 boxslotcyl smoke gbox12 1x1x12x12x1 boxchan,diag1,debug +restart gx3 8x2 modal smoke gx3 8x2 bgcz -smoke gx3 8x2 bgcz,debug +smoke gx3 8x2 bgczm,debug smoke gx3 8x1 bgcskl,debug #smoke gx3 4x1 bgcz,thread smoke_gx3_8x2_bgcz restart gx1 4x2 bgcsklclim,medium diff --git a/configuration/scripts/tests/baseline.script b/configuration/scripts/tests/baseline.script index bb8f50a1f..2700fe71f 100644 --- a/configuration/scripts/tests/baseline.script +++ b/configuration/scripts/tests/baseline.script @@ -29,7 +29,7 @@ if (${ICE_BASECOM} != ${ICE_SPVAL}) then set base_file = `ls -1t ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME}/cice.runlog* | head -1` echo "" - echo "bfb Log Compare Mode:" + echo "Regression Log Compare Mode:" echo "base_file: ${base_file}" echo "test_file: ${test_file}" @@ -58,7 +58,7 @@ if (${ICE_BASECOM} != ${ICE_SPVAL}) then endif echo "" - echo "Regression Compare Mode:" + echo "Regression Restart Compare Mode:" echo "base_dir: ${base_dir}" echo "test_dir: ${test_dir}" @@ -71,7 +71,7 @@ if (${ICE_BASECOM} != ${ICE_SPVAL}) then set base_file = `ls -1t ${ICE_BASELINE}/${ICE_BASECOM}/${ICE_TESTNAME}/cice.runlog* | head -1` echo "" - echo "bfb Log Compare Mode:" + echo "Regression Log Compare Mode:" echo "base_file: ${base_file}" echo "test_file: ${test_file}" diff --git a/configuration/scripts/tests/cice.results.csh b/configuration/scripts/tests/cice.results.csh index 13580c0be..4074115cb 100644 --- a/configuration/scripts/tests/cice.results.csh +++ b/configuration/scripts/tests/cice.results.csh @@ -5,12 +5,13 @@ cat ./results.log | grep -iv "#machinfo" | grep -iv "#envinfo" set pends = `cat ./results.log | grep PEND | wc -l` set misses = `cat ./results.log | grep MISS | wc -l` set failures = `cat ./results.log | grep FAIL | wc -l` -set failbuild = `cat ./results.log | grep FAIL | grep " build " | wc -l` -set failrun = `cat ./results.log | grep FAIL | grep " run " | wc -l` -set failtest = `cat ./results.log | grep FAIL | grep " test " | wc -l` -set failcomp = `cat ./results.log | grep FAIL | grep " compare " | wc -l` -set failbfbc = `cat ./results.log | grep FAIL | grep " bfbcomp " | wc -l` -set failgen = `cat ./results.log | grep FAIL | grep " generate " | wc -l` +set failbuild = `cat ./results.log | grep FAIL | grep " build" | wc -l` +set failrun = `cat ./results.log | grep FAIL | grep " run" | wc -l` +set failtest = `cat ./results.log | grep FAIL | grep " test" | wc -l` +set failcomp = `cat ./results.log | grep FAIL | grep " compare" | wc -l` +set failclog = `cat ./results.log | grep FAIL | grep " complog" | wc -l` +set failbfbc = `cat ./results.log | grep FAIL | grep " bfbcomp" | wc -l` +set failgen = `cat ./results.log | grep FAIL | grep " generate" | wc -l` set success = `cat ./results.log | grep 'PASS\|COPY' | wc -l` set comments = `cat ./results.log | grep "#" | wc -l` set alltotal = `cat ./results.log | wc -l` @@ -29,9 +30,13 @@ echo " #failbuild = $failbuild" >> results.log echo " #failrun = $failrun" >> results.log echo " #failtest = $failtest" >> results.log echo " #failcomp = $failcomp" >> results.log +echo " #failclog = $failclog" >> results.log echo " #failbfbc = $failbfbc" >> results.log echo " #failgen = $failgen" >> results.log +set stamp = `date '+%y%m%d-%H%M%S'` +cp results.log results.$stamp.log + echo "" echo "Descriptors:" echo " PASS - successful completion" @@ -49,6 +54,7 @@ echo "$failures of $chkcnt tests FAILED" #echo " $failrun of $failures FAILED run" #echo " $failtest of $failures FAILED test" #echo " $failcomp of $failures FAILED compare" +#echo " $failclog of $failures FAILED compare" #echo " $failbfbc of $failures FAILED bfbcomp" #echo " $failgen of $failures FAILED generate" exit $failures diff --git a/configuration/scripts/tests/gridsys_suite.ts b/configuration/scripts/tests/gridsys_suite.ts index 10d8ae36c..d9752073f 100644 --- a/configuration/scripts/tests/gridsys_suite.ts +++ b/configuration/scripts/tests/gridsys_suite.ts @@ -8,15 +8,10 @@ smoke gbox80 1x1 box2001 smoke gbox80 2x2 boxwallblock smoke gbox80 1x1 boxslotcyl smoke gbox80 2x4 boxnodyn -#smoke gbox80 2x2 boxsymn,run1day -smoke gbox80 4x2 boxsyme,run1day -#smoke gbox80 4x1 boxsymne,run1day -#smoke gbox80 2x2 boxsymn,run1day,kmtislands -smoke gbox80 4x1 boxsyme,run1day,kmtislands -#smoke gbox80 4x2 boxsymne,run1day,kmtislands -#smoke gbox80 8x1 boxislandsn,run1day -smoke gbox80 4x2 boxislandse,run1day -#smoke gbox80 2x4 boxislandsne,run1day +smoke gbox80 4x2 boxclosed,boxforcee,run1day +smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands +smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day +smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid smoke gx3 1x1x100x116x1 reprosum,run10day smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall smoke_gx3_1x1x100x116x1_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall smoke_gx3_1x1x100x116x1_reprosum_run10day @@ -35,15 +30,10 @@ smoke gbox80 1x1 box2001,gridcd smoke gbox80 2x2 boxwallblock,gridcd smoke gbox80 1x1 boxslotcyl,gridcd smoke gbox80 2x4 boxnodyn,gridcd -#smoke gbox80 2x2 boxsymn,run1day,gridcd -smoke gbox80 4x2 boxsyme,run1day,gridcd -#smoke gbox80 4x1 boxsymne,run1day,gridcd -#smoke gbox80 2x2 boxsymn,run1day,kmtislands,gridcd -smoke gbox80 4x1 boxsyme,run1day,kmtislands,gridcd -#smoke gbox80 4x2 boxsymne,run1day,kmtislands,gridcd -#smoke gbox80 8x1 boxislandsn,run1day,gridcd -smoke gbox80 4x2 boxislandse,run1day,gridcd -#smoke gbox80 2x4 boxislandsne,run1day,gridcd +smoke gbox80 4x2 boxclosed,boxforcee,run1day,gridcd +smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands,gridcd +smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day,gridcd +smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid,gridcd smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day @@ -62,15 +52,10 @@ smoke gbox80 1x1 box2001,gridc smoke gbox80 2x2 boxwallblock,gridc smoke gbox80 1x1 boxslotcyl,gridc smoke gbox80 2x4 boxnodyn,gridc -#smoke gbox80 2x2 boxsymn,run1day,gridc -smoke gbox80 4x2 boxsyme,run1day,gridc -#smoke gbox80 4x1 boxsymne,run1day,gridc -#smoke gbox80 2x2 boxsymn,run1day,kmtislands,gridc -smoke gbox80 4x1 boxsyme,run1day,kmtislands,gridc -#smoke gbox80 4x2 boxsymne,run1day,kmtislands,gridc -#smoke gbox80 8x1 boxislandsn,run1day,gridc -smoke gbox80 4x2 boxislandse,run1day,gridc -#smoke gbox80 2x4 boxislandsne,run1day,gridc +smoke gbox80 4x2 boxclosed,boxforcee,run1day,gridc +smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands,gridc +smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day,gridc +smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid,gridc smoke gx3 1x1x100x116x1 reprosum,run10day,gridc smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day @@ -78,4 +63,4 @@ smoke gx3 1x1x5x4x580 reprosum,run10day,gridc smoke smoke gx1 32x1x16x16x32 reprosum,run10day,gridc smoke gx1 32x1x16x16x32 reprosum,run10day,cmplogrest,dwblockall,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,dwblockall,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day -smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day +smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day diff --git a/configuration/scripts/tests/io_suite.ts b/configuration/scripts/tests/io_suite.ts index 01ea21ec2..84d064f32 100644 --- a/configuration/scripts/tests/io_suite.ts +++ b/configuration/scripts/tests/io_suite.ts @@ -23,7 +23,7 @@ restart gx3 8x4 alt04,histall,ionetcdf,cdf64 restart gx3 8x4 alt05,histall,ionetcdf,precision8,cdf64 restart gx3 16x2 alt06,histall,ionetcdf restart gx3 16x2 alt07,histall,ionetcdf -restart gx3 30x1 bgcz,histall,ionetcdf +restart gx3 30x1 bgczm,histall,ionetcdf restart gx3 15x2 bgcskl,histall,ionetcdf,precision8 restart gx3 31x1 isotope,histall,ionetcdf,cdf64 restart gx3 14x2 fsd12,histall,ionetcdf,precision8 @@ -37,7 +37,7 @@ restart gx3 8x4 alt04,histall,iopio1,precision8,cdf64 restart gx3 8x4 alt05,histall,iopio1,cdf64 restart gx3 32x1 alt06,histall,iopio1,precision8 restart gx3 32x1 alt07,histall,iopio1,precision8 -restart gx3 16x2 bgcz,histall,iopio1,precision8 +restart gx3 16x2 bgczm,histall,iopio1,precision8 restart gx3 30x1 bgcskl,histall,iopio1 restart gx3 8x4 isotope,histall,iopio1,precision8,cdf64 restart gx3 12x2 fsd12,histall,iopio1,cdf64 @@ -51,7 +51,7 @@ restart gx3 8x4 alt04,histall,iopio2 restart gx3 8x4 alt05,histall,iopio2,precision8,cdf64 restart gx3 16x2 alt06,histall,iopio2,cdf64 restart gx3 16x2 alt07,histall,iopio2,cdf64 -restart gx3 16x2 bgcz,histall,iopio2,cdf64 +restart gx3 16x2 bgczm,histall,iopio2,cdf64 restart gx3 30x1 bgcskl,histall,iopio2,precision8 restart gx3 8x4 isotope,histall,iopio2 restart gx3 12x2 fsd12,histall,iopio2,precision8,cdf64 @@ -65,7 +65,7 @@ restart gx3 8x4 alt04,histall,iopio1p,precision8 restart gx3 8x4 alt05,histall,iopio1p restart gx3 6x4 alt06,histall,iopio1p,precision8,cdf64 restart gx3 6x4 alt07,histall,iopio1p,precision8,cdf64 -restart gx3 16x2 bgcz,histall,iopio1p,precision8,cdf64 +restart gx3 16x2 bgczm,histall,iopio1p,precision8,cdf64 restart gx3 30x1 bgcskl,histall,iopio1p,cdf64 restart gx3 8x4 isotope,histall,iopio1p,precision8 restart gx3 12x2 fsd12,histall,iopio1p @@ -79,7 +79,7 @@ restart gx3 8x4 alt04,histall,iopio2p,cdf64 restart gx3 8x4 alt05,histall,iopio2p,precision8 restart gx3 24x1 alt06,histall,iopio2p restart gx3 24x1 alt07,histall,iopio2p -restart gx3 16x2 bgcz,histall,iopio2p +restart gx3 16x2 bgczm,histall,iopio2p restart gx3 30x1 bgcskl,histall,iopio2p,precision8,cdf64 restart gx3 8x4 isotope,histall,iopio2p,cdf64 restart gx3 12x2 fsd12,histall,iopio2p,precision8 diff --git a/configuration/scripts/tests/nothread_suite.ts b/configuration/scripts/tests/nothread_suite.ts index 93839b000..a262ec135 100644 --- a/configuration/scripts/tests/nothread_suite.ts +++ b/configuration/scripts/tests/nothread_suite.ts @@ -54,7 +54,7 @@ restart gx3 12x1 short #restart gx1 24x1 short smoke gx3 16x1 bgcz -smoke gx3 16x1 bgcz,debug +smoke gx3 16x1 bgczm,debug smoke gx3 24x1 bgcskl,debug #tcraig, hangs nodes intermittently on izumi #restart gx1 128x1 bgcsklclim,medium diff --git a/configuration/scripts/tests/omp_suite.ts b/configuration/scripts/tests/omp_suite.ts index 5d5e18376..686fa72db 100644 --- a/configuration/scripts/tests/omp_suite.ts +++ b/configuration/scripts/tests/omp_suite.ts @@ -8,7 +8,7 @@ smoke gx3 4x4 alt04,reprosum,run10day smoke gx3 4x4 alt05,reprosum,run10day smoke gx3 8x2 alt06,reprosum,run10day smoke gx3 7x2 alt07,reprosum,run10day -smoke gx3 8x2 bgcz,reprosum,run10day +smoke gx3 8x2 bgczm,reprosum,run10day smoke gx1 15x2 reprosum,run10day smoke gx1 15x2 seabedprob,reprosum,run10day smoke gx3 14x2 fsd12,reprosum,run10day @@ -34,7 +34,7 @@ smoke gx3 8x1 alt04,reprosum,run10day,cmplogrest,thread smoke gx3 8x1 alt05,reprosum,run10day,cmplogrest,thread smoke_gx3_4x4_alt05_reprosum_run10day smoke gx3 8x1 alt06,reprosum,run10day,cmplogrest,thread smoke_gx3_8x2_alt06_reprosum_run10day smoke gx3 8x1 alt07,reprosum,run10day,cmplogrest,thread smoke_gx3_7x2_alt07_reprosum_run10day -smoke gx3 8x1 bgcz,reprosum,run10day,cmplogrest,thread smoke_gx3_8x2_bgcz_reprosum_run10day +smoke gx3 8x1 bgczm,reprosum,run10day,cmplogrest,thread smoke_gx3_8x2_bgczm_reprosum_run10day smoke gx1 18x1 reprosum,run10day,cmplogrest,thread smoke_gx1_15x2_reprosum_run10day smoke gx1 18x1 seabedprob,reprosum,run10day,cmplogrest,thread smoke_gx1_15x2_reprosum_run10day_seabedprob smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread smoke_gx3_14x2_fsd12_reprosum_run10day @@ -62,7 +62,7 @@ smoke gx3 4x4 alt04,reprosum,run10day,gridc smoke gx3 4x4 alt05,reprosum,run10day,gridc smoke gx3 8x2 alt06,reprosum,run10day,gridc smoke gx3 7x2 alt07,reprosum,run10day,gridc -smoke gx3 8x2 bgcz,reprosum,run10day,gridc +smoke gx3 8x2 bgczm,reprosum,run10day,gridc smoke gx1 15x2 reprosum,run10day,gridc smoke gx1 15x2 seabedprob,reprosum,run10day,gridc smoke gx3 14x2 fsd12,reprosum,run10day,gridc @@ -88,7 +88,7 @@ smoke gx3 8x1 alt04,reprosum,run10day,cmplogrest,thread,grid smoke gx3 8x1 alt05,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_4x4_alt05_gridc_reprosum_run10day smoke gx3 8x1 alt06,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x2_alt06_gridc_reprosum_run10day smoke gx3 8x1 alt07,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_7x2_alt07_gridc_reprosum_run10day -smoke gx3 8x1 bgcz,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x2_bgcz_gridc_reprosum_run10day +smoke gx3 8x1 bgczm,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x2_bgczm_gridc_reprosum_run10day smoke gx1 18x1 reprosum,run10day,cmplogrest,thread,gridc smoke_gx1_15x2_gridc_reprosum_run10day smoke gx1 18x1 seabedprob,reprosum,run10day,cmplogrest,thread,gridc smoke_gx1_15x2_gridc_reprosum_run10day_seabedprob smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_14x2_fsd12_gridc_reprosum_run10day @@ -116,7 +116,7 @@ smoke gx3 4x4 alt04,reprosum,run10day,gridcd smoke gx3 4x4 alt05,reprosum,run10day,gridcd smoke gx3 8x2 alt06,reprosum,run10day,gridcd smoke gx3 7x2 alt07,reprosum,run10day,gridcd -smoke gx3 8x2 bgcz,reprosum,run10day,gridcd +smoke gx3 8x2 bgczm,reprosum,run10day,gridcd smoke gx1 15x2 reprosum,run10day,gridcd smoke gx1 15x2 seabedprob,reprosum,run10day,gridcd smoke gx3 14x2 fsd12,reprosum,run10day,gridcd @@ -142,7 +142,7 @@ smoke gx3 8x1 alt04,reprosum,run10day,cmplogrest,thread,grid smoke gx3 8x1 alt05,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_4x4_alt05_gridcd_reprosum_run10day smoke gx3 8x1 alt06,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x2_alt06_gridcd_reprosum_run10day smoke gx3 8x1 alt07,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_7x2_alt07_gridcd_reprosum_run10day -smoke gx3 8x1 bgcz,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x2_bgcz_gridcd_reprosum_run10day +smoke gx3 8x1 bgczm,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x2_bgczm_gridcd_reprosum_run10day smoke gx1 18x1 reprosum,run10day,cmplogrest,thread,gridcd smoke_gx1_15x2_gridcd_reprosum_run10day smoke gx1 18x1 seabedprob,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx1_15x2_gridcd_reprosum_run10day_seabedprob smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_14x2_fsd12_gridcd_reprosum_run10day diff --git a/configuration/tools/cice4_restart_conversion/convert_restarts.f90 b/configuration/tools/cice4_restart_conversion/convert_restarts.f90 index 30c952510..3717f7d86 100644 --- a/configuration/tools/cice4_restart_conversion/convert_restarts.f90 +++ b/configuration/tools/cice4_restart_conversion/convert_restarts.f90 @@ -39,12 +39,8 @@ program convert_restarts logical (kind=log_kind), parameter :: & oceanmixed_ice = .true., & ! if true, read/write ocean mixed layer fields heat_capacity = .true., & ! if true, ice has nonzero heat capacity -#ifdef UNDEPRECATE_0LAYER - ! if false, use zero-layer thermodynamics -#else ! heat_capacity = .false. (zero-layer thermodynamics) ! has been deprecated in CICE and Icepack -#endif diag = .true. ! write min/max diagnostics for fields ! file names diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 99679e791..a8b9d08f1 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -9,7 +9,7 @@ This index defines many of the symbols used frequently in the CICE model code. Namelist variables are partly included here but also documented elsewhere, see Section :ref:`tabnamelist`. All quantities in the code are expressed in MKS units (temperatures may take -either Celsius or Kelvin units). +either Celsius or Kelvin units). Deprecated parameters are listed at the end. .. csv-table:: *Alphabetical Index* :header: " ", " ", " " @@ -72,7 +72,7 @@ either Celsius or Kelvin units). "awtvdf", "weighting factor for visible, diffuse albedo", "0.63282" "awtvdr", "weighting factor for visible, direct albedo", "0.00318" "**B**", "", "" - "bfb_flag", "for bit-for-bit reproducible diagnostics", "" + "bfbflag", "for bit-for-bit reproducible diagnostics, and reproducible outputs when using the VP solver", "" "bgc_data_dir", "data directory for bgc", "" "bgc_data_type", "source of silicate, nitrate data", "" "bgc_flux_type", "ice–ocean flux velocity", "" @@ -330,7 +330,7 @@ either Celsius or Kelvin units). "hpmin", "minimum melt pond depth (shortwave)", "0.005 m" "hpondn", "melt pond depth", "m" "hs_min", "minimum thickness for which :math:`T_s` is computed", "1.\ :math:`\times`\ 10\ :math:`^{-4}` m" - "hs0", "snow depth at which transition to ice occurs (dEdd)", "0.03 m" + "hs0", "snow depth at which transition to ice occurs (dEdd)", "m" "hs1", "snow depth of transition to pond ice", "0.03 m" "hs_ssl", "snow surface scattering layer thickness", "0.04 m" "Hstar", "determines mean thickness of ridged ice", "25. m" @@ -354,10 +354,10 @@ either Celsius or Kelvin units). "icells", "number of grid cells with specified property (for vectorization)", "" "iceruf", "ice surface roughness at atmosphere interface", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" "iceruf_ocn", "under-ice roughness (at ocean interface)", "0.03 m" - "iceemask", "ice extent mask (E-cell)", "" - "icenmask", "ice extent mask (N-cell)", "" - "icetmask", "ice extent mask (T-cell)", "" - "iceumask", "ice extent mask (U-cell)", "" + "iceEmask", "dynamics ice extent mask (E-cell)", "" + "iceNmask", "dynamics ice extent mask (N-cell)", "" + "iceTmask", "dynamics ice extent mask (T-cell)", "" + "iceUmask", "dynamics ice extent mask (U-cell)", "" "idate", "the date at the end of the current time step (yyyymmdd)", "" "idate0", "initial date", "" "ierr", "general-use error flag", "" @@ -699,7 +699,6 @@ either Celsius or Kelvin units). "tr_FY", "if true, use first-year area tracer", "" "tr_iage", "if true, use ice age tracer", "" "tr_lvl", "if true, use level ice area and volume tracers", "" - "tr_pond_cesm", "if true, use CESM melt pond scheme", "" "tr_pond_lvl", "if true, use level-ice melt pond scheme", "" "tr_pond_topo", "if true, use topo melt pond scheme", "" "trcr", "ice tracers", "" @@ -772,9 +771,11 @@ either Celsius or Kelvin units). "zref", "reference height for stability", "10. m" "zTrf", "reference height for :math:`T_{ref}`, :math:`Q_{ref}`, :math:`U_{ref}`", "2. m" "zvir", "gas constant (water vapor)/gas constant (air) - 1", "0.606" - -.. - ktherm=0 has been deprecated + "**Deprecated options and parameters**", "", "" "heat_capacity", "if true, use salinity-dependent thermodynamics", "T" "kseaice", "thermal conductivity of ice for zero-layer thermodynamics", "2.0 W/m/deg" "ktherm", "thermodynamic formulation (0 = zero-layer, 1 = :cite:`Bitz99`, 2 = mushy)", "" + "tr_pond_cesm", "if true, use CESM melt pond scheme", "" + +.. + new deprecation comments diff --git a/doc/source/developer_guide/dg_other.rst b/doc/source/developer_guide/dg_other.rst index 8d4f9716e..308c2629c 100644 --- a/doc/source/developer_guide/dg_other.rst +++ b/doc/source/developer_guide/dg_other.rst @@ -30,11 +30,11 @@ To run with an interactive debugger, the following general steps should be taken Reproducible Sums ---------------------- -Reproducible sums in the CICE diagnostics are set with the namelist `bfbflag`. -CICE prognostics results do NOT depend on the global sum implementation. The +Reproducible sums in CICE are set with the namelist `bfbflag`. +CICE prognostics results do NOT depend on the global sum implementation when using the default dynamics solver (EVP) or the EAP solver. With these solvers, the results are bit-for-bit identical with any `bfbflag`. The `bfbflag` only impacts the results and performance of the global diagnostics written to the CICE -log file. For best performance, the off (or lsum8 which is equivalent) setting is recommended. +log file (for all dynamics solvers), as well as the model results when using the VP solver. For best performance, the off setting is recommended. This will probably not produce bit-for-bit results with different decompositions. For bit-for-bit results, the reprosum setting is recommended. This should be only slightly slower than the lsum8 implementation. @@ -50,7 +50,7 @@ The `bfbflag` namelist is a character string with several valid settings. The tradeoff in these settings is the likelihood for bit-for-bit results versus their cost. The `bfbflag` settings are implemented as follows, -off is the default and equivalent to lsum8. +off is the default and mostly equivalent to lsum8 (some computations in the VP solver use a different code path when lsum8 is chosen). lsum4 is a local sum computed with single precision (4 byte) data and a scalar mpi allreduce. This is extremely unlikely to be bit-for-bit for different decompositions. diff --git a/doc/source/intro/quickstart.rst b/doc/source/intro/quickstart.rst index 56d19ee70..b68fc32d7 100644 --- a/doc/source/intro/quickstart.rst +++ b/doc/source/intro/quickstart.rst @@ -6,13 +6,14 @@ Quick Start =========== -Download the model from the CICE-Consortium repository, - https://github.com/CICE-Consortium/CICE +Clone the model from the CICE-Consortium repository:: -Instructions for working in github with CICE (and Icepack) can be -found in the `CICE Git and Workflow Guide `_. + git clone --recurse-submodules https://github.com/CICE-Consortium/CICE -You will probably have to download some inputdata, see the `CICE wiki `_ or :ref:`force`. +Instructions for working with Git and GitHub with CICE (and Icepack) can be +found in the `CICE Git Workflow Guide `_. + +You will probably have to download some input data, see the `CICE wiki `_ or :ref:`force`. Software requirements are noted in this :ref:`software` section. diff --git a/doc/source/science_guide/sg_dynamics.rst b/doc/source/science_guide/sg_dynamics.rst index 241fa05fe..e6b918538 100644 --- a/doc/source/science_guide/sg_dynamics.rst +++ b/doc/source/science_guide/sg_dynamics.rst @@ -92,8 +92,7 @@ dynamics into CICE is described in detail in The VP solver implementation mostly follows :cite:`Lemieux08`, with FGMRES :cite:`Saad93` as the linear solver and GMRES as the preconditioner. -Note that the VP solver has not yet been tested on the ``tx1`` grid or with -threading enabled. +Note that the VP solver has not yet been tested on the ``tx1`` grid. The EVP, rEVP, EAP and VP approaches are all available with the B grid. However, at the moment, only the EVP and rEVP schemes are possible with the C grid. diff --git a/doc/source/science_guide/sg_fundvars.rst b/doc/source/science_guide/sg_fundvars.rst index e80b68f4a..5b5703266 100644 --- a/doc/source/science_guide/sg_fundvars.rst +++ b/doc/source/science_guide/sg_fundvars.rst @@ -14,7 +14,7 @@ modeling is to describe the evolution of the ice thickness distribution In addition to an ice thickness distribution, CICE includes an optional capability for a floe size distribution. -Ice floe horizontal size may change through vertical and lateral growth and melting of existing floes, freezing of new ice, wave breaking, and welding of floes in freezing conditions. The floe size distribution (FSD) is a probability function that characterizes this variability. The scheme is based on the theoretical framework described in :cite:`Horvat15` for a joint floe size and thickness distribution (FSTD), and was implemented by :cite:`Roach18` and :cite:`Roach19`. The joint floe size distribution is carried as an area-weighted tracer, defined as the fraction of ice belonging to a given thickness category with lateral floe size belong to a given floe size class. This development includes interactions between sea ice and ocean surface waves. Input data on ocean surface wave spectra at a single time is provided for testing, but as with the other CICE datasets, it should not be used for production runs or publications. +Ice floe horizontal size may change through vertical and lateral growth and melting of existing floes, freezing of new ice, wave breaking, and welding of floes in freezing conditions. The floe size distribution (FSD) is a probability function that characterizes this variability. The scheme is based on the theoretical framework described in :cite:`Horvat15` for a joint floe size and thickness distribution (FSTD), and was implemented by :cite:`Roach18` and :cite:`Roach19`. The joint floe size distribution is carried as an area-weighted tracer, defined as the fraction of ice belonging to a given thickness category with lateral floe size belong to a given floe size class. This development includes interactions between sea ice and ocean surface waves. Input data on ocean surface wave spectra at a single time is provided for testing, but as with the other CICE datasets, it should not be used for production runs or publications. It is not recommended to use the FSD without ocean surface waves. Additional information about the ITD and joint FSTD for CICE can be found in the `Icepack documentation `_. diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 64264613c..a34c69822 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -247,7 +247,9 @@ grid_nml "", "``pop``", "pop thickness file in cm in ascii format", "" "``close_boundaries``", "logical", "force two gridcell wide land mask on boundaries for rectangular grids", "``.false.``" "``dxrect``", "real", "x-direction grid spacing for rectangular grid in cm", "0.0" + "``dxscale``", "real", "user defined rectgrid x-grid scale factor", "1.0" "``dyrect``", "real", "y-direction grid spacing for rectangular grid in cm", "0.0" + "``dyscale``", "real", "user defined rectgrid y-grid scale factor", "1.0" "``gridcpl_file``", "string", "input file for coupling grid info", "'unknown_gridcpl_file'" "``grid_atm``", "``A``", "atm forcing/coupling grid, all fields on T grid", "``A``" "", "``B``", "atm forcing/coupling grid, thermo fields on T grid, dyn fields on U grid", "" @@ -277,12 +279,15 @@ grid_nml "", "default", "ocean/land mask set internally, land in upper left and lower right of domain, ", "" "", "file", "ocean/land mask setup read from file, see kmt_file", "" "", "wall", "ocean/land mask set at right edge of domain", "" + "``latrefrect``","real","lower left corner lat for rectgrid in deg", "71.35" + "``lonrefrect``","real","lower left corner lon for rectgrid in deg", "-156.5" "``nblyr``", "integer", "number of zbgc layers", "0" "``ncat``", "integer", "number of ice thickness categories", "0" "``nfsd``", "integer", "number of floe size categories", "1" "``nilyr``", "integer", "number of vertical layers in ice", "0" "``nslyr``", "integer", "number of vertical layers in snow", "0" "``orca_halogrid``", "logical", "use orca haloed grid for data/grid read", "``.false.``" + "``scale_dxdy``", "logical", "apply dxscale, dyscale to rectgrid", "``false``" "``use_bathymetry``", "logical", "use read in bathymetry file for seabedstress option", "``.false.``" "", "", "", "" @@ -398,9 +403,6 @@ thermo_nml "``Rac_rapid_mode``", "real", "critical Rayleigh number", "10.0" "", "", "", "" -.. - ktherm=0 has been deprecated - "", "``0``", "zero-layer thermodynamic model", "" .. _dynamics_nml: @@ -464,7 +466,7 @@ dynamics_nml "``k1``", "real", "1st free parameter for landfast parameterization", "7.5" "``k2``", "real", "2nd free parameter (N/m\ :math:`^3`) for landfast parameterization", "15.0" "``maxits_fgmres``", "integer", "maximum number of restarts for FGMRES solver", "1" - "``maxits_nonlin``", "integer", "maximum number of nonlinear iterations for VP solver", "1000" + "``maxits_nonlin``", "integer", "maximum number of nonlinear iterations for VP solver", "10" "``maxits_pgmres``", "integer", "maximum number of restarts for PGMRES preconditioner", "1" "``monitor_fgmres``", "logical", "write velocity norm at each FGMRES iteration", "``.false.``" "``monitor_nonlin``", "logical", "write velocity norm at each nonlinear iteration", "``.false.``" @@ -477,7 +479,7 @@ dynamics_nml "", "``ident``", "Don't use a preconditioner for the FGMRES solver", "" "", "``pgmres``", "Use GMRES as preconditioner for FGMRES solver", "" "``Pstar``", "real", "constant in Hibler strength formula (N/m\ :math:`^2`)", "2.75e4" - "``reltol_fgmres``", "real", "relative tolerance for FGMRES solver", "1e-2" + "``reltol_fgmres``", "real", "relative tolerance for FGMRES solver", "1e-1" "``reltol_nonlin``", "real", "relative tolerance for nonlinear solver", "1e-8" "``reltol_pgmres``", "real", "relative tolerance for PGMRES preconditioner", "1e-6" "``revised_evp``", "logical", "use revised EVP formulation", "``.false.``" @@ -533,7 +535,7 @@ ponds_nml "``frzpnd``", "``cesm``", "CESM pond refreezing forumulation", "``cesm``" "", "``hlid``", "Stefan refreezing with pond ice thickness", "" "``hp1``", "real", "critical ice lid thickness for topo ponds in m", "0.01" - "``hs0``", "real", "snow depth of transition to bare sea ice in m", "0.03" + "``hs0``", "real", "snow depth of transition to bare sea ice in m", "" "``hs1``", "real", "snow depth of transition to pond ice in m", "0.03" "``pndaspect``", "real", "aspect ratio of pond changes (depth:area)", "0.8" "``rfracmax``", ":math:`0 \le r_{max} \le 1`", "maximum melt water added to ponds", "0.85" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index a3f7d11bc..a7cc66948 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -312,6 +312,34 @@ Special treatment is also required in the scattering routine, and when computing global sums one of each pair of coincident points has to be excluded. +***************** +Rectangular grids +***************** + +Rectangular test grids can be defined for CICE. They are generated +internally and defined by several namelist +settings including ``grid_type`` = ``rectangular``, ``nx_global``, ``ny_global``, +``dx_rect``, ``dy_rect``, ``lonrefrect``, and ``latrefrect``. Forcing and +initial condition can be set via namelists ``atm_data_type``, ``ocn_data_type``, +``ice_data_type``, ``ice_data_conc``, ``ice_data_dist``. Variable grid spacing +is also supported with the namelist settings ``scale_dxdy`` which turns on +the option, and ``dxscale`` and ``dyscale`` which sets the variable grid scaling +factor. Values of 1.0 will produced constant grid spacing. For rectangular grids, +``lonrefrect`` and ``latrefrect`` define the lower left longitude and latitude +value of the grid, ``dx_rect`` and ``dy_rect`` define the base grid spacing, and +``dxscale`` and ``dyscale`` provide the grid space scaling. The base spacing +is set in the center of the rectangular domain and the scaling is applied symetrically +outward as a multiplicative factor in the x and y directions. + +Several predefined rectangular grids are available in CICE with +**cice.setup --grid** including ``gbox12``, ``gbox80``, ``gbox128``, and ``gbox180`` +where 12, 80, 128, and 180 are the number of gridcells in each direction. +Several predefined options also exist, set with **cice.setup --set**, to +establish varied idealized configurations of box tests including ``box2001``, +``boxadv``, ``boxchan``, ``boxnodyn``, ``boxrestore``, ``boxslotcyl``, and +``boxopen``, ``boxclosed``, and ``boxforcee``. See **cice.setup --help** for a current +list of supported settings. + ************** Vertical Grids ************** @@ -398,16 +426,16 @@ respectively) are useful in conditional statements. In addition to the land masks, two other masks are implemented in *dyn\_prep* in order to reduce the dynamics component’s work on a global -grid. At each time step the logical masks ``icetmask`` and ``iceumask`` are +grid. At each time step the logical masks ``iceTmask`` and ``iceUmask`` are determined from the current ice extent, such that they have the value “true” wherever ice exists. They also include a border of cells around the ice pack for numerical purposes. These masks are used in the dynamics component to prevent unnecessary calculations on grid points where there is no ice. They are not used in the thermodynamics component, so that ice may form in previously ice-free cells. Like the -land masks ``hm`` and ``uvm``, the ice extent masks ``icetmask`` and ``iceumask`` +land masks ``hm`` and ``uvm``, the ice extent masks ``iceTmask`` and ``iceUmask`` are for T-cells and U-cells, respectively. Note that the ice extent masks -``iceemask`` and ``icenmask`` are also defined when using the C or CD grid. +``iceEmask`` and ``iceNmask`` are also defined when using the C or CD grid. Improved parallel performance may result from utilizing halo masks for boundary updates of the full ice state, incremental remapping transport, diff --git a/icepack b/icepack index 3a039e598..18fc1c9b7 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 3a039e598e6395333a278bb1822f03e9bc954ac6 +Subproject commit 18fc1c9b79d81604eafdb1fac4ddd039b78ad390 From ed472ab7daa2b4ddd50f79300c22cc4cdc11f13c Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 9 Jan 2023 12:53:05 +0000 Subject: [PATCH 73/90] fix for nudiag_set allow nudiag_set to be available outside of cesm; may prefer to fix in coupling interface --- cicecore/shared/ice_fileunits.F90 | 2 -- 1 file changed, 2 deletions(-) diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index 7e425e5e7..72a40f513 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -81,10 +81,8 @@ module ice_fileunits integer (kind=int_kind), public :: & nu_diag = ice_stdout ! diagnostics output file, unit number may be overwritten -#ifdef CESMCOUPLED logical (kind=log_kind), public :: & nu_diag_set = .false. ! flag to indicate whether nu_diag is already set -#endif integer (kind=int_kind), public :: & ice_IOUnitsMinUnit = 11, & ! do not use unit numbers below From e628a9a696bd146a0036e946884b9f9102ebd8e4 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Mon, 23 Jan 2023 07:58:06 -0500 Subject: [PATCH 74/90] Update CICE for latest Consortium/main (#56) --- .github/workflows/test-cice.yml | 14 + cice.setup | 17 +- .../analysis/ice_diagnostics.F90 | 45 +- .../analysis/ice_diagnostics_bgc.F90 | 0 .../analysis/ice_history.F90 | 65 +- .../analysis/ice_history_bgc.F90 | 31 +- .../analysis/ice_history_drag.F90 | 30 +- .../analysis/ice_history_fsd.F90 | 30 +- .../analysis/ice_history_mechred.F90 | 30 +- .../analysis/ice_history_pond.F90 | 36 +- .../analysis/ice_history_shared.F90 | 2 +- .../analysis/ice_history_snow.F90 | 34 +- .../dynamics/ice_dyn_eap.F90 | 129 ++-- .../dynamics/ice_dyn_evp.F90 | 617 ++++++++++-------- .../dynamics/ice_dyn_evp_1d.F90 | 2 +- .../dynamics/ice_dyn_shared.F90 | 45 +- .../dynamics/ice_dyn_vp.F90 | 20 +- .../dynamics/ice_transport_driver.F90 | 0 .../dynamics/ice_transport_remap.F90 | 0 .../general/ice_flux.F90 | 0 .../general/ice_flux_bgc.F90 | 0 .../general/ice_forcing.F90 | 16 +- .../general/ice_forcing_bgc.F90 | 0 .../general/ice_init.F90 | 271 +++++--- .../general/ice_state.F90 | 0 .../general/ice_step_mod.F90 | 0 .../infrastructure/comm/mpi/ice_boundary.F90 | 3 - .../infrastructure/comm/mpi/ice_broadcast.F90 | 0 .../comm/mpi/ice_communicate.F90 | 0 .../infrastructure/comm/mpi/ice_exit.F90 | 0 .../comm/mpi/ice_gather_scatter.F90 | 0 .../comm/mpi/ice_global_reductions.F90 | 0 .../infrastructure/comm/mpi/ice_reprosum.F90 | 0 .../infrastructure/comm/mpi/ice_timers.F90 | 0 .../comm/serial/ice_boundary.F90 | 3 - .../comm/serial/ice_broadcast.F90 | 0 .../comm/serial/ice_communicate.F90 | 0 .../infrastructure/comm/serial/ice_exit.F90 | 0 .../comm/serial/ice_gather_scatter.F90 | 0 .../comm/serial/ice_global_reductions.F90 | 0 .../comm/serial/ice_reprosum.F90 | 0 .../infrastructure/comm/serial/ice_timers.F90 | 0 .../infrastructure/ice_blocks.F90 | 0 .../infrastructure/ice_domain.F90 | 34 +- .../infrastructure/ice_grid.F90 | 109 ++-- .../infrastructure/ice_memusage.F90 | 0 .../infrastructure/ice_memusage_gptl.c | 2 +- .../infrastructure/ice_read_write.F90 | 0 .../infrastructure/ice_restart_driver.F90 | 156 +++-- .../infrastructure/ice_restoring.F90 | 0 .../infrastructure/ice_shr_reprosum86.c | 0 .../io/io_binary/ice_history_write.F90 | 0 .../io/io_binary/ice_restart.F90 | 0 .../io/io_netcdf/ice_history_write.F90 | 0 .../io/io_netcdf/ice_restart.F90 | 0 .../io/io_pio2/ice_history_write.F90 | 0 .../infrastructure/io/io_pio2/ice_pio.F90 | 0 .../infrastructure/io/io_pio2/ice_restart.F90 | 15 +- cicecore/cicedynB | 1 + .../drivers/direct/hadgem3/CICE_InitMod.F90 | 14 +- .../direct/nemo_concepts/CICE_InitMod.F90 | 14 +- cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 12 +- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 13 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 6 +- .../drivers/nuopc/cmeps/ice_import_export.F90 | 4 +- cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 31 +- .../drivers/standalone/cice/CICE_InitMod.F90 | 12 +- .../unittest/gridavgchk/CICE_InitMod.F90 | 12 +- .../drivers/unittest/sumchk/CICE_InitMod.F90 | 13 +- cicecore/shared/ice_fileunits.F90 | 64 +- cicecore/shared/ice_init_column.F90 | 9 + cicecore/version.txt | 2 +- configuration/scripts/cice.build | 12 +- configuration/scripts/ice_in | 2 + configuration/scripts/options/set_nml.dyneap | 1 + .../scripts/options/set_nml.saltflux | 2 + configuration/scripts/tests/base_suite.ts | 3 +- configuration/scripts/tests/decomp_suite.ts | 2 + configuration/scripts/tests/first_suite.ts | 15 +- configuration/scripts/tests/gridsys_suite.ts | 13 +- configuration/scripts/tests/omp_suite.ts | 112 ++-- configuration/scripts/tests/perf_suite.ts | 33 +- doc/source/cice_index.rst | 3 +- doc/source/conf.py | 4 +- doc/source/developer_guide/dg_dynamics.rst | 22 +- doc/source/developer_guide/dg_forcing.rst | 2 +- doc/source/science_guide/sg_horiztrans.rst | 9 +- doc/source/science_guide/sg_tracers.rst | 2 +- doc/source/user_guide/ug_case_settings.rst | 21 +- doc/source/user_guide/ug_implementation.rst | 2 +- doc/source/user_guide/ug_testing.rst | 83 ++- doc/source/user_guide/ug_troubleshooting.rst | 20 + icepack | 2 +- 93 files changed, 1446 insertions(+), 882 deletions(-) rename cicecore/{cicedynB => cicedyn}/analysis/ice_diagnostics.F90 (98%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_diagnostics_bgc.F90 (100%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history.F90 (98%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_bgc.F90 (99%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_drag.F90 (92%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_fsd.F90 (95%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_mechred.F90 (94%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_pond.F90 (92%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_shared.F90 (99%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_snow.F90 (93%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_eap.F90 (98%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_evp.F90 (83%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_evp_1d.F90 (99%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_shared.F90 (99%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_vp.F90 (99%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_transport_driver.F90 (100%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_transport_remap.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_flux.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_flux_bgc.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_forcing.F90 (99%) rename cicecore/{cicedynB => cicedyn}/general/ice_forcing_bgc.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_init.F90 (94%) rename cicecore/{cicedynB => cicedyn}/general/ice_state.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_step_mod.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_boundary.F90 (99%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_broadcast.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_communicate.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_exit.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_gather_scatter.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_global_reductions.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_reprosum.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_timers.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_boundary.F90 (99%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_broadcast.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_communicate.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_exit.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_gather_scatter.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_global_reductions.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_reprosum.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_timers.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_blocks.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_domain.F90 (97%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_grid.F90 (99%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_memusage.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_memusage_gptl.c (98%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_read_write.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_restart_driver.F90 (92%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_restoring.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_shr_reprosum86.c (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_binary/ice_history_write.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_binary/ice_restart.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_netcdf/ice_history_write.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_netcdf/ice_restart.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_pio2/ice_history_write.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_pio2/ice_pio.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_pio2/ice_restart.F90 (98%) create mode 120000 cicecore/cicedynB create mode 100644 configuration/scripts/options/set_nml.dyneap create mode 100644 configuration/scripts/options/set_nml.saltflux diff --git a/.github/workflows/test-cice.yml b/.github/workflows/test-cice.yml index c60a07721..70fdc4c14 100644 --- a/.github/workflows/test-cice.yml +++ b/.github/workflows/test-cice.yml @@ -74,6 +74,20 @@ jobs: ln -s ${GITHUB_WORKSPACE}/../CICE ${HOME}/cice # ls -al ${HOME}/ # ls -al ${GITHUB_WORKSPACE}/ + - name: check for tabs + run: | + cd $HOME/cice/cicecore + set cnt = 0 + set ffiles = `find -P . -iname "*.f*"` + set cfiles = `find -P . -iname "*.c*"` + foreach file ($ffiles $cfiles) + set fcnt = `sed -n '/\t/p' $file | wc -l` + @ cnt = $cnt + $fcnt + if ($fcnt > 0) then + echo "TAB found: $fcnt $file" + endif + end + exit $cnt - name: setup conda env shell: /bin/bash {0} run: | diff --git a/cice.setup b/cice.setup index 586fe3464..30da0ed2e 100755 --- a/cice.setup +++ b/cice.setup @@ -455,7 +455,22 @@ if ( ${dosuite} == 0 ) then set sets = "" else - set tarrays = `echo ${testsuite} | sed 's/,/ /g' | fmt -1 | sort -u` + # generate unique set of suites in tarrays in order they are set + set tarrays0 = `echo ${testsuite} | sed 's/,/ /g' | fmt -1 ` + #echo "${0}: tarrays0 = ${tarrays0}" + set tarrays = $tarrays0[1] + foreach t1 ( ${tarrays0} ) + set found = 0 + foreach t2 ( ${tarrays} ) + if ( ${t1} == ${t2} ) then + set found = 1 + endif + end + if ( ${found} == 0 ) then + set tarrays = ( ${tarrays} ${t1} ) + endif + end + #echo "${0}: tarrays = ${tarrays}" set testsuitecnt = 0 foreach tarray ( ${tarrays} ) @ testsuitecnt = ${testsuitecnt} + 1 diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedyn/analysis/ice_diagnostics.F90 similarity index 98% rename from cicecore/cicedynB/analysis/ice_diagnostics.F90 rename to cicecore/cicedyn/analysis/ice_diagnostics.F90 index 8879d6632..53631b2d4 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedyn/analysis/ice_diagnostics.F90 @@ -87,6 +87,8 @@ module ice_diagnostics totms , & ! total ice/snow water mass (sh) totmin , & ! total ice water mass (nh) totmis , & ! total ice water mass (sh) + totsn , & ! total salt mass (nh) + totss , & ! total salt mass (sh) toten , & ! total ice/snow energy (J) totes ! total ice/snow energy (J) @@ -154,7 +156,7 @@ subroutine runtime_diags (dt) rhofresh, lfresh, lvap, ice_ref_salinity, Tffresh character (len=char_len) :: & - snwredist + snwredist, saltflux_option ! hemispheric state quantities real (kind=dbl_kind) :: & @@ -162,6 +164,8 @@ subroutine runtime_diags (dt) umaxs, hmaxs, shmaxs, areas, snwmxs, extents, shmaxst, & etotn, mtotn, micen, msnwn, pmaxn, ketotn, & etots, mtots, mices, msnws, pmaxs, ketots, & + stotn, & + stots, & urmsn, albtotn, arean_alb, mpndn, ptotn, spondn, & urmss, albtots, areas_alb, mpnds, ptots, sponds @@ -226,7 +230,7 @@ subroutine runtime_diags (dt) awtvdr_out=awtvdr, awtidr_out=awtidr, awtvdf_out=awtvdf, awtidf_out=awtidf, & rhofresh_out=rhofresh, lfresh_out=lfresh, lvap_out=lvap, & ice_ref_salinity_out=ice_ref_salinity,snwredist_out=snwredist, & - snwgrain_out=snwgrain) + snwgrain_out=snwgrain, saltflux_option_out=saltflux_option) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -512,6 +516,15 @@ subroutine runtime_diags (dt) etots = global_sum(work1, distrb_info, & field_loc_center, tareas) + ! total salt volume + call total_salt (work2) + + stotn = global_sum(work2, distrb_info, & + field_loc_center, tarean) + stots = global_sum(work2, distrb_info, & + field_loc_center, tareas) + + !----------------------------------------------------------------- ! various fluxes !----------------------------------------------------------------- @@ -785,12 +798,22 @@ subroutine runtime_diags (dt) swerrs = (fswnets - fswdns) / (fswnets - c1) ! salt mass - msltn = micen*ice_ref_salinity*p001 - mslts = mices*ice_ref_salinity*p001 + if (saltflux_option == 'prognostic') then + ! compute the total salt mass + msltn = stotn*rhoi*p001 + mslts = stots*rhoi*p001 + + ! change in salt mass + delmsltn = rhoi*(stotn-totsn)*p001 + delmslts = rhoi*(stots-totss)*p001 + else + msltn = micen*ice_ref_salinity*p001 + mslts = mices*ice_ref_salinity*p001 - ! change in salt mass - delmsltn = delmxn*ice_ref_salinity*p001 - delmslts = delmxs*ice_ref_salinity*p001 + ! change in salt mass + delmsltn = delmxn*ice_ref_salinity*p001 + delmslts = delmxs*ice_ref_salinity*p001 + endif ! salt error serrn = (sfsaltn + delmsltn) / (msltn + c1) @@ -1275,7 +1298,7 @@ subroutine init_mass_diags rhoi, rhos, rhofresh real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1 + work1, work2 character(len=*), parameter :: subname = '(init_mass_diags)' @@ -1310,6 +1333,12 @@ subroutine init_mass_diags toten = global_sum(work1, distrb_info, field_loc_center, tarean) totes = global_sum(work1, distrb_info, field_loc_center, tareas) + ! north/south salt + call total_salt (work2) + totsn = global_sum(work2, distrb_info, field_loc_center, tarean) + totss = global_sum(work2, distrb_info, field_loc_center, tareas) + + if (print_points) then do n = 1, npnt if (my_task == pmloc(n)) then diff --git a/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 b/cicecore/cicedyn/analysis/ice_diagnostics_bgc.F90 similarity index 100% rename from cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 rename to cicecore/cicedyn/analysis/ice_diagnostics_bgc.F90 diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedyn/analysis/ice_history.F90 similarity index 98% rename from cicecore/cicedynB/analysis/ice_history.F90 rename to cicecore/cicedyn/analysis/ice_history.F90 index f5e7d0d16..f19158f6a 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedyn/analysis/ice_history.F90 @@ -81,6 +81,7 @@ subroutine init_hist (dt) use ice_history_fsd, only: init_hist_fsd_2D, init_hist_fsd_3Df, & init_hist_fsd_4Df, f_afsd, f_afsdn use ice_restart_shared, only: restart + use ice_fileunits, only: goto_nml real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -102,7 +103,11 @@ subroutine init_hist (dt) character (len=25) :: & cstr_gat, cstr_gau, cstr_gav, & ! mask area name for t, u, v atm grid (ga) cstr_got, cstr_gou, cstr_gov ! mask area name for t, u, v ocn grid (go) - character(len=char_len) :: description + character (len=25) :: & + gridstr2D, gridstr ! temporary string names + character(len=char_len) :: description + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! text namelist name character(len=*), parameter :: subname = '(init_hist)' @@ -226,24 +231,39 @@ subroutine init_hist (dt) file=__FILE__, line=__LINE__) if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_nml' + nml_name = 'icefields_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! seek to this namelist + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif @@ -418,10 +438,6 @@ subroutine init_hist (dt) f_taubyE = f_tauby endif -#ifndef ncdf - f_bounds = .false. -#endif - ! write dimensions for 3D or 4D history variables ! note: list of variables checked here is incomplete if (f_aicen(1:1) /= 'x' .or. f_vicen(1:1) /= 'x' .or. & @@ -1307,21 +1323,25 @@ subroutine init_hist (dt) select case (grid_ice) case('B') description = ", on U grid (NE corner values)" + gridstr2d = trim(ustr2D) + gridstr = trim(ucstr) case ('CD','C') description = ", on T grid" + gridstr2d = trim(tstr2D) + gridstr = trim(tcstr) end select - call define_hist_field(n_sig1,"sig1","1",ustr2D, ucstr, & + call define_hist_field(n_sig1,"sig1","1",gridstr2d, gridstr, & "norm. principal stress 1", & "sig1 is instantaneous" // trim(description), c1, c0, & ns1, f_sig1) - call define_hist_field(n_sig2,"sig2","1",ustr2D, ucstr, & + call define_hist_field(n_sig2,"sig2","1",gridstr2d, gridstr, & "norm. principal stress 2", & "sig2 is instantaneous" // trim(description), c1, c0, & ns1, f_sig2) - call define_hist_field(n_sigP,"sigP","1",ustr2D, ucstr, & + call define_hist_field(n_sigP,"sigP","1",gridstr2d, gridstr, & "ice pressure", & "sigP is instantaneous" // trim(description), c1, c0, & ns1, f_sigP) @@ -2162,12 +2182,13 @@ subroutine accum_hist (dt) real (kind=dbl_kind) :: awtvdr, awtidr, awtvdf, awtidf, puny, secday, rad_to_deg real (kind=dbl_kind) :: Tffresh, rhoi, rhos, rhow, ice_ref_salinity - real (kind=dbl_kind) :: rho_ice, rho_ocn, Tice, Sbr, phi, rhob, dfresh, dfsalt + real (kind=dbl_kind) :: rho_ice, rho_ocn, Tice, Sbr, phi, rhob, dfresh, dfsalt, sicen logical (kind=log_kind) :: formdrag, skl_bgc logical (kind=log_kind) :: tr_pond, tr_aero, tr_brine, tr_snow integer (kind=int_kind) :: ktherm integer (kind=int_kind) :: nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY, nt_Tsfc, & nt_alvl, nt_vlvl + character (len=char_len) :: saltflux_option type (block) :: & this_block ! block information for current block @@ -2179,6 +2200,7 @@ subroutine accum_hist (dt) call icepack_query_parameters(Tffresh_out=Tffresh, rhoi_out=rhoi, rhos_out=rhos, & rhow_out=rhow, ice_ref_salinity_out=ice_ref_salinity) call icepack_query_parameters(formdrag_out=formdrag, skl_bgc_out=skl_bgc, ktherm_out=ktherm) + call icepack_query_parameters(saltflux_option_out=saltflux_option) call icepack_query_tracer_flags(tr_pond_out=tr_pond, tr_aero_out=tr_aero, & tr_brine_out=tr_brine, tr_snow_out=tr_snow) call icepack_query_tracer_indices(nt_sice_out=nt_sice, nt_qice_out=nt_qice, & @@ -2263,7 +2285,7 @@ subroutine accum_hist (dt) !--------------------------------------------------------------- !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & - !$OMP k,n,qn,ns,sn,rho_ocn,rho_ice,Tice,Sbr,phi,rhob,dfresh,dfsalt, & + !$OMP k,n,qn,ns,sn,rho_ocn,rho_ice,Tice,Sbr,phi,rhob,dfresh,dfsalt,sicen, & !$OMP worka,workb,worka3,Tinz4d,Sinz4d,Tsnz4d) do iblk = 1, nblocks @@ -3222,7 +3244,16 @@ subroutine accum_hist (dt) dfresh = -rhoi*frazil(i,j,iblk)/dt endif endif - dfsalt = ice_ref_salinity*p001*dfresh + if (saltflux_option == 'prognostic') then + sicen = c0 + do k = 1, nzilyr + sicen = sicen + trcr(i,j,nt_sice+k-1,iblk)*vice(i,j,iblk) & + / real(nzilyr,kind=dbl_kind) + enddo + dfsalt = sicen*p001*dfresh + else + dfsalt = ice_ref_salinity*p001*dfresh + endif worka(i,j) = aice(i,j,iblk)*(fsalt(i,j,iblk)+dfsalt) endif enddo diff --git a/cicecore/cicedynB/analysis/ice_history_bgc.F90 b/cicecore/cicedyn/analysis/ice_history_bgc.F90 similarity index 99% rename from cicecore/cicedynB/analysis/ice_history_bgc.F90 rename to cicecore/cicedyn/analysis/ice_history_bgc.F90 index 003e76120..6974a087b 100644 --- a/cicecore/cicedynB/analysis/ice_history_bgc.F90 +++ b/cicecore/cicedyn/analysis/ice_history_bgc.F90 @@ -271,6 +271,7 @@ subroutine init_hist_bgc_2D use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field, & f_fsalt, f_fsalt_ai, f_sice + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: n, ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag @@ -283,6 +284,9 @@ subroutine init_hist_bgc_2D tr_bgc_DON, tr_bgc_Fe, tr_bgc_hum, & skl_bgc, solve_zsal, z_tracers + character(len=char_len) :: nml_name ! for namelist check + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=*), parameter :: subname = '(init_hist_bgc_2D)' call icepack_query_parameters(skl_bgc_out=skl_bgc, & @@ -305,24 +309,39 @@ subroutine init_hist_bgc_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_bgc_nml' + nml_name = 'icefields_bgc_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! check if can open file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_bgc_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! seek to namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_bgc_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_bgc_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_drag.F90 b/cicecore/cicedyn/analysis/ice_history_drag.F90 similarity index 92% rename from cicecore/cicedynB/analysis/ice_history_drag.F90 rename to cicecore/cicedyn/analysis/ice_history_drag.F90 index fba19b364..dd9e3cb59 100644 --- a/cicecore/cicedynB/analysis/ice_history_drag.F90 +++ b/cicecore/cicedyn/analysis/ice_history_drag.F90 @@ -64,10 +64,13 @@ subroutine init_hist_drag_2D use ice_calendar, only: nstreams use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag logical (kind=log_kind) :: formdrag + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! for namelist check character(len=*), parameter :: subname = '(init_hist_drag_2D)' @@ -81,24 +84,39 @@ subroutine init_hist_drag_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_drag_nml' + nml_name = 'icefields_drag_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_drag_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! go to this namelist + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_drag_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_drag_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_fsd.F90 b/cicecore/cicedyn/analysis/ice_history_fsd.F90 similarity index 95% rename from cicecore/cicedynB/analysis/ice_history_fsd.F90 rename to cicecore/cicedyn/analysis/ice_history_fsd.F90 index b52db4e05..610f56608 100644 --- a/cicecore/cicedynB/analysis/ice_history_fsd.F90 +++ b/cicecore/cicedyn/analysis/ice_history_fsd.F90 @@ -76,10 +76,13 @@ subroutine init_hist_fsd_2D use ice_calendar, only: nstreams use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag logical (kind=log_kind) :: tr_fsd, wave_spec + character (len=char_len_long) :: tmpstr2 ! test namelist + character(len=char_len) :: nml_name ! text namelist name character(len=*), parameter :: subname = '(init_hist_fsd_2D)' @@ -96,24 +99,39 @@ subroutine init_hist_fsd_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_fsd_nml' + nml_name = 'icefields_fsd_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_fsd_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! goto this namelist + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_fsd_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_fsd_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_mechred.F90 b/cicecore/cicedyn/analysis/ice_history_mechred.F90 similarity index 94% rename from cicecore/cicedynB/analysis/ice_history_mechred.F90 rename to cicecore/cicedyn/analysis/ice_history_mechred.F90 index 98c58bc39..e0d15fcf2 100644 --- a/cicecore/cicedynB/analysis/ice_history_mechred.F90 +++ b/cicecore/cicedyn/analysis/ice_history_mechred.F90 @@ -84,11 +84,14 @@ subroutine init_hist_mechred_2D use ice_calendar, only: nstreams, histfreq use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag real (kind=dbl_kind) :: secday logical (kind=log_kind) :: tr_lvl + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! for namelist check character(len=*), parameter :: subname = '(init_hist_mechred_2D)' @@ -103,24 +106,39 @@ subroutine init_hist_mechred_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_mechred_nml' + nml_name = 'icefields_mechred_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_mechred_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! goto this namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_mechred_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_mechred_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_pond.F90 b/cicecore/cicedyn/analysis/ice_history_pond.F90 similarity index 92% rename from cicecore/cicedynB/analysis/ice_history_pond.F90 rename to cicecore/cicedyn/analysis/ice_history_pond.F90 index 8818ff94e..d209e6db6 100644 --- a/cicecore/cicedynB/analysis/ice_history_pond.F90 +++ b/cicecore/cicedyn/analysis/ice_history_pond.F90 @@ -69,10 +69,13 @@ subroutine init_hist_pond_2D use ice_calendar, only: nstreams, histfreq use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag logical (kind=log_kind) :: tr_pond + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! text namelist name character(len=*), parameter :: subname = '(init_hist_pond_2D)' @@ -86,24 +89,39 @@ subroutine init_hist_pond_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_pond_nml' + nml_name = 'icefields_pond_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_pond_nml open file '// & - trim(nml_filename), & - file=__FILE__, line=__LINE__) + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif - + + ! goto this namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_pond_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_pond_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedyn/analysis/ice_history_shared.F90 similarity index 99% rename from cicecore/cicedynB/analysis/ice_history_shared.F90 rename to cicecore/cicedyn/analysis/ice_history_shared.F90 index d9c62edde..70aa5e14c 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedyn/analysis/ice_history_shared.F90 @@ -40,7 +40,7 @@ module ice_history_shared logical (kind=log_kind), public :: & hist_avg ! if true, write averaged data instead of snapshots - character (len=char_len), public :: & + character (len=char_len_long), public :: & history_file , & ! output file for history incond_file ! output file for snapshot initial conditions diff --git a/cicecore/cicedynB/analysis/ice_history_snow.F90 b/cicecore/cicedyn/analysis/ice_history_snow.F90 similarity index 93% rename from cicecore/cicedynB/analysis/ice_history_snow.F90 rename to cicecore/cicedyn/analysis/ice_history_snow.F90 index 0ec4144bf..62e65b5a3 100644 --- a/cicecore/cicedynB/analysis/ice_history_snow.F90 +++ b/cicecore/cicedyn/analysis/ice_history_snow.F90 @@ -69,6 +69,7 @@ subroutine init_hist_snow_2D (dt) use ice_history_shared, only: tstr2D, tcstr, define_hist_field use ice_fileunits, only: nu_nml, nml_filename, & get_fileunit, release_fileunit + use ice_fileunits, only: goto_nml real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -76,7 +77,10 @@ subroutine init_hist_snow_2D (dt) integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag real (kind=dbl_kind) :: rhofresh, secday - logical (kind=log_kind) :: tr_snow + logical (kind=log_kind) :: tr_snow + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! for namelist check + character(len=*), parameter :: subname = '(init_hist_snow_2D)' call icepack_query_tracer_flags(tr_snow_out=tr_snow) @@ -92,26 +96,42 @@ subroutine init_hist_snow_2D (dt) !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_snow_nml' + nml_name = 'icefields_snow_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_snow_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_snow_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_snow_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) + endif else ! .not. tr_snow diff --git a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 b/cicecore/cicedyn/dynamics/ice_dyn_eap.F90 similarity index 98% rename from cicecore/cicedynB/dynamics/ice_dyn_eap.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_eap.F90 index 28a047c4e..e240fc8f1 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_eap.F90 @@ -25,6 +25,7 @@ module ice_dyn_eap p001, p027, p055, p111, p166, p222, p25, p333 use ice_fileunits, only: nu_diag, nu_dump_eap, nu_restart_eap use ice_exit, only: abort_ice + use ice_flux, only: rdg_shear ! use ice_timers, only: & ! ice_timer_start, ice_timer_stop, & ! timer_tmp1, timer_tmp2, timer_tmp3, timer_tmp4, & @@ -36,8 +37,7 @@ module ice_dyn_eap implicit none private - public :: eap, init_eap, write_restart_eap, read_restart_eap, & - alloc_dyn_eap + public :: eap, init_eap, write_restart_eap, read_restart_eap ! Look-up table needed for calculating structure tensor integer (int_kind), parameter :: & @@ -71,42 +71,16 @@ module ice_dyn_eap real (kind=dbl_kind) :: & puny, pi, pi2, piq, pih -!======================================================================= - - contains - -!======================================================================= -! Allocate space for all variables -! - subroutine alloc_dyn_eap + real (kind=dbl_kind), parameter :: & + kfriction = 0.45_dbl_kind - integer (int_kind) :: ierr + real (kind=dbl_kind), save :: & + invdx, invdy, invda, invsin - character(len=*), parameter :: subname = '(alloc_dyn_eap)' - allocate( a11_1 (nx_block,ny_block,max_blocks), & - a11_2 (nx_block,ny_block,max_blocks), & - a11_3 (nx_block,ny_block,max_blocks), & - a11_4 (nx_block,ny_block,max_blocks), & - a12_1 (nx_block,ny_block,max_blocks), & - a12_2 (nx_block,ny_block,max_blocks), & - a12_3 (nx_block,ny_block,max_blocks), & - a12_4 (nx_block,ny_block,max_blocks), & - e11 (nx_block,ny_block,max_blocks), & - e12 (nx_block,ny_block,max_blocks), & - e22 (nx_block,ny_block,max_blocks), & - yieldstress11(nx_block,ny_block,max_blocks), & - yieldstress12(nx_block,ny_block,max_blocks), & - yieldstress22(nx_block,ny_block,max_blocks), & - s11 (nx_block,ny_block,max_blocks), & - s12 (nx_block,ny_block,max_blocks), & - s22 (nx_block,ny_block,max_blocks), & - a11 (nx_block,ny_block,max_blocks), & - a12 (nx_block,ny_block,max_blocks), & - stat=ierr) - if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory') +!======================================================================= - end subroutine alloc_dyn_eap + contains !======================================================================= ! Elastic-anisotropic-plastic dynamics driver @@ -134,7 +108,8 @@ subroutine eap (dt) dyn_prep1, dyn_prep2, stepu, dyn_finish, & seabed_stress_factor_LKD, seabed_stress_factor_prob, & seabed_stress_method, seabed_stress, & - stack_fields, unstack_fields, iceTmask, iceUmask + stack_fields, unstack_fields, iceTmask, iceUmask, & + fld2, fld3, fld4 use ice_flux, only: rdg_conv, strairxT, strairyT, & strairxU, strairyU, uocn, vocn, ss_tltx, ss_tlty, fmU, & strtltxU, strtltyU, strocnxU, strocnyU, strintxU, strintyU, taubxU, taubyU, & @@ -186,11 +161,6 @@ subroutine eap (dt) umass , & ! total mass of ice and snow (u grid) umassdti ! mass of U-cell/dte (kg/m^2 s) - real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:), & ! temporary for stacking fields for halo update - fld3(:,:,:,:), & ! temporary for stacking fields for halo update - fld4(:,:,:,:) ! temporary for stacking fields for halo update - real (kind=dbl_kind), dimension(nx_block,ny_block,8):: & strtmp ! stress combinations for momentum equation @@ -214,10 +184,6 @@ subroutine eap (dt) ! Initialize !----------------------------------------------------------------- - allocate(fld2(nx_block,ny_block,2,max_blocks)) - allocate(fld3(nx_block,ny_block,3,max_blocks)) - allocate(fld4(nx_block,ny_block,4,max_blocks)) - ! This call is needed only if dt changes during runtime. ! call set_evp_parameters (dt) @@ -226,7 +192,7 @@ subroutine eap (dt) do j = 1, ny_block do i = 1, nx_block rdg_conv (i,j,iblk) = c0 -! rdg_shear(i,j,iblk) = c0 + rdg_shear(i,j,iblk) = c0 ! always zero. Could be moved divu (i,j,iblk) = c0 shear(i,j,iblk) = c0 e11(i,j,iblk) = c0 @@ -554,7 +520,6 @@ subroutine eap (dt) enddo ! subcycling - deallocate(fld2,fld3,fld4) if (maskhalo_dyn) call ice_HaloDestroy(halo_info_mask) !----------------------------------------------------------------- @@ -588,6 +553,8 @@ subroutine init_eap use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks + use ice_calendar, only: dt_dyn + use ice_dyn_shared, only: init_dyn_shared ! local variables @@ -599,7 +566,7 @@ subroutine init_eap eps6 = 1.0e-6_dbl_kind integer (kind=int_kind) :: & - ix, iy, iz, ia + ix, iy, iz, ia, ierr integer (kind=int_kind), parameter :: & nz = 100 @@ -609,6 +576,8 @@ subroutine init_eap da, dx, dy, dz, & phi + real (kind=dbl_kind) :: invstressconviso + character(len=*), parameter :: subname = '(init_eap)' call icepack_query_parameters(puny_out=puny, & @@ -619,6 +588,31 @@ subroutine init_eap phi = pi/c12 ! diamond shaped floe smaller angle (default phi = 30 deg) + call init_dyn_shared(dt_dyn) + + allocate( a11_1 (nx_block,ny_block,max_blocks), & + a11_2 (nx_block,ny_block,max_blocks), & + a11_3 (nx_block,ny_block,max_blocks), & + a11_4 (nx_block,ny_block,max_blocks), & + a12_1 (nx_block,ny_block,max_blocks), & + a12_2 (nx_block,ny_block,max_blocks), & + a12_3 (nx_block,ny_block,max_blocks), & + a12_4 (nx_block,ny_block,max_blocks), & + e11 (nx_block,ny_block,max_blocks), & + e12 (nx_block,ny_block,max_blocks), & + e22 (nx_block,ny_block,max_blocks), & + yieldstress11(nx_block,ny_block,max_blocks), & + yieldstress12(nx_block,ny_block,max_blocks), & + yieldstress22(nx_block,ny_block,max_blocks), & + s11 (nx_block,ny_block,max_blocks), & + s12 (nx_block,ny_block,max_blocks), & + s22 (nx_block,ny_block,max_blocks), & + a11 (nx_block,ny_block,max_blocks), & + a12 (nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory') + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) SCHEDULE(runtime) do iblk = 1, nblocks do j = 1, ny_block @@ -640,6 +634,7 @@ subroutine init_eap a12_2 (i,j,iblk) = c0 a12_3 (i,j,iblk) = c0 a12_4 (i,j,iblk) = c0 + rdg_shear (i,j,iblk) = c0 enddo ! i enddo ! j enddo ! iblk @@ -657,6 +652,9 @@ subroutine init_eap zinit = -pih dy = pi/real(ny_yield-1,kind=dbl_kind) yinit = -dy + invdx = c1/dx + invdy = c1/dy + invda = c1/da do ia=1,na_yield do ix=1,nx_yield @@ -712,6 +710,12 @@ subroutine init_eap enddo enddo + ! Factor to maintain the same stress as in EVP (see Section 3) + ! Can be set to 1 otherwise + + invstressconviso = c1/(c1+kfriction*kfriction) + invsin = c1/sin(pi2/c12) * invstressconviso + end subroutine init_eap !======================================================================= @@ -1590,22 +1594,12 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & rotstemp11s, rotstemp12s, rotstemp22s, & sig11, sig12, sig22, & sgprm11, sgprm12, sgprm22, & - invstressconviso, & Angle_denom_gamma, Angle_denom_alpha, & Tany_1, Tany_2, & x, y, dx, dy, da, & dtemp1, dtemp2, atempprime, & kxw, kyw, kaw - real (kind=dbl_kind), save :: & - invdx, invdy, invda, invsin - - logical (kind=log_kind), save :: & - first_call = .true. - - real (kind=dbl_kind), parameter :: & - kfriction = 0.45_dbl_kind - ! tcraig, temporary, should be moved to namelist ! turns on interpolation in stress_rdg logical(kind=log_kind), parameter :: & @@ -1613,14 +1607,6 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & character(len=*), parameter :: subname = '(update_stress_rdg)' - ! Factor to maintain the same stress as in EVP (see Section 3) - ! Can be set to 1 otherwise - - if (first_call) then - invstressconviso = c1/(c1+kfriction*kfriction) - invsin = c1/sin(pi2/c12) * invstressconviso - endif - ! compute eigenvalues, eigenvectors and angles for structure tensor, strain rates ! 1) structure tensor @@ -1717,17 +1703,6 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & if (y > pi) y = y - pi if (y < 0) y = y + pi - ! Now calculate updated stress tensor - - if (first_call) then - dx = pi/real(nx_yield-1,kind=dbl_kind) - dy = pi/real(ny_yield-1,kind=dbl_kind) - da = p5/real(na_yield-1,kind=dbl_kind) - invdx = c1/dx - invdy = c1/dy - invda = c1/da - endif - if (interpolate_stress_rdg) then ! Interpolated lookup @@ -1869,8 +1844,6 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & + rotstemp22s*dtemp22 endif - first_call = .false. - end subroutine update_stress_rdg !======================================================================= diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedyn/dynamics/ice_dyn_evp.F90 similarity index 83% rename from cicecore/cicedynB/dynamics/ice_dyn_evp.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_evp.F90 index 8eab5e260..cf111cccf 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_evp.F90 @@ -55,7 +55,61 @@ module ice_dyn_evp implicit none private - public :: evp +! all c or cd + real (kind=dbl_kind), allocatable :: & + uocnN (:,:,:) , & ! i ocean current (m/s) + vocnN (:,:,:) , & ! j ocean current (m/s) + ss_tltxN (:,:,:) , & ! sea surface slope, x-direction (m/m) + ss_tltyN (:,:,:) , & ! sea surface slope, y-direction (m/m) + cdn_ocnN (:,:,:) , & ! ocn drag coefficient + waterxN (:,:,:) , & ! for ocean stress calculation, x (m/s) + wateryN (:,:,:) , & ! for ocean stress calculation, y (m/s) + forcexN (:,:,:) , & ! work array: combined atm stress and ocn tilt, x + forceyN (:,:,:) , & ! work array: combined atm stress and ocn tilt, y + aiN (:,:,:) , & ! ice fraction on N-grid + nmass (:,:,:) , & ! total mass of ice and snow (N grid) + nmassdti (:,:,:) ! mass of N-cell/dte (kg/m^2 s) +! all c or d + real (kind=dbl_kind), allocatable :: & + uocnE (:,:,:) , & ! i ocean current (m/s) + vocnE (:,:,:) , & ! j ocean current (m/s) + ss_tltxE (:,:,:) , & ! sea surface slope, x-direction (m/m) + ss_tltyE (:,:,:) , & ! sea surface slope, y-direction (m/m) + cdn_ocnE (:,:,:) , & ! ocn drag coefficient + waterxE (:,:,:) , & ! for ocean stress calculation, x (m/s) + wateryE (:,:,:) , & ! for ocean stress calculation, y (m/s) + forcexE (:,:,:) , & ! work array: combined atm stress and ocn tilt, x + forceyE (:,:,:) , & ! work array: combined atm stress and ocn tilt, y + aiE (:,:,:) , & ! ice fraction on E-grid + emass (:,:,:) , & ! total mass of ice and snow (E grid) + emassdti (:,:,:) ! mass of E-cell/dte (kg/m^2 s) + + real (kind=dbl_kind), allocatable :: & + strengthU(:,:,:) , & ! strength averaged to U points + divergU (:,:,:) , & ! div array on U points, differentiate from divu + tensionU (:,:,:) , & ! tension array on U points + shearU (:,:,:) , & ! shear array on U points + deltaU (:,:,:) , & ! delta array on U points + zetax2T (:,:,:) , & ! zetax2 = 2*zeta (bulk viscosity) + zetax2U (:,:,:) , & ! zetax2T averaged to U points + etax2T (:,:,:) , & ! etax2 = 2*eta (shear viscosity) + etax2U (:,:,:) ! etax2T averaged to U points + + real (kind=dbl_kind), allocatable :: & + uocnU (:,:,:) , & ! i ocean current (m/s) + vocnU (:,:,:) , & ! j ocean current (m/s) + ss_tltxU (:,:,:) , & ! sea surface slope, x-direction (m/m) + ss_tltyU (:,:,:) , & ! sea surface slope, y-direction (m/m) + cdn_ocnU (:,:,:) , & ! ocn drag coefficient + tmass (:,:,:) , & ! total mass of ice and snow (kg/m^2) + waterxU (:,:,:) , & ! for ocean stress calculation, x (m/s) + wateryU (:,:,:) , & ! for ocean stress calculation, y (m/s) + forcexU (:,:,:) , & ! work array: combined atm stress and ocn tilt, x + forceyU (:,:,:) , & ! work array: combined atm stress and ocn tilt, y + umass (:,:,:) , & ! total mass of ice and snow (u grid) + umassdti (:,:,:) ! mass of U-cell/dte (kg/m^2 s) + + public :: evp, init_evp !======================================================================= @@ -64,6 +118,84 @@ module ice_dyn_evp !======================================================================= ! Elastic-viscous-plastic dynamics driver ! + subroutine init_evp + use ice_blocks, only: nx_block, ny_block + use ice_domain_size, only: max_blocks + use ice_grid, only: grid_ice + use ice_calendar, only: dt_dyn + use ice_dyn_shared, only: init_dyn_shared + +!allocate c and cd grid var. Follow structucre of eap + integer (int_kind) :: ierr + + character(len=*), parameter :: subname = '(alloc_dyn_evp)' + + call init_dyn_shared(dt_dyn) + + allocate( uocnU (nx_block,ny_block,max_blocks), & ! i ocean current (m/s) + vocnU (nx_block,ny_block,max_blocks), & ! j ocean current (m/s) + ss_tltxU (nx_block,ny_block,max_blocks), & ! sea surface slope, x-direction (m/m) + ss_tltyU (nx_block,ny_block,max_blocks), & ! sea surface slope, y-direction (m/m) + cdn_ocnU (nx_block,ny_block,max_blocks), & ! ocn drag coefficient + tmass (nx_block,ny_block,max_blocks), & ! total mass of ice and snow (kg/m^2) + waterxU (nx_block,ny_block,max_blocks), & ! for ocean stress calculation, x (m/s) + wateryU (nx_block,ny_block,max_blocks), & ! for ocean stress calculation, y (m/s) + forcexU (nx_block,ny_block,max_blocks), & ! work array: combined atm stress and ocn tilt, x + forceyU (nx_block,ny_block,max_blocks), & ! work array: combined atm stress and ocn tilt, y + umass (nx_block,ny_block,max_blocks), & ! total mass of ice and snow (u grid) + umassdti (nx_block,ny_block,max_blocks), & ! mass of U-cell/dte (kg/m^2 s) + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory B-Grid evp') + + + if (grid_ice == 'CD' .or. grid_ice == 'C') then + + allocate( strengthU(nx_block,ny_block,max_blocks), & + divergU (nx_block,ny_block,max_blocks), & + tensionU (nx_block,ny_block,max_blocks), & + shearU (nx_block,ny_block,max_blocks), & + deltaU (nx_block,ny_block,max_blocks), & + zetax2T (nx_block,ny_block,max_blocks), & + zetax2U (nx_block,ny_block,max_blocks), & + etax2T (nx_block,ny_block,max_blocks), & + etax2U (nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory U evp') + + allocate( uocnN (nx_block,ny_block,max_blocks), & + vocnN (nx_block,ny_block,max_blocks), & + ss_tltxN (nx_block,ny_block,max_blocks), & + ss_tltyN (nx_block,ny_block,max_blocks), & + cdn_ocnN (nx_block,ny_block,max_blocks), & + waterxN (nx_block,ny_block,max_blocks), & + wateryN (nx_block,ny_block,max_blocks), & + forcexN (nx_block,ny_block,max_blocks), & + forceyN (nx_block,ny_block,max_blocks), & + aiN (nx_block,ny_block,max_blocks), & + nmass (nx_block,ny_block,max_blocks), & + nmassdti (nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory N evp') + + allocate( uocnE (nx_block,ny_block,max_blocks), & + vocnE (nx_block,ny_block,max_blocks), & + ss_tltxE (nx_block,ny_block,max_blocks), & + ss_tltyE (nx_block,ny_block,max_blocks), & + cdn_ocnE (nx_block,ny_block,max_blocks), & + waterxE (nx_block,ny_block,max_blocks), & + wateryE (nx_block,ny_block,max_blocks), & + forcexE (nx_block,ny_block,max_blocks), & + forceyE (nx_block,ny_block,max_blocks), & + aiE (nx_block,ny_block,max_blocks), & + emass (nx_block,ny_block,max_blocks), & + emassdti (nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory E evp') + + endif + + end subroutine init_evp + #ifdef CICE_IN_NEMO ! Wind stress is set during this routine from the values supplied ! via NEMO (unless calc_strair is true). These values are supplied @@ -116,7 +248,7 @@ subroutine evp (dt) DminTarea, visc_method, deformations, deformationsC_T, deformationsCD_T, & strain_rates_U, & iceTmask, iceUmask, iceEmask, iceNmask, & - dyn_haloUpdate + dyn_haloUpdate, fld2, fld3, fld4 real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -145,64 +277,6 @@ subroutine evp (dt) indxUi , & ! compressed index in i-direction indxUj ! compressed index in j-direction - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - uocnU , & ! i ocean current (m/s) - vocnU , & ! j ocean current (m/s) - ss_tltxU , & ! sea surface slope, x-direction (m/m) - ss_tltyU , & ! sea surface slope, y-direction (m/m) - cdn_ocnU , & ! ocn drag coefficient - tmass , & ! total mass of ice and snow (kg/m^2) - waterxU , & ! for ocean stress calculation, x (m/s) - wateryU , & ! for ocean stress calculation, y (m/s) - forcexU , & ! work array: combined atm stress and ocn tilt, x - forceyU , & ! work array: combined atm stress and ocn tilt, y - umass , & ! total mass of ice and snow (u grid) - umassdti ! mass of U-cell/dte (kg/m^2 s) - - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - uocnN , & ! i ocean current (m/s) - vocnN , & ! j ocean current (m/s) - ss_tltxN , & ! sea surface slope, x-direction (m/m) - ss_tltyN , & ! sea surface slope, y-direction (m/m) - cdn_ocnN , & ! ocn drag coefficient - waterxN , & ! for ocean stress calculation, x (m/s) - wateryN , & ! for ocean stress calculation, y (m/s) - forcexN , & ! work array: combined atm stress and ocn tilt, x - forceyN , & ! work array: combined atm stress and ocn tilt, y - aiN , & ! ice fraction on N-grid - nmass , & ! total mass of ice and snow (N grid) - nmassdti ! mass of N-cell/dte (kg/m^2 s) - - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - uocnE , & ! i ocean current (m/s) - vocnE , & ! j ocean current (m/s) - ss_tltxE , & ! sea surface slope, x-direction (m/m) - ss_tltyE , & ! sea surface slope, y-direction (m/m) - cdn_ocnE , & ! ocn drag coefficient - waterxE , & ! for ocean stress calculation, x (m/s) - wateryE , & ! for ocean stress calculation, y (m/s) - forcexE , & ! work array: combined atm stress and ocn tilt, x - forceyE , & ! work array: combined atm stress and ocn tilt, y - aiE , & ! ice fraction on E-grid - emass , & ! total mass of ice and snow (E grid) - emassdti ! mass of E-cell/dte (kg/m^2 s) - - real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:), & ! 2 bundled fields - fld3(:,:,:,:), & ! 3 bundled fields - fld4(:,:,:,:) ! 4 bundled fields - - real (kind=dbl_kind), allocatable :: & - strengthU(:,:,:), & ! strength averaged to U points - divergU (:,:,:), & ! div array on U points, differentiate from divu - tensionU (:,:,:), & ! tension array on U points - shearU (:,:,:), & ! shear array on U points - deltaU (:,:,:), & ! delta array on U points - zetax2T (:,:,:), & ! zetax2 = 2*zeta (bulk viscosity) - zetax2U (:,:,:), & ! zetax2T averaged to U points - etax2T (:,:,:), & ! etax2 = 2*eta (shear viscosity) - etax2U (:,:,:) ! etax2T averaged to U points - real (kind=dbl_kind), dimension(nx_block,ny_block,8):: & strtmp ! stress combinations for momentum equation @@ -218,9 +292,6 @@ subroutine evp (dt) type (block) :: & this_block ! block information for current block - logical (kind=log_kind), save :: & - first_time = .true. ! first time logical - character(len=*), parameter :: subname = '(evp)' call ice_timer_start(timer_dynamics) ! dynamics @@ -229,21 +300,8 @@ subroutine evp (dt) ! Initialize !----------------------------------------------------------------- - allocate(fld2(nx_block,ny_block,2,max_blocks)) - allocate(fld3(nx_block,ny_block,3,max_blocks)) - allocate(fld4(nx_block,ny_block,4,max_blocks)) - if (grid_ice == 'CD' .or. grid_ice == 'C') then - allocate(strengthU(nx_block,ny_block,max_blocks)) - allocate(divergU (nx_block,ny_block,max_blocks)) - allocate(tensionU (nx_block,ny_block,max_blocks)) - allocate(shearU (nx_block,ny_block,max_blocks)) - allocate(deltaU (nx_block,ny_block,max_blocks)) - allocate(zetax2T (nx_block,ny_block,max_blocks)) - allocate(zetax2U (nx_block,ny_block,max_blocks)) - allocate(etax2T (nx_block,ny_block,max_blocks)) - allocate(etax2U (nx_block,ny_block,max_blocks)) strengthU(:,:,:) = c0 divergU (:,:,:) = c0 tensionU (:,:,:) = c0 @@ -383,20 +441,20 @@ subroutine evp (dt) endif endif - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) SCHEDULE(runtime) - do iblk = 1, nblocks + if (trim(grid_ice) == 'B') then + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) SCHEDULE(runtime) + do iblk = 1, nblocks - !----------------------------------------------------------------- - ! more preparation for dynamics - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! more preparation for dynamics + !----------------------------------------------------------------- - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi - if (trim(grid_ice) == 'B') then call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & icellT (iblk), icellU (iblk), & @@ -409,7 +467,7 @@ subroutine evp (dt) strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & iceTmask (:,:,iblk), iceUmask (:,:,iblk), & - fmU (:,:,iblk), dt, & + fmU (:,:,iblk), dt , & strtltxU (:,:,iblk), strtltyU (:,:,iblk), & strocnxU (:,:,iblk), strocnyU (:,:,iblk), & strintxU (:,:,iblk), strintyU (:,:,iblk), & @@ -426,7 +484,35 @@ subroutine evp (dt) uvel (:,:,iblk), vvel (:,:,iblk), & TbU (:,:,iblk)) - elseif (trim(grid_ice) == 'CD' .or. grid_ice == 'C') then + !----------------------------------------------------------------- + ! ice strength + !----------------------------------------------------------------- + + strength(:,:,iblk) = c0 ! initialize + do ij = 1, icellT(iblk) + i = indxTi(ij, iblk) + j = indxTj(ij, iblk) + call icepack_ice_strength(ncat = ncat, & + aice = aice (i,j, iblk), & + vice = vice (i,j, iblk), & + aice0 = aice0 (i,j, iblk), & + aicen = aicen (i,j,:,iblk), & + vicen = vicen (i,j,:,iblk), & + strength = strength(i,j, iblk)) + enddo ! ij + + enddo ! iblk + !$OMP END PARALLEL DO + elseif (trim(grid_ice) == 'CD' .or. grid_ice == 'C') then + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) SCHEDULE(runtime) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & icellT (iblk), icellU (iblk), & @@ -455,122 +541,108 @@ subroutine evp (dt) uvel_init (:,:,iblk), vvel_init (:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & TbU (:,:,iblk)) - endif - !----------------------------------------------------------------- - ! ice strength - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! ice strength + !----------------------------------------------------------------- - strength(:,:,iblk) = c0 ! initialize - do ij = 1, icellT(iblk) - i = indxTi(ij, iblk) - j = indxTj(ij, iblk) - call icepack_ice_strength(ncat = ncat, & - aice = aice (i,j, iblk), & - vice = vice (i,j, iblk), & - aice0 = aice0 (i,j, iblk), & - aicen = aicen (i,j,:,iblk), & - vicen = vicen (i,j,:,iblk), & - strength = strength(i,j, iblk) ) - enddo ! ij - - enddo ! iblk - !$OMP END PARALLEL DO + strength(:,:,iblk) = c0 ! initialize + do ij = 1, icellT(iblk) + i = indxTi(ij, iblk) + j = indxTj(ij, iblk) + call icepack_ice_strength(ncat = ncat, & + aice = aice (i,j, iblk), & + vice = vice (i,j, iblk), & + aice0 = aice0 (i,j, iblk), & + aicen = aicen (i,j,:,iblk), & + vicen = vicen (i,j,:,iblk), & + strength = strength(i,j, iblk) ) + enddo ! ij - if (grid_ice == 'CD' .or. grid_ice == 'C') then - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,i,j) SCHEDULE(runtime) - do iblk = 1, nblocks + !----------------------------------------------------------------- + ! more preparation for dynamics on N grid + !----------------------------------------------------------------- - !----------------------------------------------------------------- - ! more preparation for dynamics on N grid - !----------------------------------------------------------------- + call dyn_prep2 (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + icellT (iblk), icellN (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxNi (:,iblk), indxNj (:,iblk), & + aiN (:,:,iblk), nmass (:,:,iblk), & + nmassdti (:,:,iblk), fcorN_blk (:,:,iblk), & + nmask (:,:,iblk), & + uocnN (:,:,iblk), vocnN (:,:,iblk), & + strairxN (:,:,iblk), strairyN (:,:,iblk), & + ss_tltxN (:,:,iblk), ss_tltyN (:,:,iblk), & + iceTmask (:,:,iblk), iceNmask (:,:,iblk), & + fmN (:,:,iblk), dt , & + strtltxN (:,:,iblk), strtltyN (:,:,iblk), & + strocnxN (:,:,iblk), strocnyN (:,:,iblk), & + strintxN (:,:,iblk), strintyN (:,:,iblk), & + taubxN (:,:,iblk), taubyN (:,:,iblk), & + waterxN (:,:,iblk), wateryN (:,:,iblk), & + forcexN (:,:,iblk), forceyN (:,:,iblk), & + stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & + stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & + stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & + stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & + stress12_1(:,:,iblk), stress12_2(:,:,iblk), & + stress12_3(:,:,iblk), stress12_4(:,:,iblk), & + uvelN_init(:,:,iblk), vvelN_init(:,:,iblk), & + uvelN (:,:,iblk), vvelN (:,:,iblk), & + TbN (:,:,iblk)) - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi + !----------------------------------------------------------------- + ! more preparation for dynamics on E grid + !----------------------------------------------------------------- - call dyn_prep2 (nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - icellT (iblk), icellN (iblk), & - indxTi (:,iblk), indxTj (:,iblk), & - indxNi (:,iblk), indxNj (:,iblk), & - aiN (:,:,iblk), nmass (:,:,iblk), & - nmassdti (:,:,iblk), fcorN_blk (:,:,iblk), & - nmask (:,:,iblk), & - uocnN (:,:,iblk), vocnN (:,:,iblk), & - strairxN (:,:,iblk), strairyN (:,:,iblk), & - ss_tltxN (:,:,iblk), ss_tltyN (:,:,iblk), & - iceTmask (:,:,iblk), iceNmask (:,:,iblk), & - fmN (:,:,iblk), dt, & - strtltxN (:,:,iblk), strtltyN (:,:,iblk), & - strocnxN (:,:,iblk), strocnyN (:,:,iblk), & - strintxN (:,:,iblk), strintyN (:,:,iblk), & - taubxN (:,:,iblk), taubyN (:,:,iblk), & - waterxN (:,:,iblk), wateryN (:,:,iblk), & - forcexN (:,:,iblk), forceyN (:,:,iblk), & - stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & - stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & - stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & - stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & - stress12_1(:,:,iblk), stress12_2(:,:,iblk), & - stress12_3(:,:,iblk), stress12_4(:,:,iblk), & - uvelN_init(:,:,iblk), vvelN_init(:,:,iblk), & - uvelN (:,:,iblk), vvelN (:,:,iblk), & - TbN (:,:,iblk)) + call dyn_prep2 (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + icellT (iblk), icellE (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxEi (:,iblk), indxEj (:,iblk), & + aiE (:,:,iblk), emass (:,:,iblk), & + emassdti (:,:,iblk), fcorE_blk (:,:,iblk), & + emask (:,:,iblk), & + uocnE (:,:,iblk), vocnE (:,:,iblk), & + strairxE (:,:,iblk), strairyE (:,:,iblk), & + ss_tltxE (:,:,iblk), ss_tltyE (:,:,iblk), & + iceTmask (:,:,iblk), iceEmask (:,:,iblk), & + fmE (:,:,iblk), dt , & + strtltxE (:,:,iblk), strtltyE (:,:,iblk), & + strocnxE (:,:,iblk), strocnyE (:,:,iblk), & + strintxE (:,:,iblk), strintyE (:,:,iblk), & + taubxE (:,:,iblk), taubyE (:,:,iblk), & + waterxE (:,:,iblk), wateryE (:,:,iblk), & + forcexE (:,:,iblk), forceyE (:,:,iblk), & + stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & + stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & + stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & + stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & + stress12_1(:,:,iblk), stress12_2(:,:,iblk), & + stress12_3(:,:,iblk), stress12_4(:,:,iblk), & + uvelE_init(:,:,iblk), vvelE_init(:,:,iblk), & + uvelE (:,:,iblk), vvelE (:,:,iblk), & + TbE (:,:,iblk)) - !----------------------------------------------------------------- - ! more preparation for dynamics on E grid - !----------------------------------------------------------------- - call dyn_prep2 (nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - icellT (iblk), icellE (iblk), & - indxTi (:,iblk), indxTj (:,iblk), & - indxEi (:,iblk), indxEj (:,iblk), & - aiE (:,:,iblk), emass (:,:,iblk), & - emassdti (:,:,iblk), fcorE_blk (:,:,iblk), & - emask (:,:,iblk), & - uocnE (:,:,iblk), vocnE (:,:,iblk), & - strairxE (:,:,iblk), strairyE (:,:,iblk), & - ss_tltxE (:,:,iblk), ss_tltyE (:,:,iblk), & - iceTmask (:,:,iblk), iceEmask (:,:,iblk), & - fmE (:,:,iblk), dt, & - strtltxE (:,:,iblk), strtltyE (:,:,iblk), & - strocnxE (:,:,iblk), strocnyE (:,:,iblk), & - strintxE (:,:,iblk), strintyE (:,:,iblk), & - taubxE (:,:,iblk), taubyE (:,:,iblk), & - waterxE (:,:,iblk), wateryE (:,:,iblk), & - forcexE (:,:,iblk), forceyE (:,:,iblk), & - stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & - stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & - stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & - stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & - stress12_1(:,:,iblk), stress12_2(:,:,iblk), & - stress12_3(:,:,iblk), stress12_4(:,:,iblk), & - uvelE_init(:,:,iblk), vvelE_init(:,:,iblk), & - uvelE (:,:,iblk), vvelE (:,:,iblk), & - TbE (:,:,iblk)) - - - do i=1,nx_block - do j=1,ny_block - if (.not.iceUmask(i,j,iblk)) then - stresspU (i,j,iblk) = c0 - stressmU (i,j,iblk) = c0 - stress12U(i,j,iblk) = c0 - endif - if (.not.iceTmask(i,j,iblk)) then - stresspT (i,j,iblk) = c0 - stressmT (i,j,iblk) = c0 - stress12T(i,j,iblk) = c0 - endif - enddo - enddo - enddo ! iblk - !$OMP END PARALLEL DO + do i=1,nx_block + do j=1,ny_block + if (.not.iceUmask(i,j,iblk)) then + stresspU (i,j,iblk) = c0 + stressmU (i,j,iblk) = c0 + stress12U(i,j,iblk) = c0 + endif + if (.not.iceTmask(i,j,iblk)) then + stresspT (i,j,iblk) = c0 + stressmT (i,j,iblk) = c0 + stress12T(i,j,iblk) = c0 + endif + enddo + enddo + enddo ! iblk + !$OMP END PARALLEL DO endif ! grid_ice @@ -721,10 +793,6 @@ subroutine evp (dt) if (evp_algorithm == "shared_mem_1d" ) then - if (first_time .and. my_task == master_task) then - write(nu_diag,'(3a)') subname,' Entering evp_algorithm version ',evp_algorithm - first_time = .false. - endif if (trim(grid_type) == 'tripole') then call abort_ice(trim(subname)//' & & Kernel not tested on tripole grid. Set evp_algorithm=standard_2d') @@ -877,7 +945,8 @@ subroutine evp (dt) uarea (:,:,iblk), DminTarea (:,:,iblk), & strength (:,:,iblk), shearU (:,:,iblk), & zetax2T (:,:,iblk), etax2T (:,:,iblk), & - stresspT (:,:,iblk), stressmT (:,:,iblk)) + stresspT (:,:,iblk), stressmT (:,:,iblk), & + stress12T (:,:,iblk)) enddo !$OMP END PARALLEL DO @@ -1211,12 +1280,6 @@ subroutine evp (dt) call ice_timer_stop(timer_evp_2d) endif ! evp_algorithm - deallocate(fld2,fld3,fld4) - if (grid_ice == 'CD' .or. grid_ice == 'C') then - deallocate(strengthU, divergU, tensionU, shearU, deltaU) - deallocate(zetax2T, zetax2U, etax2T, etax2U) - endif - if (maskhalo_dyn) then call ice_HaloDestroy(halo_info_mask) endif @@ -1658,17 +1721,18 @@ end subroutine stress ! Kimmritz, M., S. Danilov and M. Losch (2016). The adaptive EVP method ! for solving the sea ice momentum equation. Ocean Model., 101, 59-67. - subroutine stressC_T (nx_block, ny_block , & - icellT , & - indxTi , indxTj , & - uvelE , vvelE , & - uvelN , vvelN , & - dxN , dyE , & - dxT , dyT , & - uarea , DminTarea, & - strength, shearU , & - zetax2T , etax2T , & - stressp , stressm ) + subroutine stressC_T (nx_block, ny_block , & + icellT , & + indxTi , indxTj , & + uvelE , vvelE , & + uvelN , vvelN , & + dxN , dyE , & + dxT , dyT , & + uarea , DminTarea , & + strength , shearU , & + zetax2T , etax2T , & + stresspT , stressmT , & + stress12T) use ice_dyn_shared, only: strain_rates_T, capping, & visc_replpress, e_factor @@ -1691,15 +1755,16 @@ subroutine stressC_T (nx_block, ny_block , & dxT , & ! width of T-cell through the middle (m) dyT , & ! height of T-cell through the middle (m) strength , & ! ice strength (N/m) - shearU , & ! shearU + shearU , & ! shearU local for this routine uarea , & ! area of u cell DminTarea ! deltaminEVP*tarea real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & zetax2T , & ! zetax2 = 2*zeta (bulk viscosity) etax2T , & ! etax2 = 2*eta (shear viscosity) - stressp , & ! sigma11+sigma22 - stressm ! sigma11-sigma22 + stresspT , & ! sigma11+sigma22 + stressmT , & ! sigma11-sigma22 + stress12T ! sigma12 ! local variables @@ -1712,7 +1777,9 @@ subroutine stressC_T (nx_block, ny_block , & real (kind=dbl_kind) :: & shearTsqr , & ! strain rates squared at T point + shearT , & ! strain rate at T point DeltaT , & ! delt at T point + uareaavgr , & ! 1 / uarea avg rep_prsT ! replacement pressure at T point character(len=*), parameter :: subname = '(stressC_T)' @@ -1728,7 +1795,7 @@ subroutine stressC_T (nx_block, ny_block , & uvelN (:,:), vvelN (:,:), & dxN (:,:), dyE (:,:), & dxT (:,:), dyT (:,:), & - divT (:,:), tensionT(:,:) ) + divT (:,:), tensionT(:,:)) do ij = 1, icellT i = indxTi(ij) @@ -1739,11 +1806,19 @@ subroutine stressC_T (nx_block, ny_block , & ! U point values (Bouillon et al., 2013, Kimmritz et al., 2016 !----------------------------------------------------------------- + uareaavgr = c1/(uarea(i,j)+uarea(i,j-1)+uarea(i-1,j-1)+uarea(i-1,j)) + shearTsqr = (shearU(i ,j )**2 * uarea(i ,j ) & + shearU(i ,j-1)**2 * uarea(i ,j-1) & + shearU(i-1,j-1)**2 * uarea(i-1,j-1) & + shearU(i-1,j )**2 * uarea(i-1,j )) & - / (uarea(i,j)+uarea(i,j-1)+uarea(i-1,j-1)+uarea(i-1,j)) + * uareaavgr + + shearT = (shearU(i ,j ) * uarea(i ,j ) & + + shearU(i ,j-1) * uarea(i ,j-1) & + + shearU(i-1,j-1) * uarea(i-1,j-1) & + + shearU(i-1,j ) * uarea(i-1,j )) & + * uareaavgr DeltaT = sqrt(divT(i,j)**2 + e_factor*(tensionT(i,j)**2 + shearTsqr)) @@ -1752,7 +1827,7 @@ subroutine stressC_T (nx_block, ny_block , & !----------------------------------------------------------------- call visc_replpress (strength(i,j), DminTarea(i,j), DeltaT, & - zetax2T (i,j), etax2T (i,j), rep_prsT, capping) + zetax2T (i,j), etax2T(i,j), rep_prsT, capping) !----------------------------------------------------------------- ! the stresses ! kg/s^2 @@ -1760,11 +1835,14 @@ subroutine stressC_T (nx_block, ny_block , & ! NOTE: for comp. efficiency 2 x zeta and 2 x eta are used in the code - stressp(i,j) = (stressp(i,j)*(c1-arlx1i*revp) & - + arlx1i*(zetax2T(i,j)*divT(i,j) - rep_prsT)) * denom1 + stresspT(i,j) = (stresspT (i,j)*(c1-arlx1i*revp) & + + arlx1i*(zetax2T(i,j)*divT(i,j) - rep_prsT)) * denom1 - stressm(i,j) = (stressm(i,j)*(c1-arlx1i*revp) & - + arlx1i*etax2T(i,j)*tensionT(i,j)) * denom1 + stressmT(i,j) = (stressmT (i,j)*(c1-arlx1i*revp) & + + arlx1i*etax2T(i,j)*tensionT(i,j)) * denom1 + + stress12T(i,j) = (stress12T(i,j)*(c1-arlx1i*revp) & + + arlx1i*p5*etax2T(i,j)*shearT ) * denom1 enddo ! ij @@ -1783,13 +1861,13 @@ end subroutine stressC_T ! Kimmritz, M., S. Danilov and M. Losch (2016). The adaptive EVP method ! for solving the sea ice momentum equation. Ocean Model., 101, 59-67. - subroutine stressC_U (nx_block , ny_block, & - icellU, & - indxUi , indxUj, & + subroutine stressC_U (nx_block , ny_block ,& + icellU ,& + indxUi , indxUj ,& uarea , & - etax2U , deltaU, & - strengthU, shearU, & - stress12 ) + etax2U , deltaU ,& + strengthU, shearU ,& + stress12U) use ice_dyn_shared, only: visc_replpress, & visc_method, deltaminEVP, capping @@ -1810,7 +1888,7 @@ subroutine stressC_U (nx_block , ny_block, & strengthU ! ice strength at the U point real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & - stress12 ! sigma12 + stress12U ! sigma12 ! local variables @@ -1829,15 +1907,15 @@ subroutine stressC_U (nx_block , ny_block, & ! viscosities and replacement pressure at U point ! avg_zeta: Bouillon et al. 2013, C1 method of Kimmritz et al. 2016 ! avg_strength: C2 method of Kimmritz et al. 2016 - ! if outside do and stress12 equation repeated in each loop for performance + ! if outside do and stress12U equation repeated in each loop for performance !----------------------------------------------------------------- if (visc_method == 'avg_zeta') then do ij = 1, icellU i = indxUi(ij) j = indxUj(ij) - stress12(i,j) = (stress12(i,j)*(c1-arlx1i*revp) & - + arlx1i*p5*etax2U(i,j)*shearU(i,j)) * denom1 + stress12U(i,j) = (stress12U(i,j)*(c1-arlx1i*revp) & + + arlx1i*p5*etax2U(i,j)*shearU(i,j)) * denom1 enddo elseif (visc_method == 'avg_strength') then @@ -1847,10 +1925,10 @@ subroutine stressC_U (nx_block , ny_block, & DminUarea = deltaminEVP*uarea(i,j) ! only need etax2U here, but other terms are calculated with etax2U ! minimal extra calculations here even though it seems like there is - call visc_replpress (strengthU(i,j), DminUarea, DeltaU(i,j), & + call visc_replpress (strengthU(i,j), DminUarea, deltaU(i,j), & lzetax2U , letax2U , lrep_prsU , capping) - stress12(i,j) = (stress12(i,j)*(c1-arlx1i*revp) & - + arlx1i*p5*letax2U*shearU(i,j)) * denom1 + stress12U(i,j) = (stress12U(i,j)*(c1-arlx1i*revp) & + + arlx1i*p5*letax2U*shearU(i,j)) * denom1 enddo endif @@ -1863,18 +1941,18 @@ end subroutine stressC_U ! author: JF Lemieux, ECCC ! Nov 2021 - subroutine stressCD_T (nx_block, ny_block, & - icellT, & - indxTi, indxTj, & - uvelE, vvelE, & - uvelN, vvelN, & - dxN, dyE, & - dxT, dyT, & - DminTarea, & - strength, & - zetax2T, etax2T, & - stresspT, stressmT, & - stress12T ) + subroutine stressCD_T (nx_block, ny_block , & + icellT , & + indxTi , indxTj , & + uvelE , vvelE , & + uvelN , vvelN , & + dxN , dyE , & + dxT , dyT , & + DminTarea, & + strength, & + zetax2T , etax2T , & + stresspT, stressmT , & + stress12T) use ice_dyn_shared, only: strain_rates_T, capping, & visc_replpress @@ -1914,7 +1992,7 @@ subroutine stressCD_T (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block) :: & divT , & ! divergence at T point tensionT , & ! tension at T point - shearT , & ! sheat at T point + shearT , & ! shear at T point DeltaT ! delt at T point real (kind=dbl_kind) :: & @@ -1935,7 +2013,7 @@ subroutine stressCD_T (nx_block, ny_block, & dxN (:,:), dyE (:,:), & dxT (:,:), dyT (:,:), & divT (:,:), tensionT(:,:), & - shearT(:,:), DeltaT (:,:) ) + shearT(:,:), DeltaT (:,:)) do ij = 1, icellT i = indxTi(ij) @@ -1946,7 +2024,7 @@ subroutine stressCD_T (nx_block, ny_block, & !----------------------------------------------------------------- call visc_replpress (strength(i,j), DminTarea(i,j), DeltaT(i,j), & - zetax2T (i,j), etax2T (i,j), rep_prsT , capping) + zetax2T (i,j), etax2T(i,j), rep_prsT , capping) !----------------------------------------------------------------- ! the stresses ! kg/s^2 @@ -1973,19 +2051,18 @@ end subroutine stressCD_T ! author: JF Lemieux, ECCC ! Nov 2021 - subroutine stressCD_U (nx_block, ny_block, & - icellU, & - indxUi, indxUj, & - uarea, & - zetax2U, etax2U, & - strengthU, & - divergU, tensionU, & - shearU, DeltaU, & - stresspU, stressmU, & - stress12U ) - - use ice_dyn_shared, only: strain_rates_U, & - visc_replpress, & + subroutine stressCD_U (nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + uarea , & + zetax2U , etax2U , & + strengthU , & + divergU , tensionU, & + shearU , deltaU , & + stresspU , stressmU, & + stress12U) + + use ice_dyn_shared, only: visc_replpress, & visc_method, deltaminEVP, capping integer (kind=int_kind), intent(in) :: & @@ -2043,7 +2120,7 @@ subroutine stressCD_U (nx_block, ny_block, & DminUarea = deltaminEVP*uarea(i,j) ! only need etax2U here, but other terms are calculated with etax2U ! minimal extra calculations here even though it seems like there is - call visc_replpress (strengthU(i,j), DminUarea, DeltaU(i,j), & + call visc_replpress (strengthU(i,j), DminUarea, deltaU(i,j), & lzetax2U , letax2U , lrep_prsU , capping) endif diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedyn/dynamics/ice_dyn_evp_1d.F90 similarity index 99% rename from cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_evp_1d.F90 index e874611bd..b7daab0a0 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_evp_1d.F90 @@ -889,7 +889,7 @@ subroutine evp1d_halo_update(NAVEL_len, lb, ub, uvel, vvel, & #ifdef _OPENACC !$acc parallel & - !$acc present(uvel, vvel) & + !$acc present(uvel, vvel) !$acc loop do iw = 1, NAVEL_len if (halo_parent(iw) == 0) cycle diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedyn/dynamics/ice_dyn_shared.F90 similarity index 99% rename from cicecore/cicedynB/dynamics/ice_dyn_shared.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_shared.F90 index 187ec55cc..a12e6fddd 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_shared.F90 @@ -24,9 +24,8 @@ module ice_dyn_shared implicit none private public :: set_evp_parameters, stepu, stepuv_CD, stepu_C, stepv_C, & - principal_stress, init_dyn, dyn_prep1, dyn_prep2, dyn_finish, & + principal_stress, init_dyn_shared, dyn_prep1, dyn_prep2, dyn_finish, & seabed_stress_factor_LKD, seabed_stress_factor_prob, & - alloc_dyn_shared, & deformations, deformationsC_T, deformationsCD_T, & strain_rates, strain_rates_T, strain_rates_U, & visc_replpress, & @@ -94,6 +93,11 @@ module ice_dyn_shared fcorE_blk(:,:,:), & ! Coriolis parameter at E points (1/s) fcorN_blk(:,:,:) ! Coriolis parameter at N points (1/s) + real (kind=dbl_kind), allocatable, public :: & + fld2(:,:,:,:), & ! 2 bundled fields + fld3(:,:,:,:), & ! 3 bundled fields + fld4(:,:,:,:) ! 4 bundled fields + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & uvel_init , & ! x-component of velocity (m/s), beginning of timestep vvel_init ! y-component of velocity (m/s), beginning of timestep @@ -176,6 +180,15 @@ subroutine alloc_dyn_shared vvel_init (nx_block,ny_block,max_blocks), & ! y-component of velocity (m/s), beginning of timestep iceTmask (nx_block,ny_block,max_blocks), & ! T mask for dynamics iceUmask (nx_block,ny_block,max_blocks), & ! U mask for dynamics + fcor_blk (nx_block,ny_block,max_blocks), & ! Coriolis + DminTarea (nx_block,ny_block,max_blocks), & ! + stat=ierr) + if (ierr/=0) call abort_ice(subname//': Out of memory') + + allocate( & + fld2(nx_block,ny_block,2,max_blocks), & + fld3(nx_block,ny_block,3,max_blocks), & + fld4(nx_block,ny_block,4,max_blocks), & stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') @@ -187,6 +200,8 @@ subroutine alloc_dyn_shared vvelN_init (nx_block,ny_block,max_blocks), & ! y-component of velocity (m/s), beginning of timestep iceEmask (nx_block,ny_block,max_blocks), & ! T mask for dynamics iceNmask (nx_block,ny_block,max_blocks), & ! U mask for dynamics + fcorE_blk (nx_block,ny_block,max_blocks), & ! Coriolis + fcorN_blk (nx_block,ny_block,max_blocks), & ! Coriolis stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') endif @@ -197,18 +212,18 @@ end subroutine alloc_dyn_shared ! Initialize parameters and variables needed for the dynamics ! author: Elizabeth C. Hunke, LANL - subroutine init_dyn (dt) + subroutine init_dyn_shared (dt) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks, halo_dynbundle use ice_domain_size, only: max_blocks - use ice_flux, only: rdg_conv, rdg_shear, & + use ice_flux, only: & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U - use ice_state, only: uvel, vvel, uvelE, vvelE, uvelN, vvelN, divu, shear + use ice_state, only: uvel, vvel, uvelE, vvelE, uvelN, vvelN use ice_grid, only: ULAT, NLAT, ELAT, tarea real (kind=dbl_kind), intent(in) :: & @@ -221,10 +236,11 @@ subroutine init_dyn (dt) nprocs, & ! number of processors iblk ! block index - character(len=*), parameter :: subname = '(init_dyn)' + character(len=*), parameter :: subname = '(init_dyn_shared)' call set_evp_parameters (dt) - + ! allocate dyn shared (init_uvel,init_vvel) + call alloc_dyn_shared ! Set halo_dynbundle, this is empirical at this point, could become namelist halo_dynbundle = .true. nprocs = get_num_procs() @@ -237,14 +253,6 @@ subroutine init_dyn (dt) write(nu_diag,*) 'halo_dynbundle =', halo_dynbundle endif - allocate(fcor_blk(nx_block,ny_block,max_blocks)) - allocate(DminTarea(nx_block,ny_block,max_blocks)) - - if (grid_ice == 'CD' .or. grid_ice == 'C') then - allocate(fcorE_blk(nx_block,ny_block,max_blocks)) - allocate(fcorN_blk(nx_block,ny_block,max_blocks)) - endif - !$OMP PARALLEL DO PRIVATE(iblk,i,j) SCHEDULE(runtime) do iblk = 1, nblocks do j = 1, ny_block @@ -260,11 +268,6 @@ subroutine init_dyn (dt) vvelN(i,j,iblk) = c0 endif - ! strain rates - divu (i,j,iblk) = c0 - shear(i,j,iblk) = c0 - rdg_conv (i,j,iblk) = c0 - rdg_shear(i,j,iblk) = c0 ! Coriolis parameter if (trim(coriolis) == 'constant') then @@ -330,7 +333,7 @@ subroutine init_dyn (dt) enddo ! iblk !$OMP END PARALLEL DO - end subroutine init_dyn + end subroutine init_dyn_shared !======================================================================= ! Set parameters needed for the evp dynamics. diff --git a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 b/cicecore/cicedyn/dynamics/ice_dyn_vp.F90 similarity index 99% rename from cicecore/cicedynB/dynamics/ice_dyn_vp.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_vp.F90 index 6534e7568..3915004b4 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_vp.F90 @@ -48,7 +48,7 @@ module ice_dyn_vp use ice_dyn_shared, only: dyn_prep1, dyn_prep2, dyn_finish, & cosw, sinw, fcor_blk, uvel_init, vvel_init, & seabed_stress_factor_LKD, seabed_stress_factor_prob, seabed_stress_method, & - seabed_stress, Ktens, stack_fields, unstack_fields + seabed_stress, Ktens, stack_fields, unstack_fields, fld2, fld3, fld4 use ice_fileunits, only: nu_diag use ice_flux, only: fmU use ice_global_reductions, only: global_sum @@ -88,7 +88,7 @@ module ice_dyn_vp reltol_andacc ! relative tolerance for Anderson acceleration character (len=char_len), public :: & - precond , & ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), + precond , & ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), ! 'pgmres' (Jacobi-preconditioned GMRES) algo_nonlin , & ! nonlinear algorithm: 'picard' (Picard iteration), 'anderson' (Anderson acceleration) ortho_type ! type of orthogonalization for FGMRES ('cgs' or 'mgs') @@ -105,11 +105,6 @@ module ice_dyn_vp indxUi(:,:) , & ! compressed index in i-direction indxUj(:,:) ! compressed index in j-direction - real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:), & ! work array for boundary updates - fld3(:,:,:,:), & ! work array for boundary updates - fld4(:,:,:,:) ! work array for boundary updates - !======================================================================= contains @@ -126,6 +121,8 @@ subroutine init_vp use ice_constants, only: c1, & field_loc_center, field_type_scalar use ice_domain, only: blocks_ice, halo_info + use ice_calendar, only: dt_dyn + use ice_dyn_shared, only: init_dyn_shared ! use ice_grid, only: tarea ! local variables @@ -137,15 +134,14 @@ subroutine init_vp type (block) :: & this_block ! block information for current block + call init_dyn_shared(dt_dyn) + ! Initialize module variables allocate(icellT(max_blocks), icellU(max_blocks)) allocate(indxTi(nx_block*ny_block, max_blocks), & indxTj(nx_block*ny_block, max_blocks), & indxUi(nx_block*ny_block, max_blocks), & indxUj(nx_block*ny_block, max_blocks)) - allocate(fld2(nx_block,ny_block,2,max_blocks)) - allocate(fld3(nx_block,ny_block,3,max_blocks)) - allocate(fld4(nx_block,ny_block,4,max_blocks)) end subroutine init_vp @@ -3348,7 +3344,7 @@ subroutine pgmres (zetax2 , etax2 , & ! Update workspace with boundary values ! NOTE: skipped for efficiency since this is just a preconditioner - ! unless bfbflag is active + ! unless bfbflag is active if (bfbflag /= 'off') then call stack_fields(workspace_x, workspace_y, fld2) call ice_timer_start(timer_bound) @@ -3569,7 +3565,7 @@ subroutine precondition(zetax2 , etax2, & type (ice_halo), intent(in) :: & halo_info_mask ! ghost cell update info for masked halo - + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & vx , & ! input vector (x components) vy ! input vector (y components) diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedyn/dynamics/ice_transport_driver.F90 similarity index 100% rename from cicecore/cicedynB/dynamics/ice_transport_driver.F90 rename to cicecore/cicedyn/dynamics/ice_transport_driver.F90 diff --git a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 b/cicecore/cicedyn/dynamics/ice_transport_remap.F90 similarity index 100% rename from cicecore/cicedynB/dynamics/ice_transport_remap.F90 rename to cicecore/cicedyn/dynamics/ice_transport_remap.F90 diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedyn/general/ice_flux.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_flux.F90 rename to cicecore/cicedyn/general/ice_flux.F90 diff --git a/cicecore/cicedynB/general/ice_flux_bgc.F90 b/cicecore/cicedyn/general/ice_flux_bgc.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_flux_bgc.F90 rename to cicecore/cicedyn/general/ice_flux_bgc.F90 diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedyn/general/ice_forcing.F90 similarity index 99% rename from cicecore/cicedynB/general/ice_forcing.F90 rename to cicecore/cicedyn/general/ice_forcing.F90 index ff79778c5..541efb282 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedyn/general/ice_forcing.F90 @@ -118,7 +118,7 @@ module ice_forcing real (kind=dbl_kind), dimension(:,:,:,:,:), allocatable :: & wave_spectrum_data ! field values at 2 temporal data points - + character(char_len), public :: & atm_data_format, & ! 'bin'=binary or 'nc'=netcdf ocn_data_format, & ! 'bin'=binary or 'nc'=netcdf @@ -5650,7 +5650,7 @@ subroutine get_wave_spec file=__FILE__, line=__LINE__) else #ifdef USE_NETCDF - call wave_spec_data + call wave_spec_data #else write (nu_diag,*) "wave spectrum file not available, requires cpp USE_NETCDF" write (nu_diag,*) "wave spectrum file not available, using default profile" @@ -5682,9 +5682,9 @@ subroutine wave_spec_data use ice_grid, only: hm, tlon, tlat, tmask, umask use ice_calendar, only: days_per_year, use_leap_years - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ncid , & ! netcdf file id - i, j, freq , & + i, j, freq , & ixm,ixx,ixp , & ! record numbers for neighboring months recnum , & ! record number maxrec , & ! maximum record number @@ -5710,7 +5710,7 @@ subroutine wave_spec_data wave_spectrum_profile ! wave spectrum character(len=64) :: fieldname !netcdf field name - character(char_len_long) :: spec_file + character(char_len_long) :: spec_file character(char_len) :: wave_spec_type logical (kind=log_kind) :: wave_spec character(len=*), parameter :: subname = '(wave_spec_data)' @@ -5736,7 +5736,7 @@ subroutine wave_spec_data yr = fyear ! current year !------------------------------------------------------------------- ! 6-hourly data - ! + ! ! Assume that the 6-hourly value is located at the end of the ! 6-hour period. This is the convention for NCEP reanalysis data. ! E.g. record 1 gives conditions at 6 am GMT on 1 January. @@ -5785,9 +5785,9 @@ subroutine wave_spec_data call ice_read_nc_xyf(ncid,recnum,'efreq',wave_spectrum_data(:,:,:,2,:),debug_n_d, & field_loc=field_loc_center, & field_type=field_type_scalar) - call ice_close_nc(ncid) + call ice_close_nc(ncid) + - ! Interpolate call interpolate_wavespec_data (wave_spectrum_data, wave_spectrum) diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedyn/general/ice_forcing_bgc.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_forcing_bgc.F90 rename to cicecore/cicedyn/general/ice_forcing_bgc.F90 diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedyn/general/ice_init.F90 similarity index 94% rename from cicecore/cicedynB/general/ice_init.F90 rename to cicecore/cicedyn/general/ice_init.F90 index 45ae58d8b..d56ad002e 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedyn/general/ice_init.F90 @@ -17,11 +17,11 @@ module ice_init use ice_constants, only: c0, c1, c2, c3, c5, c12, p2, p3, p5, p75, p166, & cm_to_m use ice_exit, only: abort_ice - use ice_fileunits, only: nu_nml, nu_diag, nu_diag_set, nml_filename, diag_type, & + use ice_fileunits, only: nu_nml, nu_diag, nml_filename, diag_type, & ice_stdout, get_fileunit, release_fileunit, bfbflag, flush_fileunit, & ice_IOUnitsMinUnit, ice_IOUnitsMaxUnit #ifdef CESMCOUPLED - use ice_fileunits, only: inst_suffix + use ice_fileunits, only: inst_suffix, nu_diag_set #endif use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_aggregate @@ -124,6 +124,8 @@ subroutine input_data use ice_restoring, only: restore_ice use ice_timers, only: timer_stats use ice_memusage, only: memory_stats + use ice_fileunits, only: goto_nml + #ifdef CESMCOUPLED use shr_file_mod, only: shr_file_setIO #endif @@ -151,7 +153,7 @@ subroutine input_data kitd, kcatbound, ktransport character (len=char_len) :: shortwave, albedo_type, conduct, fbot_xfer_type, & - tfrz_option, frzpnd, atmbndy, wave_spec_type, snwredist, snw_aging_table, & + tfrz_option, saltflux_option, frzpnd, atmbndy, wave_spec_type, snwredist, snw_aging_table, & capping_method logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & @@ -163,9 +165,11 @@ subroutine input_data integer (kind=int_kind) :: numin, numax ! unit number limits integer (kind=int_kind) :: rplvl, rptopo - real (kind=dbl_kind) :: Cf, ksno, puny + real (kind=dbl_kind) :: Cf, ksno, puny, ice_ref_salinity + character (len=char_len) :: abort_list - character (len=128) :: tmpstr2 + character (len=char_len) :: nml_name ! namelist name + character (len=char_len_long) :: tmpstr2 character(len=*), parameter :: subname='(input_data)' @@ -260,6 +264,7 @@ subroutine input_data highfreq, natmiter, atmiter_conv, calc_dragio, & ustar_min, emissivity, iceruf, iceruf_ocn, & fbot_xfer_type, update_ocn_f, l_mpond_fresh, tfrz_option, & + saltflux_option,ice_ref_salinity, & oceanmixed_ice, restore_ice, restore_ocn, trestore, & precip_units, default_season, wave_spec_type,nfreq, & atm_data_type, ocn_data_type, bgc_data_type, fe_data_type, & @@ -497,6 +502,8 @@ subroutine input_data precip_units = 'mks' ! 'mm_per_month' or ! 'mm_per_sec' = 'mks' = kg/m^2 s tfrz_option = 'mushy' ! freezing temp formulation + saltflux_option = 'constant' ! saltflux calculation + ice_ref_salinity = 4.0_dbl_kind ! Ice reference salinity for coupling oceanmixed_ice = .false. ! if true, use internal ocean mixed layer wave_spec_type = 'none' ! type of wave spectrum forcing nfreq = 25 ! number of wave frequencies @@ -584,6 +591,7 @@ subroutine input_data if (my_task == master_task) then + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then @@ -592,141 +600,228 @@ subroutine input_data file=__FILE__, line=__LINE__) endif - write(nu_diag,*) subname,' Reading setup_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read setup_nml + nml_name = 'setup_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: setup_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=setup_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: setup_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading grid_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read grid_nml + nml_name = 'grid_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: grid_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=grid_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' //trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: grid_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading tracer_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read tracer_nml + nml_name = 'tracer_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: tracer_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=tracer_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' //trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: tracer_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading thermo_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read thermo_nml + nml_name = 'thermo_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: thermo_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=thermo_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: thermo_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading dynamics_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read dynamics_nml + nml_name = 'dynamics_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: dynamics_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=dynamics_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: dynamics_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading shortwave_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read shortwave_nml + nml_name = 'shortwave_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: shortwave_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=shortwave_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '//& + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: shortwave_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading ponds_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read ponds_nml + nml_name = 'ponds_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: ponds_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=ponds_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: ponds_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading snow_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read snow_nml + nml_name = 'snow_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: snow_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=snow_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: snow_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading forcing_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read forcing_nml + nml_name = 'forcing_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: forcing_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=forcing_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '// trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: forcing_nml reading ', & - file=__FILE__, line=__LINE__) - endif + ! done reading namelist. close(nu_nml) call release_fileunit(nu_nml) endif @@ -758,8 +853,8 @@ subroutine input_data ! each task gets unique ice log filename when if test is true, for debugging if (1 == 0) then call get_fileUnit(nu_diag) - write(tmpstr,'(a,i4.4)') "ice.log.task_",my_task - open(nu_diag,file=tmpstr) + write(tmpstr2,'(a,i4.4)') "ice.log.task_",my_task + open(nu_diag,file=tmpstr2) endif end if if (trim(ice_ic) /= 'default' .and. & @@ -979,6 +1074,8 @@ subroutine input_data call broadcast_scalar(wave_spec_file, master_task) call broadcast_scalar(nfreq, master_task) call broadcast_scalar(tfrz_option, master_task) + call broadcast_scalar(saltflux_option, master_task) + call broadcast_scalar(ice_ref_salinity, master_task) call broadcast_scalar(ocn_data_format, master_task) call broadcast_scalar(bgc_data_type, master_task) call broadcast_scalar(fe_data_type, master_task) @@ -1414,6 +1511,12 @@ subroutine input_data write(nu_diag,*) subname//' WARNING: For consistency, set tfrz_option = mushy' endif endif + if (ktherm == 1 .and. trim(saltflux_option) /= 'constant') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and saltflux_option = ',trim(saltflux_option) + write(nu_diag,*) subname//' WARNING: For consistency, set saltflux_option = constant' + endif + endif !tcraig if (ktherm == 1 .and. .not.sw_redist) then if (my_task == master_task) then @@ -1532,7 +1635,7 @@ subroutine input_data write(nu_diag,*) subname//' WARNING: tr_fsd=T but wave_spec=F - not recommended' endif end if - + ! compute grid locations for thermo, u and v fields grid_ice_thrm = 'T' @@ -1974,6 +2077,10 @@ subroutine input_data write(nu_diag,*) ' WARNING: will impact ocean forcing interaction' write(nu_diag,*) ' WARNING: coupled forcing will be modified by mixed layer routine' endif + write(nu_diag,1030) ' saltflux_option = ', trim(saltflux_option) + if (trim(saltflux_option) == 'constant') then + write(nu_diag,1002) ' ice_ref_salinity = ',ice_ref_salinity + endif if (trim(tfrz_option) == 'minus1p8') then tmpstr2 = ' : constant ocean freezing temperature (-1.8C)' elseif (trim(tfrz_option) == 'linear_salt') then @@ -2027,11 +2134,14 @@ subroutine input_data if (trim(wave_spec_type) == 'none') then tmpstr2 = ' : no wave data provided, no wave-ice interactions' elseif (trim(wave_spec_type) == 'profile') then - tmpstr2 = ' : use fixed dummy wave spectrum for testing, sea surface height generated using constant phase (1 iteration of wave fracture)' + tmpstr2 = ' : use fixed dummy wave spectrum for testing, sea surface height generated '// & + 'using constant phase (1 iteration of wave fracture)' elseif (trim(wave_spec_type) == 'constant') then - tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using constant phase (1 iteration of wave fracture)' + tmpstr2 = ' : wave spectrum data file provided, sea surface height generated '// & + 'using constant phase (1 iteration of wave fracture)' elseif (trim(wave_spec_type) == 'random') then - tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using random number (multiple iterations of wave fracture to convergence)' + tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using '// & + 'random number (multiple iterations of wave fracture to convergence)' else tmpstr2 = ' : unknown value' endif @@ -2312,8 +2422,7 @@ subroutine input_data grid_type /= 'rectangular' .and. & grid_type /= 'cpom_grid' .and. & grid_type /= 'regional' .and. & - grid_type /= 'latlon' .and. & - grid_type /= 'setmask' ) then + grid_type /= 'latlon') then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown grid_type=',trim(grid_type) abort_list = trim(abort_list)//":20" endif @@ -2375,6 +2484,7 @@ subroutine input_data wave_spec_type_in = wave_spec_type, & wave_spec_in=wave_spec, nfreq_in=nfreq, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & + saltflux_option_in=saltflux_option, ice_ref_salinity_in=ice_ref_salinity, & Pstar_in=Pstar, Cstar_in=Cstar, iceruf_in=iceruf, iceruf_ocn_in=iceruf_ocn, calc_dragio_in=calc_dragio, & windmin_in=windmin, drhosdwind_in=drhosdwind, & rsnw_fall_in=rsnw_fall, rsnw_tmax_in=rsnw_tmax, rhosnew_in=rhosnew, & @@ -2790,7 +2900,7 @@ subroutine set_state_var (nx_block, ny_block, & indxi, indxj ! compressed indices for cells with aicen > puny real (kind=dbl_kind) :: & - Tsfc, sum, hbar, abar, puny, rhos, Lfresh, rad_to_deg, rsnw_fall, dist_ratio + Tsfc, sum, hbar, abar, puny, rhos, Lfresh, rad_to_deg, rsnw_fall, dist_ratio, Tffresh real (kind=dbl_kind), dimension(ncat) :: & ainit, hinit ! initial area, thickness @@ -2832,7 +2942,7 @@ subroutine set_state_var (nx_block, ny_block, & nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) call icepack_query_parameters(rhos_out=rhos, Lfresh_out=Lfresh, puny_out=puny, & - rad_to_deg_out=rad_to_deg, rsnw_fall_out=rsnw_fall) + rad_to_deg_out=rad_to_deg, rsnw_fall_out=rsnw_fall, Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday, pi_out=pi) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -3070,7 +3180,12 @@ subroutine set_state_var (nx_block, ny_block, & do i = ilo, ihi if (tmask(i,j)) then ! place ice in high latitudes where ocean sfc is cold +#ifdef CESMCOUPLED + ! Option to use Tair instead. + if ( (Tair (i,j) <= Tffresh) .and. & +#else if ( (sst (i,j) <= Tf(i,j)+p2) .and. & +#endif (TLAT(i,j) < edge_init_sh/rad_to_deg .or. & TLAT(i,j) > edge_init_nh/rad_to_deg) ) then icells = icells + 1 diff --git a/cicecore/cicedynB/general/ice_state.F90 b/cicecore/cicedyn/general/ice_state.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_state.F90 rename to cicecore/cicedyn/general/ice_state.F90 diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedyn/general/ice_step_mod.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_step_mod.F90 rename to cicecore/cicedyn/general/ice_step_mod.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 similarity index 99% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 index 9fda67dad..68436cd0f 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 @@ -6915,9 +6915,6 @@ subroutine primary_grid_lengths_global_ext( & ! This subroutine adds ghost cells to global primary grid lengths array ! ARRAY_I and outputs result to array ARRAY_O -! Note duplicate implementation of this subroutine in: -! cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 - use ice_constants, only: c0 use ice_domain_size, only: nx_global, ny_global diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_broadcast.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_broadcast.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_broadcast.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_broadcast.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_communicate.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_communicate.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_exit.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_exit.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_exit.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_exit.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_gather_scatter.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_gather_scatter.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_global_reductions.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_global_reductions.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_reprosum.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_reprosum.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_timers.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_timers.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 similarity index 99% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 index f10a9f432..2b81c4441 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 @@ -4686,9 +4686,6 @@ subroutine primary_grid_lengths_global_ext( & ! This subroutine adds ghost cells to global primary grid lengths array ! ARRAY_I and outputs result to array ARRAY_O -! Note duplicate implementation of this subroutine in: -! cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 - use ice_constants, only: c0 use ice_domain_size, only: nx_global, ny_global diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_broadcast.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_broadcast.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_broadcast.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_broadcast.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_communicate.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_communicate.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_exit.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_exit.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_exit.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_exit.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_gather_scatter.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_gather_scatter.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_global_reductions.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_global_reductions.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_reprosum.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_reprosum.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_timers.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_timers.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_blocks.F90 b/cicecore/cicedyn/infrastructure/ice_blocks.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_blocks.F90 rename to cicecore/cicedyn/infrastructure/ice_blocks.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedyn/infrastructure/ice_domain.F90 similarity index 97% rename from cicecore/cicedynB/infrastructure/ice_domain.F90 rename to cicecore/cicedyn/infrastructure/ice_domain.F90 index 10254aa93..ff1fac723 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedyn/infrastructure/ice_domain.F90 @@ -104,7 +104,7 @@ subroutine init_domain_blocks use ice_distribution, only: processor_shape use ice_domain_size, only: ncat, nilyr, nslyr, max_blocks, & nx_global, ny_global, block_size_x, block_size_y - + use ice_fileunits, only: goto_nml !---------------------------------------------------------------------- ! ! local variables @@ -114,6 +114,9 @@ subroutine init_domain_blocks integer (int_kind) :: & nml_error ! namelist read error flag + character(len=char_len) :: nml_name ! text namelist name + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=*), parameter :: subname = '(init_domain_blocks)' !---------------------------------------------------------------------- @@ -167,26 +170,39 @@ subroutine init_domain_blocks landblockelim = .true. ! on by default if (my_task == master_task) then - write(nu_diag,*) subname,' Reading domain_nml' - + nml_name = 'domain_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then call abort_ice(subname//'ERROR: domain_nml open file '// & - trim(nml_filename), & - file=__FILE__, line=__LINE__) + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=domain_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: domain_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) + endif call broadcast_scalar(nprocs, master_task) diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedyn/infrastructure/ice_grid.F90 similarity index 99% rename from cicecore/cicedynB/infrastructure/ice_grid.F90 rename to cicecore/cicedyn/infrastructure/ice_grid.F90 index dfccdd413..b775c21f2 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedyn/infrastructure/ice_grid.F90 @@ -507,23 +507,24 @@ subroutine init_grid2 ! Diagnose OpenMP thread schedule, force order in output !----------------------------------------------------------------- +! This code does not work in CESM. Needs to be investigated further. #if defined (_OPENMP) - !$OMP PARALLEL DO ORDERED PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks - if (my_task == master_task) then - !$OMP ORDERED - if (iblk == 1) then - call omp_get_schedule(ompsk,ompcs) - write(nu_diag,*) '' - write(nu_diag,*) subname,' OpenMP runtime thread schedule:' - write(nu_diag,*) subname,' omp schedule = ',ompsk,ompcs - endif - write(nu_diag,*) subname,' block, thread = ',iblk,OMP_GET_THREAD_NUM() - call flush_fileunit(nu_diag) - !$OMP END ORDERED - endif - enddo - !$OMP END PARALLEL DO + !$OMP PARALLEL DO ORDERED PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + if (my_task == master_task) then + !$OMP ORDERED + if (iblk == 1) then + call omp_get_schedule(ompsk,ompcs) +! write(nu_diag,*) '' + write(nu_diag,*) subname,' OpenMP runtime thread schedule:' + write(nu_diag,*) subname,' omp schedule = ',ompsk,ompcs + endif + write(nu_diag,*) subname,' block, thread = ',iblk,OMP_GET_THREAD_NUM() + !$OMP END ORDERED + endif + enddo + !$OMP END PARALLEL DO + call flush_fileunit(nu_diag) #endif !----------------------------------------------------------------- @@ -1393,15 +1394,15 @@ subroutine rectgrid ! original rectgrid defines latlon first call rectgrid_scale_dxdy else - ! rectgrid no grid spacing. + ! rectgrid no grid spacing. ! original method with addition to use namelist lat/lon reference - + if (my_task == master_task) then work_g1 = c0 length = dxrect*cm_to_m/radius*rad_to_deg - + work_g1(1,:) = lonrefrect ! reference lon from namelist - + do j = 1, ny_global do i = 2, nx_global work_g1(i,j) = work_g1(i-1,j) + length ! ULON @@ -1413,13 +1414,13 @@ subroutine rectgrid field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & ew_boundary_type, ns_boundary_type) - + if (my_task == master_task) then work_g1 = c0 length = dyrect*cm_to_m/radius*rad_to_deg - + work_g1(:,1) = latrefrect ! reference latitude from namelist - + do i = 1, nx_global do j = 2, ny_global work_g1(i,j) = work_g1(i,j-1) + length ! ULAT @@ -1532,32 +1533,32 @@ subroutine rectgrid end subroutine rectgrid !======================================================================= - + subroutine rectgrid_scale_dxdy - + ! generate a variable spaced rectangluar grid. ! extend spacing from center of grid outward. use ice_constants, only: c0, c1, c2, radius, cm_to_m, & field_loc_center, field_loc_NEcorner, field_type_scalar - + integer (kind=int_kind) :: & i, j, iblk, & imid, jmid, & center1, center2 ! array centers for expanding dx, dy - + real (kind=dbl_kind) :: & length, & rad_to_deg real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 - + character(len=*), parameter :: subname = '(rectgrid_scale_dxdy)' - + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) allocate(work_g1(nx_global,ny_global)) - + ! determine dx spacing ! strategy: initialize with dxrect. ! if want to scale the grid, work from center outwards, @@ -1565,51 +1566,51 @@ subroutine rectgrid_scale_dxdy ! this assumes dx varies in x direction only. ! (i.e, dx is the same across same y location) if (my_task == master_task) then - + ! initialize with initial dxrect work_g1(:,:) = dxrect - + ! check if nx is even or odd ! if even, middle 2 columns are center ! of odd, middle 1 column is center if (mod(nx_global,2) == 0) then ! nx_global is even - + ! with even number of x locatons, ! the center two y columns are center center1 = nx_global/2 ! integer math center2 = center1 + 1 ! integer math - + else ! nx_global = odd ! only one center index. set center2=center1 center1 = ceiling(real(nx_global/2),int_kind) center2 = center1 endif - + ! note loop over only half the x grid points (center1)-1 ! working from the center outward. do j = 1, ny_global do i = 1, center1-1 ! work from center1 to left work_g1(center1-i,j) = dxscale*work_g1(center1-i+1,j) - + ! work from center2 to right work_g1(center2+i,j) = dxscale*work_g1(center2+i-1,j) enddo ! i enddo ! j - + endif ! my_task == master_task - - + + ! note work_g1 is converted to meters in primary_grid_lengths_HTN call primary_grid_lengths_HTN(work_g1) ! dxU, dxT, dxN, dxE - + ! make ULON array if (my_task == master_task) then - + ! make first column reference lon in radians. ! the remaining work_g1 is still dx in meters work_g1(1,:) = lonrefrect/rad_to_deg ! radians - + ! loop over remaining points and add spacing to successive ! x locations do j = 1, ny_global @@ -1623,7 +1624,7 @@ subroutine rectgrid_scale_dxdy field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & ew_boundary_type, ns_boundary_type) - + ! determine dy spacing ! strategy: initialize with dyrect. ! if want to scale the grid, work from center outwards, @@ -1631,7 +1632,7 @@ subroutine rectgrid_scale_dxdy ! this assumes dy varies in y direction only. ! (i.e, dy is the same across same x location) if (my_task == master_task) then - + ! initialize with initial dxrect work_g1(:,:) = dyrect @@ -1639,25 +1640,25 @@ subroutine rectgrid_scale_dxdy ! if even, middle 2 rows are center ! of odd, middle 1 row is center if (mod(ny_global,2) == 0) then ! ny_global is even - + ! with even number of x locatons, ! the center two y columns are center center1 = ny_global/2 ! integer math center2 = center1 + 1 ! integer math - + else ! ny_global = odd ! only one center index. set center2=center1 center1 = ceiling(real(ny_global/2),int_kind) center2 = center1 endif - + ! note loop over only half the y grid points (center1)-1 ! working from the center outward. do i = 1, nx_global do j = 1, center1-1 ! work from center1 to bottom work_g1(i,center1-j) = dyscale*work_g1(i,center1-j+1) - + ! work from center2 to top work_g1(i,center2+j) = dyscale*work_g1(i,center2+j-1) enddo ! i @@ -1665,15 +1666,15 @@ subroutine rectgrid_scale_dxdy endif ! mytask == master_task ! note work_g1 is converted to meters primary_grid_lengths_HTE call primary_grid_lengths_HTE(work_g1) ! dyU, dyT, dyN, dyE - + ! make ULAT array if (my_task == master_task) then - + ! make first row reference lat in radians. ! the remaining work_g1 is still dy in meters work_g1(:,1) = latrefrect/rad_to_deg ! radians - - + + ! loop over remaining points and add spacing to successive ! x locations do j = 2, ny_global ! start from j=2. j=1 is latrefrect @@ -1687,10 +1688,10 @@ subroutine rectgrid_scale_dxdy field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULAT, distrb_info, & ew_boundary_type, ns_boundary_type) - + deallocate(work_g1) - + end subroutine rectgrid_scale_dxdy !======================================================================= diff --git a/cicecore/cicedynB/infrastructure/ice_memusage.F90 b/cicecore/cicedyn/infrastructure/ice_memusage.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_memusage.F90 rename to cicecore/cicedyn/infrastructure/ice_memusage.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_memusage_gptl.c b/cicecore/cicedyn/infrastructure/ice_memusage_gptl.c similarity index 98% rename from cicecore/cicedynB/infrastructure/ice_memusage_gptl.c rename to cicecore/cicedyn/infrastructure/ice_memusage_gptl.c index 309c8824b..32b31171d 100644 --- a/cicecore/cicedynB/infrastructure/ice_memusage_gptl.c +++ b/cicecore/cicedyn/infrastructure/ice_memusage_gptl.c @@ -196,7 +196,7 @@ int ice_memusage_gptl (int *size, int *rss, int *share, int *text, int *datastac */ ret = fscanf (fd, "%d %d %d %d %d %d %d", - size, rss, share, text, datastack, &dum, &dum); + size, rss, share, text, datastack, &dum, &dum); ret = fclose (fd); return 0; diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedyn/infrastructure/ice_read_write.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_read_write.F90 rename to cicecore/cicedyn/infrastructure/ice_read_write.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 similarity index 92% rename from cicecore/cicedynB/infrastructure/ice_restart_driver.F90 rename to cicecore/cicedyn/infrastructure/ice_restart_driver.F90 index bd5a49eaf..ffe9ec587 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 @@ -55,7 +55,7 @@ subroutine dumpfile(filename_spec) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_domain_size, only: nilyr, nslyr, ncat, max_blocks - use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask + use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask, kdyn use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & @@ -215,45 +215,52 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! ice mask for dynamics !----------------------------------------------------------------- - - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - if (iceUmask(i,j,iblk)) work1(i,j,iblk) = c1 - enddo - enddo - enddo - !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) - - if (grid_ice == 'CD' .or. grid_ice == 'C') then - + if (kdyn > 0) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block work1(i,j,iblk) = c0 - if (iceNmask(i,j,iblk)) work1(i,j,iblk) = c1 + if (iceUmask(i,j,iblk)) work1(i,j,iblk) = c1 enddo enddo enddo !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','icenmask',1,diag) + call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - if (iceEmask(i,j,iblk)) work1(i,j,iblk) = c1 - enddo + if (grid_ice == 'CD' .or. grid_ice == 'C') then + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + if (iceNmask(i,j,iblk)) work1(i,j,iblk) = c1 + enddo + enddo enddo - enddo - !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','iceemask',1,diag) + !$OMP END PARALLEL DO + call write_restart_field(nu_dump,0,work1,'ruf8','icenmask',1,diag) + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + if (iceEmask(i,j,iblk)) work1(i,j,iblk) = c1 + enddo + enddo + enddo + !$OMP END PARALLEL DO + call write_restart_field(nu_dump,0,work1,'ruf8','iceemask',1,diag) + endif + else + work1(:,:,:) = c0 + call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) + if (grid_ice == 'CD' .or. grid_ice == 'C') then + call write_restart_field(nu_dump,0,work1,'ruf8','icenmask',1,diag) + call write_restart_field(nu_dump,0,work1,'ruf8','iceemask',1,diag) + endif endif ! for mixed layer model @@ -277,7 +284,7 @@ subroutine restartfile (ice_ic) use ice_domain, only: nblocks, halo_info use ice_domain_size, only: nilyr, nslyr, ncat, & max_blocks - use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask + use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask,kdyn use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & @@ -524,57 +531,76 @@ subroutine restartfile (ice_ic) !----------------------------------------------------------------- ! ice mask for dynamics !----------------------------------------------------------------- - if (my_task == master_task) & - write(nu_diag,*) 'ice mask for dynamics' - - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'iceumask',1,diag,field_loc_center, field_type_scalar) - - iceUmask(:,:,:) = .false. - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceUmask(i,j,iblk) = .true. - enddo - enddo - enddo - !$OMP END PARALLEL DO + if (kdyn > 0) then - if (grid_ice == 'CD' .or. grid_ice == 'C') then - - if (query_field(nu_restart,'icenmask')) then + if (my_task == master_task) & + write(nu_diag,*) 'ice mask for dynamics' + if (query_field(nu_restart,'iceumask')) then call read_restart_field(nu_restart,0,work1,'ruf8', & - 'icenmask',1,diag,field_loc_center, field_type_scalar) + 'iceumask',1,diag,field_loc_center, field_type_scalar) - iceNmask(:,:,:) = .false. + iceUmask(:,:,:) = .false. !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceNmask(i,j,iblk) = .true. + if (work1(i,j,iblk) > p5) iceUmask(i,j,iblk) = .true. enddo enddo enddo !$OMP END PARALLEL DO endif - - if (query_field(nu_restart,'iceemask')) then - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'iceemask',1,diag,field_loc_center, field_type_scalar) - - iceEmask(:,:,:) = .false. - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceEmask(i,j,iblk) = .true. + if (grid_ice == 'CD' .or. grid_ice == 'C') then + + if (query_field(nu_restart,'icenmask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'icenmask',1,diag,field_loc_center, field_type_scalar) + + iceNmask(:,:,:) = .false. + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (work1(i,j,iblk) > p5) iceNmask(i,j,iblk) = .true. + enddo + enddo enddo + !$OMP END PARALLEL DO + endif + + if (query_field(nu_restart,'iceemask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'iceemask',1,diag,field_loc_center, field_type_scalar) + + iceEmask(:,:,:) = .false. + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (work1(i,j,iblk) > p5) iceEmask(i,j,iblk) = .true. + enddo + enddo enddo - enddo - !$OMP END PARALLEL DO + !$OMP END PARALLEL DO + endif + endif + else + if (my_task == master_task) & + write(nu_diag,*) 'ice mask for dynamics - not used, however mandatory to read in binary files' + if (query_field(nu_restart,'iceumask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'iceumask',1,diag,field_loc_center, field_type_scalar) + endif + if (grid_ice == 'CD' .or. grid_ice == 'C') then + if (query_field(nu_restart,'icenmask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'icenmask',1,diag,field_loc_center, field_type_scalar) + endif + if (query_field(nu_restart,'iceemask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'iceemask',1,diag,field_loc_center, field_type_scalar) + endif endif - endif ! set Tsfcn to c0 on land diff --git a/cicecore/cicedynB/infrastructure/ice_restoring.F90 b/cicecore/cicedyn/infrastructure/ice_restoring.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_restoring.F90 rename to cicecore/cicedyn/infrastructure/ice_restoring.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_shr_reprosum86.c b/cicecore/cicedyn/infrastructure/ice_shr_reprosum86.c similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_shr_reprosum86.c rename to cicecore/cicedyn/infrastructure/ice_shr_reprosum86.c diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 rename to cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 rename to cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 rename to cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 rename to cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 rename to cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_pio.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 rename to cicecore/cicedyn/infrastructure/io/io_pio2/ice_pio.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 similarity index 98% rename from cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 rename to cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 index 679a2b6e6..7019f7128 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 @@ -749,10 +749,6 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & ! if (ndim3 == ncat .and. ncat>1) then if (ndim3 == ncat .and. ndims == 3) then call pio_read_darray(File, vardesc, iodesc3d_ncat, work, status) -!#ifndef CESM1_PIO -!! This only works for PIO2 -! where (work == PIO_FILL_DOUBLE) work = c0 -!#endif if (present(field_loc)) then do n=1,ndim3 call ice_HaloUpdate (work(:,:,n,:), halo_info, & @@ -762,10 +758,6 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & ! elseif (ndim3 == 1) then elseif (ndim3 == 1 .and. ndims == 2) then call pio_read_darray(File, vardesc, iodesc2d, work, status) -!#ifndef CESM1_PIO -!! This only works for PIO2 -! where (work == PIO_FILL_DOUBLE) work = c0 -!#endif if (present(field_loc)) then call ice_HaloUpdate (work(:,:,1,:), halo_info, & field_loc, field_type) @@ -942,11 +934,8 @@ logical function query_field(nu,vname) query_field = .false. - if (my_task == master_task) then - status = pio_inq_varid(File,trim(vname),vardesc) - if (status == PIO_noerr) query_field = .true. - endif - call broadcast_scalar(query_field,master_task) + status = pio_inq_varid(File,trim(vname),vardesc) + if (status == PIO_noerr) query_field = .true. end function query_field diff --git a/cicecore/cicedynB b/cicecore/cicedynB new file mode 120000 index 000000000..70695ca4b --- /dev/null +++ b/cicecore/cicedynB @@ -0,0 +1 @@ +cicedyn \ No newline at end of file diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index 0b8ed689e..85050d8c9 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -70,9 +70,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -111,7 +112,6 @@ subroutine cice_init call alloc_grid ! allocate grid call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state - call alloc_dyn_shared ! allocate dyn shared (init_uvel,init_vvel) call alloc_flux_bgc ! allocate flux_bgc call alloc_flux ! allocate flux call init_ice_timers ! initialize all timers @@ -122,9 +122,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables @@ -262,7 +262,7 @@ subroutine init_restart nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & nt_iage, nt_FY, nt_aero, nt_fsd - character(len=*),parameter :: subname = '(init_restart)' + character(len=*), parameter :: subname = '(init_restart)' call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_warnings_flush(nu_diag) diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 index 0b8ed689e..85050d8c9 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 @@ -70,9 +70,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -111,7 +112,6 @@ subroutine cice_init call alloc_grid ! allocate grid call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state - call alloc_dyn_shared ! allocate dyn shared (init_uvel,init_vvel) call alloc_flux_bgc ! allocate flux_bgc call alloc_flux ! allocate flux call init_ice_timers ! initialize all timers @@ -122,9 +122,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables @@ -262,7 +262,7 @@ subroutine init_restart nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & nt_iage, nt_FY, nt_aero, nt_fsd - character(len=*),parameter :: subname = '(init_restart)' + character(len=*), parameter :: subname = '(init_restart)' call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_warnings_flush(nu_diag) diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index a8bf96ad2..5efa18a28 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -72,9 +72,10 @@ subroutine cice_init(mpicom_ice) use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -125,7 +126,6 @@ subroutine cice_init(mpicom_ice) call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -135,9 +135,9 @@ subroutine cice_init(mpicom_ice) call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 5fbde9cce..0ba672f3d 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -36,7 +36,6 @@ subroutine cice_init1() use ice_domain , only: init_domain_blocks use ice_arrays_column , only: alloc_arrays_column use ice_state , only: alloc_state - use ice_dyn_shared , only: alloc_dyn_shared use ice_flux_bgc , only: alloc_flux_bgc use ice_flux , only: alloc_flux use ice_timers , only: timer_total, init_ice_timers, ice_timer_start @@ -59,7 +58,6 @@ subroutine cice_init1() call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -79,9 +77,10 @@ subroutine cice_init2() use ice_communicate , only: my_task, master_task use ice_diagnostics , only: init_diags use ice_domain_size , only: ncat, nfsd, nfreq - use ice_dyn_eap , only: init_eap, alloc_dyn_eap - use ice_dyn_shared , only: kdyn, init_dyn + use ice_dyn_eap , only: init_eap + use ice_dyn_evp , only: init_evp use ice_dyn_vp , only: init_vp + use ice_dyn_shared , only: kdyn use ice_flux , only: init_coupler_flux, init_history_therm use ice_flux , only: init_history_dyn, init_flux_atm, init_flux_ocn use ice_forcing , only: init_snowtable @@ -107,9 +106,9 @@ subroutine cice_init2() call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp ! define evp dynamics parameters, variables + elseif (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index afdee5590..b94fcff05 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -21,7 +21,7 @@ module ice_comp_nuopc use ice_import_export , only : ice_import, ice_export, ice_advertise_fields, ice_realize_fields use ice_domain_size , only : nx_global, ny_global - use ice_grid , only : grid_type, init_grid2 + use ice_grid , only : grid_format, init_grid2 use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice use ice_calendar , only : force_restart_now, write_ic, init_calendar use ice_calendar , only : idate, mday, mmonth, myear, year_init @@ -576,7 +576,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call t_startf ('cice_init1') call cice_init1 call t_stopf ('cice_init1') - + !----------------------------------------------------------------- ! Advertise fields !----------------------------------------------------------------- @@ -684,7 +684,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Initialize the cice mesh and the cice mask - if (trim(grid_type) == 'setmask') then + if (trim(grid_format) == 'meshnc') then ! In this case cap code determines the mask file call ice_mesh_setmask_from_maskfile(ice_maskfile, ice_mesh, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index e4db010de..60059e39a 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -27,7 +27,7 @@ module ice_import_export use ice_arrays_column , only : floe_rad_c, wave_spectrum use ice_state , only : vice, vsno, aice, aicen_init, trcr, trcrn use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm - use ice_grid , only : grid_type + use ice_grid , only : grid_format use ice_mesh_mod , only : ocn_gridcell_frac use ice_boundary , only : ice_HaloUpdate use ice_fileunits , only : nu_diag, flush_fileunit @@ -1059,7 +1059,7 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'ice_fraction', input=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (trim(grid_type) == 'setmask') then + if (trim(grid_format) == 'meshnc') then call state_setexport(exportState, 'ice_mask', input=ocn_gridcell_frac, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return else diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index 22596429d..dc83c7703 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -15,6 +15,7 @@ module CICE_InitMod use ice_kinds_mod use ice_exit, only: abort_ice use ice_fileunits, only: init_fileunits, nu_diag + use ice_memusage, only: ice_memusage_init, ice_memusage_print use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave @@ -47,9 +48,9 @@ subroutine CICE_Initialize(mpi_comm) integer (kind=int_kind), optional, intent(in) :: mpi_comm ! communicator from nuopc character(len=*), parameter :: subname='(CICE_Initialize)' - !-------------------------------------------------------------------- - ! model initialization - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! model initialization + !-------------------------------------------------------------------- if (present(mpi_comm)) then call cice_init(mpi_comm) @@ -70,15 +71,16 @@ subroutine cice_init(mpi_comm) floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & + use ice_calendar, only: dt, dt_dyn, write_ic, & init_calendar, advance_timestep, calc_timesteps use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -122,12 +124,17 @@ subroutine cice_init(mpi_comm) call input_zbgc ! vertical biogeochemistry namelist call count_tracers ! count tracers + ! Call this as early as possible, must be after memory_stats is read + if (my_task == master_task) then + call ice_memusage_init(nu_diag) + call ice_memusage_print(nu_diag,subname//':start') + endif + call init_domain_blocks ! set up block decomposition call init_grid1 ! domain distribution call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -137,9 +144,9 @@ subroutine cice_init(mpi_comm) call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables @@ -254,6 +261,10 @@ subroutine cice_init(mpi_comm) if (write_ic) call accum_hist(dt) ! write initial conditions + if (my_task == master_task) then + call ice_memusage_print(nu_diag,subname//':end') + endif + end subroutine cice_init !======================================================================= diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 9c30b15a3..8de05a121 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -72,9 +72,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -122,7 +123,6 @@ subroutine cice_init call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -132,9 +132,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 index 84d1a3a60..9ed1c5cbc 100644 --- a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 @@ -70,9 +70,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -113,7 +114,6 @@ subroutine cice_init call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -123,9 +123,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 index 84d1a3a60..8a5070d25 100644 --- a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 @@ -70,8 +70,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp + use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_dyn_vp, only: init_vp use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux @@ -113,7 +115,6 @@ subroutine cice_init call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -123,9 +124,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index 1854dda64..72a40f513 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -28,7 +28,8 @@ module ice_fileunits implicit none private public :: init_fileunits, get_fileunit, flush_fileunit, & - release_fileunit, release_all_fileunits + release_fileunit, release_all_fileunits, & + goto_nml character (len=char_len), public :: & diag_type ! 'stdout' or 'file' @@ -116,7 +117,11 @@ subroutine init_fileunits ice_IOUnitsInUse(ice_stdout) = .true. ! reserve unit 6 ice_IOUnitsInUse(ice_stderr) = .true. if (nu_diag >= 1 .and. nu_diag <= ice_IOUnitsMaxUnit) & - ice_IOUnitsInUse(nu_diag) = .true. ! reserve unit nu_diag + ice_IOUnitsInUse(nu_diag) = .true. ! reserve unit nu_diag +#ifdef CESMCOUPLED + ! CESM can have negative unit numbers. + if (nu_diag < 0) nu_diag_set = .true. +#endif call get_fileunit(nu_grid) call get_fileunit(nu_kmt) @@ -239,7 +244,12 @@ subroutine release_all_fileunits call release_fileunit(nu_rst_pointer) call release_fileunit(nu_history) call release_fileunit(nu_hdr) +#ifdef CESMCOUPLED + ! CESM can have negative unit numbers + if (nu_diag > 0 .and. nu_diag /= ice_stdout) call release_fileunit(nu_diag) +#else if (nu_diag /= ice_stdout) call release_fileunit(nu_diag) +#endif end subroutine release_all_fileunits @@ -311,6 +321,56 @@ subroutine flush_fileunit(iunit) end subroutine flush_fileunit +!======================================================================= + +!======================================================= + + subroutine goto_nml(iunit, nml, status) + ! Search to namelist group within ice_in file. + ! for compilers that do not allow optional namelists + + ! passed variables + integer(kind=int_kind), intent(in) :: & + iunit ! namelist file unit + + character(len=*), intent(in) :: & + nml ! namelist to search for + + integer(kind=int_kind), intent(out) :: & + status ! status of subrouine + + ! local variables + character(len=char_len) :: & + file_str, & ! string in file + nml_str ! namelist string to test + + integer(kind=int_kind) :: & + i, n ! dummy integers + + + ! rewind file + rewind(iunit) + + ! define test string with ampersand + nml_str = '&' // trim(adjustl(nml)) + + ! search for the record containing the namelist group we're looking for + do + read(iunit, '(a)', iostat=status) file_str + if (status /= 0) then + exit ! e.g. end of file + else + if (index(adjustl(file_str), nml_str) == 1) then + exit ! i.e. found record we're looking for + end if + end if + end do + + ! backspace to namelist name in file + backspace(iunit) + + end subroutine goto_nml + !======================================================================= end module ice_fileunits diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 06ab79cdb..0d06b0aac 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -1477,6 +1477,14 @@ subroutine input_zbgc restart_zsal = .false. endif + if (solve_zsal) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: solve_zsal=T deprecated' + endif + abort_flag = 101 + endif + +#ifdef UNDEPRECATE_ZSAL if (solve_zsal .and. nblyr < 1) then if (my_task == master_task) then write(nu_diag,*) subname,' ERROR: solve_zsal=T but 0 zsalinity tracers' @@ -1490,6 +1498,7 @@ subroutine input_zbgc endif abort_flag = 102 endif +#endif if (tr_brine .and. nblyr < 1 ) then if (my_task == master_task) then diff --git a/cicecore/version.txt b/cicecore/version.txt index 154cda3d7..953395fa1 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.4.0 +CICE 6.4.1 diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index d75d74253..66b7b1321 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -128,12 +128,12 @@ endif ### List of source code directories (in order of importance). cat >! Filepath << EOF ${ICE_SANDBOX}/cicecore/drivers/${ICE_DRVOPT} -${ICE_SANDBOX}/cicecore/cicedynB/dynamics -${ICE_SANDBOX}/cicecore/cicedynB/general -${ICE_SANDBOX}/cicecore/cicedynB/analysis -${ICE_SANDBOX}/cicecore/cicedynB/infrastructure -${ICE_SANDBOX}/cicecore/cicedynB/infrastructure/io/$IODIR -${ICE_SANDBOX}/cicecore/cicedynB/infrastructure/comm/${ICE_COMMDIR} +${ICE_SANDBOX}/cicecore/cicedyn/dynamics +${ICE_SANDBOX}/cicecore/cicedyn/general +${ICE_SANDBOX}/cicecore/cicedyn/analysis +${ICE_SANDBOX}/cicecore/cicedyn/infrastructure +${ICE_SANDBOX}/cicecore/cicedyn/infrastructure/io/$IODIR +${ICE_SANDBOX}/cicecore/cicedyn/infrastructure/comm/${ICE_COMMDIR} ${ICE_SANDBOX}/cicecore/shared ${ICE_SANDBOX}/icepack/columnphysics EOF diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 8262f34ec..32db0270b 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -254,6 +254,8 @@ update_ocn_f = .false. l_mpond_fresh = .false. tfrz_option = 'mushy' + saltflux_option = 'constant' + ice_ref_salinity = 4.0 oceanmixed_ice = .true. wave_spec_type = 'none' wave_spec_file = 'unknown_wave_spec_file' diff --git a/configuration/scripts/options/set_nml.dyneap b/configuration/scripts/options/set_nml.dyneap new file mode 100644 index 000000000..6ebab625e --- /dev/null +++ b/configuration/scripts/options/set_nml.dyneap @@ -0,0 +1 @@ +kdyn = 2 diff --git a/configuration/scripts/options/set_nml.saltflux b/configuration/scripts/options/set_nml.saltflux new file mode 100644 index 000000000..d50ddc4e3 --- /dev/null +++ b/configuration/scripts/options/set_nml.saltflux @@ -0,0 +1,2 @@ + ktherm = 2 + saltflux_option = 'prognostic' diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index 3007380ab..8685ab9a8 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -22,6 +22,7 @@ restart gx3 4x4 alt04 restart gx3 4x4 alt05 restart gx3 8x2 alt06 restart gx3 8x3 alt07 +restart gx3 8x3 saltflux restart gx3 18x2 debug,maskhalo restart gx3 6x2 alt01,debug,short restart gx3 8x2 alt02,debug,short @@ -70,8 +71,6 @@ restart gx3 4x4 histall,precision8,cdf64 smoke gx3 30x1 bgcz,histall smoke gx3 14x2 fsd12,histall smoke gx3 4x1 dynpicard -smoke gx3 8x2 diag24,run5day,zsal,debug -restart gx3 8x2 zsal restart gx3 8x2 gx3ncarbulk,debug restart gx3 4x4 gx3ncarbulk,diag1 smoke gx3 4x1 calcdragio diff --git a/configuration/scripts/tests/decomp_suite.ts b/configuration/scripts/tests/decomp_suite.ts index c39c3ddfe..8d47506d6 100644 --- a/configuration/scripts/tests/decomp_suite.ts +++ b/configuration/scripts/tests/decomp_suite.ts @@ -3,6 +3,8 @@ restart gx3 4x2x25x29x4 dslenderX2 restart gx1 64x1x16x16x10 dwghtfile restart gbox180 16x1x6x6x60 dspacecurve,debugblocks decomp gx3 4x2x25x29x5 none +decomp gx3 4x2x25x29x5 dynpicard,reprosum +decomp gx3 4x2x25x29x5 dyneap restart gx3 1x1x50x58x4 droundrobin,thread restart_gx3_4x2x25x29x4_dslenderX2 restart gx3 4x1x25x116x1 dslenderX1,thread restart_gx3_4x2x25x29x4_dslenderX2 restart gx3 6x2x4x29x18 dspacecurve restart_gx3_4x2x25x29x4_dslenderX2 diff --git a/configuration/scripts/tests/first_suite.ts b/configuration/scripts/tests/first_suite.ts index b42d917ea..bef24d9eb 100644 --- a/configuration/scripts/tests/first_suite.ts +++ b/configuration/scripts/tests/first_suite.ts @@ -1,6 +1,19 @@ # Test Grid PEs Sets BFB-compare smoke gx3 8x2 diag1,run5day +# decomp_suite restart gx3 4x2x25x29x4 dslenderX2 smoke gx3 4x2x25x29x4 debug,run2day,dslenderX2 -smoke gx3 4x2x25x29x4 dslenderX2,diag1,reprosum,cmplog +# reprosum_suite +smoke gx3 4x2x25x29x4 dslenderX2,diag1,reprosum +# travis_suite smoke gx3 1x2 run2day +# gridsys_suite +smoke gx3 1x1x100x116x1 reprosum,run10day +smoke gx1 32x1x16x16x32 reprosum,run10day +smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd +smoke gx1 32x1x16x16x32 reprosum,run10day,gridcd +smoke gx3 1x1x100x116x1 reprosum,run10day,gridc +smoke gx1 32x1x16x16x32 reprosum,run10day,gridc +# perf_suite +smoke gx1 32x1x16x16x15 run2day,droundrobin +smoke gx1 64x1x16x16x8 run2day,droundrobin,thread diff --git a/configuration/scripts/tests/gridsys_suite.ts b/configuration/scripts/tests/gridsys_suite.ts index d9752073f..faf01344a 100644 --- a/configuration/scripts/tests/gridsys_suite.ts +++ b/configuration/scripts/tests/gridsys_suite.ts @@ -1,4 +1,11 @@ # Test Grid PEs Sets BFB-compare +smoke gx3 1x1x100x116x1 reprosum,run10day +smoke gx1 32x1x16x16x32 reprosum,run10day +smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd +smoke gx1 32x1x16x16x32 reprosum,run10day,gridcd +smoke gx3 1x1x100x116x1 reprosum,run10day,gridc +smoke gx1 32x1x16x16x32 reprosum,run10day,gridc + smoke gx3 8x2 diag1,run5day smoke gx3 8x4 diag1,run5day,debug restart gx3 4x2 debug,diag1 @@ -12,11 +19,9 @@ smoke gbox80 4x2 boxclosed,boxforcee,run1day smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid -smoke gx3 1x1x100x116x1 reprosum,run10day smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall smoke_gx3_1x1x100x116x1_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall smoke_gx3_1x1x100x116x1_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day smoke_gx3_1x1x100x116x1_reprosum_run10day -smoke gx1 32x1x16x16x32 reprosum,run10day smoke gx1 32x1x16x16x32 reprosum,run10day,cmplogrest,dwblockall smoke_gx1_32x1x16x16x32_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,dwblockall smoke_gx1_32x1x16x16x32_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest smoke_gx1_32x1x16x16x32_reprosum_run10day @@ -34,11 +39,9 @@ smoke gbox80 4x2 boxclosed,boxforcee,run1day,gridcd smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands,gridcd smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day,gridcd smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid,gridcd -smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day -smoke gx1 32x1x16x16x32 reprosum,run10day,gridcd smoke gx1 32x1x16x16x32 reprosum,run10day,cmplogrest,dwblockall,gridcd smoke_gx1_32x1x16x16x32_gridcd_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,dwblockall,gridcd smoke_gx1_32x1x16x16x32_gridcd_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,gridcd smoke_gx1_32x1x16x16x32_gridcd_reprosum_run10day @@ -56,11 +59,9 @@ smoke gbox80 4x2 boxclosed,boxforcee,run1day,gridc smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands,gridc smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day,gridc smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid,gridc -smoke gx3 1x1x100x116x1 reprosum,run10day,gridc smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day -smoke gx1 32x1x16x16x32 reprosum,run10day,gridc smoke gx1 32x1x16x16x32 reprosum,run10day,cmplogrest,dwblockall,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,dwblockall,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day diff --git a/configuration/scripts/tests/omp_suite.ts b/configuration/scripts/tests/omp_suite.ts index 686fa72db..62630e874 100644 --- a/configuration/scripts/tests/omp_suite.ts +++ b/configuration/scripts/tests/omp_suite.ts @@ -1,5 +1,7 @@ # Test Grid PEs Sets BFB-compare +#gridB + smoke gx3 8x4 diag1,reprosum,run10day smoke gx3 6x2 alt01,reprosum,run10day smoke gx3 8x2 alt02,reprosum,run10day @@ -15,7 +17,6 @@ smoke gx3 14x2 fsd12,reprosum,run10day smoke gx3 11x2 isotope,reprosum,run10day smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day smoke gx3 6x4 dynpicard,reprosum,run10day -smoke gx3 8x3 zsal,reprosum,run10day smoke gx3 1x1x100x116x1 reprosum,run10day,thread smoke gbox128 8x2 reprosum,run10day @@ -25,6 +26,62 @@ smoke gbox128 14x2 boxrestore,reprosum,run10day smoke gbox80 4x5 box2001,reprosum,run10day smoke gbox80 11x3 boxslotcyl,reprosum,run10day +#gridC + +smoke gx3 8x4 diag1,reprosum,run10day,gridc +smoke gx3 6x2 alt01,reprosum,run10day,gridc +smoke gx3 8x2 alt02,reprosum,run10day,gridc +#smoke gx3 12x2 alt03,droundrobin,reprosum,run10day,gridc +smoke gx3 4x4 alt04,reprosum,run10day,gridc +smoke gx3 4x4 alt05,reprosum,run10day,gridc +smoke gx3 8x2 alt06,reprosum,run10day,gridc +smoke gx3 7x2 alt07,reprosum,run10day,gridc +smoke gx3 8x2 bgczm,reprosum,run10day,gridc +smoke gx1 15x2 reprosum,run10day,gridc +smoke gx1 15x2 seabedprob,reprosum,run10day,gridc +smoke gx3 14x2 fsd12,reprosum,run10day,gridc +smoke gx3 11x2 isotope,reprosum,run10day,gridc +smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day,gridc +#smoke gx3 6x4 dynpicard,reprosum,run10day,gridc +smoke gx3 1x1x100x116x1 reprosum,run10day,gridc,thread + +smoke gbox128 8x2 reprosum,run10day,gridc +smoke gbox128 12x2 boxnodyn,reprosum,run10day,gridc +#smoke gbox128 9x2 boxadv,reprosum,run10day,gridc +smoke gbox128 14x2 boxrestore,reprosum,run10day,gridc +smoke gbox80 4x5 box2001,reprosum,run10day,gridc +smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridc + +#gridCD + +smoke gx3 8x4 diag1,reprosum,run10day,gridcd +smoke gx3 6x2 alt01,reprosum,run10day,gridcd +smoke gx3 8x2 alt02,reprosum,run10day,gridcd +#smoke gx3 12x2 alt03,droundrobin,reprosum,run10day,gridcd +smoke gx3 4x4 alt04,reprosum,run10day,gridcd +smoke gx3 4x4 alt05,reprosum,run10day,gridcd +smoke gx3 8x2 alt06,reprosum,run10day,gridcd +smoke gx3 7x2 alt07,reprosum,run10day,gridcd +smoke gx3 8x2 bgczm,reprosum,run10day,gridcd +smoke gx1 15x2 reprosum,run10day,gridcd +smoke gx1 15x2 seabedprob,reprosum,run10day,gridcd +smoke gx3 14x2 fsd12,reprosum,run10day,gridcd +smoke gx3 11x2 isotope,reprosum,run10day,gridcd +smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day,gridcd +#smoke gx3 6x4 dynpicard,reprosum,run10day,gridcd +smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd,thread + +smoke gbox128 8x2 reprosum,run10day,gridcd +smoke gbox128 12x2 boxnodyn,reprosum,run10day,gridcd +#smoke gbox128 9x2 boxadv,reprosum,run10day,gridcd +smoke gbox128 14x2 boxrestore,reprosum,run10day,gridcd +smoke gbox80 4x5 box2001,reprosum,run10day,gridcd +smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridcd + +sleep 180 + +#gridB + smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest smoke_gx3_8x4_diag1_reprosum_run10day smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread smoke_gx3_8x4_diag1_reprosum_run10day smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread smoke_gx3_6x2_alt01_reprosum_run10day @@ -41,7 +98,6 @@ smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread smoke_gx3_11x2_isotope_reprosum_run10day smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread smoke_gx3_8x4_icdefault_reprosum_run10day_snwgrain_snwitdrdg smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread smoke_gx3_6x4_dynpicard_reprosum_run10day -smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread smoke_gx3_8x3_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day smoke_gx3_1x1x100x116x1_reprosum_run10day_thread smoke gx3 8x4x5x4x80 reprosum,run10day smoke_gx3_1x1x100x116x1_reprosum_run10day_thread @@ -54,31 +110,6 @@ smoke gbox80 8x1 boxslotcyl,reprosum,run10day,cmplogrest,thread #gridC -smoke gx3 8x4 diag1,reprosum,run10day,gridc -smoke gx3 6x2 alt01,reprosum,run10day,gridc -smoke gx3 8x2 alt02,reprosum,run10day,gridc -#smoke gx3 12x2 alt03,droundrobin,reprosum,run10day,gridc -smoke gx3 4x4 alt04,reprosum,run10day,gridc -smoke gx3 4x4 alt05,reprosum,run10day,gridc -smoke gx3 8x2 alt06,reprosum,run10day,gridc -smoke gx3 7x2 alt07,reprosum,run10day,gridc -smoke gx3 8x2 bgczm,reprosum,run10day,gridc -smoke gx1 15x2 reprosum,run10day,gridc -smoke gx1 15x2 seabedprob,reprosum,run10day,gridc -smoke gx3 14x2 fsd12,reprosum,run10day,gridc -smoke gx3 11x2 isotope,reprosum,run10day,gridc -smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day,gridc -#smoke gx3 6x4 dynpicard,reprosum,run10day,gridc -smoke gx3 8x3 zsal,reprosum,run10day,gridc -smoke gx3 1x1x100x116x1 reprosum,run10day,gridc,thread - -smoke gbox128 8x2 reprosum,run10day,gridc -smoke gbox128 12x2 boxnodyn,reprosum,run10day,gridc -#smoke gbox128 9x2 boxadv,reprosum,run10day,gridc -smoke gbox128 14x2 boxrestore,reprosum,run10day,gridc -smoke gbox80 4x5 box2001,reprosum,run10day,gridc -smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridc - smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest,gridc smoke_gx3_8x4_diag1_gridc_reprosum_run10day smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x4_diag1_gridc_reprosum_run10day smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_6x2_alt01_gridc_reprosum_run10day @@ -95,7 +126,6 @@ smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread,grid smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_11x2_gridc_isotope_reprosum_run10day smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x4_gridc_icdefault_reprosum_run10day_snwgrain_snwitdrdg #smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_6x4_dynpicard_gridc_reprosum_run10day -smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x3_gridc_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day_thread smoke gx3 8x4x5x4x80 reprosum,run10day,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day_thread @@ -108,31 +138,6 @@ smoke gbox80 8x1 boxslotcyl,reprosum,run10day,cmplogrest,thread #gridCD -smoke gx3 8x4 diag1,reprosum,run10day,gridcd -smoke gx3 6x2 alt01,reprosum,run10day,gridcd -smoke gx3 8x2 alt02,reprosum,run10day,gridcd -#smoke gx3 12x2 alt03,droundrobin,reprosum,run10day,gridcd -smoke gx3 4x4 alt04,reprosum,run10day,gridcd -smoke gx3 4x4 alt05,reprosum,run10day,gridcd -smoke gx3 8x2 alt06,reprosum,run10day,gridcd -smoke gx3 7x2 alt07,reprosum,run10day,gridcd -smoke gx3 8x2 bgczm,reprosum,run10day,gridcd -smoke gx1 15x2 reprosum,run10day,gridcd -smoke gx1 15x2 seabedprob,reprosum,run10day,gridcd -smoke gx3 14x2 fsd12,reprosum,run10day,gridcd -smoke gx3 11x2 isotope,reprosum,run10day,gridcd -smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day,gridcd -#smoke gx3 6x4 dynpicard,reprosum,run10day,gridcd -smoke gx3 8x3 zsal,reprosum,run10day,gridcd -smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd,thread - -smoke gbox128 8x2 reprosum,run10day,gridcd -smoke gbox128 12x2 boxnodyn,reprosum,run10day,gridcd -#smoke gbox128 9x2 boxadv,reprosum,run10day,gridcd -smoke gbox128 14x2 boxrestore,reprosum,run10day,gridcd -smoke gbox80 4x5 box2001,reprosum,run10day,gridcd -smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridcd - smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest,gridcd smoke_gx3_8x4_diag1_gridcd_reprosum_run10day smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x4_diag1_gridcd_reprosum_run10day smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_6x2_alt01_gridcd_reprosum_run10day @@ -149,7 +154,6 @@ smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread,grid smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_11x2_gridcd_isotope_reprosum_run10day smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x4_gridcd_icdefault_reprosum_run10day_snwgrain_snwitdrdg #smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_6x4_dynpicard_gridcd_reprosum_run10day -smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x3_gridcd_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day_thread smoke gx3 8x4x5x4x80 reprosum,run10day,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day_thread diff --git a/configuration/scripts/tests/perf_suite.ts b/configuration/scripts/tests/perf_suite.ts index 9a17d8a55..a4d8ef588 100644 --- a/configuration/scripts/tests/perf_suite.ts +++ b/configuration/scripts/tests/perf_suite.ts @@ -1,25 +1,24 @@ # Test Grid PEs Sets BFB-compare -smoke gx1 1x1x320x384x1 run2day,droundrobin +smoke gx1 32x1x16x16x15 run2day,droundrobin smoke gx1 64x1x16x16x8 run2day,droundrobin,thread -sleep 180 # -smoke gx1 1x1x320x384x1 run2day,droundrobin -smoke gx1 1x1x160x192x4 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 1x1x80x96x16 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 1x1x40x48x64 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 1x1x20x24x256 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 1x1x320x384x1 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 1x1x160x192x4 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 1x1x80x96x16 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 1x1x40x48x64 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 1x1x20x24x256 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day # -smoke gx1 1x1x16x16x480 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 2x1x16x16x240 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 4x1x16x16x120 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 8x1x16x16x60 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 16x1x16x16x30 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 32x1x16x16x15 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 128x1x16x16x4 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 1x1x16x16x480 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 2x1x16x16x240 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 4x1x16x16x120 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 8x1x16x16x60 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 16x1x16x16x30 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +#smoke gx1 32x1x16x16x15 run2day,droundrobin +smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 128x1x16x16x4 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day # -smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 64x1x16x16x8 run2day,droundrobin,thread +smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +#smoke gx1 64x1x16x16x8 run2day,droundrobin,thread smoke gx1 32x2x16x16x16 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread smoke gx1 16x4x16x16x32 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread smoke gx1 8x8x16x16x64 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index a8b9d08f1..0e9d21517 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -350,7 +350,7 @@ either Celsius or Kelvin units). Deprecated parameters are listed at the end. "ice_ic", "choice of initial conditions (see :ref:`tab-ic`)", "" "ice_stdout", "unit number for standard output", "" "ice_stderr", "unit number for standard error output", "" - "ice_ref_salinity", "reference salinity for ice–ocean exchanges", "4. ppt" + "ice_ref_salinity", "reference salinity for ice–ocean exchanges", "" "icells", "number of grid cells with specified property (for vectorization)", "" "iceruf", "ice surface roughness at atmosphere interface", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" "iceruf_ocn", "under-ice roughness (at ocean interface)", "0.03 m" @@ -677,6 +677,7 @@ either Celsius or Kelvin units). Deprecated parameters are listed at the end. "Tf", "freezing temperature", "C" "Tffresh", "freezing temp of fresh ice", "273.15 K" "tfrz_option", "form of ocean freezing temperature", "" + "saltflux_option", "form of coupled salt flux ", "" "thinS", "minimum ice thickness for brine tracer", "" "timer_stats", "logical to turn on extra timer statistics", ".false." "timesecs", "total elapsed time in seconds", "s" diff --git a/doc/source/conf.py b/doc/source/conf.py index a1b2871ae..88b98bc09 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.4.0' +version = u'6.4.1' # The full version, including alpha/beta/rc tags. -version = u'6.4.0' +version = u'6.4.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/developer_guide/dg_dynamics.rst b/doc/source/developer_guide/dg_dynamics.rst index 48dead1cb..1f1430e71 100644 --- a/doc/source/developer_guide/dg_dynamics.rst +++ b/doc/source/developer_guide/dg_dynamics.rst @@ -9,14 +9,14 @@ Dynamics The CICE **cicecore/** directory consists of the non icepack source code. Within that directory there are the following subdirectories -**cicecore/cicedynB/analysis** contains higher level history and diagnostic routines. +**cicecore/cicedyn/analysis** contains higher level history and diagnostic routines. -**cicecore/cicedynB/dynamics** contains all the dynamical evp, eap, and transport routines. +**cicecore/cicedyn/dynamics** contains all the dynamical evp, eap, and transport routines. -**cicecore/cicedynB/general** contains routines associated with forcing, flux calculation, +**cicecore/cicedyn/general** contains routines associated with forcing, flux calculation, initialization, and model timestepping. -**cicecore/cicedynB/infrastructure** contains most of the low-level infrastructure associated +**cicecore/cicedyn/infrastructure** contains most of the low-level infrastructure associated with communication (halo updates, gather, scatter, global sums, etc) and I/O reading and writing binary and netcdf files. @@ -29,7 +29,7 @@ coupling layers. Dynamical Solvers -------------------- -The dynamics solvers are found in **cicecore/cicedynB/dynamics/**. A couple of different solvers are +The dynamics solvers are found in **cicecore/cicedyn/dynamics/**. A couple of different solvers are available including EVP, EAP and VP. The dynamics solver is specified in namelist with the ``kdyn`` variable. ``kdyn=1`` is evp, ``kdyn=2`` is eap, ``kdyn=3`` is VP. @@ -41,7 +41,7 @@ with the tripole grid. Transport ----------------- -The transport (advection) methods are found in **cicecore/cicedynB/dynamics/**. Two methods are supported, +The transport (advection) methods are found in **cicecore/cicedyn/dynamics/**. Two methods are supported, upwind and remap. These are set in namelist via the ``advection`` variable. Transport can be disabled with the ``ktransport`` namelist variable. @@ -94,11 +94,11 @@ Two low-level communications packages, mpi and serial, are provided as part of C provides a middle layer between the model and the underlying libraries. Only the CICE mpi or serial directories are compiled with CICE, not both. -**cicedynB/infrastructure/comm/mpi/** +**cicedyn/infrastructure/comm/mpi/** is based on MPI and provides various methods to do halo updates, global sums, gather/scatter, broadcasts and similar using some fairly generic interfaces to isolate the MPI calls in the code. -**cicedynB/infrastructure/comm/serial/** support the same interfaces, but operates +**cicedyn/infrastructure/comm/serial/** support the same interfaces, but operates in shared memory mode with no MPI. The serial library will be used, by default in the CICE scripts, if the number of MPI tasks is set to 1. The serial library allows the model to be run on a single core or with OpenMP parallelism only without requiring an MPI library. @@ -112,15 +112,15 @@ Only one of the three IO directories can be built with CICE. The CICE scripts w by default, but other options can be selecting by setting ``ICE_IOTYPE`` in **cice.settings** in the case. This has to be set before CICE is built. -**cicedynB/infrastructure/io/io_netcdf/** is the +**cicedyn/infrastructure/io/io_netcdf/** is the default for the standalone CICE model, and it supports writing history and restart files in netcdf format using standard netcdf calls. It does this by writing from and reading to the root task and gathering and scattering fields from the root task to support model parallelism. -**cicedynB/infrastructure/io/io_binary/** supports files in binary format using a gather/scatter +**cicedyn/infrastructure/io/io_binary/** supports files in binary format using a gather/scatter approach and reading to and writing from the root task. -**cicedynB/infrastructure/io/io_pio/** support reading and writing through the pio interface. pio +**cicedyn/infrastructure/io/io_pio/** support reading and writing through the pio interface. pio is a parallel io library (https://github.com/NCAR/ParallelIO) that supports reading and writing of binary and netcdf file through various interfaces including netcdf and pnetcdf. pio is generally more parallel in memory even when using serial netcdf than the standard gather/scatter methods, diff --git a/doc/source/developer_guide/dg_forcing.rst b/doc/source/developer_guide/dg_forcing.rst index d3c406b43..0b90a9b2e 100644 --- a/doc/source/developer_guide/dg_forcing.rst +++ b/doc/source/developer_guide/dg_forcing.rst @@ -15,7 +15,7 @@ generally not been maintained by the Consortium and only a subset of the code is tested by the Consortium. The forcing implementation can be found in the file -**cicecore/cicedynB/general/ice_forcing.F90**. As noted above, only a subset of the +**cicecore/cicedyn/general/ice_forcing.F90**. As noted above, only a subset of the forcing modes are tested and supported. In many ways, the implemetation is fairly primitive, in part due to historical reasons and in part because standalone runs are discouraged for evaluating complex science. In general, most implementations diff --git a/doc/source/science_guide/sg_horiztrans.rst b/doc/source/science_guide/sg_horiztrans.rst index f85f13ee5..d66046465 100644 --- a/doc/source/science_guide/sg_horiztrans.rst +++ b/doc/source/science_guide/sg_horiztrans.rst @@ -35,8 +35,11 @@ versions but have not yet been implemented. Two transport schemes are available: upwind and the incremental remapping scheme of :cite:`Dukowicz00` as modified for sea ice by -:cite:`Lipscomb04`. The upwind scheme is naturally suited for a C grid discretization. As such, the C grid velocity components (i.e. :math:`uvelE=u` at the E point and :math:`vvelN=v` at the N point) are directly passed to the upwind transport scheme. On the other hand, if the B grid is used, :math:`uvel` and :math:`vvel` (respectively :math:`u` and :math:`v` at the U point) are interpolated to the E and N points such that the upwind advection can be performed. Conversely, as the remapping scheme was originally developed for B grid applications, :math:`uvel` and :math:`vvel` are directly used for the advection. If the remapping scheme is used for the C grid, :math:`uvelE` and :math:`vvelN` are first interpolated to the U points before performing the advection. +:cite:`Lipscomb04`. +- The upwind scheme uses velocity points at the East and North face (i.e. :math:`uvelE=u` at the E point and :math:`vvelN=v` at the N point) of a T gridcell. As such, the prognostic C grid velocity components (:math:`uvelE` and :math:`vvelN`) can be passed directly to the upwind transport scheme. If the upwind scheme is used with the B grid, the B grid velocities, :math:`uvelU` and :math:`vvelU` (respectively :math:`u` and :math:`v` at the U point) are interpolated to the E and N points first. (Note however that the upwind scheme does not transport all potentially available tracers.) + +- The remapping scheme uses :math:`uvelU` and :math:`vvelU` if l_fixed_area is false and :math:`uvelE` and :math:`vvelN` if l_fixed_area is true. l_fixed_area is hardcoded to false by default and further described below. As such, the B grid velocities (:math:`uvelU` and :math:`vvelU`) are used directly in the remapping scheme, while the C grid velocities (:math:`uvelE` and :math:`vvelN`) are interpolated to U points first. If l_fixed_area is changed to true, then the reverse is true. The C grid velocities are used directly and the B grid velocities are interpolated. The remapping scheme has several desirable features: @@ -464,14 +467,14 @@ In general, the fluxes in this expression are not equal to those implied by the above scheme for locating departure regions. For some applications it may be desirable to prescribe the divergence by prescribing the area of the departure region for each edge. This can be -done in CICE 4.0 by setting `l\_fixed\_area` = true in +done by setting `l\_fixed\_area` = true in **ice\_transport\_driver.F90** and passing the prescribed departure areas (`edgearea\_e` and `edgearea\_n`) into the remapping routine. An extra triangle is then constructed for each departure region to ensure that the total area is equal to the prescribed value. This idea was suggested and first implemented by Mats Bentsen of the Nansen Environmental and Remote Sensing Center (Norway), who applied an earlier version of the -CICE remapping scheme to an ocean model. The implementation in CICE v4.0 +CICE remapping scheme to an ocean model. The implementation in CICE is somewhat more general, allowing for departure regions lying on both sides of a cell edge. The extra triangle is constrained to lie in one but not both of the grid cells that share the edge. Since this option diff --git a/doc/source/science_guide/sg_tracers.rst b/doc/source/science_guide/sg_tracers.rst index b75edfb00..cbecb9310 100644 --- a/doc/source/science_guide/sg_tracers.rst +++ b/doc/source/science_guide/sg_tracers.rst @@ -92,7 +92,6 @@ is not in use. " ","nslyr","vsno","nt_rhos"," " " ","nslyr","vsno","nt_smice"," " " ","nslyr","vsno","nt_smliq"," " - "solve_zsal", "n_trzs", "fbri or (a,v)ice", "nt_bgc_S", " " "tr_bgc_N", "n_algae", "fbri or (a,v)ice", "nt_bgc_N", "nlt_bgc_N" "tr_bgc_Nit", " ", "fbri or (a,v)ice", "nt_bgc_Nit", "nlt_bgc_Nit" "tr_bgc_C", "n_doc", "fbri or (a,v)ice", "nt_bgc_DOC", "nlt_bgc_DOC" @@ -112,6 +111,7 @@ is not in use. " ", "1", "fbri", "nt_zbgc_frac", " " .. + "solve_zsal", "n_trzs", "fbri or (a,v)ice", "nt_bgc_S", " " "tr_pond_cesm", "2", "aice", "nt_apnd", " " " ", " ", "apnd", "nt_vpnd", " " diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index a34c69822..587adcd56 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -644,6 +644,7 @@ forcing_nml "", "``eastblock``", "ice block covering about 25 percent of domain at the east edge of the domain", "" "", "``latsst``", "ice dependent on latitude and ocean temperature", "" "", "``uniform``", "ice defined at all grid points", "" + "``ice_ref_salinity``", "real", "sea ice salinity for coupling fluxes (ppt)", "4.0" "``iceruf``", "real", "ice surface roughness at atmosphere interface in meters", "0.0005" "``l_mpond_fresh``", "``.false.``", "release pond water immediately to ocean", "``.false.``" "", "``true``", "retain (topo) pond water until ponds drain", "" @@ -666,6 +667,8 @@ forcing_nml "``restore_ocn``", "logical", "restore sst to data", "``.false.``" "``restore_ice``", "logical", "restore ice state along lateral boundaries", "``.false.``" "``rotate_wind``", "logical", "rotate wind from east/north to computation grid", "``.true.``" + "``saltflux_option``", "``constant``", "computed using ice_ref_salinity", "``constant``" + "", "``prognostic``", "computed using prognostic salinity", "" "``tfrz_option``", "``linear_salt``", "linear function of salinity (ktherm=1)", "``mushy``" "", "``minus1p8``", "constant ocean freezing temperature (:math:`-1.8^{\circ} C`)", "" "", "``mushy``", "matches mushy-layer thermo (ktherm=2)", "" @@ -804,14 +807,14 @@ zbgc_nml "``ratio_S2N_sp``", "real", "algal S to N in mol/mol small plankton", "0.03" "``restart_bgc``", "logical", "restart tracer values from file", "``.false.``" "``restart_hbrine``", "logical", "", "``.false.``" - "``restart_zsal``", "logical", "", "``.false.``" + "``restart_zsal``", "logical", "zsalinity DEPRECATED", "``.false.``" "``restore_bgc``", "logical", "restore bgc to data", "``.false.``" "``R_dFe2dust``", "real", "g/g :cite:`Tagliabue09`", "0.035" "``scale_bgc``", "logical", "", "``.false.``" "``silicatetype``", "real", "mobility type between stationary and mobile silicate", "-1.0" "``skl_bgc``", "logical", "biogeochemistry", "``.false.``" "``solve_zbgc``", "logical", "", "``.false.``" - "``solve_zsal``", "logical", "update salinity tracer profile", "``.false.``" + "``solve_zsal``", "logical", "zsalinity DEPRECATED, update salinity tracer profile", "``.false.``" "``tau_max``", "real", "long time mobile to stationary exchanges", "1.73e-5" "``tau_min``", "real", "rapid module to stationary exchanges", "5200." "``tr_bgc_Am``", "logical", "ammonium tracer", "``.false.``" @@ -847,13 +850,13 @@ icefields_nml There are several icefield namelist groups to control model history output. See the source code for a full list of supported output fields. -* ``icefields_nml`` is in **cicecore/cicedynB/analysis/ice_history_shared.F90** -* ``icefields_bgc_nml`` is in **cicecore/cicedynB/analysis/ice_history_bgc.F90** -* ``icefields_drag_nml`` is in **cicecore/cicedynB/analysis/ice_history_drag.F90** -* ``icefields_fsd_nml`` is in **cicecore/cicedynB/analysis/ice_history_fsd.F90** -* ``icefields_mechred_nml`` is in **cicecore/cicedynB/analysis/ice_history_mechred.F90** -* ``icefields_pond_nml`` is in **cicecore/cicedynB/analysis/ice_history_pond.F90** -* ``icefields_snow_nml`` is in **cicecore/cicedynB/analysis/ice_history_snow.F90** +* ``icefields_nml`` is in **cicecore/cicedyn/analysis/ice_history_shared.F90** +* ``icefields_bgc_nml`` is in **cicecore/cicedyn/analysis/ice_history_bgc.F90** +* ``icefields_drag_nml`` is in **cicecore/cicedyn/analysis/ice_history_drag.F90** +* ``icefields_fsd_nml`` is in **cicecore/cicedyn/analysis/ice_history_fsd.F90** +* ``icefields_mechred_nml`` is in **cicecore/cicedyn/analysis/ice_history_mechred.F90** +* ``icefields_pond_nml`` is in **cicecore/cicedyn/analysis/ice_history_pond.F90** +* ``icefields_snow_nml`` is in **cicecore/cicedyn/analysis/ice_history_snow.F90** .. csv-table:: **icefields_nml namelist options** :header: "variable", "options/format", "description", "default value" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index a7cc66948..5ed2092c0 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -47,7 +47,7 @@ as follows **cicecore/** CICE source code -**cicecore/cicedynB/** +**cicecore/cicedyn/** routines associated with the dynamics core **cicecore/drivers/** diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 284de72f1..289f626a9 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -23,7 +23,8 @@ The testing scripts support several features - Ability to compare results to prior baselines to verify bit-for-bit (``--bcmp``) - Ability to define where baseline tests are stored (``--bdir``) - Ability to compare tests against each other (``--diff``) - - Ability to set account number (``--acct``), which is otherwise not set and may result in tests not being submitted + - Ability to set or overide the batch account number (``--acct``) and queue name (``--queue``) + - Ability to control how test suites execute (``--setup-only``, ``--setup-build``, ``--setup-build-run``, ``--setup-build-submit``) .. _indtests: @@ -301,22 +302,6 @@ results.csh script in the testsuite.[testid]:: cd testsuite.[testid] ./results.csh -The script **create_fails.csh** will process the output from results.csh and generate a new -test suite file, **fails.ts**, from the failed tests. -**fails.ts** can then be edited and passed into ``cice.setup --suite fails.ts ...`` to rerun -subsets of failed tests to more efficiently move thru the development, testing, and -validation process. However, a full test suite should be run on the final development -version of the code. - -To report the test results, as is required for Pull Requests to be accepted into -the master the CICE Consortium code see :ref:`testreporting`. - -If using the ``--tdir`` option, that directory must not exist before the script is run. The tdir directory will be -created by the script and it will be populated by all tests as well as scripts that support the -test suite:: - - ./cice.setup --suite base_suite --mach wolf --env gnu --testid myid --tdir /scratch/$user/testsuite.myid - Multiple suites are supported on the command line as comma separated arguments:: ./cice.setup --suite base_suite,decomp_suite --mach wolf --env gnu --testid myid @@ -329,9 +314,48 @@ The option settings defined at the command line have precedence over the test su values if there are conflicts. The predefined test suites are defined under **configuration/scripts/tests** and -the files defining the suites -have a suffix of .ts in that directory. The format for the test suite file -is relatively simple. +the files defining the suites have a suffix of .ts in that directory. Some of the +available tests suites are + +``quick_suite`` + consists of a handful of basic CICE tests + +``base_suite`` + consists of a much large suite of tests covering much of the CICE functionality + +``decomp_suite`` + checks that different decompositions and pe counts produce bit-for-bit results + +``omp_suite`` + checks that OpenMP single thread and multi-thread cases are bit-for-bit identical + +``io_suite`` + tests the various IO options including binary, netcdf, and pio. PIO should be installed locally and accessible to the CICE build system to make full use of this suite. + +``perf_suite`` + runs a series of tests to evaluate model scaling and performance + +``reprosum_suite`` + verifies that CICE log files are bit-for-bit with different decompositions and pe counts when the bfbflag is set to reprosum + +``gridsys_suite`` + tests B, C, and CD grid_ice configurations + +``prod_suite`` + consists of a handful of tests running 5 to 10 model years and includes some QC testing. These tests will be relatively expensive and take more time compared to other suites. + +``unittest_suite`` + runs unit tests in the CICE repository + +``travis_suite`` + consists of a small suite of tests suitable for running on low pe counts. This is the suite used with Github Actions for CI in the workflow. + +``first_suite`` + this small suite of tests is redundant with tests in other suites. It runs several of the critical baseline tests that other test compare to. It can improve testing turnaround if listed first in a series of test suites. + +When running multiple suites on the command line (i.e. ``--suite first_suite,base_suite,omp_suite``) the suites will be run in the order defined by the user and redundant tests across multiple suites will be created and executed only once. + +The format for the test suite file is relatively simple. It is a text file with white space delimited columns that define a handful of values in a specific order. The first column is the test name, the second the grid, the third the pe count, @@ -423,6 +447,22 @@ which means by default the test suite builds and submits the jobs. By defining By leveraging the **cice.setup** command line arguments ``--setup-only``, ``--setup-build``, and ``--setup-build-run`` as well as the environment variables SUITE_BUILD, SUITE_RUN, and SUITE_SUBMIT, users can run **cice.setup** and **suite.submit** in various combinations to quickly setup, setup and build, submit, resubmit, run interactively, or rebuild and resubmit full testsuites quickly and easily. See :ref:`examplesuites` for an example. +The script **create_fails.csh** will process the output from results.csh and generate a new +test suite file, **fails.ts**, from the failed tests. +**fails.ts** can then be edited and passed into ``cice.setup --suite fails.ts ...`` to rerun +subsets of failed tests to more efficiently move thru the development, testing, and +validation process. However, a full test suite should be run on the final development +version of the code. + +To report the test results, as is required for Pull Requests to be accepted into +the master the CICE Consortium code see :ref:`testreporting`. + +If using the ``--tdir`` option, that directory must not exist before the script is run. The tdir directory will be +created by the script and it will be populated by all tests as well as scripts that support the +test suite:: + + ./cice.setup --suite base_suite --mach wolf --env gnu --testid myid --tdir /scratch/$user/testsuite.myid + .. _examplesuites: @@ -695,9 +735,12 @@ The following are brief descriptions of some of the current unit tests, both sets of software are tested independently and correctness is verified. - **calchk** is a unit test that exercises the CICE calendar over 100,000 years and verifies correctness. This test does not depend on the CICE initialization. + - **gridavgchk** is a unit test that exercises the CICE grid_average_X2Y methods and verifies results. - **helloworld** is a simple test that writes out helloworld and uses no CICE infrastructure. This tests exists to demonstrate how to build a unit test by specifying the object files directly in the Makefile + - **optargs** is a unit test that tests passing optional arguments down a calling tree and verifying + that the optional attribute is preserved correctly. - **sumchk** is a unit test that exercises the methods in ice_global_reductions.F90. This test requires that a CICE grid and decomposition be initialized, so CICE_InitMod.F90 is leveraged to initialize the model prior to running a suite of unit validation tests to verify correctness. diff --git a/doc/source/user_guide/ug_troubleshooting.rst b/doc/source/user_guide/ug_troubleshooting.rst index d20b14ffc..315b2f869 100644 --- a/doc/source/user_guide/ug_troubleshooting.rst +++ b/doc/source/user_guide/ug_troubleshooting.rst @@ -7,6 +7,16 @@ Troubleshooting Check the FAQ: https://github.com/CICE-Consortium/CICE/wiki +.. _dirsetup: + +Directory Structure +--------------------- + +In November, 2022, the cicedynB directory was renamed to cicedyn. +A soft link was temporarily added to preserve the ability to use +cicedynB as a path when compiling CICE in other build systems. This +soft link will be removed in the future. + .. _setup: Initial setup @@ -221,6 +231,16 @@ be found in the `Icepack documentation `_. +VP dynamics results +---------------------------------------- + +The VP dynamics solver (`kdyn=3`) requires a global sum. This global sum +is computed by default via an efficient implementation that is not bit-for-bit +for different decompositions or pe counts. Bit-for-bit identical results +can be recovered for the VP dynamics solver by setting the namelist +`bfbflag = reprosum` or using the `-s reprosum` option when setting up a case. + + Proliferating subprocess parameterizations ------------------------------------------------------- diff --git a/icepack b/icepack index 18fc1c9b7..8f96707a9 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 18fc1c9b79d81604eafdb1fac4ddd039b78ad390 +Subproject commit 8f96707a90132ca119d81ed84e5a62ca0ff3ed96 From bd0ee2c14c848f3f0a01ab697603d2f03d396f73 Mon Sep 17 00:00:00 2001 From: "Jun.Wang" Date: Tue, 27 Jun 2023 20:30:34 +0000 Subject: [PATCH 75/90] add run time info --- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index b94fcff05..2488b1c66 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -105,6 +105,7 @@ module ice_comp_nuopc character(*), parameter :: modName = "(ice_comp_nuopc)" character(*), parameter :: u_FILE_u = & __FILE__ + real(8) :: timers, timere !======================================================================= contains @@ -243,8 +244,11 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist character(len=char_len) :: tfrz_option_driver ! tfrz_option from cice namelist character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' + real(8) :: MPI_Wtime(), timeiads !-------------------------------- + timeiads = MPI_Wtime() + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldName", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent .and. isSet) then @@ -699,6 +703,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call t_stopf ('cice_init_total') + if(mastertask) write(stdout,*) 'In ', trim(subname),' time', MPI_Wtime()-timeiads end subroutine InitializeAdvertise @@ -730,9 +735,11 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) character(len=char_len_long) :: single_column_lnd_domainfile character(len=char_len_long) , pointer :: lfieldnamelist(:) => null() character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' + real(8) :: MPI_Wtime(), timeirls !-------------------------------- rc = ESMF_SUCCESS + timeirls = MPI_Wtime() if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) !---------------------------------------------------------------------------- @@ -912,6 +919,10 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call flush_fileunit(nu_diag) + timers = 0. + timere = 0. + if(mastertask) write(stdout,*) 'In ', trim(subname),' time', MPI_Wtime()-timeirls + end subroutine InitializeRealize !=============================================================================== @@ -954,9 +965,12 @@ subroutine ModelAdvance(gcomp, rc) logical :: isPresent, isSet character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' character(char_len_long) :: msgString + real(8) :: MPI_Wtime !-------------------------------- rc = ESMF_SUCCESS + timers = MPI_Wtime() + if(mastertask) write(stdout,*) 'In CICE, time since last step ', timers - timere call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) @@ -1177,6 +1191,9 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + timere = MPI_Wtime() + if(mastertask) write(stdout,*) 'In ', trim(subname),' time ', timere-timers + end subroutine ModelAdvance !=============================================================================== @@ -1318,9 +1335,11 @@ subroutine ModelFinalize(gcomp, rc) ! local variables character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" character(len=*),parameter :: subname=trim(modName)//':(ModelFinalize) ' + real(8) :: MPI_Wtime, timefs !-------------------------------- rc = ESMF_SUCCESS + timefs = MPI_Wtime() if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) if (my_task == master_task) then write(nu_diag,F91) @@ -1329,6 +1348,8 @@ subroutine ModelFinalize(gcomp, rc) end if if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + if(mastertask) write(stdout,*) 'In ', trim(subname),' time ', MPI_Wtime()-timefs + end subroutine ModelFinalize !=============================================================================== From e01660bfc872cb6489ccccc93a8b0e0bbd7dcb80 Mon Sep 17 00:00:00 2001 From: "Jun.Wang" Date: Thu, 6 Jul 2023 02:53:50 +0000 Subject: [PATCH 76/90] change real(8) to real(dbl)kind) --- cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 2488b1c66..1686dde92 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -105,7 +105,7 @@ module ice_comp_nuopc character(*), parameter :: modName = "(ice_comp_nuopc)" character(*), parameter :: u_FILE_u = & __FILE__ - real(8) :: timers, timere + real(dbl_kind) :: timers, timere !======================================================================= contains From b475ad468215f9922086517feaf82375fe6e219a Mon Sep 17 00:00:00 2001 From: "Jun.Wang" Date: Thu, 6 Jul 2023 03:11:02 +0000 Subject: [PATCH 77/90] fix syntax --- cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 1686dde92..caa3d9987 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -244,7 +244,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist character(len=char_len) :: tfrz_option_driver ! tfrz_option from cice namelist character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' - real(8) :: MPI_Wtime(), timeiads + real(dbl_kind) :: MPI_Wtime, timeiads !-------------------------------- timeiads = MPI_Wtime() @@ -735,7 +735,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) character(len=char_len_long) :: single_column_lnd_domainfile character(len=char_len_long) , pointer :: lfieldnamelist(:) => null() character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' - real(8) :: MPI_Wtime(), timeirls + real(dbl_kind) :: MPI_Wtime, timeirls !-------------------------------- rc = ESMF_SUCCESS @@ -965,7 +965,7 @@ subroutine ModelAdvance(gcomp, rc) logical :: isPresent, isSet character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' character(char_len_long) :: msgString - real(8) :: MPI_Wtime + real(dbl_kind) :: MPI_Wtime !-------------------------------- rc = ESMF_SUCCESS @@ -1335,7 +1335,7 @@ subroutine ModelFinalize(gcomp, rc) ! local variables character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" character(len=*),parameter :: subname=trim(modName)//':(ModelFinalize) ' - real(8) :: MPI_Wtime, timefs + real(dbl_kind) :: MPI_Wtime, timefs !-------------------------------- rc = ESMF_SUCCESS From dc73c669ff76c548d0195233b7e022be6666b5ed Mon Sep 17 00:00:00 2001 From: Jun Wang Date: Thu, 6 Jul 2023 09:43:22 -0500 Subject: [PATCH 78/90] fix write unit --- cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index caa3d9987..b556ec408 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -703,7 +703,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call t_stopf ('cice_init_total') - if(mastertask) write(stdout,*) 'In ', trim(subname),' time', MPI_Wtime()-timeiads + if(mastertask) write(nu_diag,*) 'In ', trim(subname),' time', MPI_Wtime()-timeiads end subroutine InitializeAdvertise @@ -921,7 +921,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) timers = 0. timere = 0. - if(mastertask) write(stdout,*) 'In ', trim(subname),' time', MPI_Wtime()-timeirls + if(mastertask) write(nu_diag,*) 'In ', trim(subname),' time', MPI_Wtime()-timeirls end subroutine InitializeRealize @@ -970,7 +970,7 @@ subroutine ModelAdvance(gcomp, rc) rc = ESMF_SUCCESS timers = MPI_Wtime() - if(mastertask) write(stdout,*) 'In CICE, time since last step ', timers - timere + if(mastertask) write(nu_diag,*) 'In CICE, time since last step ', timers - timere call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) @@ -1192,7 +1192,7 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) timere = MPI_Wtime() - if(mastertask) write(stdout,*) 'In ', trim(subname),' time ', timere-timers + if(mastertask) write(nu_diag,*) 'In ', trim(subname),' time ', timere-timers end subroutine ModelAdvance @@ -1348,7 +1348,7 @@ subroutine ModelFinalize(gcomp, rc) end if if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if(mastertask) write(stdout,*) 'In ', trim(subname),' time ', MPI_Wtime()-timefs + if(mastertask) write(nu_diag,*) 'In ', trim(subname),' time ', MPI_Wtime()-timefs end subroutine ModelFinalize From 1e65452a186de7371685e2b45cd593267a4285a1 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Thu, 6 Jul 2023 18:33:30 -0500 Subject: [PATCH 79/90] use cice_wrapper for ufs timer functionality --- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 37 ++++++++++++++++--- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 30 +++++---------- 2 files changed, 41 insertions(+), 26 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index 0da2ed491..9098715dc 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -5,19 +5,35 @@ module cice_wrapper_mod use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit #else -contains + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long - ! These are just stub routines put in place to remove + implicit none + + real(dbl_kind) :: timeiads = 0.0, timeirls = 0.0, timeadv = 0.0, timefs = 0.0 +#endif +contains + ! Define stub routines that do nothing - they are just here to avoid + ! having cppdefs in the main program +#ifndef CESMCOUPLED + subroutine ufs_settimer(timevalue) + real(dbl_kind), intent(inout) :: timevalue + real(dbl_kind) :: MPI_Wtime + timevalue = MPI_Wtime() + end subroutine ufs_settimer + subroutine ufs_logtimer(nunit,string,time0) + integer, intent(in) :: nunit + character(len=*), intent(in) :: string + real(dbl_kind), intent(in) :: time0 + real(dbl_kind) :: MPI_Wtime, timevalue + timevalue = MPI_Wtime()-time0 + write(nunit,*)'CICE '//trim(string),timevalue + end subroutine ufs_logtimer subroutine shr_file_setLogUnit(nunit) integer, intent(in) :: nunit - ! do nothing for this stub - its just here to replace - ! having cppdefs in the main program end subroutine shr_file_setLogUnit subroutine shr_file_getLogUnit(nunit) integer, intent(in) :: nunit - ! do nothing for this stub - its just here to replace - ! having cppdefs in the main program end subroutine shr_file_getLogUnit subroutine t_startf(string) @@ -30,6 +46,15 @@ subroutine t_barrierf(string, comm) character(len=*) :: string integer:: comm end subroutine t_barrierf +#else + subroutine ufs_settimer(timevalue) + real(dbl_kind), intent(out) :: timevalue + end subroutine ufs_settimer + subroutine ufs_logtimer(nunit,string,time0) + character(len=*), intent(in) :: string + real(dbl_kind), intent(in) :: time0 + integer, intent(in) :: nunit + end subroutine ufs_logtimer #endif end module cice_wrapper_mod diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index b556ec408..b43920b40 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -38,6 +38,7 @@ module ice_comp_nuopc use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf use cice_wrapper_mod , only : shr_file_getlogunit, shr_file_setlogunit + use cice_wrapper_mod , only : ufs_settimer, ufs_logtimer, timeiads, timeirls, timeadv, timefs #ifdef CESMCOUPLED use shr_const_mod use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT @@ -105,7 +106,6 @@ module ice_comp_nuopc character(*), parameter :: modName = "(ice_comp_nuopc)" character(*), parameter :: u_FILE_u = & __FILE__ - real(dbl_kind) :: timers, timere !======================================================================= contains @@ -244,10 +244,9 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist character(len=char_len) :: tfrz_option_driver ! tfrz_option from cice namelist character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' - real(dbl_kind) :: MPI_Wtime, timeiads !-------------------------------- - timeiads = MPI_Wtime() + if (mastertask) call ufs_settimer(timeiads) call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldName", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -703,8 +702,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call t_stopf ('cice_init_total') - if(mastertask) write(nu_diag,*) 'In ', trim(subname),' time', MPI_Wtime()-timeiads - + if (mastertask) call ufs_logtimer(nu_diag,'InitializeAdvertise time: ',timeiads) end subroutine InitializeAdvertise !=============================================================================== @@ -735,13 +733,12 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) character(len=char_len_long) :: single_column_lnd_domainfile character(len=char_len_long) , pointer :: lfieldnamelist(:) => null() character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' - real(dbl_kind) :: MPI_Wtime, timeirls !-------------------------------- rc = ESMF_SUCCESS - timeirls = MPI_Wtime() if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + if (mastertask) call ufs_settimer(timeirls) !---------------------------------------------------------------------------- ! Second cice initialization phase -after initializing grid info !---------------------------------------------------------------------------- @@ -919,10 +916,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call flush_fileunit(nu_diag) - timers = 0. - timere = 0. - if(mastertask) write(nu_diag,*) 'In ', trim(subname),' time', MPI_Wtime()-timeirls - + if (mastertask) call ufs_logtimer(nu_diag,'InitializeRealize time: ',timeirls) end subroutine InitializeRealize !=============================================================================== @@ -965,12 +959,11 @@ subroutine ModelAdvance(gcomp, rc) logical :: isPresent, isSet character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' character(char_len_long) :: msgString - real(dbl_kind) :: MPI_Wtime !-------------------------------- rc = ESMF_SUCCESS - timers = MPI_Wtime() - if(mastertask) write(nu_diag,*) 'In CICE, time since last step ', timers - timere + if (mastertask) call ufs_logtimer(nu_diag,'ModelAdvance time since last step: ',timeadv) + if (mastertask) call ufs_settimer(timeadv) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) @@ -1191,9 +1184,7 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - timere = MPI_Wtime() - if(mastertask) write(nu_diag,*) 'In ', trim(subname),' time ', timere-timers - + if (mastertask) call ufs_logtimer(nu_diag, 'ModelAdvance time: ', timeadv) end subroutine ModelAdvance !=============================================================================== @@ -1335,11 +1326,10 @@ subroutine ModelFinalize(gcomp, rc) ! local variables character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" character(len=*),parameter :: subname=trim(modName)//':(ModelFinalize) ' - real(dbl_kind) :: MPI_Wtime, timefs !-------------------------------- rc = ESMF_SUCCESS - timefs = MPI_Wtime() + if (mastertask) call ufs_settimer(timefs) if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) if (my_task == master_task) then write(nu_diag,F91) @@ -1348,7 +1338,7 @@ subroutine ModelFinalize(gcomp, rc) end if if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if(mastertask) write(nu_diag,*) 'In ', trim(subname),' time ', MPI_Wtime()-timefs + if(mastertask) call ufs_logtimer(nu_diag,'ModelFinalize time: ', timefs) end subroutine ModelFinalize From c6049c59b68753fea7edc44d3e35a1b64a300202 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Thu, 6 Jul 2023 18:51:11 -0500 Subject: [PATCH 80/90] add elapsed model time for logtime --- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 14 ++++++++------ .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 18 +++++++++--------- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index 9098715dc..c42125127 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -20,13 +20,14 @@ subroutine ufs_settimer(timevalue) real(dbl_kind) :: MPI_Wtime timevalue = MPI_Wtime() end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,string,time0) + subroutine ufs_logtimer(nunit,etime,string,time0) integer, intent(in) :: nunit + integer(int_kind), intent(in) :: etime character(len=*), intent(in) :: string real(dbl_kind), intent(in) :: time0 real(dbl_kind) :: MPI_Wtime, timevalue timevalue = MPI_Wtime()-time0 - write(nunit,*)'CICE '//trim(string),timevalue + write(nunit,*)etime,' CICE '//trim(string),timevalue end subroutine ufs_logtimer subroutine shr_file_setLogUnit(nunit) @@ -50,10 +51,11 @@ end subroutine t_barrierf subroutine ufs_settimer(timevalue) real(dbl_kind), intent(out) :: timevalue end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,string,time0) - character(len=*), intent(in) :: string - real(dbl_kind), intent(in) :: time0 - integer, intent(in) :: nunit + subroutine ufs_logtimer(nunit,etime,string,time0) + character(len=*), intent(in) :: string + integer(int_kind), intent(in) :: etime + real(dbl_kind), intent(in) :: time0 + integer, intent(in) :: nunit end subroutine ufs_logtimer #endif diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index b43920b40..035fedf81 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -246,7 +246,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- - if (mastertask) call ufs_settimer(timeiads) + call ufs_settimer(timeiads) call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldName", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -702,7 +702,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call t_stopf ('cice_init_total') - if (mastertask) call ufs_logtimer(nu_diag,'InitializeAdvertise time: ',timeiads) + if (mastertask) call ufs_logtimer(nu_diag,msec,'InitializeAdvertise time: ',timeiads) end subroutine InitializeAdvertise !=============================================================================== @@ -738,7 +738,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) rc = ESMF_SUCCESS if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - if (mastertask) call ufs_settimer(timeirls) + call ufs_settimer(timeirls) !---------------------------------------------------------------------------- ! Second cice initialization phase -after initializing grid info !---------------------------------------------------------------------------- @@ -916,7 +916,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call flush_fileunit(nu_diag) - if (mastertask) call ufs_logtimer(nu_diag,'InitializeRealize time: ',timeirls) + if (mastertask) call ufs_logtimer(nu_diag,msec,'InitializeRealize time: ',timeirls) end subroutine InitializeRealize !=============================================================================== @@ -962,8 +962,8 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS - if (mastertask) call ufs_logtimer(nu_diag,'ModelAdvance time since last step: ',timeadv) - if (mastertask) call ufs_settimer(timeadv) + if (mastertask) call ufs_logtimer(nu_diag,msec,'ModelAdvance time since last step: ',timeadv) + call ufs_settimer(timeadv) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) @@ -1184,7 +1184,7 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if (mastertask) call ufs_logtimer(nu_diag, 'ModelAdvance time: ', timeadv) + if (mastertask) call ufs_logtimer(nu_diag,msec,'ModelAdvance time: ', timeadv) end subroutine ModelAdvance !=============================================================================== @@ -1329,7 +1329,7 @@ subroutine ModelFinalize(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS - if (mastertask) call ufs_settimer(timefs) + call ufs_settimer(timefs) if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) if (my_task == master_task) then write(nu_diag,F91) @@ -1338,7 +1338,7 @@ subroutine ModelFinalize(gcomp, rc) end if if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if(mastertask) call ufs_logtimer(nu_diag,'ModelFinalize time: ', timefs) + if(mastertask) call ufs_logtimer(nu_diag,msec,'ModelFinalize time: ', timefs) end subroutine ModelFinalize From b7d88d957345ff7e7506c157a1811c6f887fb19d Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Fri, 7 Jul 2023 07:38:31 -0500 Subject: [PATCH 81/90] tidy up the wrapper * fix case for 'time since' at the first advance --- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 57 ++++++++++--------- 1 file changed, 31 insertions(+), 26 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index c42125127..6343ae6e9 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -1,35 +1,50 @@ module cice_wrapper_mod #ifdef CESMCOUPLED - use perf_mod , only : t_startf, t_stopf, t_barrierf - use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit + use perf_mod , only : t_startf, t_stopf, t_barrierf + use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit -#else use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long implicit none - - real(dbl_kind) :: timeiads = 0.0, timeirls = 0.0, timeadv = 0.0, timefs = 0.0 -#endif contains ! Define stub routines that do nothing - they are just here to avoid ! having cppdefs in the main program -#ifndef CESMCOUPLED subroutine ufs_settimer(timevalue) - real(dbl_kind), intent(inout) :: timevalue - real(dbl_kind) :: MPI_Wtime + real(dbl_kind), intent(out) :: timevalue + end subroutine ufs_settimer + subroutine ufs_logtimer(nunit,etime,string,time0) + integer, intent(in) :: nunit + integer(int_kind), intent(in) :: etime + character(len=*), intent(in) :: string + real(dbl_kind), intent(in) :: time0 + end subroutine ufs_logtimer +#else + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long + + implicit none + + real(dbl_kind) :: timeiads = 0.0, timeirls = 0.0, timeadv = 0.0, timefs = 0.0 +contains + subroutine ufs_settimer(timevalue) + real(dbl_kind), intent(inout) :: timevalue + real(dbl_kind) :: MPI_Wtime timevalue = MPI_Wtime() end subroutine ufs_settimer subroutine ufs_logtimer(nunit,etime,string,time0) - integer, intent(in) :: nunit - integer(int_kind), intent(in) :: etime - character(len=*), intent(in) :: string - real(dbl_kind), intent(in) :: time0 - real(dbl_kind) :: MPI_Wtime, timevalue - timevalue = MPI_Wtime()-time0 - write(nunit,*)etime,' CICE '//trim(string),timevalue + integer, intent(in) :: nunit + integer(int_kind), intent(in) :: etime + character(len=*), intent(in) :: string + real(dbl_kind), intent(in) :: time0 + real(dbl_kind) :: MPI_Wtime, timevalue + if (time0 > 0.) then + timevalue = MPI_Wtime()-time0 + write(nunit,*)etime,' CICE '//trim(string),timevalue + end if end subroutine ufs_logtimer + ! Define stub routines that do nothing - they are just here to avoid + ! having cppdefs in the main program subroutine shr_file_setLogUnit(nunit) integer, intent(in) :: nunit end subroutine shr_file_setLogUnit @@ -47,16 +62,6 @@ subroutine t_barrierf(string, comm) character(len=*) :: string integer:: comm end subroutine t_barrierf -#else - subroutine ufs_settimer(timevalue) - real(dbl_kind), intent(out) :: timevalue - end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,etime,string,time0) - character(len=*), intent(in) :: string - integer(int_kind), intent(in) :: etime - real(dbl_kind), intent(in) :: time0 - integer, intent(in) :: nunit - end subroutine ufs_logtimer #endif end module cice_wrapper_mod From fead17fbf234cddd15455d1bb466652d48956982 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Fri, 7 Jul 2023 14:51:07 -0500 Subject: [PATCH 82/90] add timer and forecast log * write timer values to timer log, not nu_diag * write log.ice.fXXX --- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 16 ++++++-- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 40 ++++++++++++++++--- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 16 ++++---- 3 files changed, 56 insertions(+), 16 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index e908f509f..db28c79de 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -16,6 +16,7 @@ module CICE_RunMod use ice_kinds_mod use cice_wrapper_mod, only : t_startf, t_stopf, t_barrierf + use cice_wrapper_mod, only : ufs_logfhour use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 @@ -107,9 +108,11 @@ end subroutine CICE_Run subroutine ice_step + use ice_constants, only: c3600 use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_calendar, only: idate, msec + use ice_calendar, only: idate, msec, timesecs + use ice_calendar, only: write_history, nstreams, histfreq use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks @@ -133,7 +136,7 @@ subroutine ice_step use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite - use ice_communicate, only: MPI_COMM_ICE + use ice_communicate, only: MPI_COMM_ICE, my_task, master_task use ice_prescribed_mod integer (kind=int_kind) :: & @@ -152,6 +155,7 @@ subroutine ice_step character(len=*), parameter :: subname = '(ice_step)' character (len=char_len) :: plabeld + integer (kind=int_kind) :: ns if (debug_model) then plabeld = 'beginning time step' @@ -384,7 +388,13 @@ subroutine ice_step endif call ice_timer_stop(timer_readwrite) ! reading/writing - + if (my_task == master_task) then + do ns = 1,nstreams + if (write_history(ns) .and. histfreq(ns) .eq. 'h') then + call ufs_logfhour(idate,msec,timesecs/c3600) + end if + end do + end if end subroutine ice_step !======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index 6343ae6e9..b2e5e5315 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -10,16 +10,26 @@ module cice_wrapper_mod contains ! Define stub routines that do nothing - they are just here to avoid ! having cppdefs in the main program + subroutine ufs_file_setLogUnit(filename,nunit) + character(len=*), intent(in) :: filename + integer, intent(out) :: nunit + end subroutine ufs_file_setLogUnit subroutine ufs_settimer(timevalue) real(dbl_kind), intent(out) :: timevalue end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,etime,string,time0) + subroutine ufs_logtimer(nunit,esecs,string,time0) integer, intent(in) :: nunit - integer(int_kind), intent(in) :: etime + integer(int_kind), intent(in) :: esecs character(len=*), intent(in) :: string real(dbl_kind), intent(in) :: time0 end subroutine ufs_logtimer + subroutine ufs_logfhour(date,esecs,hour) + integer(int_kind), intent(in) :: date + integer(int_kind), intent(in) :: esecs + real(dbl_kind), intent(in) :: hour + end subroutine ufs_logfhour #else + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long implicit none @@ -31,18 +41,37 @@ subroutine ufs_settimer(timevalue) real(dbl_kind) :: MPI_Wtime timevalue = MPI_Wtime() end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,etime,string,time0) + + subroutine ufs_logtimer(nunit,esecs,string,time0) integer, intent(in) :: nunit - integer(int_kind), intent(in) :: etime + integer(int_kind), intent(in) :: esecs character(len=*), intent(in) :: string real(dbl_kind), intent(in) :: time0 real(dbl_kind) :: MPI_Wtime, timevalue if (time0 > 0.) then timevalue = MPI_Wtime()-time0 - write(nunit,*)etime,' CICE '//trim(string),timevalue + write(nunit,*)esecs,' CICE '//trim(string),timevalue end if end subroutine ufs_logtimer + subroutine ufs_file_setLogUnit(filename,nunit) + character(len=*), intent(in) :: filename + integer, intent(out) :: nunit + open (newunit=nunit, file=trim(filename)) + end subroutine ufs_file_setLogUnit + + subroutine ufs_logfhour(date,esecs,hour) + integer(int_kind), intent(in) :: date + integer(int_kind), intent(in) :: esecs + real(dbl_kind), intent(in) :: hour + character(len=char_len) :: filename + integer(int_kind) :: nunit + write(filename,'(a,i3.3)')'log.ice.f',int(hour) + open(newunit=nunit,file=trim(filename)) + write(nunit,'(a,f8.2,a,2i10)')'completed CICE fhour = ',hour,' complete ',date,esecs + close(nunit) + end subroutine ufs_logfhour + ! Define stub routines that do nothing - they are just here to avoid ! having cppdefs in the main program subroutine shr_file_setLogUnit(nunit) @@ -51,7 +80,6 @@ end subroutine shr_file_setLogUnit subroutine shr_file_getLogUnit(nunit) integer, intent(in) :: nunit end subroutine shr_file_getLogUnit - subroutine t_startf(string) character(len=*) :: string end subroutine t_startf diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 035fedf81..87c7f1684 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -38,7 +38,8 @@ module ice_comp_nuopc use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf use cice_wrapper_mod , only : shr_file_getlogunit, shr_file_setlogunit - use cice_wrapper_mod , only : ufs_settimer, ufs_logtimer, timeiads, timeirls, timeadv, timefs + use cice_wrapper_mod , only : ufs_settimer, ufs_logtimer, ufs_file_setlogunit + use cice_wrapper_mod , only : timeiads, timeirls, timeadv, timefs #ifdef CESMCOUPLED use shr_const_mod use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT @@ -89,7 +90,7 @@ module ice_comp_nuopc type(ESMF_Mesh) :: ice_mesh integer :: nthrds ! Number of threads to use in this component - + integer :: nu_timer ! Simple timer log integer :: dbug = 0 logical :: profile_memory = .false. logical :: mastertask @@ -490,6 +491,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) ! Set the nu_diag_set flag so it's not reset later call shr_file_setLogUnit (shrlogunit) + call ufs_file_setLogUnit('./log.ice.timer', nu_timer) call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, & isPresent=isPresent, isSet=isSet, rc=rc) @@ -702,7 +704,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call t_stopf ('cice_init_total') - if (mastertask) call ufs_logtimer(nu_diag,msec,'InitializeAdvertise time: ',timeiads) + if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeAdvertise time: ',timeiads) end subroutine InitializeAdvertise !=============================================================================== @@ -916,7 +918,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call flush_fileunit(nu_diag) - if (mastertask) call ufs_logtimer(nu_diag,msec,'InitializeRealize time: ',timeirls) + if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeRealize time: ',timeirls) end subroutine InitializeRealize !=============================================================================== @@ -962,7 +964,7 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS - if (mastertask) call ufs_logtimer(nu_diag,msec,'ModelAdvance time since last step: ',timeadv) + if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time since last step: ',timeadv) call ufs_settimer(timeadv) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) @@ -1184,7 +1186,7 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if (mastertask) call ufs_logtimer(nu_diag,msec,'ModelAdvance time: ', timeadv) + if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time: ', timeadv) end subroutine ModelAdvance !=============================================================================== @@ -1338,7 +1340,7 @@ subroutine ModelFinalize(gcomp, rc) end if if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if(mastertask) call ufs_logtimer(nu_diag,msec,'ModelFinalize time: ', timefs) + if(mastertask) call ufs_logtimer(nu_timer,msec,'ModelFinalize time: ', timefs) end subroutine ModelFinalize From a744734cdd20fcde439ef9585002f34acb13a95a Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Sat, 8 Jul 2023 10:19:13 -0500 Subject: [PATCH 83/90] only one time is needed --- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 2 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 21 +++++++++---------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index b2e5e5315..3c4ce6e42 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -34,7 +34,7 @@ end subroutine ufs_logfhour implicit none - real(dbl_kind) :: timeiads = 0.0, timeirls = 0.0, timeadv = 0.0, timefs = 0.0 + real(dbl_kind) :: wtime = 0.0 contains subroutine ufs_settimer(timevalue) real(dbl_kind), intent(inout) :: timevalue diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 87c7f1684..e85a32a0b 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -38,8 +38,7 @@ module ice_comp_nuopc use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf use cice_wrapper_mod , only : shr_file_getlogunit, shr_file_setlogunit - use cice_wrapper_mod , only : ufs_settimer, ufs_logtimer, ufs_file_setlogunit - use cice_wrapper_mod , only : timeiads, timeirls, timeadv, timefs + use cice_wrapper_mod , only : ufs_settimer, ufs_logtimer, ufs_file_setlogunit, wtime #ifdef CESMCOUPLED use shr_const_mod use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT @@ -247,7 +246,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- - call ufs_settimer(timeiads) + call ufs_settimer(wtime) call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldName", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -704,7 +703,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call t_stopf ('cice_init_total') - if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeAdvertise time: ',timeiads) + if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeAdvertise time: ',wtime) end subroutine InitializeAdvertise !=============================================================================== @@ -740,7 +739,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) rc = ESMF_SUCCESS if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - call ufs_settimer(timeirls) + call ufs_settimer(wtime) !---------------------------------------------------------------------------- ! Second cice initialization phase -after initializing grid info !---------------------------------------------------------------------------- @@ -918,7 +917,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call flush_fileunit(nu_diag) - if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeRealize time: ',timeirls) + if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeRealize time: ',wtime) end subroutine InitializeRealize !=============================================================================== @@ -964,8 +963,8 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS - if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time since last step: ',timeadv) - call ufs_settimer(timeadv) + if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time since last step: ',wtime) + call ufs_settimer(wtime) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) @@ -1186,7 +1185,7 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time: ', timeadv) + if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time: ', wtime) end subroutine ModelAdvance !=============================================================================== @@ -1331,7 +1330,7 @@ subroutine ModelFinalize(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS - call ufs_settimer(timefs) + call ufs_settimer(wtime) if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) if (my_task == master_task) then write(nu_diag,F91) @@ -1340,7 +1339,7 @@ subroutine ModelFinalize(gcomp, rc) end if if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if(mastertask) call ufs_logtimer(nu_timer,msec,'ModelFinalize time: ', timefs) + if(mastertask) call ufs_logtimer(nu_timer,msec,'ModelFinalize time: ', wtime) end subroutine ModelFinalize From 1c98b436f8d506cd3e466636b162ca8f1b1e9087 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 10 Jul 2023 12:18:21 -0500 Subject: [PATCH 84/90] modify message written for log.ice.fXXX --- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 11 +++-- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 44 +++++++++---------- 2 files changed, 28 insertions(+), 27 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index db28c79de..301ada1ea 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -111,8 +111,8 @@ subroutine ice_step use ice_constants, only: c3600 use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_calendar, only: idate, msec, timesecs - use ice_calendar, only: write_history, nstreams, histfreq + use ice_calendar, only: idate, myear, mmonth, mday, msec, timesecs + use ice_calendar, only: calendar_sec2hms, write_history, nstreams, histfreq use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks @@ -155,7 +155,8 @@ subroutine ice_step character(len=*), parameter :: subname = '(ice_step)' character (len=char_len) :: plabeld - integer (kind=int_kind) :: ns + integer (kind=int_kind) :: hh,mm,ss,ns + character (len=char_len) :: logmsg if (debug_model) then plabeld = 'beginning time step' @@ -391,7 +392,9 @@ subroutine ice_step if (my_task == master_task) then do ns = 1,nstreams if (write_history(ns) .and. histfreq(ns) .eq. 'h') then - call ufs_logfhour(idate,msec,timesecs/c3600) + call calendar_sec2hms(msec,hh,mm,ss) + write(logmsg,'(6(i4,2x))')myear,mmonth,mday,hh,mm,ss + call ufs_logfhour(trim(logmsg),timesecs/c3600) end if end do end if diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index 3c4ce6e42..659507373 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -10,23 +10,22 @@ module cice_wrapper_mod contains ! Define stub routines that do nothing - they are just here to avoid ! having cppdefs in the main program - subroutine ufs_file_setLogUnit(filename,nunit) - character(len=*), intent(in) :: filename - integer, intent(out) :: nunit - end subroutine ufs_file_setLogUnit subroutine ufs_settimer(timevalue) - real(dbl_kind), intent(out) :: timevalue + real(dbl_kind), intent(out) :: timevalue end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,esecs,string,time0) - integer, intent(in) :: nunit - integer(int_kind), intent(in) :: esecs - character(len=*), intent(in) :: string - real(dbl_kind), intent(in) :: time0 + subroutine ufs_logtimer(nunit,elapsedsecs,string,time0) + integer, intent(in) :: nunit + integer(int_kind), intent(in) :: elapsedsecs + character(len=*), intent(in) :: string + real(dbl_kind), intent(in) :: time0 end subroutine ufs_logtimer - subroutine ufs_logfhour(date,esecs,hour) - integer(int_kind), intent(in) :: date - integer(int_kind), intent(in) :: esecs - real(dbl_kind), intent(in) :: hour + subroutine ufs_file_setLogUnit(filename,nunit) + character(len=*), intent(in) :: filename + integer, intent(out) :: nunit + end subroutine ufs_file_setLogUnit + subroutine ufs_logfhour(msg,hour) + character(len=*), intent(in) :: msg + real(dbl_kind), intent(in) :: hour end subroutine ufs_logfhour #else @@ -42,33 +41,32 @@ subroutine ufs_settimer(timevalue) timevalue = MPI_Wtime() end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,esecs,string,time0) + subroutine ufs_logtimer(nunit,elapsedsecs,string,time0) integer, intent(in) :: nunit - integer(int_kind), intent(in) :: esecs + integer(int_kind), intent(in) :: elapsedsecs character(len=*), intent(in) :: string real(dbl_kind), intent(in) :: time0 real(dbl_kind) :: MPI_Wtime, timevalue if (time0 > 0.) then timevalue = MPI_Wtime()-time0 - write(nunit,*)esecs,' CICE '//trim(string),timevalue + write(nunit,*)elapsedsecs,' CICE '//trim(string),timevalue end if end subroutine ufs_logtimer subroutine ufs_file_setLogUnit(filename,nunit) - character(len=*), intent(in) :: filename - integer, intent(out) :: nunit + character(len=*), intent(in) :: filename + integer, intent(out) :: nunit open (newunit=nunit, file=trim(filename)) end subroutine ufs_file_setLogUnit - subroutine ufs_logfhour(date,esecs,hour) - integer(int_kind), intent(in) :: date - integer(int_kind), intent(in) :: esecs + subroutine ufs_logfhour(msg,hour) + character(len=*), intent(in) :: msg real(dbl_kind), intent(in) :: hour character(len=char_len) :: filename integer(int_kind) :: nunit write(filename,'(a,i3.3)')'log.ice.f',int(hour) open(newunit=nunit,file=trim(filename)) - write(nunit,'(a,f8.2,a,2i10)')'completed CICE fhour = ',hour,' complete ',date,esecs + write(nunit,'(a,f10.3,a)')'completed CICE fhour = ',hour,' validation time: '//trim(msg) close(nunit) end subroutine ufs_logfhour From dd13d6f7c3ce161283cf228d2b5eb3764f656bab Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 10 Jul 2023 16:06:52 -0500 Subject: [PATCH 85/90] change info in fXXX log file --- cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index 659507373..2e0324e22 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -66,7 +66,9 @@ subroutine ufs_logfhour(msg,hour) integer(int_kind) :: nunit write(filename,'(a,i3.3)')'log.ice.f',int(hour) open(newunit=nunit,file=trim(filename)) - write(nunit,'(a,f10.3,a)')'completed CICE fhour = ',hour,' validation time: '//trim(msg) + write(nunit,'(a)')'completed: cice' + write(nunit,'(a,f10.3)')'forecast hour:',hour + write(nunit,'(a)')'valid time: '//trim(msg) close(nunit) end subroutine ufs_logfhour From d41c61dd332d5803d31408be1b9e8ef611856049 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Wed, 2 Aug 2023 17:09:26 -0400 Subject: [PATCH 86/90] Update CICE from Consortium/main (#62) * Fix CESMCOUPLED compile issue in icepack. (#823) * Update global reduction implementation to improve performance, fix VP bug (#824) * Update VP global sum to exclude local implementation with tripole grids * Add functionality to change hist_avg for each stream (#827) * Update Icepack to #6703bc533c968 May 22, 2023 (#829) * Fix for mesh check in CESM driver (#830) * Namelist option for time axis position. (#839) --- .readthedocs.yaml | 29 + cicecore/cicedyn/analysis/ice_diagnostics.F90 | 162 +- cicecore/cicedyn/analysis/ice_history.F90 | 4 +- .../cicedyn/analysis/ice_history_pond.F90 | 6 +- .../cicedyn/analysis/ice_history_shared.F90 | 8 +- .../cicedyn/analysis/ice_history_snow.F90 | 2 +- cicecore/cicedyn/dynamics/ice_dyn_vp.F90 | 41 +- .../cicedyn/dynamics/ice_transport_driver.F90 | 12 +- .../cicedyn/dynamics/ice_transport_remap.F90 | 113 +- cicecore/cicedyn/general/ice_forcing.F90 | 539 +-- cicecore/cicedyn/general/ice_init.F90 | 77 +- .../infrastructure/comm/mpi/ice_exit.F90 | 60 +- .../comm/mpi/ice_global_reductions.F90 | 232 +- .../infrastructure/comm/mpi/ice_reprosum.F90 | 34 +- .../comm/serial/ice_boundary.F90 | 2 +- .../infrastructure/comm/serial/ice_exit.F90 | 69 +- .../comm/serial/ice_global_reductions.F90 | 232 +- .../cicedyn/infrastructure/ice_domain.F90 | 4 +- .../io/io_binary/ice_history_write.F90 | 18 +- .../io/io_netcdf/ice_history_write.F90 | 57 +- .../io/io_pio2/ice_history_write.F90 | 47 +- cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 | 20 +- cicecore/drivers/unittest/optargs/optargs.F90 | 160 +- .../drivers/unittest/optargs/optargs_subs.F90 | 116 +- cicecore/drivers/unittest/opticep/CICE.F90 | 59 + .../unittest/opticep/CICE_FinalMod.F90 | 71 + .../drivers/unittest/opticep/CICE_InitMod.F90 | 517 +++ .../drivers/unittest/opticep/CICE_RunMod.F90 | 741 ++++ cicecore/drivers/unittest/opticep/README | 30 + .../unittest/opticep/ice_init_column.F90 | 3135 +++++++++++++++++ .../drivers/unittest/opticep/ice_step_mod.F90 | 1784 ++++++++++ cicecore/drivers/unittest/sumchk/sumchk.F90 | 59 +- cicecore/shared/ice_fileunits.F90 | 22 +- configuration/scripts/Makefile | 6 +- configuration/scripts/cice.batch.csh | 15 + configuration/scripts/cice.launch.csh | 12 + configuration/scripts/ice_in | 3 +- .../scripts/machines/Macros.derecho_intel | 69 + .../scripts/machines/Macros.onyx_intel | 4 +- .../scripts/machines/env.derecho_intel | 70 + configuration/scripts/machines/env.nrlssc_gnu | 10 +- configuration/scripts/machines/env.onyx_cray | 12 +- configuration/scripts/machines/env.onyx_gnu | 12 +- configuration/scripts/machines/env.onyx_intel | 12 +- configuration/scripts/options/set_env.opticep | 2 + configuration/scripts/options/set_nml.gx1 | 4 +- configuration/scripts/options/set_nml.gx3 | 4 +- .../scripts/options/set_nml.gx3ncarbulk | 2 +- .../scripts/options/set_nml.histinst | 2 +- configuration/scripts/options/set_nml.jra55 | 2 + configuration/scripts/options/set_nml.jra55do | 2 + configuration/scripts/options/set_nml.qc | 2 +- configuration/scripts/options/set_nml.run3dt | 2 +- configuration/scripts/options/set_nml.tx1 | 4 +- configuration/scripts/tests/base_suite.ts | 3 + configuration/scripts/tests/baseline.script | 27 +- configuration/scripts/tests/comparelog.csh | 12 +- configuration/scripts/tests/unittest_suite.ts | 8 +- doc/source/cice_index.rst | 7 +- doc/source/developer_guide/dg_forcing.rst | 17 +- doc/source/developer_guide/dg_other.rst | 6 +- doc/source/developer_guide/dg_tools.rst | 4 +- doc/source/intro/about.rst | 2 +- doc/source/intro/citing.rst | 2 +- doc/source/master_list.bib | 10 +- doc/source/science_guide/sg_coupling.rst | 4 +- doc/source/science_guide/sg_dynamics.rst | 4 +- doc/source/science_guide/sg_fundvars.rst | 4 +- doc/source/science_guide/sg_horiztrans.rst | 6 +- doc/source/science_guide/sg_tracers.rst | 2 +- doc/source/user_guide/ug_case_settings.rst | 10 +- doc/source/user_guide/ug_implementation.rst | 14 +- doc/source/user_guide/ug_testing.rst | 14 +- doc/source/user_guide/ug_troubleshooting.rst | 4 +- icepack | 2 +- 75 files changed, 7707 insertions(+), 1168 deletions(-) create mode 100644 .readthedocs.yaml create mode 100644 cicecore/drivers/unittest/opticep/CICE.F90 create mode 100644 cicecore/drivers/unittest/opticep/CICE_FinalMod.F90 create mode 100644 cicecore/drivers/unittest/opticep/CICE_InitMod.F90 create mode 100644 cicecore/drivers/unittest/opticep/CICE_RunMod.F90 create mode 100644 cicecore/drivers/unittest/opticep/README create mode 100644 cicecore/drivers/unittest/opticep/ice_init_column.F90 create mode 100644 cicecore/drivers/unittest/opticep/ice_step_mod.F90 create mode 100644 configuration/scripts/machines/Macros.derecho_intel create mode 100644 configuration/scripts/machines/env.derecho_intel create mode 100644 configuration/scripts/options/set_env.opticep create mode 100644 configuration/scripts/options/set_nml.jra55 create mode 100644 configuration/scripts/options/set_nml.jra55do diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000..cc2b2817b --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,29 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.7" + # You can also specify other tool versions: + # nodejs: "19" + # rust: "1.64" + # golang: "1.19" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: doc/source/conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +formats: + - pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: doc/requirements.txt diff --git a/cicecore/cicedyn/analysis/ice_diagnostics.F90 b/cicecore/cicedyn/analysis/ice_diagnostics.F90 index 53631b2d4..395cca98d 100644 --- a/cicecore/cicedyn/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedyn/analysis/ice_diagnostics.F90 @@ -261,10 +261,8 @@ subroutine runtime_diags (dt) !$OMP END PARALLEL DO extentn = c0 extents = c0 - extentn = global_sum(work1, distrb_info, field_loc_center, & - tarean) - extents = global_sum(work1, distrb_info, field_loc_center, & - tareas) + extentn = global_sum(work1, distrb_info, field_loc_center, tarean) + extents = global_sum(work1, distrb_info, field_loc_center, tareas) extentn = extentn * m2_to_km2 extents = extents * m2_to_km2 @@ -1945,162 +1943,6 @@ subroutine print_state(plabel,i,j,iblk) end subroutine print_state !======================================================================= -#ifdef UNDEPRECATE_print_points_state - -! This routine is useful for debugging. - - subroutine print_points_state(plabel,ilabel) - - use ice_grid, only: grid_ice - use ice_blocks, only: block, get_block - use ice_domain, only: blocks_ice - use ice_domain_size, only: ncat, nilyr, nslyr - use ice_state, only: aice0, aicen, vicen, vsnon, uvel, vvel, & - uvelE, vvelE, uvelE, vvelE, trcrn - use ice_flux, only: uatm, vatm, potT, Tair, Qa, flw, frain, fsnow, & - fsens, flat, evap, flwout, swvdr, swvdf, swidr, swidf, rhoa, & - frzmlt, sst, sss, Tf, Tref, Qref, Uref, uocn, vocn, strtltxU, strtltyU - - character (len=*), intent(in),optional :: plabel - integer , intent(in),optional :: ilabel - - ! local variables - - real (kind=dbl_kind) :: & - eidebug, esdebug, & - qi, qs, & - puny - - integer (kind=int_kind) :: m, n, k, i, j, iblk, nt_Tsfc, nt_qice, nt_qsno - character(len=256) :: llabel - - type (block) :: & - this_block ! block information for current block - - character(len=*), parameter :: subname = '(print_points_state)' - ! ---------------------- - - call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & - nt_qsno_out=nt_qsno) - call icepack_query_parameters(puny_out=puny) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - do m = 1, npnt - if (my_task == pmloc(m)) then - i = piloc(m) - j = pjloc(m) - iblk = pbloc(m) - this_block = get_block(blocks_ice(iblk),iblk) - - if (present(ilabel)) then - write(llabel,'(i6,a1,i3,a1)') ilabel,':',m,':' - else - write(llabel,'(i3,a1)') m,':' - endif - if (present(plabel)) then - write(llabel,'(a)') 'pps:'//trim(plabel)//':'//trim(llabel) - else - write(llabel,'(a)') 'pps:'//trim(llabel) - endif - - write(nu_diag,*) subname - write(nu_diag,*) trim(llabel),'istep1, my_task, i, j, iblk=', & - istep1, my_task, i, j, iblk - write(nu_diag,*) trim(llabel),'Global i and j=', & - this_block%i_glob(i), & - this_block%j_glob(j) - write(nu_diag,*) trim(llabel),'aice0=', aice0(i,j,iblk) - - do n = 1, ncat - write(nu_diag,*) trim(llabel),'aicen=', n,aicen(i,j,n,iblk) - write(nu_diag,*) trim(llabel),'vicen=', n,vicen(i,j,n,iblk) - write(nu_diag,*) trim(llabel),'vsnon=', n,vsnon(i,j,n,iblk) - if (aicen(i,j,n,iblk) > puny) then - write(nu_diag,*) trim(llabel),'hin=', n,vicen(i,j,n,iblk)/aicen(i,j,n,iblk) - write(nu_diag,*) trim(llabel),'hsn=', n,vsnon(i,j,n,iblk)/aicen(i,j,n,iblk) - endif - write(nu_diag,*) trim(llabel),'Tsfcn=',n,trcrn(i,j,nt_Tsfc,n,iblk) - enddo - - eidebug = c0 - do n = 1,ncat - do k = 1,nilyr - qi = trcrn(i,j,nt_qice+k-1,n,iblk) - write(nu_diag,*) trim(llabel),'qice= ',n,k, qi - eidebug = eidebug + qi - enddo - enddo - write(nu_diag,*) trim(llabel),'qice=',eidebug - - esdebug = c0 - do n = 1,ncat - if (vsnon(i,j,n,iblk) > puny) then - do k = 1,nslyr - qs = trcrn(i,j,nt_qsno+k-1,n,iblk) - write(nu_diag,*) trim(llabel),'qsnow=',n,k, qs - esdebug = esdebug + qs - enddo - endif - enddo - write(nu_diag,*) trim(llabel),'qsnow=',esdebug - - write(nu_diag,*) trim(llabel),'uvel=',uvel(i,j,iblk) - write(nu_diag,*) trim(llabel),'vvel=',vvel(i,j,iblk) - if (grid_ice == 'C') then - write(nu_diag,*) trim(llabel),'uvelE=',uvelE(i,j,iblk) - write(nu_diag,*) trim(llabel),'vvelN=',vvelN(i,j,iblk) - elseif (grid_ice == 'CD') then - write(nu_diag,*) trim(llabel),'uvelE=',uvelE(i,j,iblk) - write(nu_diag,*) trim(llabel),'vvelE=',vvelE(i,j,iblk) - write(nu_diag,*) trim(llabel),'uvelN=',uvelN(i,j,iblk) - write(nu_diag,*) trim(llabel),'vvelN=',vvelN(i,j,iblk) - endif - - write(nu_diag,*) ' ' - write(nu_diag,*) 'atm states and fluxes' - write(nu_diag,*) ' uatm = ',uatm (i,j,iblk) - write(nu_diag,*) ' vatm = ',vatm (i,j,iblk) - write(nu_diag,*) ' potT = ',potT (i,j,iblk) - write(nu_diag,*) ' Tair = ',Tair (i,j,iblk) - write(nu_diag,*) ' Qa = ',Qa (i,j,iblk) - write(nu_diag,*) ' rhoa = ',rhoa (i,j,iblk) - write(nu_diag,*) ' swvdr = ',swvdr(i,j,iblk) - write(nu_diag,*) ' swvdf = ',swvdf(i,j,iblk) - write(nu_diag,*) ' swidr = ',swidr(i,j,iblk) - write(nu_diag,*) ' swidf = ',swidf(i,j,iblk) - write(nu_diag,*) ' flw = ',flw (i,j,iblk) - write(nu_diag,*) ' frain = ',frain(i,j,iblk) - write(nu_diag,*) ' fsnow = ',fsnow(i,j,iblk) - write(nu_diag,*) ' ' - write(nu_diag,*) 'ocn states and fluxes' - write(nu_diag,*) ' frzmlt = ',frzmlt (i,j,iblk) - write(nu_diag,*) ' sst = ',sst (i,j,iblk) - write(nu_diag,*) ' sss = ',sss (i,j,iblk) - write(nu_diag,*) ' Tf = ',Tf (i,j,iblk) - write(nu_diag,*) ' uocn = ',uocn (i,j,iblk) - write(nu_diag,*) ' vocn = ',vocn (i,j,iblk) - write(nu_diag,*) ' strtltxU= ',strtltxU(i,j,iblk) - write(nu_diag,*) ' strtltyU= ',strtltyU(i,j,iblk) - write(nu_diag,*) ' ' - write(nu_diag,*) 'srf states and fluxes' - write(nu_diag,*) ' Tref = ',Tref (i,j,iblk) - write(nu_diag,*) ' Qref = ',Qref (i,j,iblk) - write(nu_diag,*) ' Uref = ',Uref (i,j,iblk) - write(nu_diag,*) ' fsens = ',fsens (i,j,iblk) - write(nu_diag,*) ' flat = ',flat (i,j,iblk) - write(nu_diag,*) ' evap = ',evap (i,j,iblk) - write(nu_diag,*) ' flwout = ',flwout(i,j,iblk) - write(nu_diag,*) ' ' - call flush_fileunit(nu_diag) - - endif ! my_task - enddo ! ncnt - - end subroutine print_points_state -#endif -!======================================================================= ! prints error information prior to aborting diff --git a/cicecore/cicedyn/analysis/ice_history.F90 b/cicecore/cicedyn/analysis/ice_history.F90 index 54b6ce934..3eda456ec 100644 --- a/cicecore/cicedyn/analysis/ice_history.F90 +++ b/cicecore/cicedyn/analysis/ice_history.F90 @@ -263,7 +263,7 @@ subroutine init_hist (dt) trim(tmpstr2), file=__FILE__, line=__LINE__) endif end do - + close(nu_nml) call release_fileunit(nu_nml) endif @@ -2225,7 +2225,7 @@ subroutine accum_hist (dt) n4Dfcum = n4Dscum + num_avail_hist_fields_4Df ! should equal num_avail_hist_fields_tot do ns = 1,nstreams - if (.not. hist_avg) then ! write snapshots + if (.not. hist_avg(ns)) then ! write snapshots do n = 1,n2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) & a2D(:,:,n,:) = c0 diff --git a/cicecore/cicedyn/analysis/ice_history_pond.F90 b/cicecore/cicedyn/analysis/ice_history_pond.F90 index d209e6db6..976a87d40 100644 --- a/cicecore/cicedyn/analysis/ice_history_pond.F90 +++ b/cicecore/cicedyn/analysis/ice_history_pond.F90 @@ -100,14 +100,14 @@ subroutine init_hist_pond_2D trim(nml_filename), & file=__FILE__, line=__LINE__) endif - + ! goto this namelist in file call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif - + ! read namelist nml_error = 1 do while (nml_error > 0) @@ -121,7 +121,7 @@ subroutine init_hist_pond_2D trim(tmpstr2), file=__FILE__, line=__LINE__) endif end do - + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedyn/analysis/ice_history_shared.F90 b/cicecore/cicedyn/analysis/ice_history_shared.F90 index 70aa5e14c..3c31f23ca 100644 --- a/cicecore/cicedyn/analysis/ice_history_shared.F90 +++ b/cicecore/cicedyn/analysis/ice_history_shared.F90 @@ -38,7 +38,7 @@ module ice_history_shared integer (kind=int_kind), public :: history_precision logical (kind=log_kind), public :: & - hist_avg ! if true, write averaged data instead of snapshots + hist_avg(max_nstrm) ! if true, write averaged data instead of snapshots character (len=char_len_long), public :: & history_file , & ! output file for history @@ -132,6 +132,8 @@ module ice_history_shared time_end(max_nstrm), & time_bounds(2) + character (len=char_len), public :: hist_time_axis + real (kind=dbl_kind), allocatable, public :: & a2D (:,:,:,:) , & ! field accumulations/averages, 2D a3Dz(:,:,:,:,:) , & ! field accumulations/averages, 3D vertical @@ -743,7 +745,7 @@ subroutine construct_filename(ncfile,suffix,ns) imonth,'-',iday,'-',isec,'.',trim(suffix) else - if (hist_avg) then + if (hist_avg(ns)) then if (histfreq(ns) == '1' .or. histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! do nothing elseif (new_year) then @@ -763,7 +765,7 @@ subroutine construct_filename(ncfile,suffix,ns) !echmod ! of other groups (including RASM which uses CESMCOUPLED) !echmod if (ns > 1) write(cstream,'(i1.1)') ns-1 - if (hist_avg) then ! write averaged data + if (hist_avg(ns)) then ! write averaged data if (histfreq(ns) == '1' .and. histfreq_n(ns) == 1) then ! timestep write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & diff --git a/cicecore/cicedyn/analysis/ice_history_snow.F90 b/cicecore/cicedyn/analysis/ice_history_snow.F90 index 62e65b5a3..19722b014 100644 --- a/cicecore/cicedyn/analysis/ice_history_snow.F90 +++ b/cicecore/cicedyn/analysis/ice_history_snow.F90 @@ -77,7 +77,7 @@ subroutine init_hist_snow_2D (dt) integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag real (kind=dbl_kind) :: rhofresh, secday - logical (kind=log_kind) :: tr_snow + logical (kind=log_kind) :: tr_snow character(len=char_len_long) :: tmpstr2 ! for namelist check character(len=char_len) :: nml_name ! for namelist check diff --git a/cicecore/cicedyn/dynamics/ice_dyn_vp.F90 b/cicecore/cicedyn/dynamics/ice_dyn_vp.F90 index 3915004b4..32971c5b6 100644 --- a/cicecore/cicedyn/dynamics/ice_dyn_vp.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_vp.F90 @@ -2502,7 +2502,7 @@ function global_dot_product (nx_block , ny_block , & vector2_x , vector2_y) & result(dot_product) - use ice_domain, only: distrb_info + use ice_domain, only: distrb_info, ns_boundary_type use ice_domain_size, only: max_blocks use ice_fileunits, only: bfbflag @@ -2552,8 +2552,14 @@ function global_dot_product (nx_block , ny_block , & enddo !$OMP END PARALLEL DO - ! Use local summation result unless bfbflag is active - if (bfbflag == 'off') then + ! Use faster local summation result for several bfbflag settings. + ! The local implementation sums over each block, sums over local + ! blocks, and calls global_sum on a scalar and should be just as accurate as + ! bfbflag = 'off', 'lsum8', and 'lsum4' without the extra copies and overhead + ! in the more general array global_sum. But use the array global_sum + ! if bfbflag is more strict or for tripole grids (requires special masking) + if (ns_boundary_type /= 'tripole' .and. ns_boundary_type /= 'tripoleT' .and. & + (bfbflag == 'off' .or. bfbflag == 'lsum8' .or. bfbflag == 'lsum4')) then dot_product = global_sum(sum(dot), distrb_info) else dot_product = global_sum(prod, distrb_info, field_loc_NEcorner) @@ -3120,7 +3126,7 @@ subroutine fgmres (zetax2 , etax2 , & j = indxUj(ij, iblk) workspace_x(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_x(i, j, iblk, it) - workspace_y(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_y(i, j, iblk, it) + workspace_y(i, j, iblk) = workspace_y(i, j, iblk) + rhs_hess(it) * arnoldi_basis_y(i, j, iblk, it) enddo ! ij enddo !$OMP END PARALLEL DO @@ -3151,7 +3157,6 @@ subroutine pgmres (zetax2 , etax2 , & use ice_boundary, only: ice_HaloUpdate use ice_domain, only: maskhalo_dyn, halo_info - use ice_fileunits, only: bfbflag use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks,4), intent(in) :: & @@ -3343,21 +3348,17 @@ subroutine pgmres (zetax2 , etax2 , & workspace_x , workspace_y) ! Update workspace with boundary values - ! NOTE: skipped for efficiency since this is just a preconditioner - ! unless bfbflag is active - if (bfbflag /= 'off') then - call stack_fields(workspace_x, workspace_y, fld2) - call ice_timer_start(timer_bound) - if (maskhalo_dyn) then - call ice_HaloUpdate (fld2, halo_info_mask, & - field_loc_NEcorner, field_type_vector) - else - call ice_HaloUpdate (fld2, halo_info, & - field_loc_NEcorner, field_type_vector) - endif - call ice_timer_stop(timer_bound) - call unstack_fields(fld2, workspace_x, workspace_y) + call stack_fields(workspace_x, workspace_y, fld2) + call ice_timer_start(timer_bound) + if (maskhalo_dyn) then + call ice_HaloUpdate (fld2, halo_info_mask, & + field_loc_NEcorner, field_type_vector) + else + call ice_HaloUpdate (fld2, halo_info, & + field_loc_NEcorner, field_type_vector) endif + call ice_timer_stop(timer_bound) + call unstack_fields(fld2, workspace_x, workspace_y) !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks @@ -3528,7 +3529,7 @@ subroutine pgmres (zetax2 , etax2 , & j = indxUj(ij, iblk) workspace_x(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_x(i, j, iblk, it) - workspace_y(i, j, iblk) = workspace_x(i, j, iblk) + rhs_hess(it) * arnoldi_basis_y(i, j, iblk, it) + workspace_y(i, j, iblk) = workspace_y(i, j, iblk) + rhs_hess(it) * arnoldi_basis_y(i, j, iblk, it) enddo ! ij enddo !$OMP END PARALLEL DO diff --git a/cicecore/cicedyn/dynamics/ice_transport_driver.F90 b/cicecore/cicedyn/dynamics/ice_transport_driver.F90 index 30fe546e0..4f9d84d98 100644 --- a/cicecore/cicedyn/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedyn/dynamics/ice_transport_driver.F90 @@ -37,9 +37,6 @@ module ice_transport_driver ! 'upwind' => 1st order donor cell scheme ! 'remap' => remapping scheme - logical, parameter :: & - l_fixed_area = .false. ! if true, prescribe area flux across each edge - ! NOTE: For remapping, hice and hsno are considered tracers. ! ntrace is not equal to ntrcr! @@ -81,6 +78,7 @@ subroutine init_transport use ice_state, only: trcr_depend use ice_timers, only: ice_timer_start, ice_timer_stop, timer_advect use ice_transport_remap, only: init_remap + use ice_grid, only: grid_ice integer (kind=int_kind) :: & k, nt, nt1 ! tracer indices @@ -236,7 +234,7 @@ subroutine init_transport endif ! master_task 1000 format (1x,a,2x,i6,2x,i6,2x,i4,4x,l4) - if (trim(advection)=='remap') call init_remap ! grid quantities + if (trim(advection)=='remap') call init_remap ! grid quantities call ice_timer_stop(timer_advect) ! advection @@ -545,19 +543,17 @@ subroutine transport_remap (dt) call horizontal_remap (dt, ntrace, & uvel (:,:,:), vvel (:,:,:), & aim (:,:,:,:), trm(:,:,:,:,:), & - l_fixed_area, & tracer_type, depend, & has_dependents, integral_order, & - l_dp_midpt, grid_ice, & + l_dp_midpt, & uvelE (:,:,:), vvelN (:,:,:)) else call horizontal_remap (dt, ntrace, & uvel (:,:,:), vvel (:,:,:), & aim (:,:,:,:), trm(:,:,:,:,:), & - l_fixed_area, & tracer_type, depend, & has_dependents, integral_order, & - l_dp_midpt, grid_ice) + l_dp_midpt) endif !------------------------------------------------------------------- diff --git a/cicecore/cicedyn/dynamics/ice_transport_remap.F90 b/cicecore/cicedyn/dynamics/ice_transport_remap.F90 index 286a51711..eb0dd17cf 100644 --- a/cicecore/cicedyn/dynamics/ice_transport_remap.F90 +++ b/cicecore/cicedyn/dynamics/ice_transport_remap.F90 @@ -42,6 +42,7 @@ module ice_transport_remap use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters + use ice_grid, only : grid_ice implicit none private @@ -57,6 +58,11 @@ module ice_transport_remap p5625m = -9._dbl_kind/16._dbl_kind ,& p52083 = 25._dbl_kind/48._dbl_kind + logical :: & + l_fixed_area ! if true, prescribe area flux across each edge + ! if false, area flux is determined internally + ! and is passed out + logical (kind=log_kind), parameter :: bugcheck = .false. !======================================================================= @@ -293,6 +299,29 @@ subroutine init_remap enddo !$OMP END PARALLEL DO + !------------------------------------------------------------------- + ! Set logical l_fixed_area depending of the grid type. + ! + ! If l_fixed_area is true, the area of each departure region is + ! computed in advance (e.g., by taking the divergence of the + ! velocity field and passed to locate_triangles. The departure + ! regions are adjusted to obtain the desired area. + ! If false, edgearea is computed in locate_triangles and passed out. + ! + ! l_fixed_area = .false. has been the default approach in CICE. It is + ! used like this for the B-grid. However, idealized tests with the + ! C-grid have shown that l_fixed_area = .false. leads to a checkerboard + ! pattern in prognostic fields (e.g. aice). Using l_fixed_area = .true. + ! eliminates the checkerboard pattern in C-grid simulations. + ! + !------------------------------------------------------------------- + + if (grid_ice == 'CD' .or. grid_ice == 'C') then + l_fixed_area = .false. !jlem temporary + else + l_fixed_area = .false. + endif + end subroutine init_remap !======================================================================= @@ -316,11 +345,10 @@ end subroutine init_remap subroutine horizontal_remap (dt, ntrace, & uvel, vvel, & mm, tm, & - l_fixed_area, & tracer_type, depend, & has_dependents, & integral_order, & - l_dp_midpt, grid_ice, & + l_dp_midpt, & uvelE, vvelN) use ice_boundary, only: ice_halo, ice_HaloMask, ice_HaloUpdate, & @@ -353,21 +381,6 @@ subroutine horizontal_remap (dt, ntrace, & real (kind=dbl_kind), intent(inout), dimension (nx_block,ny_block,ntrace,ncat,max_blocks) :: & tm ! mean tracer values in each grid cell - character (len=char_len_long), intent(in) :: & - grid_ice ! ice grid, B, C, etc - - !------------------------------------------------------------------- - ! If l_fixed_area is true, the area of each departure region is - ! computed in advance (e.g., by taking the divergence of the - ! velocity field and passed to locate_triangles. The departure - ! regions are adjusted to obtain the desired area. - ! If false, edgearea is computed in locate_triangles and passed out. - !------------------------------------------------------------------- - - logical, intent(in) :: & - l_fixed_area ! if true, edgearea_e and edgearea_n are prescribed - ! if false, edgearea is computed here and passed out - integer (kind=int_kind), dimension (ntrace), intent(in) :: & tracer_type , & ! = 1, 2, or 3 (see comments above) depend ! tracer dependencies (see above) @@ -716,8 +729,7 @@ subroutine horizontal_remap (dt, ntrace, & dxu (:,:,iblk), dyu(:,:,iblk), & xp (:,:,:,:), yp (:,:,:,:), & iflux, jflux, & - triarea, & - l_fixed_area, edgearea_e(:,:)) + triarea, edgearea_e(:,:)) !------------------------------------------------------------------- ! Given triangle vertices, compute coordinates of triangle points @@ -776,8 +788,7 @@ subroutine horizontal_remap (dt, ntrace, & dxu (:,:,iblk), dyu (:,:,iblk), & xp (:,:,:,:), yp(:,:,:,:), & iflux, jflux, & - triarea, & - l_fixed_area, edgearea_n(:,:)) + triarea, edgearea_n(:,:)) call triangle_coordinates (nx_block, ny_block, & integral_order, icellsng(:,iblk), & @@ -1696,8 +1707,7 @@ subroutine locate_triangles (nx_block, ny_block, & dxu, dyu, & xp, yp, & iflux, jflux, & - triarea, & - l_fixed_area, edgearea) + triarea, edgearea) integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions @@ -1730,12 +1740,6 @@ subroutine locate_triangles (nx_block, ny_block, & indxi , & ! compressed index in i-direction indxj ! compressed index in j-direction - logical, intent(in) :: & - l_fixed_area ! if true, the area of each departure region is - ! passed in as edgearea - ! if false, edgearea if determined internally - ! and is passed out - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout) :: & edgearea ! area of departure region for each edge ! edgearea > 0 for eastward/northward flow @@ -1838,7 +1842,7 @@ subroutine locate_triangles (nx_block, ny_block, & ! BL | BC | BR (bottom left, center, right) ! | | ! - ! and the transport is across the edge between cells TC and TB. + ! and the transport is across the edge between cells TC and BC. ! ! Departure points are scaled to a local coordinate system ! whose origin is at the midpoint of the edge. @@ -1951,45 +1955,32 @@ subroutine locate_triangles (nx_block, ny_block, & ! Compute mask for edges with nonzero departure areas !------------------------------------------------------------------- - if (l_fixed_area) then - icellsd = 0 + icellsd = 0 + if (trim(edge) == 'north') then do j = jb, je do i = ib, ie - if (edgearea(i,j) /= c0) then + if (dpx(i-1,j)/=c0 .or. dpy(i-1,j)/=c0 & + .or. & + dpx(i,j)/=c0 .or. dpy(i,j)/=c0) then icellsd = icellsd + 1 indxid(icellsd) = i indxjd(icellsd) = j endif enddo enddo - else - icellsd = 0 - if (trim(edge) == 'north') then - do j = jb, je - do i = ib, ie - if (dpx(i-1,j)/=c0 .or. dpy(i-1,j)/=c0 & - .or. & - dpx(i,j)/=c0 .or. dpy(i,j)/=c0) then - icellsd = icellsd + 1 - indxid(icellsd) = i - indxjd(icellsd) = j - endif - enddo - enddo - else ! east edge - do j = jb, je - do i = ib, ie - if (dpx(i,j-1)/=c0 .or. dpy(i,j-1)/=c0 & - .or. & - dpx(i,j)/=c0 .or. dpy(i,j)/=c0) then - icellsd = icellsd + 1 - indxid(icellsd) = i - indxjd(icellsd) = j - endif - enddo - enddo - endif ! edge = north/east - endif ! l_fixed_area + else ! east edge + do j = jb, je + do i = ib, ie + if (dpx(i,j-1)/=c0 .or. dpy(i,j-1)/=c0 & + .or. & + dpx(i,j)/=c0 .or. dpy(i,j)/=c0) then + icellsd = icellsd + 1 + indxid(icellsd) = i + indxjd(icellsd) = j + endif + enddo + enddo + endif ! edge = north/east !------------------------------------------------------------------- ! Scale the departure points diff --git a/cicecore/cicedyn/general/ice_forcing.F90 b/cicecore/cicedyn/general/ice_forcing.F90 index 541efb282..9002d0448 100755 --- a/cicecore/cicedyn/general/ice_forcing.F90 +++ b/cicecore/cicedyn/general/ice_forcing.F90 @@ -62,7 +62,7 @@ module ice_forcing fyear_final ! last year in cycle, computed at init character (char_len_long) :: & ! input data file names - uwind_file, & + uwind_file, & ! this is also used a generic file containing all fields for JRA55 vwind_file, & wind_file, & strax_file, & @@ -124,7 +124,7 @@ module ice_forcing ocn_data_format, & ! 'bin'=binary or 'nc'=netcdf atm_data_type, & ! 'default', 'monthly', 'ncar', 'box2001' ! 'hadgem', 'oned', 'calm', 'uniform' - ! 'JRA55_gx1' or 'JRA55_gx3' or 'JRA55_tx1' + ! 'JRA55' or 'JRA55do' bgc_data_type, & ! 'default', 'clim' ocn_data_type, & ! 'default', 'clim', 'ncar', 'oned', 'calm', 'box2001' ! 'hadgem_sst' or 'hadgem_sst_uvocn', 'uniform' @@ -281,13 +281,11 @@ subroutine init_forcing_atmo file=__FILE__, line=__LINE__) endif - if (use_leap_years .and. (trim(atm_data_type) /= 'JRA55_gx1' .and. & - trim(atm_data_type) /= 'JRA55_gx3' .and. & - trim(atm_data_type) /= 'JRA55_tx1' .and. & - trim(atm_data_type) /= 'hycom' .and. & - trim(atm_data_type) /= 'box2001')) then + if (use_leap_years .and. (index(trim(atm_data_type),'JRA55') == 0 .and. & + trim(atm_data_type) /= 'hycom' .and. & + trim(atm_data_type) /= 'box2001')) then write(nu_diag,*) 'use_leap_years option is currently only supported for' - write(nu_diag,*) 'JRA55, default , and box2001 atmospheric data' + write(nu_diag,*) 'JRA55, JRA55do, default , and box2001 atmospheric data' call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) endif @@ -298,16 +296,8 @@ subroutine init_forcing_atmo ! default forcing values from init_flux_atm if (trim(atm_data_type) == 'ncar') then call NCAR_files(fyear) -#ifdef UNDEPRECATE_LYq - elseif (trim(atm_data_type) == 'LYq') then - call LY_files(fyear) -#endif - elseif (trim(atm_data_type) == 'JRA55_gx1') then - call JRA55_gx1_files(fyear) - elseif (trim(atm_data_type) == 'JRA55_gx3') then - call JRA55_gx3_files(fyear) - elseif (trim(atm_data_type) == 'JRA55_tx1') then - call JRA55_tx1_files(fyear) + elseif (index(trim(atm_data_type),'JRA55') > 0) then + call JRA55_files(fyear) elseif (trim(atm_data_type) == 'hadgem') then call hadgem_files(fyear) elseif (trim(atm_data_type) == 'monthly') then @@ -644,15 +634,7 @@ subroutine get_forcing_atmo if (trim(atm_data_type) == 'ncar') then call ncar_data -#ifdef UNDEPRECATE_LYq - elseif (trim(atm_data_type) == 'LYq') then - call LY_data -#endif - elseif (trim(atm_data_type) == 'JRA55_gx1') then - call JRA55_data - elseif (trim(atm_data_type) == 'JRA55_gx3') then - call JRA55_data - elseif (trim(atm_data_type) == 'JRA55_tx1') then + elseif (index(trim(atm_data_type),'JRA55') > 0) then call JRA55_data elseif (trim(atm_data_type) == 'hadgem') then call hadgem_data @@ -1593,15 +1575,7 @@ subroutine file_year (data_file, yr) i = index(data_file,'.nc') - 5 tmpname = data_file write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' - elseif (trim(atm_data_type) == 'JRA55_gx1') then ! netcdf - i = index(data_file,'.nc') - 5 - tmpname = data_file - write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' - elseif (trim(atm_data_type) == 'JRA55_gx3') then ! netcdf - i = index(data_file,'.nc') - 5 - tmpname = data_file - write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' - elseif (trim(atm_data_type) == 'JRA55_tx1') then ! netcdf + elseif (index(trim(atm_data_type),'JRA55') > 0) then ! netcdf i = index(data_file,'.nc') - 5 tmpname = data_file write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' @@ -1726,23 +1700,6 @@ subroutine prepare_forcing (nx_block, ny_block, & enddo enddo -#ifdef UNDEPRECATE_LYq - elseif (trim(atm_data_type) == 'LYq') then - - ! precip is in mm/s - - zlvl0 = c10 - - do j = jlo, jhi - do i = ilo, ihi - ! longwave based on Rosati and Miyakoda, JPO 18, p. 1607 (1988) - call longwave_rosati_miyakoda(cldf(i,j), Tsfc(i,j), & - aice(i,j), sst(i,j), & - Qa(i,j), Tair(i,j), & - hm(i,j), flw(i,j)) - enddo - enddo -#endif elseif (trim(atm_data_type) == 'oned') then ! rectangular grid ! precip is in kg/m^2/s @@ -1977,62 +1934,75 @@ end subroutine longwave_rosati_miyakoda subroutine ncar_files (yr) -! Construct filenames based on the LANL naming conventions for NCAR data. -! Edit for other directory structures or filenames. -! Note: The year number in these filenames does not matter, because -! subroutine file_year will insert the correct year. + ! Construct filenames based on the LANL naming conventions for NCAR data. + ! Edit for other directory structures or filenames. + ! Note: The year number in these filenames does not matter, because + ! subroutine file_year will insert the correct year. + ! Note: atm_data_dir may have NCAR_bulk or not + ! + ! atm_data_type should be 'ncar' + ! atm_dat_dir should be ${CICE_DATA_root}/forcing/$grid/[NCAR_bulk,''] + ! atm_data_dir should be set to ${CICE_DATA_root}/forcing/$grid/[JRA55,JRA55do,''] + ! NCAR_bulk at the end of the atm_data_dir is optional to provide backwards + ! compatibility and if not included, will be appended automaticaly. + ! The grid is typically gx1, gx3, tx1, or similar. integer (kind=int_kind), intent(in) :: & yr ! current forcing year + character (char_len_long) :: & + atm_data_dir_extra ! atm_dat_dir extra if needed + + integer (kind=int_kind) :: & + strind ! string index + character(len=*), parameter :: subname = '(ncar_files)' if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - fsw_file = & - trim(atm_data_dir)//'/MONTHLY/swdn.1996.dat' + ! decide whether NCAR_bulk is part of atm_data_dir and set atm_data_dir_extra + atm_data_dir_extra = '/NCAR_bulk' + strind = index(trim(atm_data_dir),'NCAR_bulk') + if (strind > 0) then + atm_data_dir_extra = '' + endif + + fsw_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/MONTHLY/swdn.1996.dat' call file_year(fsw_file,yr) - flw_file = & - trim(atm_data_dir)//'/MONTHLY/cldf.1996.dat' + flw_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/MONTHLY/cldf.1996.dat' call file_year(flw_file,yr) - rain_file = & - trim(atm_data_dir)//'/MONTHLY/prec.1996.dat' + rain_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/MONTHLY/prec.1996.dat' call file_year(rain_file,yr) - uwind_file = & - trim(atm_data_dir)//'/4XDAILY/u_10.1996.dat' + uwind_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/u_10.1996.dat' call file_year(uwind_file,yr) - vwind_file = & - trim(atm_data_dir)//'/4XDAILY/v_10.1996.dat' + vwind_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/v_10.1996.dat' call file_year(vwind_file,yr) - tair_file = & - trim(atm_data_dir)//'/4XDAILY/t_10.1996.dat' + tair_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/t_10.1996.dat' call file_year(tair_file,yr) - humid_file = & - trim(atm_data_dir)//'/4XDAILY/q_10.1996.dat' + humid_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/q_10.1996.dat' call file_year(humid_file,yr) - rhoa_file = & - trim(atm_data_dir)//'/4XDAILY/dn10.1996.dat' + rhoa_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/dn10.1996.dat' call file_year(rhoa_file,yr) if (my_task == master_task) then write (nu_diag,*) ' ' write (nu_diag,*) 'Forcing data year =', fyear write (nu_diag,*) 'Atmospheric data files:' - write (nu_diag,*) trim(fsw_file) - write (nu_diag,*) trim(flw_file) - write (nu_diag,*) trim(rain_file) - write (nu_diag,*) trim(uwind_file) - write (nu_diag,*) trim(vwind_file) - write (nu_diag,*) trim(tair_file) - write (nu_diag,*) trim(humid_file) - write (nu_diag,*) trim(rhoa_file) + write (nu_diag,'(3a)') trim(fsw_file) + write (nu_diag,'(3a)') trim(flw_file) + write (nu_diag,'(3a)') trim(rain_file) + write (nu_diag,'(3a)') trim(uwind_file) + write (nu_diag,'(3a)') trim(vwind_file) + write (nu_diag,'(3a)') trim(tair_file) + write (nu_diag,'(3a)') trim(humid_file) + write (nu_diag,'(3a)') trim(rhoa_file) endif ! master_task end subroutine ncar_files @@ -2195,352 +2165,117 @@ subroutine ncar_data end subroutine ncar_data -#ifdef UNDEPRECATE_LYq -!======================================================================= -! Large and Yeager forcing (AOMIP style) -!======================================================================= - - subroutine LY_files (yr) - -! Construct filenames based on the LANL naming conventions for CORE -! (Large and Yeager) data. -! Edit for other directory structures or filenames. -! Note: The year number in these filenames does not matter, because -! subroutine file_year will insert the correct year. - -! author: Elizabeth C. Hunke, LANL - - integer (kind=int_kind), intent(in) :: & - yr ! current forcing year - - character(len=*), parameter :: subname = '(LY_files)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - flw_file = & - trim(atm_data_dir)//'/MONTHLY/cldf.omip.dat' - - rain_file = & - trim(atm_data_dir)//'/MONTHLY/prec.nmyr.dat' - - uwind_file = & - trim(atm_data_dir)//'/4XDAILY/u_10.1996.dat' - call file_year(uwind_file,yr) - - vwind_file = & - trim(atm_data_dir)//'/4XDAILY/v_10.1996.dat' - call file_year(vwind_file,yr) - - tair_file = & - trim(atm_data_dir)//'/4XDAILY/t_10.1996.dat' - call file_year(tair_file,yr) - - humid_file = & - trim(atm_data_dir)//'/4XDAILY/q_10.1996.dat' - call file_year(humid_file,yr) - - if (my_task == master_task) then - write (nu_diag,*) ' ' - write (nu_diag,*) 'Forcing data year = ', fyear - write (nu_diag,*) 'Atmospheric data files:' - write (nu_diag,*) trim(flw_file) - write (nu_diag,*) trim(rain_file) - write (nu_diag,*) trim(uwind_file) - write (nu_diag,*) trim(vwind_file) - write (nu_diag,*) trim(tair_file) - write (nu_diag,*) trim(humid_file) - endif ! master_task - - end subroutine LY_files -#endif !======================================================================= - subroutine JRA55_gx1_files(yr) -! - integer (kind=int_kind), intent(in) :: & - yr ! current forcing year - - character(len=*), parameter :: subname = '(JRA55_gx1_files)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - uwind_file = & - trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_2005.nc' - call file_year(uwind_file,yr) - if (my_task == master_task) then - write (nu_diag,*) ' ' - write (nu_diag,*) 'Atmospheric data files:' - write (nu_diag,*) trim(uwind_file) - endif - end subroutine JRA55_gx1_files - -!======================================================================= - - subroutine JRA55_tx1_files(yr) -! - integer (kind=int_kind), intent(in) :: & - yr ! current forcing year - - character(len=*), parameter :: subname = '(JRA55_tx1_files)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - uwind_file = & - trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_tx1_2005.nc' - call file_year(uwind_file,yr) - if (my_task == master_task) then - write (nu_diag,*) ' ' - write (nu_diag,*) 'Atmospheric data files:' - write (nu_diag,*) trim(uwind_file) - endif - end subroutine JRA55_tx1_files + subroutine JRA55_files(yr) -!======================================================================= + ! find the JRA55 files: + ! This subroutine finds the JRA55 atm forcing files based on settings + ! in atm_data_type and atm_data_dir. Because the filenames are not + ! entirely consistent, we need a flexible method. + ! + ! atm_data_type could be JRA55 or JRA55do with/without _grid appended + ! atm_data_dir could contain JRA55 or JRA55do or not + ! actual files could have grid in name in two location or not at all + ! + ! The files will generally be of the format + ! $atm_data_type/[JRA55,JRA55do,'']/8XDAILY/[JRA55,JRA55do][_$grid,'']_03hr_forcing[_$grid,'']_$year.nc + ! The options defined by cnt try several versions of paths/filenames + ! As a user, + ! atm_data_type should be set to JRA55, JRA55do, JRA55_xxx, or JRA55do_xxx + ! where xxx can be any set of characters. The _xxx if included will be ignored. + ! Historically, these were set to JRA55_gx1 and so forth but the _gx1 is no longer needed + ! but this is still allowed for backwards compatibility. atm_data_type_prefix + ! is atm_data_type with _ and everything after _ removed. + ! atm_data_dir should be set to ${CICE_DATA_root}/forcing/$grid/[JRA55,JRA55do,''] + ! The [JRA55,JRA55do] at the end of the atm_data_dir is optional to provide backwards + ! compatibility and if not included, will be appended automaticaly using + ! the atm_data_type_prefix value. The grid is typically gx1, gx3, tx1, or similar. + ! In general, we recommend using the following format + ! atm_data_type = [JRA55,JRA55do] + ! atm_data_dir = ${CICE_DATA_root}/forcing/$grid - subroutine JRA55_gx3_files(yr) -! integer (kind=int_kind), intent(in) :: & - yr ! current forcing year - - character(len=*), parameter :: subname = '(JRA55_gx3_files)' + yr ! current forcing year - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - uwind_file = & - trim(atm_data_dir)//'/8XDAILY/JRA55_gx3_03hr_forcing_2005.nc' - call file_year(uwind_file,yr) - if (my_task == master_task) then - write (nu_diag,*) ' ' - write (nu_diag,*) 'Atmospheric data files:' - write (nu_diag,*) trim(uwind_file) - endif - end subroutine JRA55_gx3_files - -#ifdef UNDEPRECATE_LYq -!======================================================================= -! -! read Large and Yeager atmospheric data -! note: also uses AOMIP protocol, in part + ! local variables + character(len=16) :: & + grd ! gx3, gx1, tx1 - subroutine LY_data - - use ice_blocks, only: block, get_block - use ice_global_reductions, only: global_minval, global_maxval - use ice_domain, only: nblocks, distrb_info, blocks_ice - use ice_flux, only: fsnow, Tair, uatm, vatm, Qa, fsw - use ice_grid, only: hm, tlon, tlat, tmask, umask - use ice_state, only: aice + character(len=64) :: & + atm_data_type_prefix ! atm_data_type prefix integer (kind=int_kind) :: & - i, j , & - ixm,ixx,ixp , & ! record numbers for neighboring months - recnum , & ! record number - maxrec , & ! maximum record number - recslot , & ! spline slot for current record - midmonth , & ! middle day of month - dataloc , & ! = 1 for data located in middle of time interval - ! = 2 for date located at end of time interval - iblk , & ! block index - ilo,ihi,jlo,jhi ! beginning and end of physical domain - - real (kind=dbl_kind) :: & - sec6hr , & ! number of seconds in 6 hours - secday , & ! number of seconds in day - Tffresh , & - vmin, vmax + cnt , & ! search for files + strind ! string index - logical (kind=log_kind) :: readm, read6 - - type (block) :: & - this_block ! block information for current block + logical :: & + exists ! file existance - character(len=*), parameter :: subname = '(LY_data)' + character(len=*), parameter :: subname = '(JRA55_files)' if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(Tffresh_out=Tffresh) - call icepack_query_parameters(secday_out=secday) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !------------------------------------------------------------------- - ! monthly data - ! - ! Assume that monthly data values are located in the middle of the - ! month. - !------------------------------------------------------------------- - - midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle - - ! Compute record numbers for surrounding months - maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 - if (mday >= midmonth) ixm = -99 ! other two points will be used - if (mday < midmonth) ixp = -99 - - ! Determine whether interpolation will use values 1:2 or 2:3 - ! recslot = 2 means we use values 1:2, with the current value (2) - ! in the second slot - ! recslot = 1 means we use values 2:3, with the current value (2) - ! in the first slot - recslot = 1 ! latter half of month - if (mday < midmonth) recslot = 2 ! first half of month - - ! Find interpolation coefficients - call interp_coeff_monthly (recslot) - - ! Read 2 monthly values - readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. - - call read_clim_data (readm, 0, ixm, mmonth, ixp, & - flw_file, cldf_data, field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, mmonth, ixp, & - rain_file, fsnow_data, field_loc_center, field_type_scalar) - - call interpolate_data (cldf_data, cldf) - call interpolate_data (fsnow_data, fsnow) ! units mm/s = kg/m^2/s - - !------------------------------------------------------------------- - ! 6-hourly data - ! - ! Assume that the 6-hourly value is located at the end of the - ! 6-hour period. This is the convention for NCEP reanalysis data. - ! E.g. record 1 gives conditions at 6 am GMT on 1 January. - !------------------------------------------------------------------- - - dataloc = 2 ! data located at end of interval - sec6hr = secday/c4 ! seconds in 6 hours - maxrec = 1460 ! 365*4 - - ! current record number - recnum = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec6hr) - - ! Compute record numbers for surrounding data (2 on each side) - - ixm = mod(recnum+maxrec-2,maxrec) + 1 - ixx = mod(recnum-1, maxrec) + 1 -! ixp = mod(recnum, maxrec) + 1 - - ! Compute interpolation coefficients - ! If data is located at the end of the time interval, then the - ! data value for the current record goes in slot 2 - - recslot = 2 - ixp = -99 - call interp_coeff (recnum, recslot, sec6hr, dataloc) - - ! Read - read6 = .false. - if (istep==1 .or. oldrecnum .ne. recnum) read6 = .true. + ! this could be JRA55[do] or JRA55[do]_grid, drop the _grid if set + atm_data_type_prefix = trim(atm_data_type) + strind = index(trim(atm_data_type),'_') + if (strind > 0) then + atm_data_type_prefix = atm_data_type(1:strind-1) + endif - if (trim(atm_data_format) == 'bin') then - call read_data (read6, 0, fyear, ixm, ixx, ixp, maxrec, & - tair_file, Tair_data, & - field_loc_center, field_type_scalar) - call read_data (read6, 0, fyear, ixm, ixx, ixp, maxrec, & - uwind_file, uatm_data, & - field_loc_center, field_type_vector) - call read_data (read6, 0, fyear, ixm, ixx, ixp, maxrec, & - vwind_file, vatm_data, & - field_loc_center, field_type_vector) - call read_data (read6, 0, fyear, ixm, ixx, ixp, maxrec, & - humid_file, Qa_data, & - field_loc_center, field_type_scalar) + ! check for grid version using fortran INDEX intrinsic + if (index(trim(atm_data_dir),'gx1') > 0) then + grd = 'gx1' + else if (index(trim(atm_data_dir),'gx3') > 0) then + grd = 'gx3' + else if (index(trim(atm_data_dir),'tx1') > 0) then + grd = 'tx1' else - call abort_ice (error_message=subname//'nonbinary atm_data_format unavailable', & - file=__FILE__, line=__LINE__) + call abort_ice(error_message=subname//' unknown grid type') endif - ! Interpolate - call interpolate_data (Tair_data, Tair) - call interpolate_data (uatm_data, uatm) - call interpolate_data (vatm_data, vatm) - call interpolate_data (Qa_data, Qa) + ! cnt represents the possible file format options and steps thru them until one is found + exists = .false. + cnt = 1 + do while (.not.exists .and. cnt <= 6) + if (cnt == 1) uwind_file = trim(atm_data_dir)//'/'//trim(atm_data_type_prefix)// & + '/8XDAILY/'//trim(atm_data_type_prefix)//'_'//trim(grd)//'_03hr_forcing_2005.nc' - !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks - ! limit summer Tair values where ice is present - do j = 1, ny_block - do i = 1, nx_block - if (aice(i,j,iblk) > p1) Tair(i,j,iblk) = min(Tair(i,j,iblk), Tffresh+p1) - enddo - enddo + if (cnt == 2) uwind_file = trim(atm_data_dir)//'/'//trim(atm_data_type_prefix)// & + '/8XDAILY/'//trim(atm_data_type_prefix)//'_03hr_forcing_'//trim(grd)//'_2005.nc' - call Qa_fixLY(nx_block, ny_block, & - Tair (:,:,iblk), & - Qa (:,:,iblk)) - - do j = 1, ny_block - do i = 1, nx_block - Qa (i,j,iblk) = Qa (i,j,iblk) * hm(i,j,iblk) - Tair(i,j,iblk) = Tair(i,j,iblk) * hm(i,j,iblk) - uatm(i,j,iblk) = uatm(i,j,iblk) * hm(i,j,iblk) - vatm(i,j,iblk) = vatm(i,j,iblk) * hm(i,j,iblk) - enddo - enddo + if (cnt == 3) uwind_file = trim(atm_data_dir)//'/'//trim(atm_data_type_prefix)// & + '/8XDAILY/'//trim(atm_data_type_prefix)// '_03hr_forcing_2005.nc' - ! AOMIP - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi + if (cnt == 4) uwind_file = trim(atm_data_dir)// & + '/8XDAILY/'//trim(atm_data_type_prefix)//'_'//trim(grd)//'_03hr_forcing_2005.nc' - call compute_shortwave(nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - TLON (:,:,iblk), & - TLAT (:,:,iblk), & - hm (:,:,iblk), & - Qa (:,:,iblk), & - cldf (:,:,iblk), & - fsw (:,:,iblk)) + if (cnt == 5) uwind_file = trim(atm_data_dir)// & + '/8XDAILY/'//trim(atm_data_type_prefix)//'_03hr_forcing_'//trim(grd)//'_2005.nc' - enddo ! iblk - !$OMP END PARALLEL DO + if (cnt == 6) uwind_file = trim(atm_data_dir)// & + '/8XDAILY/'//trim(atm_data_type_prefix)// '_03hr_forcing_2005.nc' - ! Save record number - oldrecnum = recnum + call file_year(uwind_file,yr) + INQUIRE(FILE=uwind_file,EXIST=exists) +! if (my_task == master_task) then +! write(nu_diag,*) subname,cnt,exists,trim(uwind_file) +! endif + cnt = cnt + 1 + enddo - if (debug_forcing) then - if (my_task == master_task) write (nu_diag,*) 'LY_bulk_data' - vmin = global_minval(fsw,distrb_info,tmask) + if (.not.exists) then + call abort_ice(error_message=subname//' could not find forcing file') + endif - vmax = global_maxval(fsw,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) 'fsw',vmin,vmax - vmin = global_minval(cldf,distrb_info,tmask) - vmax = global_maxval(cldf,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) 'cldf',vmin,vmax - vmin =global_minval(fsnow,distrb_info,tmask) - vmax =global_maxval(fsnow,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) 'fsnow',vmin,vmax - vmin = global_minval(Tair,distrb_info,tmask) - vmax = global_maxval(Tair,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) 'Tair',vmin,vmax - vmin = global_minval(uatm,distrb_info,umask) - vmax = global_maxval(uatm,distrb_info,umask) - if (my_task.eq.master_task) & - write (nu_diag,*) 'uatm',vmin,vmax - vmin = global_minval(vatm,distrb_info,umask) - vmax = global_maxval(vatm,distrb_info,umask) - if (my_task.eq.master_task) & - write (nu_diag,*) 'vatm',vmin,vmax - vmin = global_minval(Qa,distrb_info,tmask) - vmax = global_maxval(Qa,distrb_info,tmask) - if (my_task.eq.master_task) & - write (nu_diag,*) 'Qa',vmin,vmax + if (my_task == master_task) then + write (nu_diag,'(2a)') ' ' + write (nu_diag,'(2a)') subname,'Atmospheric data files:' + write (nu_diag,'(2a)') subname,trim(uwind_file) + endif - endif ! debug_forcing + end subroutine JRA55_files - end subroutine LY_data -#endif !======================================================================= subroutine JRA55_data @@ -2611,7 +2346,7 @@ subroutine JRA55_data uwind_file_old = uwind_file if (uwind_file /= uwind_file_old .and. my_task == master_task) then - write(nu_diag,*) subname,' reading forcing file = ',trim(uwind_file) + write(nu_diag,'(2a)') subname,' reading forcing file = ',trim(uwind_file) endif call ice_open_nc(uwind_file,ncid) @@ -2623,7 +2358,7 @@ subroutine JRA55_data if (n1 == 1) then recnum = 8*int(yday) - 7 + int(real(msec,kind=dbl_kind)/sec3hr) if (my_task == master_task .and. (recnum <= 2 .or. recnum >= maxrec-1)) then - write(nu_diag,*) subname,' reading forcing file 1st ts = ',trim(uwind_file) + write(nu_diag,'(3a)') subname,' reading forcing file 1st ts = ',trim(uwind_file) endif elseif (n1 == 2) then recnum = 8*int(yday) - 7 + int(real(msec,kind=dbl_kind)/sec3hr) + 1 @@ -2633,7 +2368,7 @@ subroutine JRA55_data recnum = 1 call file_year(uwind_file,lfyear) if (my_task == master_task) then - write(nu_diag,*) subname,' reading forcing file 2nd ts = ',trim(uwind_file) + write(nu_diag,'(3a)') subname,' reading forcing file 2nd ts = ',trim(uwind_file) endif call ice_close_nc(ncid) call ice_open_nc(uwind_file,ncid) diff --git a/cicecore/cicedyn/general/ice_init.F90 b/cicecore/cicedyn/general/ice_init.F90 index 4c8fb1fee..3b8d83d1f 100644 --- a/cicecore/cicedyn/general/ice_init.F90 +++ b/cicecore/cicedyn/general/ice_init.F90 @@ -81,7 +81,7 @@ subroutine input_data runid, runtype, use_restart_time, restart_format, lcdf64 use ice_history_shared, only: hist_avg, history_dir, history_file, & incond_dir, incond_file, version_name, & - history_precision, history_format + history_precision, history_format, hist_time_axis use ice_flux, only: update_ocn_f, l_mpond_fresh use ice_flux, only: default_season use ice_flux_bgc, only: cpl_bgc @@ -125,7 +125,7 @@ subroutine input_data use ice_timers, only: timer_stats use ice_memusage, only: memory_stats use ice_fileunits, only: goto_nml - + #ifdef CESMCOUPLED use shr_file_mod, only: shr_file_setIO #endif @@ -169,7 +169,7 @@ subroutine input_data character (len=char_len) :: abort_list character (len=char_len) :: nml_name ! namelist name - character (len=char_len_long) :: tmpstr2 + character (len=char_len_long) :: tmpstr2 character(len=*), parameter :: subname='(input_data)' @@ -185,6 +185,7 @@ subroutine input_data restart_ext, use_restart_time, restart_format, lcdf64, & pointer_file, dumpfreq, dumpfreq_n, dump_last, & diagfreq, diag_type, diag_file, history_format,& + hist_time_axis, & print_global, print_points, latpnt, lonpnt, & debug_forcing, histfreq, histfreq_n, hist_avg, & history_dir, history_file, history_precision, cpl_bgc, & @@ -322,8 +323,10 @@ subroutine input_data histfreq(5) = 'y' ! output frequency option for different streams histfreq_n(:) = 1 ! output frequency histfreq_base = 'zero' ! output frequency reference date - hist_avg = .true. ! if true, write time-averages (not snapshots) + hist_avg(:) = .true. ! if true, write time-averages (not snapshots) history_format = 'default' ! history file format + hist_time_axis = 'end' ! History file time axis averaging interval position + history_dir = './' ! write to executable dir for default history_file = 'iceh' ! history file name prefix history_precision = 4 ! precision of history files @@ -609,7 +612,7 @@ subroutine input_data call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif - + ! read namelist nml_error = 1 do while (nml_error > 0) @@ -657,7 +660,7 @@ subroutine input_data call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif - + ! read namelist nml_error = 1 do while (nml_error > 0) @@ -681,7 +684,7 @@ subroutine input_data call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif - + ! read namelist nml_error = 1 do while (nml_error > 0) @@ -699,7 +702,7 @@ subroutine input_data ! read dynamics_nml nml_name = 'dynamics_nml' write(nu_diag,*) subname,' Reading ', trim(nml_name) - + ! goto namelist in file call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then @@ -724,7 +727,7 @@ subroutine input_data ! read shortwave_nml nml_name = 'shortwave_nml' write(nu_diag,*) subname,' Reading ', trim(nml_name) - + ! goto namelist in file call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then @@ -749,14 +752,14 @@ subroutine input_data ! read ponds_nml nml_name = 'ponds_nml' write(nu_diag,*) subname,' Reading ', trim(nml_name) - + ! goto namelist in file call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif - + ! read namelist nml_error = 1 do while (nml_error > 0) @@ -774,14 +777,14 @@ subroutine input_data ! read snow_nml nml_name = 'snow_nml' write(nu_diag,*) subname,' Reading ', trim(nml_name) - + ! goto namelist in file call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif - + ! read namelist nml_error = 1 do while (nml_error > 0) @@ -821,7 +824,7 @@ subroutine input_data endif end do - ! done reading namelist. + ! done reading namelist. close(nu_nml) call release_fileunit(nu_nml) endif @@ -901,11 +904,12 @@ subroutine input_data enddo call broadcast_array(histfreq_n, master_task) call broadcast_scalar(histfreq_base, master_task) - call broadcast_scalar(hist_avg, master_task) + call broadcast_array(hist_avg, master_task) call broadcast_scalar(history_dir, master_task) call broadcast_scalar(history_file, master_task) call broadcast_scalar(history_precision, master_task) call broadcast_scalar(history_format, master_task) + call broadcast_scalar(hist_time_axis, master_task) call broadcast_scalar(write_ic, master_task) call broadcast_scalar(cpl_bgc, master_task) call broadcast_scalar(incond_dir, master_task) @@ -1570,6 +1574,11 @@ subroutine input_data abort_list = trim(abort_list)//":24" endif + if(trim(hist_time_axis) /= 'begin' .and. trim(hist_time_axis) /= 'middle' .and. trim(hist_time_axis) /= 'end') then + write (nu_diag,*) subname//' ERROR: hist_time_axis value not valid = '//trim(hist_time_axis) + abort_list = trim(abort_list)//":29" + endif + if(dumpfreq_base /= 'init' .and. dumpfreq_base /= 'zero') then write (nu_diag,*) subname//' ERROR: bad value for dumpfreq_base, allowed values: init, zero' abort_list = trim(abort_list)//":25" @@ -2311,12 +2320,12 @@ subroutine input_data write(nu_diag,1033) ' histfreq = ', histfreq(:) write(nu_diag,1023) ' histfreq_n = ', histfreq_n(:) write(nu_diag,1031) ' histfreq_base = ', trim(histfreq_base) - write(nu_diag,1011) ' hist_avg = ', hist_avg - if (.not. hist_avg) write(nu_diag,1039) ' History data will be snapshots' + write(nu_diag,*) ' hist_avg = ', hist_avg(:) write(nu_diag,1031) ' history_dir = ', trim(history_dir) write(nu_diag,1031) ' history_file = ', trim(history_file) write(nu_diag,1021) ' history_precision= ', history_precision write(nu_diag,1031) ' history_format = ', trim(history_format) + write(nu_diag,1031) ' hist_time_axis = ', trim(hist_time_axis) if (write_ic) then write(nu_diag,1039) ' Initial condition will be written in ', & trim(incond_dir) @@ -2533,7 +2542,7 @@ subroutine init_state use ice_domain, only: nblocks, blocks_ice, halo_info use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero, nfsd use ice_flux, only: sst, Tf, Tair, salinz, Tmltz - use ice_grid, only: tmask, ULON, TLAT, grid_ice, grid_average_X2Y + use ice_grid, only: tmask, umask, ULON, TLAT, grid_ice, grid_average_X2Y use ice_boundary, only: ice_HaloUpdate use ice_constants, only: field_loc_Nface, field_loc_Eface, field_type_scalar use ice_state, only: trcr_depend, aicen, trcrn, vicen, vsnon, & @@ -2721,6 +2730,7 @@ subroutine init_state ilo, ihi, jlo, jhi, & iglob, jglob, & ice_ic, tmask(:,:, iblk), & + umask(:,:, iblk), & ULON (:,:, iblk), & TLAT (:,:, iblk), & Tair (:,:, iblk), sst (:,:, iblk), & @@ -2743,10 +2753,10 @@ subroutine init_state if (grid_ice == 'CD' .or. grid_ice == 'C') then - call grid_average_X2Y('S',uvel,'U',uvelN,'N') - call grid_average_X2Y('S',vvel,'U',vvelN,'N') - call grid_average_X2Y('S',uvel,'U',uvelE,'E') - call grid_average_X2Y('S',vvel,'U',vvelE,'E') + call grid_average_X2Y('A',uvel,'U',uvelN,'N') + call grid_average_X2Y('A',vvel,'U',vvelN,'N') + call grid_average_X2Y('A',uvel,'U',uvelE,'E') + call grid_average_X2Y('A',vvel,'U',vvelE,'E') ! Halo update on North, East faces call ice_HaloUpdate(uvelN, halo_info, & @@ -2761,7 +2771,6 @@ subroutine init_state endif - !----------------------------------------------------------------- ! compute aggregate ice state and open water area !----------------------------------------------------------------- @@ -2820,8 +2829,9 @@ subroutine set_state_var (nx_block, ny_block, & ilo, ihi, jlo, jhi, & iglob, jglob, & ice_ic, tmask, & - ULON, & - TLAT, & + umask, & + ULON, & + TLAT, & Tair, sst, & Tf, & salinz, Tmltz, & @@ -2846,7 +2856,8 @@ subroutine set_state_var (nx_block, ny_block, & ice_ic ! method of ice cover initialization logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & - tmask ! true for ice/ocean cells + tmask , & ! true for ice/ocean cells + umask ! for U points real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & ULON , & ! longitude of velocity pts (radians) @@ -3294,13 +3305,19 @@ subroutine set_state_var (nx_block, ny_block, & domain_length = dxrect*cm_to_m*nx_global period = c12*secday ! 12 days rotational period max_vel = pi*domain_length/period + do j = 1, ny_block do i = 1, nx_block - uvel(i,j) = c2*max_vel*(real(jglob(j), kind=dbl_kind) - p5) & - / real(ny_global - 1, kind=dbl_kind) - max_vel - vvel(i,j) = -c2*max_vel*(real(iglob(i), kind=dbl_kind) - p5) & - / real(nx_global - 1, kind=dbl_kind) + max_vel + if (umask(i,j)) then + uvel(i,j) = c2*max_vel*(real(jglob(j), kind=dbl_kind) - p5) & + / real(ny_global - 1, kind=dbl_kind) - max_vel + vvel(i,j) = -c2*max_vel*(real(iglob(i), kind=dbl_kind) - p5) & + / real(nx_global - 1, kind=dbl_kind) + max_vel + else + uvel(i,j) = c0 + vvel(i,j) = c0 + endif enddo ! j enddo ! i else diff --git a/cicecore/cicedyn/infrastructure/comm/mpi/ice_exit.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_exit.F90 index eafb3228f..5351a5336 100644 --- a/cicecore/cicedyn/infrastructure/comm/mpi/ice_exit.F90 +++ b/cicecore/cicedyn/infrastructure/comm/mpi/ice_exit.F90 @@ -1,3 +1,4 @@ + !======================================================================= ! ! Exit the model. @@ -8,7 +9,15 @@ module ice_exit use ice_kinds_mod + use ice_fileunits, only: nu_diag, ice_stderr, flush_fileunit use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted +#if (defined CESMCOUPLED) + use shr_sys_mod +#else +#ifndef SERIAL_REMOVE_MPI + use mpi ! MPI Fortran module +#endif +#endif implicit none public @@ -23,14 +32,6 @@ subroutine abort_ice(error_message, file, line, doabort) ! This routine aborts the ice model and prints an error message. -#if (defined CESMCOUPLED) - use ice_fileunits, only: nu_diag, flush_fileunit - use shr_sys_mod -#else - use ice_fileunits, only: nu_diag, ice_stderr, flush_fileunit - use mpi ! MPI Fortran module -#endif - character (len=*), intent(in),optional :: error_message ! error message character (len=*), intent(in),optional :: file ! file integer (kind=int_kind), intent(in), optional :: line ! line number @@ -38,11 +39,10 @@ subroutine abort_ice(error_message, file, line, doabort) ! local variables -#ifndef CESMCOUPLED integer (int_kind) :: & ierr, & ! MPI error flag + outunit, & ! output unit error_code ! return code -#endif logical (log_kind) :: ldoabort ! local doabort flag character(len=*), parameter :: subname='(abort_ice)' @@ -50,30 +50,31 @@ subroutine abort_ice(error_message, file, line, doabort) if (present(doabort)) ldoabort = doabort #if (defined CESMCOUPLED) - call flush_fileunit(nu_diag) - call icepack_warnings_flush(nu_diag) - write(nu_diag,*) ' ' - write(nu_diag,*) subname, 'ABORTED: ' - if (present(file)) write (nu_diag,*) subname,' called from ',trim(file) - if (present(line)) write (nu_diag,*) subname,' line number ',line - if (present(error_message)) write (nu_diag,*) subname,' error = ',trim(error_message) - call flush_fileunit(nu_diag) - if (ldoabort) call shr_sys_abort(subname//trim(error_message)) + outunit = nu_diag #else + outunit = ice_stderr +#endif + call flush_fileunit(nu_diag) call icepack_warnings_flush(nu_diag) - write(ice_stderr,*) ' ' - write(ice_stderr,*) subname, 'ABORTED: ' - if (present(file)) write (ice_stderr,*) subname,' called from ',trim(file) - if (present(line)) write (ice_stderr,*) subname,' line number ',line - if (present(error_message)) write (ice_stderr,*) subname,' error = ',trim(error_message) - call flush_fileunit(ice_stderr) - error_code = 128 + write(outunit,*) ' ' + write(outunit,*) subname, 'ABORTED: ' + if (present(file)) write (outunit,*) subname,' called from ',trim(file) + if (present(line)) write (outunit,*) subname,' line number ',line + if (present(error_message)) write (outunit,*) subname,' error = ',trim(error_message) + call flush_fileunit(outunit) + if (ldoabort) then +#if (defined CESMCOUPLED) + call shr_sys_abort(subname//trim(error_message)) +#else +#ifndef SERIAL_REMOVE_MPI + error_code = 128 call MPI_ABORT(MPI_COMM_WORLD, error_code, ierr) +#endif stop - endif #endif + endif end subroutine abort_ice @@ -81,12 +82,15 @@ end subroutine abort_ice subroutine end_run -! Ends run by calling MPI_FINALIZE. +! Ends run by calling MPI_FINALIZE +! Does nothing in serial runs integer (int_kind) :: ierr ! MPI error flag character(len=*), parameter :: subname = '(end_run)' +#ifndef SERIAL_REMOVE_MPI call MPI_FINALIZE(ierr) +#endif end subroutine end_run diff --git a/cicecore/cicedyn/infrastructure/comm/mpi/ice_global_reductions.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_global_reductions.F90 index 4b94389f7..91daf53a8 100644 --- a/cicecore/cicedyn/infrastructure/comm/mpi/ice_global_reductions.F90 +++ b/cicecore/cicedyn/infrastructure/comm/mpi/ice_global_reductions.F90 @@ -181,7 +181,7 @@ function global_sum_dbl(array, dist, field_loc, mMask, lMask) & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -189,25 +189,45 @@ function global_sum_dbl(array, dist, field_loc, mMask, lMask) & n = (iblock-1)*nx_block*ny_block - do j=jb,je - do i=ib,ie - n = n + 1 - ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then - work(n,1) = 0._dbl_kind - else - if (present(mMask)) then - work(n,1) = array(i,j,iblock)*mMask(i,j,iblock) - else if (present(lMask)) then - if (lMask(i,j,iblock)) then - work(n,1) = array(i,j,iblock) - endif - else + if (present(mMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = array(i,j,iblock)*mMask(i,j,iblock) + end do + end do + elseif (present(lMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + if (lMask(i,j,iblock)) then work(n,1) = array(i,j,iblock) endif - endif - end do - end do + end do + end do + else + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = array(i,j,iblock) + enddo + enddo + endif + + if (maxiglob >= 0) then + ! eliminate redundant points at je + ! set n to (ib,je) index + n = (iblock-1)*nx_block*ny_block + n = n + (je-1-jb+1)*(ie-ib+1) + j=je + do i=ib,ie + n = n + 1 + if (this_block%i_glob(i) > maxiglob) then + work(n,1) = 0._dbl_kind + endif + end do + endif + end do call compute_sums_dbl(work,sums,communicator,numProcs) @@ -317,7 +337,7 @@ function global_sum_real(array, dist, field_loc, mMask, lMask) & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -325,25 +345,45 @@ function global_sum_real(array, dist, field_loc, mMask, lMask) & n = (iblock-1)*nx_block*ny_block - do j=jb,je - do i=ib,ie - n = n + 1 - ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then - work(n,1) = 0._dbl_kind - else - if (present(mMask)) then - work(n,1) = real(array(i,j,iblock)*mMask(i,j,iblock),dbl_kind) - else if (present(lMask)) then - if (lMask(i,j,iblock)) then - work(n,1) = real(array(i,j,iblock),dbl_kind) - endif - else + if (present(mMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = real(array(i,j,iblock)*mMask(i,j,iblock),dbl_kind) + end do + end do + elseif (present(lMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + if (lMask(i,j,iblock)) then work(n,1) = real(array(i,j,iblock),dbl_kind) endif - endif - end do - end do + end do + end do + else + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = real(array(i,j,iblock),dbl_kind) + enddo + enddo + endif + + if (maxiglob >= 0) then + ! eliminate redundant points at je + ! set n to (ib,je) index + n = (iblock-1)*nx_block*ny_block + n = n + (je-1-jb+1)*(ie-ib+1) + j=je + do i=ib,ie + n = n + 1 + if (this_block%i_glob(i) > maxiglob) then + work(n,1) = 0._dbl_kind + endif + end do + endif + end do call compute_sums_dbl(work,sums,communicator,numProcs) @@ -445,7 +485,7 @@ function global_sum_int(array, dist, field_loc, mMask, lMask) & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -456,7 +496,7 @@ function global_sum_int(array, dist, field_loc, mMask, lMask) & do j=jb,je do i=ib,ie ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then + if (maxiglob >= 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then ! blockSum = blockSum + 0_int_kind else if (present(mMask)) then @@ -798,7 +838,7 @@ function global_sum_prod_dbl (array1, array2, dist, field_loc, & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -806,25 +846,45 @@ function global_sum_prod_dbl (array1, array2, dist, field_loc, & n = (iblock-1)*nx_block*ny_block - do j=jb,je - do i=ib,ie - n = n + 1 - ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then - work(n,1) = 0._dbl_kind - else - if (present(mMask)) then - work(n,1) = array1(i,j,iblock)*array2(i,j,iblock)*mMask(i,j,iblock) - else if (present(lMask)) then - if (lMask(i,j,iblock)) then - work(n,1) = array1(i,j,iblock)*array2(i,j,iblock) - endif - else + if (present(mMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = array1(i,j,iblock)*array2(i,j,iblock)*mMask(i,j,iblock) + end do + end do + elseif (present(lMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + if (lMask(i,j,iblock)) then work(n,1) = array1(i,j,iblock)*array2(i,j,iblock) endif - endif - end do - end do + end do + end do + else + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = array1(i,j,iblock)*array2(i,j,iblock) + enddo + enddo + endif + + if (maxiglob >= 0) then + ! eliminate redundant points at je + ! set n to (ib,je) index + n = (iblock-1)*nx_block*ny_block + n = n + (je-1-jb+1)*(ie-ib+1) + j=je + do i=ib,ie + n = n + 1 + if (this_block%i_glob(i) > maxiglob) then + work(n,1) = 0._dbl_kind + endif + end do + endif + end do call compute_sums_dbl(work,sums,communicator,numProcs) @@ -936,7 +996,7 @@ function global_sum_prod_real (array1, array2, dist, field_loc, & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -944,25 +1004,45 @@ function global_sum_prod_real (array1, array2, dist, field_loc, & n = (iblock-1)*nx_block*ny_block - do j=jb,je - do i=ib,ie - n = n + 1 - ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then - work(n,1) = 0._dbl_kind - else - if (present(mMask)) then - work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock)*mMask(i,j,iblock),dbl_kind) - else if (present(lMask)) then - if (lMask(i,j,iblock)) then - work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock),dbl_kind) - endif - else + if (present(mMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock)*mMask(i,j,iblock),dbl_kind) + end do + end do + elseif (present(lMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + if (lMask(i,j,iblock)) then work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock),dbl_kind) endif - endif - end do - end do + end do + end do + else + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock),dbl_kind) + enddo + enddo + endif + + if (maxiglob >= 0) then + ! eliminate redundant points at je + ! set n to (ib,je) index + n = (iblock-1)*nx_block*ny_block + n = n + (je-1-jb+1)*(ie-ib+1) + j=je + do i=ib,ie + n = n + 1 + if (this_block%i_glob(i) > maxiglob) then + work(n,1) = 0._dbl_kind + endif + end do + endif + end do call compute_sums_dbl(work,sums,communicator,numProcs) @@ -1066,7 +1146,7 @@ function global_sum_prod_int (array1, array2, dist, field_loc, & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -1077,7 +1157,7 @@ function global_sum_prod_int (array1, array2, dist, field_loc, & do j=jb,je do i=ib,ie ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then + if (maxiglob >= 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then ! blockSum = blockSum + 0_int_kind else if (present(mMask)) then diff --git a/cicecore/cicedyn/infrastructure/comm/mpi/ice_reprosum.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_reprosum.F90 index 8c6f90363..7c6c0eb77 100644 --- a/cicecore/cicedyn/infrastructure/comm/mpi/ice_reprosum.F90 +++ b/cicecore/cicedyn/infrastructure/comm/mpi/ice_reprosum.F90 @@ -87,7 +87,7 @@ MODULE ice_reprosum !----------------------------------------------------------------------- logical :: repro_sum_use_ddpdd = .false. -! logical :: detailed_timing = .false. + logical :: detailed_timing = .false. character(len=char_len_long) :: tmpstr CONTAINS @@ -100,10 +100,10 @@ MODULE ice_reprosum !----------------------------------------------------------------------- subroutine ice_reprosum_setopts(repro_sum_use_ddpdd_in, & - repro_sum_rel_diff_max_in, & - repro_sum_recompute_in, & - repro_sum_master, & - repro_sum_logunit ) + repro_sum_rel_diff_max_in, & + repro_sum_recompute_in, & + repro_sum_master, & + repro_sum_logunit ) !------------------------------Arguments-------------------------------- logical, intent(in), optional :: repro_sum_use_ddpdd_in @@ -260,12 +260,12 @@ end subroutine ice_reprosum_setopts !---------------------------------------------------------------------- subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & - nflds, ddpdd_sum, & - arr_gbl_max, arr_gbl_max_out, & - arr_max_levels, arr_max_levels_out, & - gbl_max_nsummands, gbl_max_nsummands_out,& - gbl_count, repro_sum_validate, & - repro_sum_stats, rel_diff, commid ) + nflds, ddpdd_sum, & + arr_gbl_max, arr_gbl_max_out, & + arr_max_levels, arr_max_levels_out, & + gbl_max_nsummands, gbl_max_nsummands_out,& + gbl_count, repro_sum_validate, & + repro_sum_stats, rel_diff, commid ) !---------------------------------------------------------------------- ! Arguments @@ -434,7 +434,7 @@ subroutine ice_reprosum_calc (arr, arr_gsum, nsummands, dsummands, & ! if (detailed_timing) call xicex_timer_start('ice_reprosum_ddpdd') call ice_reprosum_ddpdd(arr, arr_gsum, nsummands, dsummands, & - nflds, mpi_comm) + nflds, mpi_comm) repro_sum_fast = 1 ! if (detailed_timing) call xicex_timer_stop('ice_reprosum_ddpdd') @@ -774,9 +774,9 @@ end subroutine ice_reprosum_calc !---------------------------------------------------------------------- subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & - arr_max_shift, arr_gmax_exp, max_levels, & - max_level, validate, recompute, & - omp_nthreads, mpi_comm ) + arr_max_shift, arr_gmax_exp, max_levels, & + max_level, validate, recompute, & + omp_nthreads, mpi_comm ) !---------------------------------------------------------------------- @@ -1224,7 +1224,7 @@ end subroutine ice_reprosum_int !---------------------------------------------------------------------- logical function ice_reprosum_tolExceeded (name, nflds, master, & - logunit, rel_diff ) + logunit, rel_diff ) !---------------------------------------------------------------------- ! Arguments @@ -1310,7 +1310,7 @@ end function ice_reprosum_tolExceeded !---------------------------------------------------------------------- subroutine ice_reprosum_ddpdd (arr, arr_gsum, nsummands, dsummands, & - nflds, mpi_comm ) + nflds, mpi_comm ) !---------------------------------------------------------------------- ! Arguments diff --git a/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 index aaebcfaad..faeaf3227 100644 --- a/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 @@ -3749,7 +3749,7 @@ end subroutine ice_HaloUpdate4DI4 !*********************************************************************** ! This routine updates ghost cells for an input array using ! a second array as needed by the stress fields. -! This is just like 2DR8 except no averaging and only on tripole +! This is just like 2DR8 except no averaging and only on tripole subroutine ice_HaloUpdate_stress(array1, array2, halo, & fieldLoc, fieldKind, & diff --git a/cicecore/cicedyn/infrastructure/comm/serial/ice_exit.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_exit.F90 index 2daadc0e6..39f2b6702 100644 --- a/cicecore/cicedyn/infrastructure/comm/serial/ice_exit.F90 +++ b/cicecore/cicedyn/infrastructure/comm/serial/ice_exit.F90 @@ -1,7 +1,9 @@ + +#define SERIAL_REMOVE_MPI + !======================================================================= ! ! Exit the model. -! ! authors William H. Lipscomb (LANL) ! Elizabeth C. Hunke (LANL) ! 2006 ECH: separated serial and mpi functionality @@ -9,10 +11,14 @@ module ice_exit use ice_kinds_mod - use ice_fileunits, only: nu_diag, flush_fileunit + use ice_fileunits, only: nu_diag, ice_stderr, flush_fileunit use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted -#ifdef CESMCOUPLED +#if (defined CESMCOUPLED) use shr_sys_mod +#else +#ifndef SERIAL_REMOVE_MPI + use mpi ! MPI Fortran module +#endif #endif implicit none @@ -24,7 +30,7 @@ module ice_exit !======================================================================= - subroutine abort_ice(error_message,file,line,doabort) + subroutine abort_ice(error_message, file, line, doabort) ! This routine aborts the ice model and prints an error message. @@ -33,30 +39,44 @@ subroutine abort_ice(error_message,file,line,doabort) integer (kind=int_kind), intent(in), optional :: line ! line number logical (kind=log_kind), intent(in), optional :: doabort ! abort flag - logical (kind=log_kind) :: ldoabort ! local doabort + ! local variables + + integer (int_kind) :: & + ierr, & ! MPI error flag + outunit, & ! output unit + error_code ! return code + logical (log_kind) :: ldoabort ! local doabort flag character(len=*), parameter :: subname='(abort_ice)' ldoabort = .true. if (present(doabort)) ldoabort = doabort -#ifdef CESMCOUPLED - call icepack_warnings_flush(nu_diag) - write(nu_diag,*) ' ' - write(nu_diag,*) subname, 'ABORTED: ' - if (present(file)) write (nu_diag,*) subname,' called from ',trim(file) - if (present(line)) write (nu_diag,*) subname,' line number ',line - if (present(error_message)) write (nu_diag,*) subname,' error = ',trim(error_message) - if (ldoabort) call shr_sys_abort(subname//trim(error_message)) +#if (defined CESMCOUPLED) + outunit = nu_diag #else - call icepack_warnings_flush(nu_diag) - write(nu_diag,*) ' ' - write(nu_diag,*) subname, 'ABORTED: ' - if (present(file)) write (nu_diag,*) subname,' called from ',trim(file) - if (present(line)) write (nu_diag,*) subname,' line number ',line - if (present(error_message)) write (nu_diag,*) subname,' error = ',trim(error_message) + outunit = ice_stderr +#endif + call flush_fileunit(nu_diag) - if (ldoabort) stop + call icepack_warnings_flush(nu_diag) + write(outunit,*) ' ' + write(outunit,*) subname, 'ABORTED: ' + if (present(file)) write (outunit,*) subname,' called from ',trim(file) + if (present(line)) write (outunit,*) subname,' line number ',line + if (present(error_message)) write (outunit,*) subname,' error = ',trim(error_message) + call flush_fileunit(outunit) + + if (ldoabort) then +#if (defined CESMCOUPLED) + call shr_sys_abort(subname//trim(error_message)) +#else +#ifndef SERIAL_REMOVE_MPI + error_code = 128 + call MPI_ABORT(MPI_COMM_WORLD, error_code, ierr) #endif + stop +#endif + endif end subroutine abort_ice @@ -64,10 +84,15 @@ end subroutine abort_ice subroutine end_run +! Ends run by calling MPI_FINALIZE +! Does nothing in serial runs + + integer (int_kind) :: ierr ! MPI error flag character(len=*), parameter :: subname = '(end_run)' -! Ends parallel run by calling MPI_FINALIZE. -! Does nothing in serial runs. +#ifndef SERIAL_REMOVE_MPI + call MPI_FINALIZE(ierr) +#endif end subroutine end_run diff --git a/cicecore/cicedyn/infrastructure/comm/serial/ice_global_reductions.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_global_reductions.F90 index 5fcd45876..ed36cc6c0 100644 --- a/cicecore/cicedyn/infrastructure/comm/serial/ice_global_reductions.F90 +++ b/cicecore/cicedyn/infrastructure/comm/serial/ice_global_reductions.F90 @@ -182,7 +182,7 @@ function global_sum_dbl(array, dist, field_loc, mMask, lMask) & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -190,25 +190,45 @@ function global_sum_dbl(array, dist, field_loc, mMask, lMask) & n = (iblock-1)*nx_block*ny_block - do j=jb,je - do i=ib,ie - n = n + 1 - ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then - work(n,1) = 0._dbl_kind - else - if (present(mMask)) then - work(n,1) = array(i,j,iblock)*mMask(i,j,iblock) - else if (present(lMask)) then - if (lMask(i,j,iblock)) then - work(n,1) = array(i,j,iblock) - endif - else + if (present(mMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = array(i,j,iblock)*mMask(i,j,iblock) + end do + end do + elseif (present(lMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + if (lMask(i,j,iblock)) then work(n,1) = array(i,j,iblock) endif - endif - end do - end do + end do + end do + else + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = array(i,j,iblock) + enddo + enddo + endif + + if (maxiglob >= 0) then + ! eliminate redundant points at je + ! set n to (ib,je) index + n = (iblock-1)*nx_block*ny_block + n = n + (je-1-jb+1)*(ie-ib+1) + j=je + do i=ib,ie + n = n + 1 + if (this_block%i_glob(i) > maxiglob) then + work(n,1) = 0._dbl_kind + endif + end do + endif + end do call compute_sums_dbl(work,sums,communicator,numProcs) @@ -318,7 +338,7 @@ function global_sum_real(array, dist, field_loc, mMask, lMask) & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -326,25 +346,45 @@ function global_sum_real(array, dist, field_loc, mMask, lMask) & n = (iblock-1)*nx_block*ny_block - do j=jb,je - do i=ib,ie - n = n + 1 - ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then - work(n,1) = 0._dbl_kind - else - if (present(mMask)) then - work(n,1) = real(array(i,j,iblock)*mMask(i,j,iblock),dbl_kind) - else if (present(lMask)) then - if (lMask(i,j,iblock)) then - work(n,1) = real(array(i,j,iblock),dbl_kind) - endif - else + if (present(mMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = real(array(i,j,iblock)*mMask(i,j,iblock),dbl_kind) + end do + end do + elseif (present(lMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + if (lMask(i,j,iblock)) then work(n,1) = real(array(i,j,iblock),dbl_kind) endif - endif - end do - end do + end do + end do + else + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = real(array(i,j,iblock),dbl_kind) + enddo + enddo + endif + + if (maxiglob >= 0) then + ! eliminate redundant points at je + ! set n to (ib,je) index + n = (iblock-1)*nx_block*ny_block + n = n + (je-1-jb+1)*(ie-ib+1) + j=je + do i=ib,ie + n = n + 1 + if (this_block%i_glob(i) > maxiglob) then + work(n,1) = 0._dbl_kind + endif + end do + endif + end do call compute_sums_dbl(work,sums,communicator,numProcs) @@ -446,7 +486,7 @@ function global_sum_int(array, dist, field_loc, mMask, lMask) & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -457,7 +497,7 @@ function global_sum_int(array, dist, field_loc, mMask, lMask) & do j=jb,je do i=ib,ie ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then + if (maxiglob >= 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then ! blockSum = blockSum + 0_int_kind else if (present(mMask)) then @@ -799,7 +839,7 @@ function global_sum_prod_dbl (array1, array2, dist, field_loc, & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -807,25 +847,45 @@ function global_sum_prod_dbl (array1, array2, dist, field_loc, & n = (iblock-1)*nx_block*ny_block - do j=jb,je - do i=ib,ie - n = n + 1 - ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then - work(n,1) = 0._dbl_kind - else - if (present(mMask)) then - work(n,1) = array1(i,j,iblock)*array2(i,j,iblock)*mMask(i,j,iblock) - else if (present(lMask)) then - if (lMask(i,j,iblock)) then - work(n,1) = array1(i,j,iblock)*array2(i,j,iblock) - endif - else + if (present(mMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = array1(i,j,iblock)*array2(i,j,iblock)*mMask(i,j,iblock) + end do + end do + elseif (present(lMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + if (lMask(i,j,iblock)) then work(n,1) = array1(i,j,iblock)*array2(i,j,iblock) endif - endif - end do - end do + end do + end do + else + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = array1(i,j,iblock)*array2(i,j,iblock) + enddo + enddo + endif + + if (maxiglob >= 0) then + ! eliminate redundant points at je + ! set n to (ib,je) index + n = (iblock-1)*nx_block*ny_block + n = n + (je-1-jb+1)*(ie-ib+1) + j=je + do i=ib,ie + n = n + 1 + if (this_block%i_glob(i) > maxiglob) then + work(n,1) = 0._dbl_kind + endif + end do + endif + end do call compute_sums_dbl(work,sums,communicator,numProcs) @@ -937,7 +997,7 @@ function global_sum_prod_real (array1, array2, dist, field_loc, & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -945,25 +1005,45 @@ function global_sum_prod_real (array1, array2, dist, field_loc, & n = (iblock-1)*nx_block*ny_block - do j=jb,je - do i=ib,ie - n = n + 1 - ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then - work(n,1) = 0._dbl_kind - else - if (present(mMask)) then - work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock)*mMask(i,j,iblock),dbl_kind) - else if (present(lMask)) then - if (lMask(i,j,iblock)) then - work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock),dbl_kind) - endif - else + if (present(mMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock)*mMask(i,j,iblock),dbl_kind) + end do + end do + elseif (present(lMask)) then + do j=jb,je + do i=ib,ie + n = n + 1 + if (lMask(i,j,iblock)) then work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock),dbl_kind) endif - endif - end do - end do + end do + end do + else + do j=jb,je + do i=ib,ie + n = n + 1 + work(n,1) = real(array1(i,j,iblock)*array2(i,j,iblock),dbl_kind) + enddo + enddo + endif + + if (maxiglob >= 0) then + ! eliminate redundant points at je + ! set n to (ib,je) index + n = (iblock-1)*nx_block*ny_block + n = n + (je-1-jb+1)*(ie-ib+1) + j=je + do i=ib,ie + n = n + 1 + if (this_block%i_glob(i) > maxiglob) then + work(n,1) = 0._dbl_kind + endif + end do + endif + end do call compute_sums_dbl(work,sums,communicator,numProcs) @@ -1067,7 +1147,7 @@ function global_sum_prod_int (array1, array2, dist, field_loc, & if (Nrow .and. this_block%tripoleTFlag) then maxiglob = 0 ! entire u-row on T-fold grid elseif (Nrow .or. this_block%tripoleTFlag) then - maxiglob = nx_global/2 ! half T-row on T-fold and u-row on u-fold + maxiglob = nx_global/2 ! half T-row on T-fold or half u-row on u-fold else maxiglob = -1 ! nothing to do for T-row on u-fold endif @@ -1078,7 +1158,7 @@ function global_sum_prod_int (array1, array2, dist, field_loc, & do j=jb,je do i=ib,ie ! eliminate redundant points - if (maxiglob > 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then + if (maxiglob >= 0 .and. j == je .and. this_block%i_glob(i) > maxiglob) then ! blockSum = blockSum + 0_int_kind else if (present(mMask)) then diff --git a/cicecore/cicedyn/infrastructure/ice_domain.F90 b/cicecore/cicedyn/infrastructure/ice_domain.F90 index ff1fac723..06d0d8ae1 100644 --- a/cicecore/cicedyn/infrastructure/ice_domain.F90 +++ b/cicecore/cicedyn/infrastructure/ice_domain.F90 @@ -172,7 +172,7 @@ subroutine init_domain_blocks if (my_task == master_task) then nml_name = 'domain_nml' write(nu_diag,*) subname,' Reading ', trim(nml_name) - + call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then @@ -186,7 +186,7 @@ subroutine init_domain_blocks call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif - + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=domain_nml,iostat=nml_error) diff --git a/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 index 9df51635d..526d0d96d 100644 --- a/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 @@ -157,7 +157,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 995) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vcomment) - if (histfreq(ns) == '1' .or. .not. hist_avg & + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) & .or. write_ic & .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & @@ -187,7 +187,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 994) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vdesc),trim(avail_hist_fields(n)%vunit),nn - if (histfreq(ns) == '1' .or. .not. hist_avg .or. write_ic) then + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) .or. write_ic) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & 'time_rep','instantaneous' else @@ -211,7 +211,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 993) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vdesc),trim(avail_hist_fields(n)%vunit),k - if (histfreq(ns) == '1' .or. .not. hist_avg .or. write_ic) then + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) .or. write_ic) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & 'time_rep','instantaneous' else @@ -235,7 +235,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 993) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vdesc),trim(avail_hist_fields(n)%vunit),nn,k - if (histfreq(ns) == '1' .or. .not. hist_avg .or. write_ic) then + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) .or. write_ic) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & 'time_rep','instantaneous' else @@ -259,7 +259,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 993) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vdesc),trim(avail_hist_fields(n)%vunit),nn,k - if (histfreq(ns) == '1' .or. .not. hist_avg .or. write_ic) then + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) .or. write_ic) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & 'time_rep','instantaneous' else @@ -283,7 +283,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 993) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vdesc),trim(avail_hist_fields(n)%vunit),nn,k - if (histfreq(ns) == '1' .or. .not. hist_avg .or. write_ic) then + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) .or. write_ic) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & 'time_rep','instantaneous' else @@ -308,7 +308,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 993) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vdesc),trim(avail_hist_fields(n)%vunit),nn,k - if (histfreq(ns) == '1' .or. .not. hist_avg .or. write_ic) then + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) .or. write_ic) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & 'time_rep','instantaneous' else @@ -334,7 +334,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 993) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vdesc),trim(avail_hist_fields(n)%vunit),nn,k - if (histfreq(ns) == '1' .or. .not. hist_avg .or. write_ic) then + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) .or. write_ic) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & 'time_rep','instantaneous' else @@ -360,7 +360,7 @@ subroutine ice_write_hist(ns) write (nu_hdr, 993) nrec,trim(avail_hist_fields(n)%vname), & trim(avail_hist_fields(n)%vdesc),trim(avail_hist_fields(n)%vunit),nn,k - if (histfreq(ns) == '1' .or. .not. hist_avg .or. write_ic) then + if (histfreq(ns) == '1' .or. .not. hist_avg(ns) .or. write_ic) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & 'time_rep','instantaneous' else diff --git a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 index 10d750300..51d76a6f4 100644 --- a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -21,7 +21,7 @@ module ice_history_write - use ice_constants, only: c0, c360, spval, spval_dbl + use ice_constants, only: c0, c360, p5, spval, spval_dbl use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -137,8 +137,6 @@ subroutine ice_write_hist (ns) if (my_task == master_task) then - ltime2 = timesecs/secday - call construct_filename(ncfile(ns),'nc',ns) ! add local directory path name to ncfile @@ -159,10 +157,10 @@ subroutine ice_write_hist (ns) ! define dimensions !----------------------------------------------------------------- - if (hist_avg .and. .not. write_ic) then - status = nf90_def_dim(ncid,'d2',2,boundid) + if (hist_avg(ns) .and. .not. write_ic) then + status = nf90_def_dim(ncid,'nbnd',2,boundid) if (status /= nf90_noerr) call abort_ice(subname// & - 'ERROR: defining dim d2') + 'ERROR: defining dim nbnd') endif status = nf90_def_dim(ncid,'ni',nx_global,imtid) @@ -213,7 +211,7 @@ subroutine ice_write_hist (ns) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining var time') - status = nf90_put_att(ncid,varid,'long_name','model time') + status = nf90_put_att(ncid,varid,'long_name','time') if (status /= nf90_noerr) call abort_ice(subname// & 'ice Error: time long_name') @@ -230,7 +228,7 @@ subroutine ice_write_hist (ns) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time calendar') elseif (days_per_year == 365 .and. .not.use_leap_years ) then - status = nf90_put_att(ncid,varid,'calendar','NoLeap') + status = nf90_put_att(ncid,varid,'calendar','noleap') if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time calendar') elseif (use_leap_years) then @@ -241,7 +239,7 @@ subroutine ice_write_hist (ns) call abort_ice(subname//'ERROR: invalid calendar settings') endif - if (hist_avg .and. .not. write_ic) then + if (hist_avg(ns) .and. .not. write_ic) then status = nf90_put_att(ncid,varid,'bounds','time_bounds') if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time bounds') @@ -251,14 +249,14 @@ subroutine ice_write_hist (ns) ! Define attributes for time bounds if hist_avg is true !----------------------------------------------------------------- - if (hist_avg .and. .not. write_ic) then + if (hist_avg(ns) .and. .not. write_ic) then dimid(1) = boundid dimid(2) = timid status = nf90_def_var(ncid,'time_bounds',lprecision,dimid(1:2),varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: defining var time_bounds') status = nf90_put_att(ncid,varid,'long_name', & - 'boundaries for time-averaging interval') + 'time interval endpoints') if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time_bounds long_name') write(cdate,'(i8.8)') idate0 @@ -268,6 +266,22 @@ subroutine ice_write_hist (ns) status = nf90_put_att(ncid,varid,'units',title) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: time_bounds units') + if (days_per_year == 360) then + status = nf90_put_att(ncid,varid,'calendar','360_day') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time calendar') + elseif (days_per_year == 365 .and. .not.use_leap_years ) then + status = nf90_put_att(ncid,varid,'calendar','noleap') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time calendar') + elseif (use_leap_years) then + status = nf90_put_att(ncid,varid,'calendar','Gregorian') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time calendar') + else + call abort_ice(subname//'ERROR: invalid calendar settings') + endif + endif !----------------------------------------------------------------- @@ -702,6 +716,12 @@ subroutine ice_write_hist (ns) 'ERROR: global attribute time_period_freq') endif + if (hist_avg(ns)) then + status = nf90_put_att(ncid,nf90_global,'time_axis_position',trim(hist_time_axis)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: global attribute time axis position') + endif + title = 'CF-1.0' status = & nf90_put_att(ncid,nf90_global,'conventions',title) @@ -734,6 +754,15 @@ subroutine ice_write_hist (ns) ! write time variable !----------------------------------------------------------------- + ltime2 = timesecs/secday ! hist_time_axis = 'end' (default) + + ! Some coupled models require the time axis "stamp" to be in the middle + ! or even beginning of averaging interval. + if (hist_avg(ns)) then + if (trim(hist_time_axis) == "begin" ) ltime2 = time_beg(ns) + if (trim(hist_time_axis) == "middle") ltime2 = p5*(time_beg(ns)+time_end(ns)) + endif + status = nf90_inq_varid(ncid,'time',varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting time varid') @@ -745,7 +774,7 @@ subroutine ice_write_hist (ns) ! write time_bounds info !----------------------------------------------------------------- - if (hist_avg .and. .not. write_ic) then + if (hist_avg(ns) .and. .not. write_ic) then status = nf90_inq_varid(ncid,'time_bounds',varid) if (status /= nf90_noerr) call abort_ice(subname// & 'ERROR: getting time_bounds id') @@ -1279,7 +1308,7 @@ subroutine ice_write_hist_attrs(ncid, varid, hfield, ns) call ice_write_hist_fill(ncid,varid,hfield%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. .not. write_ic) then + if (hist_avg(ns) .and. .not. write_ic) then if (TRIM(hfield%vname(1:4))/='sig1' & .and.TRIM(hfield%vname(1:4))/='sig2' & .and.TRIM(hfield%vname(1:9))/='sistreave' & @@ -1292,7 +1321,7 @@ subroutine ice_write_hist_attrs(ncid, varid, hfield, ns) endif if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & - .or..not. hist_avg & + .or..not. hist_avg(ns) & .or. write_ic & .or.TRIM(hfield%vname(1:4))=='divu' & .or.TRIM(hfield%vname(1:5))=='shear' & diff --git a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 index 25f9850ce..cf2f40521 100644 --- a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 @@ -18,7 +18,7 @@ module ice_history_write use ice_kinds_mod - use ice_constants, only: c0, c360, spval, spval_dbl + use ice_constants, only: c0, c360, p5, spval, spval_dbl use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -185,8 +185,6 @@ subroutine ice_write_hist (ns) call ice_pio_initdecomp(ndim3=nzslyr, ndim4=ncat_hist, iodesc=iodesc4ds, precision=history_precision) call ice_pio_initdecomp(ndim3=nfsd_hist, ndim4=ncat_hist, iodesc=iodesc4df, precision=history_precision) - ltime2 = timesecs/secday - ! option of turning on double precision history files lprecision = pio_real if (history_precision == 8) lprecision = pio_double @@ -195,8 +193,8 @@ subroutine ice_write_hist (ns) ! define dimensions !----------------------------------------------------------------- - if (hist_avg .and. .not. write_ic) then - status = pio_def_dim(File,'d2',2,boundid) + if (hist_avg(ns) .and. .not. write_ic) then + status = pio_def_dim(File,'nbnd',2,boundid) endif status = pio_def_dim(File,'ni',nx_global,imtid) @@ -215,7 +213,7 @@ subroutine ice_write_hist (ns) !----------------------------------------------------------------- status = pio_def_var(File,'time',pio_double,(/timid/),varid) - status = pio_put_att(File,varid,'long_name','model time') + status = pio_put_att(File,varid,'long_name','time') write(cdate,'(i8.8)') idate0 write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & @@ -226,24 +224,35 @@ subroutine ice_write_hist (ns) if (days_per_year == 360) then status = pio_put_att(File,varid,'calendar','360_day') elseif (days_per_year == 365 .and. .not.use_leap_years ) then - status = pio_put_att(File,varid,'calendar','NoLeap') + status = pio_put_att(File,varid,'calendar','noleap') elseif (use_leap_years) then status = pio_put_att(File,varid,'calendar','Gregorian') else call abort_ice(subname//'ERROR: invalid calendar settings') endif - if (hist_avg .and. .not. write_ic) then + if (hist_avg(ns) .and. .not. write_ic) then status = pio_put_att(File,varid,'bounds','time_bounds') endif ! Define attributes for time_bounds if hist_avg is true - if (hist_avg .and. .not. write_ic) then + if (hist_avg(ns) .and. .not. write_ic) then dimid2(1) = boundid dimid2(2) = timid status = pio_def_var(File,'time_bounds',pio_double,dimid2,varid) status = pio_put_att(File,varid,'long_name', & - 'boundaries for time-averaging interval') + 'time interval endpoints') + + if (days_per_year == 360) then + status = pio_put_att(File,varid,'calendar','360_day') + elseif (days_per_year == 365 .and. .not.use_leap_years ) then + status = pio_put_att(File,varid,'calendar','noleap') + elseif (use_leap_years) then + status = pio_put_att(File,varid,'calendar','Gregorian') + else + call abort_ice(subname//'ERROR: invalid calendar settings') + endif + write(cdate,'(i8.8)') idate0 write(title,'(a,a4,a1,a2,a1,a2,a1,i2.2,a1,i2.2,a1,i2.2)') 'days since ', & cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' ', & @@ -667,6 +676,9 @@ subroutine ice_write_hist (ns) status = pio_put_att(File,pio_global,'time_period_freq',trim(time_period_freq)) endif + if (hist_avg(ns)) & + status = pio_put_att(File,pio_global,'time_axis_position',trim(hist_time_axis)) + title = 'CF-1.0' status = & pio_put_att(File,pio_global,'conventions',trim(title)) @@ -695,6 +707,15 @@ subroutine ice_write_hist (ns) ! write time variable !----------------------------------------------------------------- + ltime2 = timesecs/secday ! hist_time_axis = 'end' (default) + + ! Some coupled models require the time axis "stamp" to be in the middle + ! or even beginning of averaging interval. + if (hist_avg(ns)) then + if (trim(hist_time_axis) == "begin" ) ltime2 = time_beg(ns) + if (trim(hist_time_axis) == "middle") ltime2 = p5*(time_beg(ns)+time_end(ns)) + endif + status = pio_inq_varid(File,'time',varid) status = pio_put_var(File,varid,(/1/),ltime2) @@ -702,7 +723,7 @@ subroutine ice_write_hist (ns) ! write time_bounds info !----------------------------------------------------------------- - if (hist_avg .and. .not. write_ic) then + if (hist_avg(ns) .and. .not. write_ic) then status = pio_inq_varid(File,'time_bounds',varid) time_bounds=(/time_beg(ns),time_end(ns)/) bnd_start = (/1,1/) @@ -1250,7 +1271,7 @@ subroutine ice_write_hist_attrs(File, varid, hfield, ns) call ice_write_hist_fill(File,varid,hfield%vname,history_precision) ! Add cell_methods attribute to variables if averaged - if (hist_avg .and. .not. write_ic) then + if (hist_avg(ns) .and. .not. write_ic) then if (TRIM(hfield%vname(1:4))/='sig1' & .and.TRIM(hfield%vname(1:4))/='sig2' & .and.TRIM(hfield%vname(1:9))/='sistreave' & @@ -1261,7 +1282,7 @@ subroutine ice_write_hist_attrs(File, varid, hfield, ns) endif if ((histfreq(ns) == '1' .and. histfreq_n(ns) == 1) & - .or..not. hist_avg & + .or..not. hist_avg(ns) & .or. write_ic & .or.TRIM(hfield%vname(1:4))=='divu' & .or.TRIM(hfield%vname(1:5))=='shear' & diff --git a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 index a9b19df6b..601e59c7c 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_mesh_mod.F90 @@ -559,7 +559,7 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) ! Check CICE mesh - use ice_constants, only : c1,c0,c360 + use ice_constants, only : c1,c0,c180,c360 use ice_grid , only : tlon, tlat, hm ! input/output parameters @@ -583,7 +583,7 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) real(dbl_kind) :: diff_lon real(dbl_kind) :: diff_lat real(dbl_kind) :: rad_to_deg - real(dbl_kind) :: tmplon, eps_imesh + real(dbl_kind) :: eps_imesh logical :: isPresent, isSet logical :: mask_error integer :: mask_internal @@ -637,19 +637,19 @@ subroutine ice_mesh_check(gcomp, ice_mesh, rc) lon(n) = tlon(i,j,iblk)*rad_to_deg lat(n) = tlat(i,j,iblk)*rad_to_deg - tmplon = lon(n) - if(tmplon < c0)tmplon = tmplon + c360 - ! error check differences between internally generated lons and those read in - diff_lon = abs(mod(lonMesh(n) - tmplon,360.0)) - if (diff_lon > eps_imesh ) then - write(6,100)n,lonMesh(n),tmplon, diff_lon - !call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) + diff_lon = mod(abs(lonMesh(n) - lon(n)),360.0) + if (diff_lon > c180) then + diff_lon = diff_lon - c360 + endif + if (abs(diff_lon) > eps_imesh ) then + write(6,100)n,lonMesh(n),lon(n), diff_lon + call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) end if diff_lat = abs(latMesh(n) - lat(n)) if (diff_lat > eps_imesh) then write(6,101)n,latMesh(n),lat(n), diff_lat - !call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) + call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) end if enddo enddo diff --git a/cicecore/drivers/unittest/optargs/optargs.F90 b/cicecore/drivers/unittest/optargs/optargs.F90 index 4acf7ac9f..5d66539b9 100644 --- a/cicecore/drivers/unittest/optargs/optargs.F90 +++ b/cicecore/drivers/unittest/optargs/optargs.F90 @@ -1,45 +1,52 @@ program optargs - use optargs_subs, only: computeA, computeB, computeC, computeD - use optargs_subs, only: oa_error, oa_OK, oa_A, oa_B, oa_C, oa_D + use optargs_subs, only: dp + use optargs_subs, only: computeA, computeB, computeC, computeD, computeE + use optargs_subs, only: oa_error, oa_OK, oa_A, oa_B, oa_C, oa_D, oa_E use optargs_subs, only: oa_layer1, oa_count1 implicit none - real*8 :: Ai1, Ao - real*8 :: B - real*8 :: Ci1, Co - real*8 :: Di1, Di2, Do + real(dp):: Ai1, Ao + real(dp):: B + real(dp):: Ci1, Co + real(dp):: Di1, Di2, Do + real(dp), allocatable :: Ei(:),Eo(:) integer :: ierr, ierrV integer :: n integer, parameter :: ntests = 100 integer :: iresult - real*8 :: result, resultV - real*8, parameter :: errtol = 1.0e-12 + real(dp):: result, resultV + real(dp), parameter :: dpic = -99._dp + real(dp), parameter :: errtol = 1.0e-12 !---------------------- write(6,*) 'RunningUnitTest optargs' write(6,*) ' ' + allocate(Ei(3),Eo(3)) + iresult = 0 do n = 1,ntests - Ai1 = -99.; Ao = -99. - B = -99. - Ci1 = -99.; Co = -99. - Di1 = -99.; Di2 = -99.; Do = -99. + Ai1 = dpic; Ao = dpic + B = dpic + Ci1 = dpic; Co = dpic + Di1 = dpic; Di2 = dpic; Do = dpic + Ei = dpic; Eo = dpic ierr = oa_error - result = -888. - resultV = -999. + result = -888._dp + resultV = -999._dp computeA = .false. computeB = .false. computeC = .false. computeD = .false. + computeE = .false. select case (n) @@ -56,8 +63,8 @@ program optargs call oa_count1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) case(2) result = -777.; resultV = -777. - ierrV = 9 - call oa_count1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + ierrV = 11 + call oa_count1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) case(3) result = -777.; resultV = -777. ierrV = 3 @@ -66,6 +73,10 @@ program optargs result = -777.; resultV = -777. ierrV = 5 call oa_count1(Ci1=Ci1,Co=Co,ierr=ierr,Ao=Ao,Di1=Di1) + case(5) + result = -777.; resultV = -777. + ierrV = 8 + call oa_count1(Ai1,Ao,B,Ci1,Co,Di1,Di2,ierr=ierr) ! test optional order case(11) @@ -80,6 +91,10 @@ program optargs result = -777.; resultV = -777. ierrV = oa_OK call oa_layer1(Ci1=Ci1,Co=Co,ierr=ierr,Ao=Ao,Di1=Di1) + case(14) + result = -777.; resultV = -777. + ierrV = oa_OK + call oa_layer1(Eo=Eo,Ei=Ei,Ci1=Ci1,Co=Co,ierr=ierr,Ao=Ao,Di1=Di1) ! test optional argument checking case(21) @@ -87,15 +102,17 @@ program optargs computeB = .true. computeC = .true. computeD = .true. + computeE = .true. result = -777.; resultV = -777. ierrV = oa_error ! B missing - call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) + call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,Ei=Ei,Eo=Eo,ierr=ierr) case(22) computeA = .true. computeB = .true. computeC = .true. computeD = .true. + computeE = .true. result = -777.; resultV = -777. ierrV = oa_error ! all optional missing @@ -105,61 +122,117 @@ program optargs computeB = .true. computeC = .true. computeD = .true. + computeE = .true. result = -777.; resultV = -777. ierrV = oa_error ! some optional missing - call oa_layer1(Ci1=Ci1,Co=Co,ierr=ierr,B=B,Ao=Ao,Di1=Di1) + call oa_layer1(Ci1=Ci1,Co=Co,Eo=Eo,ierr=ierr,B=B,Ao=Ao,Di1=Di1) case(24) computeA = .true. computeB = .true. computeC = .true. computeD = .true. + computeE = .true. result = -777.; resultV = -777. ierrV = oa_error ! one optional missing - call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Do=Do,ierr=ierr) + call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Do=Do,Ei=Ei,Eo=Eo,ierr=ierr) + case(25) + computeA = .true. + computeB = .true. + computeC = .true. + computeD = .true. + computeE = .true. + result = -777.; resultV = -777. + ierrV = oa_error + ! Ei missing + call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,Eo=Eo,ierr=ierr) - ! test computations individually + ! test computations individually, all args case(31) computeA = .true. ierrV = oa_A Ai1 = 5. resultV = 4. - call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) + call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,Ei=Ei,Eo=Eo,ierr=ierr) result = Ao case(32) computeB = .true. ierrV = oa_B B = 15. resultV = 20. - call oa_layer1(ierr=ierr,Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do) + call oa_layer1(ierr=ierr,Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,Ei=Ei,Eo=Eo) result = B case(33) computeC = .true. ierrV = oa_C Ci1 = 7. resultV = 14. - call oa_layer1(B=B,Ci1=Ci1,Co=Co,Di1=Di1,Ai1=Ai1,Ao=Ao,Di2=Di2,Do=Do,ierr=ierr) + call oa_layer1(B=B,Ci1=Ci1,Co=Co,Di1=Di1,Ai1=Ai1,Ao=Ao,Di2=Di2,Do=Do,ierr=ierr,Ei=Ei,Eo=Eo) result = Co case(34) computeD = .true. ierrV = oa_D Di1 = 19; Di2=11. resultV = 30. - call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,B=B,ierr=ierr) + call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Di1=Di1,Ei=Ei,Eo=Eo,Di2=Di2,Do=Do,B=B,ierr=ierr) result = Do + case(35) + computeE = .true. + ierrV = oa_E + Ei = 25. + resultV = 81. + call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Ei=Ei,Eo=Eo,Di1=Di1,Di2=Di2,Do=Do,B=B,ierr=ierr) + result = sum(Eo) - ! test computations individually + ! test computations individually, min args case(41) + computeA = .true. + ierrV = oa_A + Ai1 = 5. + resultV = 4. + call oa_layer1(Ao=Ao,Co=Co,Ai1=Ai1,Ci1=Ci1,ierr=ierr) + result = Ao + case(42) + computeB = .true. + ierrV = oa_B + B = 15. + resultV = 20. + call oa_layer1(ierr=ierr,Ci1=Ci1,Co=Co,B=B) + result = B + case(43) + computeC = .true. + ierrV = oa_C + Ci1 = 7. + resultV = 14. + call oa_layer1(Ci1=Ci1,Co=Co,ierr=ierr) + result = Co + case(44) + computeD = .true. + ierrV = oa_D + Di1 = 19; Di2=11. + resultV = 30. + call oa_layer1(Ci1=Ci1,Di1=Di1,Di2=Di2,Co=Co,Do=Do,ierr=ierr) + result = Do + case(45) + computeE = .true. + ierrV = oa_E + Ei = 25. + resultV = 81. + call oa_layer1(Ci1=Ci1,Co=Co,Ei=Ei,Eo=Eo,ierr=ierr) + result = sum(Eo) + + ! test computations in groups, mix of passed arguments + case(51) computeA = .true. computeC = .true. ierrV = oa_A + oa_C Ai1 = 6. Ci1 = 8. resultV = 21. - call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,ierr=ierr) + call oa_layer1(Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Eo=Eo,ierr=ierr) result = Ao + Co - case(42) + case(52) computeB = .true. computeC = .true. ierrV = oa_B + oa_C @@ -168,7 +241,7 @@ program optargs resultV = -11. call oa_layer1(ierr=ierr,Ai1=Ai1,Ao=Ao,B=B,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do) result = B + Co - case(43) + case(53) computeB = .true. computeD = .true. ierrV = oa_B + oa_D @@ -177,7 +250,7 @@ program optargs resultV = 31. call oa_layer1(B=B,Ci1=Ci1,Co=Co,Di1=Di1,Ai1=Ai1,Ao=Ao,Di2=Di2,Do=Do,ierr=ierr) result = B + Do - case(44) + case(54) computeC = .true. computeD = .true. ierrV = oa_C + oa_D @@ -186,20 +259,22 @@ program optargs resultV = 27. call oa_layer1(Ai1=Ai1,Ao=Ao,Ci1=Ci1,Co=Co,Di1=Di1,Di2=Di2,Do=Do,B=B,ierr=ierr) result = Co + Do - case(45) + case(55) computeA = .true. computeB = .true. computeC = .true. computeD = .true. - ierrV = oa_A + oa_B + oa_C + oa_D + computeE = .true. + ierrV = oa_A + oa_B + oa_C + oa_D + oa_E Ai1 = 7. B = 9. Ci1 = 7. Di1 = 12; Di2=3. - resultV = 49. - call oa_layer1(Ao=Ao,B=B,Co=Co,Do=Do,Ai1=Ai1,Ci1=Ci1,Di1=Di1,Di2=Di2,ierr=ierr) - result = Ao + B + Co + Do - case(46) + Ei = 5 + resultV = 70. + call oa_layer1(Ao=Ao,B=B,Co=Co,Do=Do,Ai1=Ai1,Ci1=Ci1,Di1=Di1,Di2=Di2,Ei=Ei,Eo=Eo,ierr=ierr) + result = Ao + B + Co + Do + sum(Eo) + case(56) computeA = .true. computeB = .true. computeD = .true. @@ -210,6 +285,15 @@ program optargs resultV = 40. call oa_layer1(Ao=Ao,B=B,Co=Co,Do=Do,Ai1=Ai1,Ci1=Ci1,Di1=Di1,Di2=Di2,ierr=ierr) result = Ao + B + Do + case(57) + computeB = .true. + computeE = .true. + ierrV = oa_B + oa_E + B = 4. + Ei = 8. + resultV = 39. + call oa_layer1(B=B,Ci1=Ci1,Co=Co,Di1=Di1,Ai1=Ai1,Ao=Ao,Di2=Di2,Do=Do,Ei=Ei,Eo=Eo,ierr=ierr) + result = B + sum(Eo) case DEFAULT ierr = -1234 @@ -219,10 +303,10 @@ program optargs ! skip -1234 if (ierr /= -1234) then if (ierr == ierrV .and. abs(result-resultV) < errtol ) then - write(6,101) 'PASS','optarg test',n,ierr,ierrV,result,resultV,Ao,B,Co,Do + write(6,101) 'PASS','optarg test',n,ierr,ierrV,result,resultV,Ao,B,Co,Do,sum(Eo) ! write(6,101) 'PASS','optarg test',n,ierr,ierrV,result,resultV else - write(6,101) 'FAIL','optarg test',n,ierr,ierrV,result,resultV,Ao,B,Co,Do + write(6,101) 'FAIL','optarg test',n,ierr,ierrV,result,resultV,Ao,B,Co,Do,sum(Eo) ! write(6,101) 'FAIL','optarg test',n,ierr,ierrV,result,resultV iresult = 1 endif @@ -230,7 +314,7 @@ program optargs enddo - 101 format(1x,a,1x,a,1x,i2.2,2i6,3x,6g11.4) + 101 format(1x,a,1x,a,1x,i2.2,2i6,3x,8g11.4) write(6,*) ' ' write(6,*) 'optargs COMPLETED SUCCESSFULLY' diff --git a/cicecore/drivers/unittest/optargs/optargs_subs.F90 b/cicecore/drivers/unittest/optargs/optargs_subs.F90 index 7469d6800..4269ed23b 100644 --- a/cicecore/drivers/unittest/optargs/optargs_subs.F90 +++ b/cicecore/drivers/unittest/optargs/optargs_subs.F90 @@ -4,17 +4,21 @@ module optargs_subs implicit none private + integer, public, parameter :: dp = kind(1.d0) + logical, public :: computeA = .false., & computeB = .false., & computeC = .false., & - computeD = .false. + computeD = .false., & + computeE = .false. integer, public :: oa_error = -99, & oa_OK = 0, & oa_A = 1, & oa_B = 2, & oa_C = 4, & - oa_D = 8 + oa_D = 8, & + oa_E = 16 public :: oa_layer1, oa_count1 @@ -22,16 +26,18 @@ module optargs_subs CONTAINS !----------------------------------- - subroutine oa_count1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + subroutine oa_count1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) - real*8 , intent(in) , optional :: Ai1, Di1, Di2 - real*8 , intent(out) , optional :: Ao, Do - real*8 , intent(inout), optional :: B - real*8 , intent(in) :: Ci1 - real*8 , intent(out) :: Co - integer, intent(inout) :: ierr + real(dp), intent(in) , optional :: Ai1, Di1, Di2 + real(dp), intent(out) , optional :: Ao, Do + real(dp), intent(inout), optional :: B + real(dp), intent(in) :: Ci1 + real(dp), intent(out) :: Co + real(dp), intent(in) , optional, dimension(:) :: Ei + real(dp), intent(out) , optional, dimension(:) :: Eo + integer , intent(inout) :: ierr - call oa_count2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + call oa_count2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) ! write(6,*) 'debug oa_count1 ',ierr @@ -39,14 +45,16 @@ end subroutine oa_count1 !----------------------------------- - subroutine oa_count2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + subroutine oa_count2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) - real*8 , intent(in) , optional :: Ai1, Di1, Di2 - real*8 , intent(out) , optional :: Ao, Do - real*8 , intent(inout), optional :: B - real*8 , intent(in) :: Ci1 - real*8 , intent(out) :: Co - integer, intent(inout) :: ierr + real(dp), intent(in) , optional :: Ai1, Di1, Di2 + real(dp), intent(out) , optional :: Ao, Do + real(dp), intent(inout), optional :: B + real(dp), intent(in) :: Ci1 + real(dp), intent(out) :: Co + real(dp), intent(in) , optional, dimension(:) :: Ei + real(dp), intent(out) , optional, dimension(:) :: Eo + integer , intent(inout) :: ierr ierr = 3 ! Ci1, Co, ierr have to be passed if (present(Ai1)) ierr = ierr + 1 @@ -55,6 +63,8 @@ subroutine oa_count2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) if (present(Di1)) ierr = ierr + 1 if (present(Di2)) ierr = ierr + 1 if (present(Do) ) ierr = ierr + 1 + if (present(Ei) ) ierr = ierr + 1 + if (present(Eo) ) ierr = ierr + 1 ! write(6,*) 'debug oa_count2 ',ierr @@ -62,14 +72,16 @@ end subroutine oa_count2 !----------------------------------- - subroutine oa_layer1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + subroutine oa_layer1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) - real*8 , intent(in) , optional :: Ai1, Di1, Di2 - real*8 , intent(out) , optional :: Ao, Do - real*8 , intent(inout), optional :: B - real*8 , intent(in) :: Ci1 - real*8 , intent(out) :: Co - integer, intent(inout) :: ierr + real(dp), intent(in) , optional :: Ai1, Di1, Di2 + real(dp), intent(out) , optional :: Ao, Do + real(dp), intent(inout), optional :: B + real(dp), intent(in) :: Ci1 + real(dp), intent(out) :: Co + real(dp), intent(in) , optional, dimension(:) :: Ei + real(dp), intent(out) , optional, dimension(:) :: Eo + integer , intent(inout) :: ierr ierr = oa_OK if (computeA) then @@ -87,38 +99,55 @@ subroutine oa_layer1(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) ierr = oa_error endif endif + if (computeE) then + if (.not.(present(Ei).and.present(Eo))) then + ierr = oa_error + endif + endif if (ierr == oa_OK) then - call oa_layer2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + call oa_layer2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) endif end subroutine oa_layer1 !----------------------------------- - subroutine oa_layer2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + subroutine oa_layer2(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) + +! Note: optional arrays must have an optional attribute, otherwise they seg fault +! Scalars do not seem to have this problem - real*8 , intent(in) , optional :: Ai1, Di1, Di2 - real*8 , intent(out) , optional :: Ao, Do - real*8 , intent(inout), optional :: B - real*8 , intent(in) :: Ci1 - real*8 , intent(out) :: Co - integer, intent(inout) :: ierr + real(dp), intent(in) :: Ai1, Di1, Di2 + real(dp), intent(out) :: Ao, Do + real(dp), intent(inout) :: B + real(dp), intent(in) :: Ci1 + real(dp), intent(out) :: Co + real(dp), intent(in) , optional, dimension(:) :: Ei + real(dp), intent(out) , optional, dimension(:) :: Eo + integer , intent(inout) :: ierr - call oa_compute(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + call oa_compute(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) end subroutine oa_layer2 !----------------------------------- - subroutine oa_compute(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) + subroutine oa_compute(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,Ei,Eo,ierr) - real*8 , intent(in) , optional :: Ai1, Di1, Di2 - real*8 , intent(out) , optional :: Ao, Do - real*8 , intent(inout), optional :: B - real*8 , intent(in) :: Ci1 - real*8 , intent(out) :: Co - integer, intent(inout) :: ierr +! Note: optional arrays must have an optional attribute, otherwise they seg fault +! Scalars do not seem to have this problem + + real(dp), intent(in) :: Ai1, Di1, Di2 + real(dp), intent(out) :: Ao, Do + real(dp), intent(inout) :: B + real(dp), intent(in) :: Ci1 + real(dp), intent(out) :: Co + real(dp), intent(in) , optional, dimension(:) :: Ei + real(dp), intent(out) , optional, dimension(:) :: Eo + integer , intent(inout) :: ierr + + integer :: n if (computeA) then Ao = Ai1 - 1. @@ -140,6 +169,13 @@ subroutine oa_compute(Ai1,Ao,B,Ci1,Co,Di1,Di2,Do,ierr) ierr = ierr + oa_D endif + if (computeE) then + ierr = ierr + oa_E + do n = 1,size(Eo) + Eo(n) = Ei(n) + n + enddo + endif + return end subroutine oa_compute diff --git a/cicecore/drivers/unittest/opticep/CICE.F90 b/cicecore/drivers/unittest/opticep/CICE.F90 new file mode 100644 index 000000000..79dd06fca --- /dev/null +++ b/cicecore/drivers/unittest/opticep/CICE.F90 @@ -0,0 +1,59 @@ +!======================================================================= +! Copyright (c) 2022, Triad National Security, LLC +! All rights reserved. +! +! Copyright 2022. Triad National Security, LLC. This software was +! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! Alamos National Laboratory (LANL), which is operated by Triad +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse +! it with the version available from LANL. +! +! The full license and distribution policy are available from +! https://github.com/CICE-Consortium +! +!======================================================================= +! +! Main driver routine for CICE. Initializes and steps through the model. +! This program should be compiled if CICE is run as a separate executable, +! but not if CICE subroutines are called from another program (e.g., CAM). +! +! authors Elizabeth C. Hunke and William H. Lipscomb, LANL +! +! 2006: Converted to free form source (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver +! + program icemodel + + use CICE_InitMod + use CICE_RunMod + use CICE_FinalMod + + implicit none + character(len=*), parameter :: subname='(icemodel)' + + !----------------------------------------------------------------- + ! Initialize CICE + !----------------------------------------------------------------- + + call CICE_Initialize + + !----------------------------------------------------------------- + ! Run CICE + !----------------------------------------------------------------- + + call CICE_Run + + !----------------------------------------------------------------- + ! Finalize CICE + !----------------------------------------------------------------- + + call CICE_Finalize + + end program icemodel + +!======================================================================= diff --git a/cicecore/drivers/unittest/opticep/CICE_FinalMod.F90 b/cicecore/drivers/unittest/opticep/CICE_FinalMod.F90 new file mode 100644 index 000000000..02494bd9c --- /dev/null +++ b/cicecore/drivers/unittest/opticep/CICE_FinalMod.F90 @@ -0,0 +1,71 @@ +!======================================================================= +! +! This module contains routines for the final exit of the CICE model, +! including final output and clean exit from any message passing +! environments and frameworks. +! +! authors: Philip W. Jones, LANL +! 2006: Converted to free source form (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_FinalMod + + use ice_kinds_mod + use ice_communicate, only: my_task, master_task + use ice_exit, only: end_run, abort_ice + use ice_fileunits, only: nu_diag, release_all_fileunits + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + + implicit none + private + public :: CICE_Finalize + +!======================================================================= + + contains + +!======================================================================= +! +! This routine shuts down CICE by exiting all relevent environments. + + subroutine CICE_Finalize + + use ice_timers, only: ice_timer_stop, ice_timer_print_all, timer_total, & + timer_stats + + character(len=*), parameter :: subname = '(CICE_Finalize)' + + !------------------------------------------------------------------- + ! stop timers and print timer info + !------------------------------------------------------------------- + + call ice_timer_stop(timer_total) ! stop timing entire run + call ice_timer_print_all(stats=timer_stats) ! print timing information + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + if (my_task == master_task) then + write(nu_diag, *) " " + write(nu_diag, *) "CICE COMPLETED SUCCESSFULLY " + write(nu_diag, *) "OPTICEP TEST COMPLETED SUCCESSFULLY " + write(nu_diag, *) " " + endif + +!echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output + call release_all_fileunits + + !------------------------------------------------------------------- + ! quit MPI + !------------------------------------------------------------------- + + call end_run ! quit MPI + + end subroutine CICE_Finalize + +!======================================================================= + + end module CICE_FinalMod + +!======================================================================= diff --git a/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 b/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 new file mode 100644 index 000000000..0371c7f38 --- /dev/null +++ b/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 @@ -0,0 +1,517 @@ +!======================================================================= +! +! This module contains the CICE initialization routine that sets model +! parameters and initializes the grid and CICE state variables. +! +! authors Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL +! Philip W. Jones, LANL +! +! 2006: Converted to free form source (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_InitMod + + use ice_kinds_mod + use ice_exit, only: abort_ice + use ice_fileunits, only: init_fileunits, nu_diag + use ice_memusage, only: ice_memusage_init, ice_memusage_print + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist + use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave + use icepack_intfc, only: icepack_init_snow + use icepack_intfc, only: icepack_configure + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & + icepack_query_tracer_indices, icepack_query_tracer_sizes + + implicit none + private + public :: CICE_Initialize, cice_init + +!======================================================================= + + contains + +!======================================================================= + +! Initialize the basic state, grid and all necessary parameters for +! running the CICE model. Return the initial state in routine +! export state. +! Note: This initialization driver is designed for standalone and +! CESM-coupled applications. For other +! applications (e.g., standalone CAM), this driver would be +! replaced by a different driver that calls subroutine cice_init, +! where most of the work is done. + + subroutine CICE_Initialize + + character(len=*), parameter :: subname='(CICE_Initialize)' + !-------------------------------------------------------------------- + ! model initialization + !-------------------------------------------------------------------- + + call cice_init + + end subroutine CICE_Initialize + +!======================================================================= +! +! Initialize CICE model. + + subroutine cice_init + + use ice_arrays_column, only: hin_max, c_hi_range, alloc_arrays_column + use ice_arrays_column, only: floe_rad_l, floe_rad_c, & + floe_binwidth, c_fsd_range + use ice_state, only: alloc_state + use ice_flux_bgc, only: alloc_flux_bgc + use ice_calendar, only: dt, dt_dyn, write_ic, & + init_calendar, advance_timestep, calc_timesteps + use ice_communicate, only: init_communicate, my_task, master_task + use ice_diagnostics, only: init_diags + use ice_domain, only: init_domain_blocks + use ice_domain_size, only: ncat, nfsd + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp + use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn + use ice_flux, only: init_coupler_flux, init_history_therm, & + init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux + use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & + get_forcing_atmo, get_forcing_ocn, get_wave_spec, init_snowtable + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & + faero_default, faero_optics, alloc_forcing_bgc, fiso_default + use ice_grid, only: init_grid1, init_grid2, alloc_grid + use ice_history, only: init_hist, accum_hist + use ice_restart_shared, only: restart, runtype + use ice_init, only: input_data, init_state + use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers + use ice_kinds_mod + use ice_restoring, only: ice_HaloRestore_init + use ice_timers, only: timer_total, init_ice_timers, ice_timer_start + use ice_transport_driver, only: init_transport + + logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & + tr_iso, tr_fsd, wave_spec, tr_snow + character(len=char_len) :: snw_aging_table + character(len=*), parameter :: subname = '(cice_init)' + + call init_communicate ! initial setup for message passing + call init_fileunits ! unit numbers + + ! tcx debug, this will create a different logfile for each pe + ! if (my_task /= master_task) nu_diag = 100+my_task + + call icepack_configure() ! initialize icepack + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + call input_data ! namelist variables + call input_zbgc ! vertical biogeochemistry namelist + call count_tracers ! count tracers + + ! Call this as early as possible, must be after memory_stats is read + if (my_task == master_task) then + call ice_memusage_init(nu_diag) + call ice_memusage_print(nu_diag,subname//':start') + endif + + call init_domain_blocks ! set up block decomposition + call init_grid1 ! domain distribution + call alloc_grid ! allocate grid arrays + call alloc_arrays_column ! allocate column arrays + call alloc_state ! allocate state arrays + call alloc_flux_bgc ! allocate flux_bgc arrays + call alloc_flux ! allocate flux arrays + call init_ice_timers ! initialize all timers + call ice_timer_start(timer_total) ! start timing entire run + call init_grid2 ! grid variables + call init_zbgc ! vertical biogeochemistry initialization + call init_calendar ! initialize some calendar stuff + call init_hist (dt) ! initialize output history file + + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables + endif + + call init_coupler_flux ! initialize fluxes exchanged with coupler + + call init_thermo_vertical ! initialize vertical thermodynamics + + call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution + if (my_task == master_task) then + call icepack_init_itd_hist(ncat=ncat, hin_max=hin_max, c_hi_range=c_hi_range) ! output + endif + + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_fsd) call icepack_init_fsd_bounds (nfsd, & ! floe size distribution + floe_rad_l, & ! fsd size lower bound in m (radius) + floe_rad_c, & ! fsd size bin centre in m (radius) + floe_binwidth, & ! fsd size bin width in m (radius) + c_fsd_range, & ! string for history output + write_diags=(my_task == master_task)) ! write diag on master only + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call init_forcing_ocn(dt) ! initialize sss and sst from data + call init_state ! initialize the ice state + call init_transport ! initialize horizontal transport + call ice_HaloRestore_init ! restored boundary conditions + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & + wave_spec_out=wave_spec, snw_aging_table_out=snw_aging_table) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays + + call init_restart ! initialize restart variables + call init_diags ! initialize diagnostic output points + call init_history_therm ! initialize thermo history variables + call init_history_dyn ! initialize dynamic history variables + call calc_timesteps ! update timestep counter if not using npt_unit="1" + + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso, tr_snow_out=tr_snow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + !property tables + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing + ! in prep_radiation. + if (trim(runtype) == 'continue' .or. restart) & + call init_shortwave ! initialize radiative transfer + + if (write_ic) call accum_hist(dt) ! write initial conditions + +! tcraig, use advance_timestep here +! istep = istep + 1 ! update time step counters +! istep1 = istep1 + 1 +! time = time + dt ! determine the time and date +! call calendar(time) ! at the end of the first timestep + call advance_timestep() + + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- + + call init_forcing_atmo ! initialize atmospheric forcing (standalone) + + if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice + call get_forcing_atmo ! atmospheric forcing from data + call get_forcing_ocn(dt) ! ocean forcing from data + + ! snow aging lookup table initialization + if (tr_snow) then ! advanced snow physics + call icepack_init_snow() + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + if (snw_aging_table(1:4) /= 'test') then + call init_snowtable() + endif + endif + + ! isotopes + if (tr_iso) call fiso_default ! default values + + ! aerosols + ! if (tr_aero) call faero_data ! data file + ! if (tr_zaero) call fzaero_data ! data file (gx1) + if (tr_aero .or. tr_zaero) call faero_default ! default values + if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + if (z_tracers) call get_atm_bgc ! biogeochemistry + + if (runtype == 'initial' .and. .not. restart) & + call init_shortwave ! initialize radiative transfer using current swdn + + call init_flux_atm ! initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + if (my_task == master_task) then + call ice_memusage_print(nu_diag,subname//':end') + endif + + end subroutine cice_init + +!======================================================================= + + subroutine init_restart + + use ice_arrays_column, only: dhsn + use ice_blocks, only: nx_block, ny_block + use ice_calendar, only: calendar + use ice_constants, only: c0 + use ice_domain, only: nblocks + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd, nslyr + use ice_dyn_eap, only: read_restart_eap + use ice_dyn_shared, only: kdyn + use ice_grid, only: tmask + use ice_init, only: ice_ic + use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & + init_meltponds_lvl, init_meltponds_topo, & + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd + use ice_restart_column, only: restart_age, read_restart_age, & + restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_lvl, read_restart_pond_lvl, & + restart_pond_topo, read_restart_pond_topo, & + restart_snow, read_restart_snow, & + restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & + restart_aero, read_restart_aero, & + restart_hbrine, read_restart_hbrine, & + restart_zsal, restart_bgc + use ice_restart_driver, only: restartfile + use ice_restart_shared, only: runtype, restart + use ice_state ! almost everything + + integer(kind=int_kind) :: & + i, j , & ! horizontal indices + iblk ! block index + logical(kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & + tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & + skl_bgc, z_tracers, solve_zsal + integer(kind=int_kind) :: & + ntrcr + integer(kind=int_kind) :: & + nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_smice, nt_smliq, nt_rhos, nt_rsnw, & + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice + + character(len=*), parameter :: subname = '(init_restart)' + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & + tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (trim(runtype) == 'continue') then + ! start from core restart file + call restartfile() ! given by pointer in ice_in + call calendar() ! update time parameters + if (kdyn == 2) call read_restart_eap ! EAP + else if (restart) then ! ice_ic = core restart file + call restartfile (ice_ic) ! or 'default' or 'none' + !!! uncomment to create netcdf + ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file + !!! uncomment if EAP restart data exists + ! if (kdyn == 2) call read_restart_eap + endif + + ! tracers + ! ice age tracer + if (tr_iage) then + if (trim(runtype) == 'continue') & + restart_age = .true. + if (restart_age) then + call read_restart_age + else + do iblk = 1, nblocks + call init_age(trcrn(:,:,nt_iage,:,iblk)) + enddo ! iblk + endif + endif + ! first-year area tracer + if (tr_FY) then + if (trim(runtype) == 'continue') restart_FY = .true. + if (restart_FY) then + call read_restart_FY + else + do iblk = 1, nblocks + call init_FY(trcrn(:,:,nt_FY,:,iblk)) + enddo ! iblk + endif + endif + ! level ice tracer + if (tr_lvl) then + if (trim(runtype) == 'continue') restart_lvl = .true. + if (restart_lvl) then + call read_restart_lvl + else + do iblk = 1, nblocks + call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & + trcrn(:,:,nt_vlvl,:,iblk)) + enddo ! iblk + endif + endif + ! level-ice melt ponds + if (tr_pond_lvl) then + if (trim(runtype) == 'continue') & + restart_pond_lvl = .true. + if (restart_pond_lvl) then + call read_restart_pond_lvl + else + do iblk = 1, nblocks + call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk), & + dhsn(:,:,:,iblk)) + enddo ! iblk + endif + endif + ! topographic melt ponds + if (tr_pond_topo) then + if (trim(runtype) == 'continue') & + restart_pond_topo = .true. + if (restart_pond_topo) then + call read_restart_pond_topo + else + do iblk = 1, nblocks + call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk)) + enddo ! iblk + endif ! .not. restart_pond + endif + + ! snow redistribution/metamorphism + if (tr_snow) then + if (trim(runtype) == 'continue') restart_snow = .true. + if (restart_snow) then + call read_restart_snow + else + do iblk = 1, nblocks + call init_snowtracers(trcrn(:,:,nt_smice:nt_smice+nslyr-1,:,iblk), & + trcrn(:,:,nt_smliq:nt_smliq+nslyr-1,:,iblk), & + trcrn(:,:,nt_rhos :nt_rhos +nslyr-1,:,iblk), & + trcrn(:,:,nt_rsnw :nt_rsnw +nslyr-1,:,iblk)) + enddo ! iblk + endif + endif + + ! floe size distribution + if (tr_fsd) then + if (trim(runtype) == 'continue') restart_fsd = .true. + if (restart_fsd) then + call read_restart_fsd + else + call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) + endif + endif + + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + + if (tr_aero) then ! ice aerosol + if (trim(runtype) == 'continue') restart_aero = .true. + if (restart_aero) then + call read_restart_aero + else + do iblk = 1, nblocks + call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) + enddo ! iblk + endif ! .not. restart_aero + endif + + if (trim(runtype) == 'continue') then + if (tr_brine) & + restart_hbrine = .true. + if (solve_zsal) & + restart_zsal = .true. + if (skl_bgc .or. z_tracers) & + restart_bgc = .true. + endif + + if (tr_brine .or. skl_bgc) then ! brine height tracer + call init_hbrine + if (tr_brine .and. restart_hbrine) call read_restart_hbrine + endif + + if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry + if (tr_fsd) then + write (nu_diag,*) 'FSD implementation incomplete for use with BGC' + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif + call init_bgc + endif + + !----------------------------------------------------------------- + ! aggregate tracers + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j,iblk)) then + call icepack_aggregate(ncat = ncat, & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + ntrcr = ntrcr, & + trcr_depend = trcr_depend, & + trcr_base = trcr_base, & + n_trcr_strata = n_trcr_strata, & + nt_strata = nt_strata) + else + ! tcraig, reset all tracer values on land to zero + trcrn(i,j,:,:,iblk) = c0 + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine init_restart + +!======================================================================= + + end module CICE_InitMod + +!======================================================================= diff --git a/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 b/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 new file mode 100644 index 000000000..ae7f7ab1f --- /dev/null +++ b/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 @@ -0,0 +1,741 @@ +!======================================================================= +! +! Main driver for time stepping of CICE. +! +! authors Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL +! +! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep +! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Converted to free source form (F90) +! 2007 BPB: Modified Delta-Eddington shortwave interface +! 2008 ECH: moved ESMF code to its own driver + + module CICE_RunMod + + use ice_kinds_mod + use ice_communicate, only: my_task, master_task + use ice_fileunits, only: nu_diag + use ice_arrays_column, only: oceanmixed_ice + use ice_constants, only: c0, c1 + use ice_constants, only: field_loc_center, field_type_scalar + use ice_exit, only: abort_ice + use ice_memusage, only: ice_memusage_print + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_max_iso, icepack_max_aero + use icepack_intfc, only: icepack_query_parameters + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes + + implicit none + private + public :: CICE_Run, ice_step + +!======================================================================= + + contains + +!======================================================================= +! +! This is the main driver routine for advancing CICE forward in time. +! +! author Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL + + subroutine CICE_Run + + use ice_calendar, only: dt, stop_now, advance_timestep + use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & + get_wave_spec + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & + fiso_default, faero_default + use ice_flux, only: init_flux_atm, init_flux_ocn + use ice_timers, only: ice_timer_start, ice_timer_stop, & + timer_couple, timer_step + logical (kind=log_kind) :: & + tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + character(len=*), parameter :: subname = '(CICE_Run)' + + !-------------------------------------------------------------------- + ! initialize error code and step timer + !-------------------------------------------------------------------- + + call ice_timer_start(timer_step) ! start timing entire run + + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, & + wave_spec_out=wave_spec) + call icepack_query_tracer_flags(tr_iso_out=tr_iso, & + tr_aero_out=tr_aero, & + tr_zaero_out=tr_zaero, & + tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + +#ifndef CICE_IN_NEMO + !-------------------------------------------------------------------- + ! timestep loop + !-------------------------------------------------------------------- + + timeLoop: do +#endif + + call ice_step + +! tcraig, use advance_timestep now +! istep = istep + 1 ! update time step counters +! istep1 = istep1 + 1 +! time = time + dt ! determine the time and date +! call calendar(time) ! at the end of the timestep + call advance_timestep() ! advance time + +#ifndef CICE_IN_NEMO + if (stop_now >= 1) exit timeLoop +#endif + + call ice_timer_start(timer_couple) ! atm/ocn coupling + +! for now, wave_spectrum is constant in time +! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice + call get_forcing_atmo ! atmospheric forcing from data + call get_forcing_ocn(dt) ! ocean forcing from data + + ! isotopes + if (tr_iso) call fiso_default ! default values + ! aerosols + ! if (tr_aero) call faero_data ! data file + ! if (tr_zaero) call fzaero_data ! data file (gx1) + if (tr_aero .or. tr_zaero) call faero_default ! default values + + if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + if (z_tracers) call get_atm_bgc ! biogeochemistry + + call init_flux_atm ! Initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + call ice_timer_stop(timer_couple) ! atm/ocn coupling + +#ifndef CICE_IN_NEMO + enddo timeLoop +#endif + + !-------------------------------------------------------------------- + ! end of timestep loop + !-------------------------------------------------------------------- + + call ice_timer_stop(timer_step) ! end timestepping loop timer + + end subroutine CICE_Run + +!======================================================================= +! +! Calls drivers for physics components, some initialization, and output +! +! author Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL + + subroutine ice_step + + use ice_boundary, only: ice_HaloUpdate + use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep + use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice + use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags + use ice_domain, only: halo_info, nblocks + use ice_dyn_eap, only: write_restart_eap + use ice_dyn_shared, only: kdyn, kridge + use ice_flux, only: scale_factor, init_history_therm, & + daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + use ice_history, only: accum_hist + use ice_history_bgc, only: init_history_bgc + use ice_restart, only: final_restart + use ice_restart_column, only: write_restart_age, write_restart_FY, & + write_restart_lvl, write_restart_pond_lvl, & + write_restart_pond_topo, write_restart_aero, write_restart_fsd, & + write_restart_iso, write_restart_bgc, write_restart_hbrine, & + write_restart_snow + use ice_restart_driver, only: dumpfile + use ice_restoring, only: restore_ice, ice_HaloRestore + use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & + update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & + biogeochemistry, step_prep, step_dyn_wave, step_snow + use ice_timers, only: ice_timer_start, ice_timer_stop, & + timer_diags, timer_column, timer_thermo, timer_bound, & + timer_hist, timer_readwrite + + integer (kind=int_kind) :: & + iblk , & ! block index + k , & ! dynamics supercycling index + ktherm ! thermodynamics is off when ktherm = -1 + + real (kind=dbl_kind) :: & + offset ! d(age)/dt time offset + + logical (kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & + tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & + calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec + + character(len=*), parameter :: subname = '(ice_step)' + + character (len=char_len) :: plabeld + + if (debug_model) then + plabeld = 'beginning time step' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo + endif + + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & + wave_spec_out=wave_spec) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! restoring on grid boundaries + !----------------------------------------------------------------- + + if (restore_ice) call ice_HaloRestore + + !----------------------------------------------------------------- + ! initialize diagnostics and save initial state values + !----------------------------------------------------------------- + + call ice_timer_start(timer_diags) ! diagnostics/history + call init_mass_diags ! diagnostics per timestep + call init_history_therm + call init_history_bgc + call ice_timer_stop(timer_diags) ! diagnostics/history + + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + call step_prep + + if (ktherm >= 0) then + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + + !----------------------------------------------------------------- + ! scale radiation fields + !----------------------------------------------------------------- + + if (calc_Tsfc) call prep_radiation (iblk) + + if (debug_model) then + plabeld = 'post prep_radiation' + call debug_ice (iblk, plabeld) + endif + + !----------------------------------------------------------------- + ! thermodynamics and biogeochemistry + !----------------------------------------------------------------- + + call step_therm1 (dt, iblk) ! vertical thermodynamics + + if (debug_model) then + plabeld = 'post step_therm1' + call debug_ice (iblk, plabeld) + endif + + call biogeochemistry (dt, iblk) ! biogeochemistry + + if (debug_model) then + plabeld = 'post biogeochemistry' + call debug_ice (iblk, plabeld) + endif + + call step_therm2 (dt, iblk) ! ice thickness distribution thermo + + if (debug_model) then + plabeld = 'post step_therm2' + call debug_ice (iblk, plabeld) + endif + + enddo + !$OMP END PARALLEL DO + endif ! ktherm > 0 + + ! clean up, update tendency diagnostics + offset = dt + call update_state (dt, daidtt, dvidtt, dagedtt, offset) + + call ice_timer_stop(timer_thermo) ! thermodynamics + call ice_timer_stop(timer_column) ! column physics + + !----------------------------------------------------------------- + ! dynamics, transport, ridging + !----------------------------------------------------------------- + + ! wave fracture of the floe size distribution + ! note this is called outside of the dynamics subcycling loop + if (tr_fsd .and. wave_spec) call step_dyn_wave(dt) + + do k = 1, ndtd + + ! momentum, stress, transport + call step_dyn_horiz (dt_dyn) + + if (debug_model) then + plabeld = 'post step_dyn_horiz' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo ! iblk + endif + + ! ridging + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) + enddo + !$OMP END PARALLEL DO + + if (debug_model) then + plabeld = 'post step_dyn_ridge' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo ! iblk + endif + + ! clean up, update tendency diagnostics + offset = c0 + call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + + enddo + + if (debug_model) then + plabeld = 'post dynamics' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo + endif + + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + !----------------------------------------------------------------- + ! snow redistribution and metamorphosis + !----------------------------------------------------------------- + + if (tr_snow) then ! advanced snow physics + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + call step_snow (dt, iblk) + enddo + !$OMP END PARALLEL DO + call update_state (dt) ! clean up + endif + + !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + + !----------------------------------------------------------------- + ! albedo, shortwave radiation + !----------------------------------------------------------------- + + if (ktherm >= 0) call step_radiation (dt, iblk) + + if (debug_model) then + plabeld = 'post step_radiation' + call debug_ice (iblk, plabeld) + endif + + !----------------------------------------------------------------- + ! get ready for coupling and the next time step + !----------------------------------------------------------------- + + call coupling_prep (iblk) + + if (debug_model) then + plabeld = 'post coupling_prep' + call debug_ice (iblk, plabeld) + endif + + enddo ! iblk + !$OMP END PARALLEL DO + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (scale_factor, halo_info, & + field_loc_center, field_type_scalar) + call ice_timer_stop(timer_bound) + + call ice_timer_stop(timer_thermo) ! thermodynamics + call ice_timer_stop(timer_column) ! column physics + + !----------------------------------------------------------------- + ! write data + !----------------------------------------------------------------- + + call ice_timer_start(timer_diags) ! diagnostics + if (mod(istep,diagfreq) == 0) then + call runtime_diags(dt) ! log file + if (solve_zsal) call zsal_diags + if (skl_bgc .or. z_tracers) call bgc_diags + if (tr_brine) call hbrine_diags + if (my_task == master_task) then + call ice_memusage_print(nu_diag,subname) + endif + endif + call ice_timer_stop(timer_diags) ! diagnostics + + call ice_timer_start(timer_hist) ! history + call accum_hist (dt) ! history file + call ice_timer_stop(timer_hist) ! history + + call ice_timer_start(timer_readwrite) ! reading/writing + if (write_restart == 1) then + call dumpfile ! core variables for restarting + if (tr_iage) call write_restart_age + if (tr_FY) call write_restart_FY + if (tr_lvl) call write_restart_lvl + if (tr_pond_lvl) call write_restart_pond_lvl + if (tr_pond_topo) call write_restart_pond_topo + if (tr_snow) call write_restart_snow + if (tr_fsd) call write_restart_fsd + if (tr_iso) call write_restart_iso + if (tr_aero) call write_restart_aero + if (solve_zsal .or. skl_bgc .or. z_tracers) & + call write_restart_bgc + if (tr_brine) call write_restart_hbrine + if (kdyn == 2) call write_restart_eap + call final_restart + endif + call ice_timer_stop(timer_readwrite) ! reading/writing + + end subroutine ice_step + +!======================================================================= +! +! Prepare for coupling +! +! authors: Elizabeth C. Hunke, LANL + + subroutine coupling_prep (iblk) + + use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & + albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn + use ice_blocks, only: nx_block, ny_block, get_block, block + use ice_domain, only: blocks_ice + use ice_calendar, only: dt, nstreams + use ice_domain_size, only: ncat + use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & + albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & + alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & + fresh_ai, fsalt_ai, fsalt, & + fswthru_ai, fhocn, scale_factor, snowfrac, & + fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & + fsens, flat, fswabs, flwout, evap, Tref, Qref, & + scale_fluxes, frzmlt_init, frzmlt + use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai + use ice_grid, only: tmask + use ice_state, only: aicen, aice +#ifdef CICE_IN_NEMO + use ice_state, only: aice_init + use ice_flux, only: flatn_f, fsurfn_f +#endif + use ice_step_mod, only: ocean_mixed_layer + use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + n , & ! thickness category index + i,j , & ! horizontal indices + k , & ! tracer index + nbtrcr ! + + type (block) :: & + this_block ! block information for current block + + logical (kind=log_kind) :: & + calc_Tsfc ! + + real (kind=dbl_kind) :: & + cszn , & ! counter for history averaging + puny , & ! + rhofresh , & ! + netsw ! flag for shortwave radiation presence + + character(len=*), parameter :: subname = '(coupling_prep)' + + call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) + call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Save current value of frzmlt for diagnostics. + ! Update mixed layer with heat and radiation from ice. + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + frzmlt_init (i,j,iblk) = frzmlt(i,j,iblk) + enddo + enddo + + call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling + + if (oceanmixed_ice) & + call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst + + !----------------------------------------------------------------- + ! Aggregate albedos + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + alvdf(i,j,iblk) = c0 + alidf(i,j,iblk) = c0 + alvdr(i,j,iblk) = c0 + alidr(i,j,iblk) = c0 + + albice(i,j,iblk) = c0 + albsno(i,j,iblk) = c0 + albpnd(i,j,iblk) = c0 + apeff_ai(i,j,iblk) = c0 + snowfrac(i,j,iblk) = c0 + + ! for history averaging + cszn = c0 + netsw = swvdr(i,j,iblk)+swidr(i,j,iblk)+swvdf(i,j,iblk)+swidf(i,j,iblk) + if (netsw > puny) cszn = c1 + do n = 1, nstreams + albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn + enddo + enddo + enddo + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do n = 1, ncat + do j = jlo, jhi + do i = ilo, ihi + if (aicen(i,j,n,iblk) > puny) then + + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidf(i,j,iblk) = alidf(i,j,iblk) & + + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alvdr(i,j,iblk) = alvdr(i,j,iblk) & + + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidr(i,j,iblk) = alidr(i,j,iblk) & + + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) + + netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & + + swvdf(i,j,iblk) + swidf(i,j,iblk) + if (netsw > puny) then ! sun above horizon + albice(i,j,iblk) = albice(i,j,iblk) & + + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) + albsno(i,j,iblk) = albsno(i,j,iblk) & + + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) + albpnd(i,j,iblk) = albpnd(i,j,iblk) & + + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) + endif + + apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history + + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) + snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history + + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) + + endif ! aicen > puny + enddo + enddo + enddo + + do j = 1, ny_block + do i = 1, nx_block + + !----------------------------------------------------------------- + ! reduce fresh by fpond for coupling + !----------------------------------------------------------------- + + if (l_mpond_fresh) then + fpond(i,j,iblk) = fpond(i,j,iblk) * rhofresh/dt + fresh(i,j,iblk) = fresh(i,j,iblk) - fpond(i,j,iblk) + endif + + !---------------------------------------------------------------- + ! Store grid box mean albedos and fluxes before scaling by aice + !---------------------------------------------------------------- + + alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) + alidf_ai (i,j,iblk) = alidf (i,j,iblk) + alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) + alidr_ai (i,j,iblk) = alidr (i,j,iblk) + fresh_ai (i,j,iblk) = fresh (i,j,iblk) + fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) + fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) + fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + + if (nbtrcr > 0) then + do k = 1, nbtrcr + flux_bio_ai (i,j,k,iblk) = flux_bio (i,j,k,iblk) + enddo + endif + + !----------------------------------------------------------------- + ! Save net shortwave for scaling factor in scale_factor + !----------------------------------------------------------------- + scale_factor(i,j,iblk) = & + swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & + + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & + + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & + + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) + + enddo + enddo + + !----------------------------------------------------------------- + ! Divide fluxes by ice area + ! - the CESM coupler assumes fluxes are per unit ice area + ! - also needed for global budget in diagnostics + !----------------------------------------------------------------- + + call scale_fluxes (nx_block, ny_block, & + tmask (:,:,iblk), nbtrcr, & + icepack_max_aero, & + aice (:,:,iblk), Tf (:,:,iblk), & + Tair (:,:,iblk), Qa (:,:,iblk), & + strairxT (:,:,iblk), strairyT(:,:,iblk), & + fsens (:,:,iblk), flat (:,:,iblk), & + fswabs (:,:,iblk), flwout (:,:,iblk), & + evap (:,:,iblk), & + Tref (:,:,iblk), Qref (:,:,iblk), & + fresh (:,:,iblk), fsalt (:,:,iblk), & + fhocn (:,:,iblk), & + fswthru (:,:,iblk), & + fswthru_vdr (:,:,iblk), & + fswthru_vdf (:,:,iblk), & + fswthru_idr (:,:,iblk), & + fswthru_idf (:,:,iblk), & + faero_ocn(:,:,:,iblk), & + alvdr (:,:,iblk), alidr (:,:,iblk), & + alvdf (:,:,iblk), alidf (:,:,iblk), & + fzsal (:,:,iblk), fzsal_g (:,:,iblk), & + flux_bio (:,:,1:nbtrcr,iblk), & + Qref_iso =Qref_iso (:,:,:,iblk), & + fiso_evap=fiso_evap(:,:,:,iblk), & + fiso_ocn =fiso_ocn (:,:,:,iblk)) + +#ifdef CICE_IN_NEMO +!echmod - comment this out for efficiency, if .not. calc_Tsfc + if (.not. calc_Tsfc) then + + !--------------------------------------------------------------- + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. + !--------------------------------------------------------------- + + call sfcflux_to_ocn & + (nx_block, ny_block, & + tmask (:,:,iblk), aice_init(:,:,iblk), & + fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & + fresh (:,:,iblk), fhocn (:,:,iblk)) + endif +!echmod +#endif + call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling + + end subroutine coupling_prep + +#ifdef CICE_IN_NEMO + +!======================================================================= +! +! If surface heat fluxes are provided to CICE instead of CICE calculating +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! be provided at points which do not have ice. (This is could be due to +! the heat fluxes being calculated on a lower resolution grid or the +! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! conserve energy and water by passing these fluxes to the ocean. +! +! author: A. McLaren, Met Office + + subroutine sfcflux_to_ocn(nx_block, ny_block, & + tmask, aice, & + fsurfn_f, flatn_f, & + fresh, fhocn) + + use ice_domain_size, only: ncat + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block ! block dimensions + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & + tmask ! land/boundary mask, thickness (T-cell) + + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in):: & + aice ! initial ice concentration + + real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), intent(in) :: & + fsurfn_f, & ! net surface heat flux (provided as forcing) + flatn_f ! latent heat flux (provided as forcing) + + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout):: & + fresh , & ! fresh water flux to ocean (kg/m2/s) + fhocn ! actual ocn/ice heat flx (W/m**2) + + + ! local variables + integer (kind=int_kind) :: & + i, j, n ! horizontal indices + + real (kind=dbl_kind) :: & + puny, & ! + Lsub, & ! + rLsub ! 1/Lsub + + character(len=*), parameter :: subname = '(sfcflux_to_ocn)' + + call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + rLsub = c1 / Lsub + + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j) .and. aice(i,j) <= puny) then + fhocn(i,j) = fhocn(i,j) & + + fsurfn_f(i,j,n) + flatn_f(i,j,n) + fresh(i,j) = fresh(i,j) & + + flatn_f(i,j,n) * rLsub + endif + enddo ! i + enddo ! j + enddo ! n + + + end subroutine sfcflux_to_ocn + +#endif + +!======================================================================= + + end module CICE_RunMod + +!======================================================================= diff --git a/cicecore/drivers/unittest/opticep/README b/cicecore/drivers/unittest/opticep/README new file mode 100644 index 000000000..b5f1bdf9c --- /dev/null +++ b/cicecore/drivers/unittest/opticep/README @@ -0,0 +1,30 @@ + +This unittest tests Icepack optional arguments. The idea is to have source code that is +identical to the standard CICE source code except the significant optional arguments passed +into Icepack are removed from the CICE calls. Then to run a standard CICE case with optional +features (fsd, bgc, isotopes, etc) off in namelist. That results should be bit-for-bit identical +with an equivalent run from the standard source code. + +This unittest will need to be maintained manually. As CICE code changes, the modified files +in the unittest also need to be update manually. Again, it should be as easy as copying the +standard files into this directory and then commenting out the optional arguments. + +NOTES: + +All files from cicecore/drivers/standalone/cice need to be copied to this directory. As of +today, that includes + CICE.F90 + CICE_FinalMod.F90 + CICE_InitMod.F90 + CICE_RunMod.F90 + +Add + write(nu_diag, *) "OPTICEP TEST COMPLETED SUCCESSFULLY " +to CICE_FinalMod.F90 + +Do not worry about the parameter/tracer query/init/write methods + +Interfaces to modify include + ice_init_column.F90 (icepack_step_radiation, icepack_init_zbgc) + ice_step_mod.F90 (icepack_step_therm1, icepack_step_therm2, icepack_prep_radiation, + icepack_step_radiation, icepack_step_ridge) diff --git a/cicecore/drivers/unittest/opticep/ice_init_column.F90 b/cicecore/drivers/unittest/opticep/ice_init_column.F90 new file mode 100644 index 000000000..82f3f4a1e --- /dev/null +++ b/cicecore/drivers/unittest/opticep/ice_init_column.F90 @@ -0,0 +1,3135 @@ +!========================================================================= +! +! Initialization routines for the column package. +! +! author: Elizabeth C. Hunke, LANL +! + module ice_init_column + + use ice_kinds_mod + use ice_blocks, only: nx_block, ny_block + use ice_constants + use ice_communicate, only: my_task, master_task, ice_barrier + use ice_domain_size, only: ncat, max_blocks + use ice_domain_size, only: nblyr, nilyr, nslyr + use ice_domain_size, only: n_aero, n_zaero, n_algae + use ice_domain_size, only: n_doc, n_dic, n_don + use ice_domain_size, only: n_fed, n_fep + use ice_fileunits, only: nu_diag + use ice_fileunits, only: nu_nml, nml_filename, get_fileunit, & + release_fileunit, flush_fileunit + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_max_don, icepack_max_doc, icepack_max_dic + use icepack_intfc, only: icepack_max_algae, icepack_max_aero, icepack_max_fe + use icepack_intfc, only: icepack_max_nbtrcr + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_init_tracer_sizes, icepack_init_tracer_flags + use icepack_intfc, only: icepack_init_tracer_indices + use icepack_intfc, only: icepack_init_parameters + use icepack_intfc, only: icepack_query_tracer_sizes, icepack_query_tracer_flags + use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_tracer_sizes + use icepack_intfc, only: icepack_query_parameters + use icepack_intfc, only: icepack_write_tracer_sizes, icepack_write_tracer_flags + use icepack_intfc, only: icepack_write_tracer_indices, icepack_write_tracer_sizes + use icepack_intfc, only: icepack_init_fsd, icepack_cleanup_fsd + use icepack_intfc, only: icepack_init_zbgc + use icepack_intfc, only: icepack_init_thermo + use icepack_intfc, only: icepack_step_radiation, icepack_init_orbit + use icepack_intfc, only: icepack_init_bgc, icepack_init_zsalinity + use icepack_intfc, only: icepack_init_ocean_bio, icepack_load_ocean_bio_array + use icepack_intfc, only: icepack_init_hbrine + + implicit none + + private + public :: init_thermo_vertical, init_shortwave, & + init_age, init_FY, init_lvl, init_fsd, & + init_meltponds_lvl, init_meltponds_topo, & + init_aerosol, init_bgc, init_hbrine, init_zbgc, input_zbgc, & + count_tracers, init_isotope, init_snowtracers + + ! namelist parameters needed locally + + real (kind=dbl_kind) :: & + tau_min , tau_max , & + nitratetype , ammoniumtype , silicatetype, & + dmspptype , dmspdtype , humtype + + real (kind=dbl_kind), dimension(icepack_max_dic) :: & + dictype + + real (kind=dbl_kind), dimension(icepack_max_algae) :: & + algaltype ! tau_min for both retention and release + + real (kind=dbl_kind), dimension(icepack_max_doc) :: & + doctype + + real (kind=dbl_kind), dimension(icepack_max_don) :: & + dontype + + real (kind=dbl_kind), dimension(icepack_max_fe) :: & + fedtype + + real (kind=dbl_kind), dimension(icepack_max_fe) :: & + feptype + + real (kind=dbl_kind), dimension(icepack_max_aero) :: & + zaerotype + + real (kind=dbl_kind) :: & + grid_o, l_sk, grid_o_t, initbio_frac, & + frazil_scav, grid_oS, l_skS, & + phi_snow, & + ratio_Si2N_diatoms , ratio_Si2N_sp , ratio_Si2N_phaeo , & + ratio_S2N_diatoms , ratio_S2N_sp , ratio_S2N_phaeo , & + ratio_Fe2C_diatoms , ratio_Fe2C_sp , ratio_Fe2C_phaeo , & + ratio_Fe2N_diatoms , ratio_Fe2N_sp , ratio_Fe2N_phaeo , & + ratio_Fe2DON , ratio_Fe2DOC_s , ratio_Fe2DOC_l , & + fr_resp , & + algal_vel , R_dFe2dust , dustFe_sol , & + chlabs_diatoms , chlabs_sp , chlabs_phaeo , & + alpha2max_low_diatoms,alpha2max_low_sp , alpha2max_low_phaeo, & + beta2max_diatoms , beta2max_sp , beta2max_phaeo , & + mu_max_diatoms , mu_max_sp , mu_max_phaeo , & + grow_Tdep_diatoms , grow_Tdep_sp , grow_Tdep_phaeo , & + fr_graze_diatoms , fr_graze_sp , fr_graze_phaeo , & + mort_pre_diatoms , mort_pre_sp , mort_pre_phaeo , & + mort_Tdep_diatoms , mort_Tdep_sp , mort_Tdep_phaeo , & + k_exude_diatoms , k_exude_sp , k_exude_phaeo , & + K_Nit_diatoms , K_Nit_sp , K_Nit_phaeo , & + K_Am_diatoms , K_Am_sp , K_Am_phaeo , & + K_Sil_diatoms , K_Sil_sp , K_Sil_phaeo , & + K_Fe_diatoms , K_Fe_sp , K_Fe_phaeo , & + f_don_protein , kn_bac_protein , f_don_Am_protein , & + f_doc_s , f_doc_l , f_exude_s , & + f_exude_l , k_bac_s , k_bac_l , & + T_max , fsal , op_dep_min , & + fr_graze_s , fr_graze_e , fr_mort2min , & + fr_dFe , k_nitrif , t_iron_conv , & + max_loss , max_dfe_doc1 , fr_resp_s , & + y_sk_DMS , t_sk_conv , t_sk_ox , & + algaltype_diatoms , algaltype_sp , algaltype_phaeo , & + doctype_s , doctype_l , dontype_protein , & + fedtype_1 , feptype_1 , zaerotype_bc1 , & + zaerotype_bc2 , zaerotype_dust1 , zaerotype_dust2 , & + zaerotype_dust3 , zaerotype_dust4 , ratio_C2N_diatoms , & + ratio_C2N_sp , ratio_C2N_phaeo , ratio_chl2N_diatoms, & + ratio_chl2N_sp , ratio_chl2N_phaeo , F_abs_chl_diatoms , & + F_abs_chl_sp , F_abs_chl_phaeo , ratio_C2N_proteins + +!======================================================================= + + contains + +!======================================================================= +! +! Initialize the vertical profile of ice salinity and melting temperature. +! +! authors: C. M. Bitz, UW +! William H. Lipscomb, LANL + + subroutine init_thermo_vertical + + use ice_flux, only: salinz, Tmltz + + integer (kind=int_kind) :: & + i, j, iblk, & ! horizontal indices + k ! ice layer index + + real (kind=dbl_kind), dimension(nilyr+1) :: & + sprofile ! vertical salinity profile + + real (kind=dbl_kind) :: & + depressT + + character(len=*), parameter :: subname='(init_thermo_vertical)' + + !----------------------------------------------------------------- + ! initialize + !----------------------------------------------------------------- + + call icepack_query_parameters(depressT_out=depressT) + call icepack_init_thermo(nilyr=nilyr, sprofile=sprofile) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Prescibe vertical profile of salinity and melting temperature. + ! Note this profile is only used for BL99 thermodynamics. + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k) + do iblk = 1,max_blocks + do j = 1, ny_block + do i = 1, nx_block + do k = 1, nilyr+1 + salinz(i,j,k,iblk) = sprofile(k) + Tmltz (i,j,k,iblk) = -salinz(i,j,k,iblk)*depressT + enddo ! k + enddo ! i + enddo ! j + enddo ! iblk + !$OMP END PARALLEL DO + + end subroutine init_thermo_vertical + +!======================================================================= +! +! Initialize shortwave + + subroutine init_shortwave + + use ice_arrays_column, only: fswpenln, Iswabsn, Sswabsn, albicen, & + albsnon, alvdrn, alidrn, alvdfn, alidfn, fswsfcn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & + fswintn, albpndn, apeffn, trcrn_sw, dhsn, ffracn, snowfracn, & + kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, gaer_bc_tab, bcenh, & + swgrid, igrid + use ice_blocks, only: block, get_block + use ice_calendar, only: dt, calendar_type, & + days_per_year, nextsw_cday, yday, msec + use ice_diagnostics, only: npnt, print_points, pmloc, piloc, pjloc + use ice_domain, only: nblocks, blocks_ice + use ice_flux, only: alvdf, alidf, alvdr, alidr, & + alvdr_ai, alidr_ai, alvdf_ai, alidf_ai, & + swvdr, swvdf, swidr, swidf, scale_factor, snowfrac, & + albice, albsno, albpnd, apeff_ai, coszen, fsnow + use ice_grid, only: tlat, tlon, tmask + use ice_restart_shared, only: restart, runtype + use ice_state, only: aicen, vicen, vsnon, trcrn + + integer (kind=int_kind) :: & + i, j , k , & ! horizontal indices + iblk , & ! block index + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + n ! thickness category index + + real (kind=dbl_kind) :: & + netsw ! flag for shortwave radiation presence + + type (block) :: & + this_block ! block information for current block + + logical (kind=log_kind) :: & + l_print_point, & ! flag to print designated grid point diagnostics + debug, & ! if true, print diagnostics + dEdd_algae, & ! use prognostic chla in dEdd radiation + modal_aero, & ! use modal aerosol optical treatment + snwgrain ! use variable snow radius + + character (char_len) :: shortwave + + integer (kind=int_kind) :: & + ipoint + + real (kind=dbl_kind), dimension(ncat) :: & + fbri ! brine height to ice thickness + + real(kind= dbl_kind), dimension(:,:), allocatable :: & + ztrcr_sw, & ! zaerosols (kg/m^3) and chla (mg/m^3) + rsnow ! snow grain radius tracer (10^-6 m) + + logical (kind=log_kind) :: tr_brine, tr_zaero, tr_bgc_n + integer (kind=int_kind) :: nt_alvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero, & + nt_fbri, nt_tsfc, ntrcr, nbtrcr, nbtrcr_sw, nt_rsnw + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nt_bgc_N + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nt_zaero + real (kind=dbl_kind) :: puny + + character(len=*), parameter :: subname='(init_shortwave)' + + call icepack_query_parameters(puny_out=puny) + call icepack_query_parameters(shortwave_out=shortwave) + call icepack_query_parameters(dEdd_algae_out=dEdd_algae) + call icepack_query_parameters(modal_aero_out=modal_aero) + call icepack_query_parameters(snwgrain_out=snwgrain) + call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr, nbtrcr_sw_out=nbtrcr_sw) + call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_zaero_out=tr_zaero, & + tr_bgc_n_out=tr_bgc_n) + call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & + nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, nt_fbri_out=nt_fbri, nt_tsfc_out=nt_tsfc, & + nt_bgc_N_out=nt_bgc_N, nt_zaero_out=nt_zaero, nt_rsnw_out=nt_rsnw) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + allocate(ztrcr_sw(nbtrcr_sw, ncat)) + allocate(rsnow(nslyr,ncat)) + + do iblk=1,nblocks + + ! Initialize + fswpenln(:,:,:,:,iblk) = c0 + Iswabsn(:,:,:,:,iblk) = c0 + Sswabsn(:,:,:,:,iblk) = c0 + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = 1, ny_block ! can be jlo, jhi + do i = 1, nx_block ! can be ilo, ihi + + l_print_point = .false. + debug = .false. + if (debug .and. print_points) then + do ipoint = 1, npnt + if (my_task == pmloc(ipoint) .and. & + i == piloc(ipoint) .and. & + j == pjloc(ipoint)) & + l_print_point = .true. + write (nu_diag, *) 'my_task = ',my_task + enddo ! n + endif + + alvdf(i,j,iblk) = c0 + alidf(i,j,iblk) = c0 + alvdr(i,j,iblk) = c0 + alidr(i,j,iblk) = c0 + alvdr_ai(i,j,iblk) = c0 + alidr_ai(i,j,iblk) = c0 + alvdf_ai(i,j,iblk) = c0 + alidf_ai(i,j,iblk) = c0 + albice(i,j,iblk) = c0 + albsno(i,j,iblk) = c0 + albpnd(i,j,iblk) = c0 + snowfrac(i,j,iblk) = c0 + apeff_ai(i,j,iblk) = c0 + + do n = 1, ncat + alvdrn(i,j,n,iblk) = c0 + alidrn(i,j,n,iblk) = c0 + alvdfn(i,j,n,iblk) = c0 + alidfn(i,j,n,iblk) = c0 + fswsfcn(i,j,n,iblk) = c0 + fswintn(i,j,n,iblk) = c0 + fswthrun(i,j,n,iblk) = c0 + fswthrun_vdr(i,j,n,iblk) = c0 + fswthrun_vdf(i,j,n,iblk) = c0 + fswthrun_idr(i,j,n,iblk) = c0 + fswthrun_idf(i,j,n,iblk) = c0 + enddo ! ncat + + enddo + enddo + do j = jlo, jhi + do i = ilo, ihi + + if (trim(shortwave) == 'dEdd') then ! delta Eddington + +#ifndef CESMCOUPLED + ! initialize orbital parameters + ! These come from the driver in the coupled model. + call icepack_init_orbit() + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(subname//' init_orbit', & + file=__FILE__, line=__LINE__) +#endif + endif + + fbri(:) = c0 + ztrcr_sw(:,:) = c0 + rsnow (:,:) = c0 + do n = 1, ncat + if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) + if (snwgrain) then + do k = 1, nslyr + rsnow(k,n) = trcrn(i,j,nt_rsnw+k-1,n,iblk) + enddo + endif + enddo + + if (tmask(i,j,iblk)) then + call icepack_step_radiation (dt=dt, ncat=ncat, & + nblyr=nblyr, nilyr=nilyr, nslyr=nslyr, & + dEdd_algae=dEdd_algae, & + swgrid=swgrid(:), igrid=igrid(:), & + fbri=fbri(:), & + aicen=aicen(i,j,:,iblk), & + vicen=vicen(i,j,:,iblk), & + vsnon=vsnon(i,j,:,iblk), & + Tsfcn=trcrn(i,j,nt_Tsfc,:,iblk), & + alvln=trcrn(i,j,nt_alvl,:,iblk), & + apndn=trcrn(i,j,nt_apnd,:,iblk), & + hpndn=trcrn(i,j,nt_hpnd,:,iblk), & + ipndn=trcrn(i,j,nt_ipnd,:,iblk), & + aeron=trcrn(i,j,nt_aero:nt_aero+4*n_aero-1,:,iblk), & + bgcNn=trcrn(i,j,nt_bgc_N(1):nt_bgc_N(1)+n_algae*(nblyr+3)-1,:,iblk), & + zaeron=trcrn(i,j,nt_zaero(1):nt_zaero(1)+n_zaero*(nblyr+3)-1,:,iblk), & + trcrn_bgcsw=ztrcr_sw, & + TLAT=TLAT(i,j,iblk), TLON=TLON(i,j,iblk), & + calendar_type=calendar_type, & + days_per_year=days_per_year, & + nextsw_cday=nextsw_cday, yday=yday, & + sec=msec, & + kaer_tab=kaer_tab, kaer_bc_tab=kaer_bc_tab(:,:), & + waer_tab=waer_tab, waer_bc_tab=waer_bc_tab(:,:), & + gaer_tab=gaer_tab, gaer_bc_tab=gaer_bc_tab(:,:), & + bcenh=bcenh(:,:,:), & + modal_aero=modal_aero, & + swvdr=swvdr(i,j,iblk), swvdf=swvdf(i,j,iblk),& + swidr=swidr(i,j,iblk), swidf=swidf(i,j,iblk),& + coszen=coszen(i,j,iblk), fsnow=fsnow(i,j,iblk),& + alvdrn=alvdrn(i,j,:,iblk), alvdfn=alvdfn(i,j,:,iblk), & + alidrn=alidrn(i,j,:,iblk), alidfn=alidfn(i,j,:,iblk), & + fswsfcn=fswsfcn(i,j,:,iblk), fswintn=fswintn(i,j,:,iblk), & + fswthrun=fswthrun(i,j,:,iblk), & +!opt fswthrun_vdr=fswthrun_vdr(i,j,:,iblk), & +!opt fswthrun_vdf=fswthrun_vdf(i,j,:,iblk), & +!opt fswthrun_idr=fswthrun_idr(i,j,:,iblk), & +!opt fswthrun_idf=fswthrun_idf(i,j,:,iblk), & + fswpenln=fswpenln(i,j,:,:,iblk), & + Sswabsn=Sswabsn(i,j,:,:,iblk), Iswabsn=Iswabsn(i,j,:,:,iblk), & + albicen=albicen(i,j,:,iblk), albsnon=albsnon(i,j,:,iblk), & + albpndn=albpndn(i,j,:,iblk), apeffn=apeffn(i,j,:,iblk), & + snowfracn=snowfracn(i,j,:,iblk), & + dhsn=dhsn(i,j,:,iblk), ffracn=ffracn(i,j,:,iblk), & +!opt rsnow=rsnow(:,:), & + l_print_point=l_print_point, & + initonly = .true.) + endif + + !----------------------------------------------------------------- + ! Define aerosol tracer on shortwave grid + !----------------------------------------------------------------- + + if (dEdd_algae .and. (tr_zaero .or. tr_bgc_N)) then + do n = 1, ncat + do k = 1, nbtrcr_sw + trcrn_sw(i,j,k,n,iblk) = ztrcr_sw(k,n) + enddo + enddo + endif + + enddo ! i + enddo ! j + + !----------------------------------------------------------------- + ! Aggregate albedos + ! Match loop order in coupling_prep for same order of operations + !----------------------------------------------------------------- + + do n = 1, ncat + do j = jlo, jhi + do i = ilo, ihi + + if (aicen(i,j,n,iblk) > puny) then + + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidf(i,j,iblk) = alidf(i,j,iblk) & + + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alvdr(i,j,iblk) = alvdr(i,j,iblk) & + + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidr(i,j,iblk) = alidr(i,j,iblk) & + + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) + + netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & + + swvdf(i,j,iblk) + swidf(i,j,iblk) + if (netsw > puny) then ! sun above horizon + albice(i,j,iblk) = albice(i,j,iblk) & + + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) + albsno(i,j,iblk) = albsno(i,j,iblk) & + + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) + albpnd(i,j,iblk) = albpnd(i,j,iblk) & + + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) + endif + + apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & + + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) + snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & + + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) + + endif ! aicen > puny + + enddo ! i + enddo ! j + enddo ! ncat + + do j = 1, ny_block + do i = 1, nx_block + + !---------------------------------------------------------------- + ! Store grid box mean albedos and fluxes before scaling by aice + !---------------------------------------------------------------- + + alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) + alidf_ai (i,j,iblk) = alidf (i,j,iblk) + alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) + alidr_ai (i,j,iblk) = alidr (i,j,iblk) + + ! for history averaging +!echmod? cszn = c0 +!echmod if (coszen(i,j,iblk) > puny) cszn = c1 +!echmod do n = 1, nstreams +!echmod albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn +!echmod enddo + + !---------------------------------------------------------------- + ! Save net shortwave for scaling factor in scale_factor + !---------------------------------------------------------------- + if (runtype == 'initial' .and. .not. restart) then + scale_factor(i,j,iblk) = & + swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & + + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & + + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & + + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) + endif + + enddo ! i + enddo ! j + enddo ! iblk + + deallocate(ztrcr_sw) + deallocate(rsnow) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine init_shortwave + +!======================================================================= + +! Initialize ice age tracer (call prior to reading restart data) + + subroutine init_age(iage) + + real(kind=dbl_kind), dimension(:,:,:), intent(out) :: iage + character(len=*),parameter :: subname='(init_age)' + + iage(:,:,:) = c0 + + end subroutine init_age + +!======================================================================= + +! Initialize ice FY tracer (call prior to reading restart data) + + subroutine init_FY(firstyear) + + real(kind=dbl_kind), dimension(:,:,:), intent(out) :: firstyear + character(len=*),parameter :: subname='(init_FY)' + + firstyear(:,:,:) = c0 + + end subroutine init_FY + +!======================================================================= + +! Initialize ice lvl tracers (call prior to reading restart data) + + subroutine init_lvl(iblk, alvl, vlvl) + + use ice_constants, only: c0, c1 + use ice_arrays_column, only: ffracn, dhsn + + integer (kind=int_kind), intent(in) :: iblk + + real (kind=dbl_kind), dimension(:,:,:), intent(out) :: & + alvl , & ! level ice area fraction + vlvl ! level ice volume + character(len=*),parameter :: subname='(init_lvl)' + + alvl(:,:,:) = c1 ! level ice area fraction + vlvl(:,:,:) = c1 ! level ice volume + ffracn(:,:,:,iblk) = c0 + dhsn(:,:,:,iblk) = c0 + + end subroutine init_lvl + +!======================================================================= + +! Initialize melt ponds. + + subroutine init_meltponds_lvl(apnd, hpnd, ipnd, dhsn) + + real(kind=dbl_kind), dimension(:,:,:), intent(out) :: & + apnd , & ! melt pond area fraction + hpnd , & ! melt pond depth + ipnd , & ! melt pond refrozen lid thickness + dhsn ! depth difference for snow on sea ice and pond ice + character(len=*),parameter :: subname='(init_meltponds_lvl)' + + apnd(:,:,:) = c0 + hpnd(:,:,:) = c0 + ipnd(:,:,:) = c0 + dhsn(:,:,:) = c0 + + end subroutine init_meltponds_lvl + +!======================================================================= + +! Initialize melt ponds. + + subroutine init_meltponds_topo(apnd, hpnd, ipnd) + + real(kind=dbl_kind), dimension(:,:,:), intent(out) :: & + apnd , & ! melt pond area fraction + hpnd , & ! melt pond depth + ipnd ! melt pond refrozen lid thickness + character(len=*),parameter :: subname='(init_meltponds_topo)' + + apnd(:,:,:) = c0 + hpnd(:,:,:) = c0 + ipnd(:,:,:) = c0 + + end subroutine init_meltponds_topo + +!======================================================================= + +! Initialize snow redistribution/metamorphosis tracers (call prior to reading restart data) + + subroutine init_snowtracers(smice, smliq, rhos_cmp, rsnw) + + real(kind=dbl_kind), dimension(:,:,:,:), intent(out) :: & + smice, smliq, rhos_cmp, rsnw + character(len=*),parameter :: subname='(init_snowtracers)' + + real (kind=dbl_kind) :: & + rsnw_fall, & ! snow grain radius of new fallen snow (10^-6 m) + rhos ! snow density (kg/m^3) + + call icepack_query_parameters(rsnw_fall_out=rsnw_fall, rhos_out=rhos) + + rsnw (:,:,:,:) = rsnw_fall + rhos_cmp(:,:,:,:) = rhos + smice (:,:,:,:) = rhos + smliq (:,:,:,:) = c0 + + end subroutine init_snowtracers + +!======================================================================= + +! Initialize floe size distribution tracer (call prior to reading restart data) + + subroutine init_fsd(floesize) + + use ice_arrays_column, only: floe_rad_c, floe_binwidth, & + wavefreq, dwavefreq, wave_sig_ht, wave_spectrum, & + d_afsd_newi, d_afsd_latg, d_afsd_latm, d_afsd_wave, d_afsd_weld + use ice_domain_size, only: ncat, max_blocks, nfsd + use ice_init, only: ice_ic + use ice_state, only: aicen + + real(kind=dbl_kind), dimension(:,:,:,:,:), intent(out) :: & + floesize ! floe size distribution tracer + + ! local variables + + real (kind=dbl_kind), dimension(nfsd) :: & + afsd ! floe size distribution "profile" + + real (kind=dbl_kind), dimension(nfsd,ncat) :: & + afsdn ! floe size distribution "profile" + + real (kind=dbl_kind) :: puny + + integer (kind=int_kind) :: & + i, j, iblk , & ! horizontal indices + n, k ! category index + + logical (kind=log_kind) :: tr_fsd + + character(len=*), parameter :: subname='(init_fsd)' + + call icepack_query_parameters(puny_out=puny) + + wavefreq (:) = c0 + dwavefreq (:) = c0 + wave_sig_ht (:,:,:) = c0 + wave_spectrum (:,:,:,:) = c0 + d_afsd_newi (:,:,:,:) = c0 + d_afsd_latg (:,:,:,:) = c0 + d_afsd_latm (:,:,:,:) = c0 + d_afsd_wave (:,:,:,:) = c0 + d_afsd_weld (:,:,:,:) = c0 + + ! default: floes occupy the smallest size category in all thickness categories + afsdn(:,:) = c0 + afsdn(1,:) = c1 + floesize(:,:,:,:,:) = c0 + floesize(:,:,1,:,:) = c1 + + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + if (tr_fsd) then + + ! initialize floe size distribution the same in every column and category + call icepack_init_fsd(nfsd, ice_ic, & + floe_rad_c, & ! fsd size bin centre in m (radius) + floe_binwidth, & ! fsd size bin width in m (radius) + afsd) ! floe size distribution + + do iblk = 1, max_blocks + do j = 1, ny_block + do i = 1, nx_block + do n = 1, ncat + do k = 1, nfsd + if (aicen(i,j,n,iblk) > puny) afsdn(k,n) = afsd(k) + enddo ! k + enddo ! n + + call icepack_cleanup_fsd (ncat, nfsd, afsdn) ! renormalize + + do n = 1, ncat + do k = 1, nfsd + floesize(i,j,k,n,iblk) = afsdn(k,n) + enddo ! k + enddo ! n + enddo ! i + enddo ! j + enddo ! iblk + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + endif ! tr_fsd + + end subroutine init_fsd + +!======================================================================= + +! Initialize isotope tracers (call prior to reading restart data) + + subroutine init_isotope(isosno, isoice) + + real(kind=dbl_kind), dimension(:,:,:,:), intent(out) :: & + isosno, isoice + character(len=*),parameter :: subname='(init_isotope)' + + isosno(:,:,:,:) = c0 + isoice(:,:,:,:) = c0 + + end subroutine init_isotope + +!======================================================================= + +! Initialize ice aerosol tracer (call prior to reading restart data) + + subroutine init_aerosol(aero) + + real(kind=dbl_kind), dimension(:,:,:,:), intent(out) :: & + aero ! aerosol tracers + character(len=*),parameter :: subname='(init_aerosol)' + + aero(:,:,:,:) = c0 + + end subroutine init_aerosol + +!======================================================================= + +! Initialize vertical profile for biogeochemistry + + subroutine init_bgc() + + use ice_arrays_column, only: zfswin, trcrn_sw, & + ocean_bio_all, ice_bio_net, snow_bio_net, & + cgrid, igrid, bphi, iDi, bTiz, iki, & + Rayleigh_criteria, Rayleigh_real + use ice_blocks, only: block, get_block + use ice_domain, only: nblocks, blocks_ice + use ice_flux, only: sss + use ice_flux_bgc, only: nit, amm, sil, dmsp, dms, algalN, & + doc, don, dic, fed, fep, zaeros, hum + use ice_forcing_bgc, only: init_bgc_data, get_forcing_bgc + use ice_restart_column, only: restart_zsal, & + read_restart_bgc, restart_bgc + use ice_state, only: trcrn + + ! local variables + + integer (kind=int_kind) :: & + i, j, iblk , & ! horizontal indices + ilo,ihi,jlo,jhi , & ! beginning and end of physical domain + k , & ! vertical index + n ! category index + + integer (kind=int_kind) :: & + max_nbtrcr, max_algae, max_don, max_doc, max_dic, max_aero, max_fe + + logical (kind=log_kind) :: & + RayleighC , & + solve_zsal + + type (block) :: & + this_block ! block information for current block + + real(kind=dbl_kind), allocatable :: & + trcrn_bgc(:,:) + + real(kind=dbl_kind), dimension(nilyr,ncat) :: & + sicen + + real(kind=dbl_kind) :: & + RayleighR + + integer (kind=int_kind) :: & + nbtrcr, ntrcr, ntrcr_o, & + nt_sice, nt_bgc_S + + character(len=*), parameter :: subname='(init_bgc)' + + ! Initialize + + call icepack_query_parameters(solve_zsal_out=solve_zsal) + call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr, ntrcr_out=ntrcr, ntrcr_o_out=ntrcr_o) + call icepack_query_tracer_indices(nt_sice_out=nt_sice, nt_bgc_S_out=nt_bgc_S) + call icepack_query_tracer_sizes(max_nbtrcr_out=max_nbtrcr, & + max_algae_out=max_algae, max_don_out=max_don, max_doc_out=max_doc, & + max_dic_out=max_dic, max_aero_out=max_aero, max_fe_out=max_fe) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + allocate(trcrn_bgc(ntrcr,ncat)) + + bphi(:,:,:,:,:) = c0 ! initial porosity for no ice + iDi (:,:,:,:,:) = c0 ! interface diffusivity + bTiz(:,:,:,:,:) = c0 ! initial bio grid ice temperature + iki (:,:,:,:,:) = c0 ! permeability + + ocean_bio_all(:,:,:,:) = c0 + ice_bio_net (:,:,:,:) = c0 ! integrated ice tracer conc (mmol/m^2 or mg/m^2) + snow_bio_net (:,:,:,:) = c0 ! integrated snow tracer conc (mmol/m^2 or mg/m^2) + zfswin (:,:,:,:,:) = c0 ! shortwave flux on bio grid + trcrn_sw (:,:,:,:,:) = c0 ! tracers active in the shortwave calculation + trcrn_bgc (:,:) = c0 + RayleighR = c0 + RayleighC = .false. + + !----------------------------------------------------------------- + ! zsalinity initialization + !----------------------------------------------------------------- + + if (solve_zsal) then ! default values + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,trcrn_bgc) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + call icepack_init_zsalinity(nblyr=nblyr, ntrcr_o=ntrcr_o, & + Rayleigh_criteria = RayleighC, & + Rayleigh_real = RayleighR, & + trcrn_bgc = trcrn_bgc, & + nt_bgc_S = nt_bgc_S, & + ncat = ncat, & + sss = sss(i,j,iblk)) + if (.not. restart_zsal) then + Rayleigh_real (i,j,iblk) = RayleighR + Rayleigh_criteria(i,j,iblk) = RayleighC + do n = 1,ncat + do k = 1, nblyr + trcrn (i,j,nt_bgc_S+k-1, n,iblk) = & + trcrn_bgc( nt_bgc_S+k-1-ntrcr_o,n) + enddo + enddo + endif + enddo ! i + enddo ! j + enddo ! iblk + !$OMP END PARALLEL DO + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif ! solve_zsal + + if (.not. solve_zsal) restart_zsal = .false. + + !----------------------------------------------------------------- + ! biogeochemistry initialization + !----------------------------------------------------------------- + + if (.not. restart_bgc) then + + !----------------------------------------------------------------- + ! Initial Ocean Values if not coupled to the ocean bgc + !----------------------------------------------------------------- + !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + call icepack_init_ocean_bio ( & + amm=amm (i,j, iblk), dmsp=dmsp(i,j, iblk), dms=dms(i,j, iblk), & + algalN=algalN(i,j,:,iblk), doc=doc (i,j,:,iblk), dic=dic(i,j,:,iblk), & + don=don (i,j,:,iblk), fed=fed (i,j,:,iblk), fep=fep(i,j,:,iblk), & + hum=hum (i,j, iblk), nit=nit (i,j, iblk), sil=sil(i,j, iblk), & + zaeros=zaeros(i,j,:,iblk), & + max_dic = icepack_max_dic, max_don = icepack_max_don, & + max_fe = icepack_max_fe, max_aero = icepack_max_aero) + enddo ! i + enddo ! j + + enddo ! iblk + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call init_bgc_data(fed(:,:,1,:),fep(:,:,1,:)) ! input dFe from file + call get_forcing_bgc ! defines nit and sil + + endif ! .not. restart + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + call icepack_load_ocean_bio_array(max_nbtrcr=icepack_max_nbtrcr, & + max_algae=icepack_max_algae, max_don=icepack_max_don, & + max_doc=icepack_max_doc, max_fe=icepack_max_fe, & + max_dic=icepack_max_dic, max_aero=icepack_max_aero, & + nit =nit (i,j, iblk), amm=amm(i,j, iblk), sil =sil (i,j, iblk), & + dmsp=dmsp(i,j, iblk), dms=dms(i,j, iblk), algalN=algalN(i,j,:,iblk), & + doc =doc (i,j,:,iblk), don=don(i,j,:,iblk), dic =dic (i,j,:,iblk), & + fed =fed (i,j,:,iblk), fep=fep(i,j,:,iblk), zaeros=zaeros(i,j,:,iblk), & + hum=hum (i,j, iblk), ocean_bio_all=ocean_bio_all(i,j,:,iblk)) + + enddo ! i + enddo ! j + + enddo ! iblk + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (.not. restart_bgc) then + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,sicen,trcrn_bgc) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + do n = 1, ncat + do k = 1, nilyr + sicen(k,n) = trcrn(i,j,nt_sice+k-1,n,iblk) + enddo + do k = ntrcr_o+1, ntrcr + trcrn_bgc(k-ntrcr_o,n) = trcrn(i,j,k,n,iblk) + enddo + enddo + call icepack_init_bgc(ncat=ncat, nblyr=nblyr, nilyr=nilyr, ntrcr_o=ntrcr_o, & + cgrid=cgrid, igrid=igrid, ntrcr=ntrcr, nbtrcr=nbtrcr, & + sicen=sicen(:,:), trcrn=trcrn_bgc(:,:), sss=sss(i,j, iblk), & + ocean_bio_all=ocean_bio_all(i,j,:,iblk)) + enddo ! i + enddo ! j + enddo ! iblk + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + endif ! .not. restart + + !----------------------------------------------------------------- + ! read restart to complete BGC initialization + !----------------------------------------------------------------- + + if (restart_zsal .or. restart_bgc) call read_restart_bgc + + deallocate(trcrn_bgc) + + end subroutine init_bgc + +!======================================================================= + +! Initialize brine height tracer + + subroutine init_hbrine() + + use ice_arrays_column, only: first_ice, bgrid, igrid, cgrid, & + icgrid, swgrid + use ice_state, only: trcrn + + real (kind=dbl_kind) :: phi_snow + integer (kind=int_kind) :: nt_fbri + logical (kind=log_kind) :: tr_brine + character(len=*), parameter :: subname='(init_hbrine)' + + call icepack_query_parameters(phi_snow_out=phi_snow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + call icepack_init_hbrine(bgrid=bgrid, igrid=igrid, cgrid=cgrid, icgrid=icgrid, & + swgrid=swgrid, nblyr=nblyr, nilyr=nilyr, phi_snow=phi_snow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_init_parameters(phi_snow_in=phi_snow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + call icepack_query_tracer_flags(tr_brine_out=tr_brine) + call icepack_query_tracer_indices(nt_fbri_out=nt_fbri) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__,line= __LINE__) + + first_ice(:,:,:,:) = .true. + if (tr_brine) trcrn(:,:,nt_fbri,:,:) = c1 + + end subroutine init_hbrine + +!======================================================================= + +! Namelist variables, set to default values; may be altered at run time +! +! author Elizabeth C. Hunke, LANL +! Nicole Jeffery, LANL + + subroutine input_zbgc + + use ice_arrays_column, only: restore_bgc, optics_file, optics_file_fieldname + use ice_broadcast, only: broadcast_scalar + use ice_restart_column, only: restart_bgc, restart_zsal, & + restart_hbrine + use ice_restart_shared, only: restart + + character (len=char_len) :: & + shortwave ! from icepack + + logical (kind=log_kind) :: & + tr_brine, & + tr_bgc_Nit, tr_bgc_Am, tr_bgc_Sil, & + tr_bgc_DMS, tr_bgc_PON, & + tr_bgc_N, tr_bgc_C, tr_bgc_chl, & + tr_bgc_DON, tr_bgc_Fe, tr_zaero, & + tr_bgc_hum, tr_aero + + integer (kind=int_kind) :: & + ktherm + + logical (kind=log_kind) :: & + solve_zsal, skl_bgc, z_tracers, scale_bgc, solve_zbgc, dEdd_algae, & + modal_aero + + character (char_len) :: & + bgc_flux_type + + integer (kind=int_kind) :: & + nml_error, & ! namelist i/o error flag + abort_flag + + character(len=*), parameter :: subname='(input_zbgc)' + + !----------------------------------------------------------------- + ! namelist variables + !----------------------------------------------------------------- + + namelist /zbgc_nml/ & + tr_brine, restart_hbrine, tr_zaero, modal_aero, skl_bgc, & + z_tracers, dEdd_algae, solve_zbgc, bgc_flux_type, & + restore_bgc, restart_bgc, scale_bgc, solve_zsal, restart_zsal, & + tr_bgc_Nit, tr_bgc_C, tr_bgc_chl, tr_bgc_Am, tr_bgc_Sil, & + tr_bgc_DMS, tr_bgc_PON, tr_bgc_hum, tr_bgc_DON, tr_bgc_Fe, & + grid_o, grid_o_t, l_sk, grid_oS, optics_file, optics_file_fieldname, & + l_skS, phi_snow, initbio_frac, frazil_scav, & + ratio_Si2N_diatoms , ratio_Si2N_sp , ratio_Si2N_phaeo , & + ratio_S2N_diatoms , ratio_S2N_sp , ratio_S2N_phaeo , & + ratio_Fe2C_diatoms , ratio_Fe2C_sp , ratio_Fe2C_phaeo , & + ratio_Fe2N_diatoms , ratio_Fe2N_sp , ratio_Fe2N_phaeo , & + ratio_Fe2DON , ratio_Fe2DOC_s , ratio_Fe2DOC_l , & + fr_resp , tau_min , tau_max , & + algal_vel , R_dFe2dust , dustFe_sol , & + chlabs_diatoms , chlabs_sp , chlabs_phaeo , & + alpha2max_low_diatoms,alpha2max_low_sp , alpha2max_low_phaeo, & + beta2max_diatoms , beta2max_sp , beta2max_phaeo , & + mu_max_diatoms , mu_max_sp , mu_max_phaeo , & + grow_Tdep_diatoms , grow_Tdep_sp , grow_Tdep_phaeo , & + fr_graze_diatoms , fr_graze_sp , fr_graze_phaeo , & + mort_pre_diatoms , mort_pre_sp , mort_pre_phaeo , & + mort_Tdep_diatoms , mort_Tdep_sp , mort_Tdep_phaeo , & + k_exude_diatoms , k_exude_sp , k_exude_phaeo , & + K_Nit_diatoms , K_Nit_sp , K_Nit_phaeo , & + K_Am_diatoms , K_Am_sp , K_Am_phaeo , & + K_Sil_diatoms , K_Sil_sp , K_Sil_phaeo , & + K_Fe_diatoms , K_Fe_sp , K_Fe_phaeo , & + f_don_protein , kn_bac_protein , f_don_Am_protein , & + f_doc_s , f_doc_l , f_exude_s , & + f_exude_l , k_bac_s , k_bac_l , & + T_max , fsal , op_dep_min , & + fr_graze_s , fr_graze_e , fr_mort2min , & + fr_dFe , k_nitrif , t_iron_conv , & + max_loss , max_dfe_doc1 , fr_resp_s , & + y_sk_DMS , t_sk_conv , t_sk_ox , & + algaltype_diatoms , algaltype_sp , algaltype_phaeo , & + nitratetype , ammoniumtype , silicatetype , & + dmspptype , dmspdtype , humtype , & + doctype_s , doctype_l , dontype_protein , & + fedtype_1 , feptype_1 , zaerotype_bc1 , & + zaerotype_bc2 , zaerotype_dust1 , zaerotype_dust2 , & + zaerotype_dust3 , zaerotype_dust4 , ratio_C2N_diatoms , & + ratio_C2N_sp , ratio_C2N_phaeo , ratio_chl2N_diatoms, & + ratio_chl2N_sp , ratio_chl2N_phaeo , F_abs_chl_diatoms , & + F_abs_chl_sp , F_abs_chl_phaeo , ratio_C2N_proteins + + !----------------------------------------------------------------- + + abort_flag = 0 + + call icepack_query_tracer_flags(tr_aero_out=tr_aero) + call icepack_query_parameters(ktherm_out=ktherm, shortwave_out=shortwave) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! default values + !----------------------------------------------------------------- + tr_brine = .false. ! brine height differs from ice height + tr_zaero = .false. ! z aerosol tracers + modal_aero = .false. ! use modal aerosol treatment of aerosols + optics_file = 'unknown_optics_file' ! modal aerosol optics file + optics_file_fieldname = 'unknown_optics_fieldname' ! modal aerosol optics file fieldname + restore_bgc = .false. ! restore bgc if true + solve_zsal = .false. ! update salinity tracer profile from solve_S_dt + restart_bgc = .false. ! biogeochemistry restart + restart_zsal = .false. ! salinity restart + restart_hbrine = .false. ! hbrine restart + scale_bgc = .false. ! initial bgc tracers proportional to S + skl_bgc = .false. ! solve skeletal biochemistry + z_tracers = .false. ! solve vertically resolved tracers + dEdd_algae = .false. ! dynamic algae contributes to shortwave absorption + ! in delta-Eddington calculation + solve_zbgc = .false. ! turn on z layer biochemistry + tr_bgc_PON = .false. !--------------------------------------------- + tr_bgc_Nit = .false. ! biogeochemistry (skl or zbgc) + tr_bgc_C = .false. ! if skl_bgc = .true. then skl + tr_bgc_chl = .false. ! if z_tracers = .true. then vertically resolved + tr_bgc_Sil = .false. ! if z_tracers + solve_zbgc = .true. then + tr_bgc_Am = .false. ! vertically resolved with reactions + tr_bgc_DMS = .false. !------------------------------------------------ + tr_bgc_DON = .false. ! + tr_bgc_hum = .false. ! + tr_bgc_Fe = .false. ! + tr_bgc_N = .true. ! + + ! brine height parameter + phi_snow = p5 ! snow porosity + + ! skl biology parameters + bgc_flux_type = 'Jin2006'! type of ocean-ice poston velocity ('constant') + + ! z biology parameters + grid_o = c5 ! for bottom flux + grid_o_t = c5 ! for top flux + l_sk = 7.0_dbl_kind ! characteristic diffusive scale (m) + initbio_frac = c1 ! fraction of ocean trcr concentration in bio trcrs + frazil_scav = c1 ! increase in initial bio tracer from ocean scavenging + ratio_Si2N_diatoms = 1.8_dbl_kind ! algal Si to N (mol/mol) + ratio_Si2N_sp = c0 ! diatoms, small plankton, phaeocystis + ratio_Si2N_phaeo = c0 + ratio_S2N_diatoms = 0.03_dbl_kind ! algal S to N (mol/mol) + ratio_S2N_sp = 0.03_dbl_kind + ratio_S2N_phaeo = 0.03_dbl_kind + ratio_Fe2C_diatoms = 0.0033_dbl_kind ! algal Fe to C (umol/mol) + ratio_Fe2C_sp = 0.0033_dbl_kind + ratio_Fe2C_phaeo = p1 + ratio_Fe2N_diatoms = 0.023_dbl_kind ! algal Fe to N (umol/mol) + ratio_Fe2N_sp = 0.023_dbl_kind + ratio_Fe2N_phaeo = 0.7_dbl_kind + ratio_Fe2DON = 0.023_dbl_kind ! Fe to N of DON (nmol/umol) + ratio_Fe2DOC_s = p1 ! Fe to C of DOC (nmol/umol) saccharids + ratio_Fe2DOC_l = 0.033_dbl_kind ! Fe to C of DOC (nmol/umol) lipids + fr_resp = 0.05_dbl_kind ! frac of algal growth lost due to respiration + tau_min = 5200.0_dbl_kind ! rapid mobile to stationary exchanges (s) + tau_max = 1.73e5_dbl_kind ! long time mobile to stationary exchanges (s) + algal_vel = 1.11e-8_dbl_kind! 0.5 cm/d(m/s) Lavoie 2005 1.5 cm/day + R_dFe2dust = 0.035_dbl_kind ! g/g (3.5% content) Tagliabue 2009 + dustFe_sol = 0.005_dbl_kind ! solubility fraction + chlabs_diatoms = 0.03_dbl_kind ! chl absorption (1/m/(mg/m^3)) + chlabs_sp = 0.01_dbl_kind + chlabs_phaeo = 0.05_dbl_kind + alpha2max_low_diatoms = 0.8_dbl_kind ! light limitation (1/(W/m^2)) + alpha2max_low_sp = 0.67_dbl_kind + alpha2max_low_phaeo = 0.67_dbl_kind + beta2max_diatoms = 0.018_dbl_kind ! light inhibition (1/(W/m^2)) + beta2max_sp = 0.0025_dbl_kind + beta2max_phaeo = 0.01_dbl_kind + mu_max_diatoms = 1.2_dbl_kind ! maximum growth rate (1/day) + mu_max_sp = 0.851_dbl_kind + mu_max_phaeo = 0.851_dbl_kind + grow_Tdep_diatoms = 0.06_dbl_kind ! Temperature dependence of growth (1/C) + grow_Tdep_sp = 0.06_dbl_kind + grow_Tdep_phaeo = 0.06_dbl_kind + fr_graze_diatoms = 0.01_dbl_kind ! Fraction grazed + fr_graze_sp = p1 + fr_graze_phaeo = p1 + mort_pre_diatoms = 0.007_dbl_kind! Mortality (1/day) + mort_pre_sp = 0.007_dbl_kind + mort_pre_phaeo = 0.007_dbl_kind + mort_Tdep_diatoms = 0.03_dbl_kind ! T dependence of mortality (1/C) + mort_Tdep_sp = 0.03_dbl_kind + mort_Tdep_phaeo = 0.03_dbl_kind + k_exude_diatoms = c0 ! algal exudation (1/d) + k_exude_sp = c0 + k_exude_phaeo = c0 + K_Nit_diatoms = c1 ! nitrate half saturation (mmol/m^3) + K_Nit_sp = c1 + K_Nit_phaeo = c1 + K_Am_diatoms = 0.3_dbl_kind ! ammonium half saturation (mmol/m^3) + K_Am_sp = 0.3_dbl_kind + K_Am_phaeo = 0.3_dbl_kind + K_Sil_diatoms = 4.0_dbl_kind ! silicate half saturation (mmol/m^3) + K_Sil_sp = c0 + K_Sil_phaeo = c0 + K_Fe_diatoms = c1 ! iron half saturation (nM) + K_Fe_sp = 0.2_dbl_kind + K_Fe_phaeo = p1 + f_don_protein = 0.6_dbl_kind ! fraction of spilled grazing to proteins + kn_bac_protein = 0.03_dbl_kind ! Bacterial degredation of DON (1/d) + f_don_Am_protein = 0.25_dbl_kind ! fraction of remineralized DON to ammonium + f_doc_s = 0.4_dbl_kind ! fraction of mortality to DOC + f_doc_l = 0.4_dbl_kind + f_exude_s = c1 ! fraction of exudation to DOC + f_exude_l = c1 + k_bac_s = 0.03_dbl_kind ! Bacterial degredation of DOC (1/d) + k_bac_l = 0.03_dbl_kind + T_max = c0 ! maximum temperature (C) + fsal = c1 ! Salinity limitation (ppt) + op_dep_min = p1 ! Light attenuates for optical depths exceeding min + fr_graze_s = p5 ! fraction of grazing spilled or slopped + fr_graze_e = p5 ! fraction of assimilation excreted + fr_mort2min = p5 ! fractionation of mortality to Am + fr_dFe = 0.3_dbl_kind ! fraction of remineralized nitrogen + ! (in units of algal iron) + k_nitrif = c0 ! nitrification rate (1/day) + t_iron_conv = 3065.0_dbl_kind ! desorption loss pFe to dFe (day) + max_loss = 0.9_dbl_kind ! restrict uptake to % of remaining value + max_dfe_doc1 = 0.2_dbl_kind ! max ratio of dFe to saccharides in the ice + !(nM Fe/muM C) + fr_resp_s = 0.75_dbl_kind ! DMSPd fraction of respiration loss as DMSPd + y_sk_DMS = p5 ! fraction conversion given high yield + t_sk_conv = 3.0_dbl_kind ! Stefels conversion time (d) + t_sk_ox = 10.0_dbl_kind ! DMS oxidation time (d) + algaltype_diatoms = c0 ! ------------------ + algaltype_sp = p5 ! + algaltype_phaeo = p5 ! + nitratetype = -c1 ! mobility type between + ammoniumtype = c1 ! stationary <--> mobile + silicatetype = -c1 ! + dmspptype = p5 ! + dmspdtype = -c1 ! + humtype = c1 ! + doctype_s = p5 ! + doctype_l = p5 ! + dontype_protein = p5 ! + fedtype_1 = p5 ! + feptype_1 = p5 ! + zaerotype_bc1 = c1 ! + zaerotype_bc2 = c1 ! + zaerotype_dust1 = c1 ! + zaerotype_dust2 = c1 ! + zaerotype_dust3 = c1 ! + zaerotype_dust4 = c1 !-------------------- + ratio_C2N_diatoms = 7.0_dbl_kind ! algal C to N ratio (mol/mol) + ratio_C2N_sp = 7.0_dbl_kind + ratio_C2N_phaeo = 7.0_dbl_kind + ratio_chl2N_diatoms= 2.1_dbl_kind ! algal chlorophyll to N ratio (mg/mmol) + ratio_chl2N_sp = 1.1_dbl_kind + ratio_chl2N_phaeo = 0.84_dbl_kind + F_abs_chl_diatoms = 2.0_dbl_kind ! scales absorbed radiation for dEdd + F_abs_chl_sp = 4.0_dbl_kind + F_abs_chl_phaeo = 5.0 + ratio_C2N_proteins = 7.0_dbl_kind ! ratio of C to N in proteins (mol/mol) + + ! z salinity parameters + grid_oS = c5 ! for bottom flux + l_skS = 7.0_dbl_kind ! characteristic diffusive scale (m) + + !----------------------------------------------------------------- + ! read from input file + !----------------------------------------------------------------- + + if (my_task == master_task) then + write(nu_diag,*) subname,' Reading zbgc_nml' + + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: zbgc_nml open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) + endif + + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=zbgc_nml,iostat=nml_error) + end do + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: zbgc_nml reading ', & + file=__FILE__, line=__LINE__) + endif + close(nu_nml) + call release_fileunit(nu_nml) + endif + + !----------------------------------------------------------------- + ! broadcast + !----------------------------------------------------------------- + + call broadcast_scalar(solve_zsal, master_task) + call broadcast_scalar(restart_zsal, master_task) + call broadcast_scalar(tr_brine, master_task) + call broadcast_scalar(restart_hbrine, master_task) + + call broadcast_scalar(phi_snow, master_task) + call broadcast_scalar(grid_oS, master_task) + call broadcast_scalar(l_skS, master_task) + + call broadcast_scalar(solve_zbgc, master_task) + call broadcast_scalar(skl_bgc, master_task) + call broadcast_scalar(restart_bgc, master_task) + call broadcast_scalar(bgc_flux_type, master_task) + call broadcast_scalar(restore_bgc, master_task) + call broadcast_scalar(tr_bgc_N, master_task) + call broadcast_scalar(tr_bgc_C, master_task) + call broadcast_scalar(tr_bgc_chl, master_task) + call broadcast_scalar(tr_bgc_Nit, master_task) + call broadcast_scalar(tr_bgc_Am, master_task) + call broadcast_scalar(tr_bgc_Sil, master_task) + call broadcast_scalar(tr_bgc_hum, master_task) + call broadcast_scalar(tr_bgc_DMS, master_task) + call broadcast_scalar(tr_bgc_PON, master_task) + call broadcast_scalar(tr_bgc_DON, master_task) + call broadcast_scalar(tr_bgc_Fe, master_task) + + call broadcast_scalar(z_tracers, master_task) + call broadcast_scalar(tr_zaero, master_task) + call broadcast_scalar(dEdd_algae, master_task) + call broadcast_scalar(modal_aero, master_task) + call broadcast_scalar(optics_file, master_task) + call broadcast_scalar(optics_file_fieldname, master_task) + call broadcast_scalar(grid_o, master_task) + call broadcast_scalar(grid_o_t, master_task) + call broadcast_scalar(l_sk, master_task) + call broadcast_scalar(scale_bgc, master_task) + call broadcast_scalar(initbio_frac, master_task) + call broadcast_scalar(frazil_scav, master_task) + call broadcast_scalar(ratio_Si2N_diatoms, master_task) + call broadcast_scalar(ratio_Si2N_sp, master_task) + call broadcast_scalar(ratio_Si2N_phaeo, master_task) + call broadcast_scalar(ratio_S2N_diatoms, master_task) + call broadcast_scalar(ratio_S2N_sp, master_task) + call broadcast_scalar(ratio_S2N_phaeo, master_task) + call broadcast_scalar(ratio_Fe2C_diatoms, master_task) + call broadcast_scalar(ratio_Fe2C_sp, master_task) + call broadcast_scalar(ratio_Fe2C_phaeo, master_task) + call broadcast_scalar(ratio_Fe2N_diatoms, master_task) + call broadcast_scalar(ratio_Fe2N_sp, master_task) + call broadcast_scalar(ratio_Fe2N_phaeo, master_task) + call broadcast_scalar(ratio_Fe2DON , master_task) + call broadcast_scalar(ratio_Fe2DOC_s , master_task) + call broadcast_scalar(ratio_Fe2DOC_l , master_task) + call broadcast_scalar(fr_resp , master_task) + call broadcast_scalar(tau_min , master_task) + call broadcast_scalar(tau_max , master_task) + call broadcast_scalar(algal_vel , master_task) + call broadcast_scalar(R_dFe2dust , master_task) + call broadcast_scalar(dustFe_sol , master_task) + call broadcast_scalar(chlabs_diatoms , master_task) + call broadcast_scalar(chlabs_sp , master_task) + call broadcast_scalar(chlabs_phaeo , master_task) + call broadcast_scalar(alpha2max_low_diatoms , master_task) + call broadcast_scalar(alpha2max_low_sp , master_task) + call broadcast_scalar(alpha2max_low_phaeo , master_task) + call broadcast_scalar(beta2max_diatoms , master_task) + call broadcast_scalar(beta2max_sp , master_task) + call broadcast_scalar(beta2max_phaeo , master_task) + call broadcast_scalar(mu_max_diatoms , master_task) + call broadcast_scalar(mu_max_sp , master_task) + call broadcast_scalar(mu_max_phaeo , master_task) + call broadcast_scalar(grow_Tdep_diatoms, master_task) + call broadcast_scalar(grow_Tdep_sp , master_task) + call broadcast_scalar(grow_Tdep_phaeo , master_task) + call broadcast_scalar(fr_graze_diatoms , master_task) + call broadcast_scalar(fr_graze_sp , master_task) + call broadcast_scalar(fr_graze_phaeo , master_task) + call broadcast_scalar(mort_pre_diatoms , master_task) + call broadcast_scalar(mort_pre_sp , master_task) + call broadcast_scalar(mort_pre_phaeo , master_task) + call broadcast_scalar(mort_Tdep_diatoms, master_task) + call broadcast_scalar(mort_Tdep_sp , master_task) + call broadcast_scalar(mort_Tdep_phaeo , master_task) + call broadcast_scalar(k_exude_diatoms , master_task) + call broadcast_scalar(k_exude_sp , master_task) + call broadcast_scalar(k_exude_phaeo , master_task) + call broadcast_scalar(K_Nit_diatoms , master_task) + call broadcast_scalar(K_Nit_sp , master_task) + call broadcast_scalar(K_Nit_phaeo , master_task) + call broadcast_scalar(K_Am_diatoms , master_task) + call broadcast_scalar(K_Am_sp , master_task) + call broadcast_scalar(K_Am_phaeo , master_task) + call broadcast_scalar(K_Sil_diatoms , master_task) + call broadcast_scalar(K_Sil_sp , master_task) + call broadcast_scalar(K_Sil_phaeo , master_task) + call broadcast_scalar(K_Fe_diatoms , master_task) + call broadcast_scalar(K_Fe_sp , master_task) + call broadcast_scalar(K_Fe_phaeo , master_task) + call broadcast_scalar(f_don_protein , master_task) + call broadcast_scalar(kn_bac_protein , master_task) + call broadcast_scalar(f_don_Am_protein , master_task) + call broadcast_scalar(f_doc_s , master_task) + call broadcast_scalar(f_doc_l , master_task) + call broadcast_scalar(f_exude_s , master_task) + call broadcast_scalar(f_exude_l , master_task) + call broadcast_scalar(k_bac_s , master_task) + call broadcast_scalar(k_bac_l , master_task) + call broadcast_scalar(T_max , master_task) + call broadcast_scalar(fsal , master_task) + call broadcast_scalar(op_dep_min , master_task) + call broadcast_scalar(fr_graze_s , master_task) + call broadcast_scalar(fr_graze_e , master_task) + call broadcast_scalar(fr_mort2min , master_task) + call broadcast_scalar(fr_dFe , master_task) + call broadcast_scalar(k_nitrif , master_task) + call broadcast_scalar(t_iron_conv , master_task) + call broadcast_scalar(max_loss , master_task) + call broadcast_scalar(max_dfe_doc1 , master_task) + call broadcast_scalar(fr_resp_s , master_task) + call broadcast_scalar(y_sk_DMS , master_task) + call broadcast_scalar(t_sk_conv , master_task) + call broadcast_scalar(t_sk_ox , master_task) + call broadcast_scalar(algaltype_diatoms, master_task) + call broadcast_scalar(algaltype_sp , master_task) + call broadcast_scalar(algaltype_phaeo , master_task) + call broadcast_scalar(nitratetype , master_task) + call broadcast_scalar(ammoniumtype , master_task) + call broadcast_scalar(silicatetype , master_task) + call broadcast_scalar(dmspptype , master_task) + call broadcast_scalar(dmspdtype , master_task) + call broadcast_scalar(humtype , master_task) + call broadcast_scalar(doctype_s , master_task) + call broadcast_scalar(doctype_l , master_task) + call broadcast_scalar(dontype_protein , master_task) + call broadcast_scalar(fedtype_1 , master_task) + call broadcast_scalar(feptype_1 , master_task) + call broadcast_scalar(zaerotype_bc1 , master_task) + call broadcast_scalar(zaerotype_bc2 , master_task) + call broadcast_scalar(zaerotype_dust1 , master_task) + call broadcast_scalar(zaerotype_dust2 , master_task) + call broadcast_scalar(zaerotype_dust3 , master_task) + call broadcast_scalar(zaerotype_dust4 , master_task) + call broadcast_scalar(ratio_C2N_diatoms , master_task) + call broadcast_scalar(ratio_C2N_sp , master_task) + call broadcast_scalar(ratio_C2N_phaeo , master_task) + call broadcast_scalar(ratio_chl2N_diatoms, master_task) + call broadcast_scalar(ratio_chl2N_sp , master_task) + call broadcast_scalar(ratio_chl2N_phaeo , master_task) + call broadcast_scalar(F_abs_chl_diatoms , master_task) + call broadcast_scalar(F_abs_chl_sp , master_task) + call broadcast_scalar(F_abs_chl_phaeo , master_task) + call broadcast_scalar(ratio_C2N_proteins , master_task) + + !----------------------------------------------------------------- + ! zsalinity and brine + !----------------------------------------------------------------- + + if (.not.restart) then + if (my_task == master_task) & + write(nu_diag,*) subname//' WARNING: restart = false, setting bgc restart flags to false' + restart_bgc = .false. + restart_hbrine = .false. + restart_zsal = .false. + endif + + if (solve_zsal) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: solve_zsal=T deprecated' + endif + abort_flag = 101 + endif + +#ifdef UNDEPRECATE_ZSAL + if (solve_zsal .and. nblyr < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: solve_zsal=T but 0 zsalinity tracers' + endif + abort_flag = 101 + endif + + if (solve_zsal .and. ((.not. tr_brine) .or. (ktherm /= 1))) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: solve_zsal needs tr_brine=T and ktherm=1' + endif + abort_flag = 102 + endif +#endif + + if (tr_brine .and. nblyr < 1 ) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: tr_brine=T but no biology layers compiled' + endif + abort_flag = 103 + endif + + !----------------------------------------------------------------- + ! biogeochemistry + !----------------------------------------------------------------- + + if (.not. tr_brine) then + if (solve_zbgc) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: tr_brine = F and solve_zbgc = T' + endif + abort_flag = 104 + endif + if (tr_zaero) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: tr_brine = F and tr_zaero = T' + endif + abort_flag = 105 + endif + endif + + if ((skl_bgc .AND. solve_zbgc) .or. (skl_bgc .AND. z_tracers)) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: skl_bgc and solve_zbgc or z_tracers are both true' + endif + abort_flag = 106 + endif + + if (skl_bgc .AND. tr_zaero) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: skl_bgc does not use vertical tracers' + endif + abort_flag = 107 + endif + + if (dEdd_algae .AND. trim(shortwave) /= 'dEdd') then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: dEdd_algae = T but shortwave /= dEdd' + endif + abort_flag = 108 + endif + + if (dEdd_algae .AND. (.NOT. tr_bgc_N) .AND. (.NOT. tr_zaero)) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: need tr_bgc_N or tr_zaero for dEdd_algae' + endif + abort_flag = 109 + endif + + if (modal_aero .AND. (.NOT. tr_zaero) .AND. (.NOT. tr_aero)) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: modal_aero T with tr_zaero and tr_aero' + endif + abort_flag = 110 + endif + + if (modal_aero .AND. trim(shortwave) /= 'dEdd') then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: modal_aero = T but shortwave /= dEdd' + endif + abort_flag = 111 + endif + if (n_algae > icepack_max_algae) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: number of algal types exceeds icepack_max_algae' + endif + abort_flag = 112 + endif + if (n_doc > icepack_max_doc) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: number of doc types exceeds icepack_max_doc' + endif + abort_flag = 113 + endif + if (n_dic > icepack_max_doc) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: number of dic types exceeds icepack_max_dic' + endif + abort_flag = 114 + endif + if (n_don > icepack_max_don) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: number of don types exceeds icepack_max_don' + endif + abort_flag = 115 + endif + if (n_fed > icepack_max_fe ) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: number of dissolved fe types exceeds icepack_max_fe ' + endif + abort_flag = 116 + endif + if (n_fep > icepack_max_fe ) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: number of particulate fe types exceeds icepack_max_fe ' + endif + abort_flag = 117 + endif + + if (n_algae == 0 .and. skl_bgc) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: skl_bgc=T but 0 bgc or algal tracers compiled' + endif + abort_flag = 118 + endif + + if (n_algae == 0 .and. solve_zbgc) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: solve_zbgc=T but 0 zbgc or algal tracers compiled' + endif + abort_flag = 119 + endif + + if (solve_zbgc .and. .not. z_tracers) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: solve_zbgc=T but not z_tracers' + endif + abort_flag = 120 + endif + + if (skl_bgc .or. solve_zbgc) then + if (.not. tr_bgc_N) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: tr_bgc_N must be on for bgc' + endif + abort_flag = 121 + endif + if (.not. tr_bgc_Nit) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: tr_bgc_Nit must be on for bgc' + endif + abort_flag = 122 + endif + else + ! tcraig, allow bgc to be turned off in this case? + tr_bgc_N = .false. + tr_bgc_C = .false. + tr_bgc_chl = .false. + tr_bgc_Nit = .false. + tr_bgc_Am = .false. + tr_bgc_Sil = .false. + tr_bgc_hum = .false. + tr_bgc_DMS = .false. + tr_bgc_PON = .false. + tr_bgc_DON = .false. + tr_bgc_Fe = .false. + endif + + !----------------------------------------------------------------- + ! z layer aerosols + !----------------------------------------------------------------- + if (tr_zaero .and. .not. z_tracers) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: tr_zaero and not z_tracers' + endif + abort_flag = 123 + endif + + if (n_zaero > icepack_max_aero) then + if (my_task == master_task) then + write(nu_diag,*) subname//'ERROR: number of z aerosols exceeds icepack_max_aero' + endif + abort_flag = 124 + endif + + !----------------------------------------------------------------- + ! output + !----------------------------------------------------------------- + + if (my_task == master_task) then + write(nu_diag,1010) ' tr_brine = ', tr_brine + if (tr_brine) then + write(nu_diag,1010) ' restart_hbrine = ', restart_hbrine + write(nu_diag,1005) ' phi_snow = ', phi_snow + endif + write(nu_diag,1010) ' solve_zsal = ', solve_zsal + if (solve_zsal) then + write(nu_diag,1010) ' restart_zsal = ', restart_zsal + write(nu_diag,1000) ' grid_oS = ', grid_oS + write(nu_diag,1005) ' l_skS = ', l_skS + endif + + write(nu_diag,1010) ' skl_bgc = ', skl_bgc + write(nu_diag,1010) ' restart_bgc = ', restart_bgc + write(nu_diag,1010) ' tr_bgc_N = ', tr_bgc_N + write(nu_diag,1010) ' tr_bgc_C = ', tr_bgc_C + write(nu_diag,1010) ' tr_bgc_chl = ', tr_bgc_chl + write(nu_diag,1010) ' tr_bgc_Nit = ', tr_bgc_Nit + write(nu_diag,1010) ' tr_bgc_Am = ', tr_bgc_Am + write(nu_diag,1010) ' tr_bgc_Sil = ', tr_bgc_Sil + write(nu_diag,1010) ' tr_bgc_hum = ', tr_bgc_hum + write(nu_diag,1010) ' tr_bgc_DMS = ', tr_bgc_DMS + write(nu_diag,1010) ' tr_bgc_PON = ', tr_bgc_PON + write(nu_diag,1010) ' tr_bgc_DON = ', tr_bgc_DON + write(nu_diag,1010) ' tr_bgc_Fe = ', tr_bgc_Fe + write(nu_diag,1020) ' n_aero = ', n_aero + write(nu_diag,1020) ' n_zaero = ', n_zaero + write(nu_diag,1020) ' n_algae = ', n_algae + write(nu_diag,1020) ' n_doc = ', n_doc + write(nu_diag,1020) ' n_dic = ', n_dic + write(nu_diag,1020) ' n_don = ', n_don + write(nu_diag,1020) ' n_fed = ', n_fed + write(nu_diag,1020) ' n_fep = ', n_fep + + if (skl_bgc) then + + write(nu_diag,1030) ' bgc_flux_type = ', bgc_flux_type + write(nu_diag,1010) ' restore_bgc = ', restore_bgc + + elseif (z_tracers) then + + write(nu_diag,1010) ' dEdd_algae = ', dEdd_algae + write(nu_diag,1010) ' modal_aero = ', modal_aero + write(nu_diag,1010) ' scale_bgc = ', scale_bgc + write(nu_diag,1010) ' solve_zbgc = ', solve_zbgc + write(nu_diag,1010) ' tr_zaero = ', tr_zaero + write(nu_diag,1020) ' number of aerosols = ', n_zaero + write(nu_diag,1031) ' optics_file = ', trim(optics_file) + write(nu_diag,1031) ' optics_file_fieldname = ', trim(optics_file_fieldname) + ! bio parameters + write(nu_diag,1000) ' grid_o = ', grid_o + write(nu_diag,1000) ' grid_o_t = ', grid_o_t + write(nu_diag,1005) ' l_sk = ', l_sk + write(nu_diag,1000) ' initbio_frac = ', initbio_frac + write(nu_diag,1000) ' frazil_scav = ', frazil_scav + + endif ! skl_bgc or solve_bgc + endif + + !----------------------------------------------------------------- + ! abort if abort flag is set + !----------------------------------------------------------------- + + if (abort_flag /= 0) then + call flush_fileunit(nu_diag) + endif + call ice_barrier() + if (abort_flag /= 0) then + write(nu_diag,*) subname,' ERROR: abort_flag=',abort_flag + call abort_ice (subname//' ABORTING on input ERRORS', & + file=__FILE__, line=__LINE__) + endif + + !----------------------------------------------------------------- + ! set values in icepack + !----------------------------------------------------------------- + + call icepack_init_parameters( & + ktherm_in=ktherm, shortwave_in=shortwave, solve_zsal_in=solve_zsal, & + skl_bgc_in=skl_bgc, z_tracers_in=z_tracers, scale_bgc_in=scale_bgc, & + dEdd_algae_in=dEdd_algae, & + solve_zbgc_in=solve_zbgc, & + bgc_flux_type_in=bgc_flux_type, grid_o_in=grid_o, l_sk_in=l_sk, & + initbio_frac_in=initbio_frac, & + grid_oS_in=grid_oS, l_skS_in=l_skS, & + phi_snow_in=phi_snow, frazil_scav_in = frazil_scav, & + modal_aero_in=modal_aero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_init_tracer_flags(tr_brine_in=tr_brine, & + tr_bgc_Nit_in=tr_bgc_Nit, tr_bgc_Am_in =tr_bgc_Am, tr_bgc_Sil_in=tr_bgc_Sil, & + tr_bgc_DMS_in=tr_bgc_DMS, tr_bgc_PON_in=tr_bgc_PON, & + tr_bgc_N_in =tr_bgc_N, tr_bgc_C_in =tr_bgc_C, tr_bgc_chl_in=tr_bgc_chl, & + tr_bgc_DON_in=tr_bgc_DON, tr_bgc_Fe_in =tr_bgc_Fe, tr_zaero_in =tr_zaero, & + tr_bgc_hum_in=tr_bgc_hum) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + 1000 format (a30,2x,f9.2) ! a30 to align formatted, unformatted statements + 1005 format (a30,2x,f9.6) ! float + 1010 format (a30,2x,l6) ! logical + 1020 format (a30,2x,i6) ! integer + 1030 format (a30, a8) ! character + 1031 format (a30, a ) ! character + + end subroutine input_zbgc + +!======================================================================= + +! Count and index tracers +! +! author Elizabeth C. Hunke, LANL + + subroutine count_tracers + + use ice_domain_size, only: nilyr, nslyr, nblyr, nfsd, n_iso, & + n_aero, n_zaero, n_algae, n_doc, n_dic, n_don, n_fed, n_fep + + ! local variables + + integer (kind=int_kind) :: & + k, mm , & ! loop index + nk , & ! layer index + nk_bgc ! layer index + + integer (kind=int_kind) :: ntrcr + logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_fsd + logical (kind=log_kind) :: tr_snow + logical (kind=log_kind) :: tr_iso, tr_pond_lvl, tr_pond_topo + integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY + integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero + integer (kind=int_kind) :: nt_fsd, nt_isosno, nt_isoice + integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw + + integer (kind=int_kind) :: & + nbtrcr, nbtrcr_sw, & + ntrcr_o, nt_fbri, & + nt_bgc_Nit, nt_bgc_Am, nt_bgc_Sil, & + nt_bgc_DMS, nt_bgc_PON, nt_bgc_S, & + nt_bgc_DMSPp, nt_bgc_DMSPd, & + nt_zbgc_frac, nlt_chl_sw, & + nlt_bgc_Nit, nlt_bgc_Am, nlt_bgc_Sil, & + nlt_bgc_DMS, nlt_bgc_DMSPp, nlt_bgc_DMSPd, & + nlt_bgc_PON, nt_bgc_hum, nlt_bgc_hum + + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nlt_zaero_sw ! points to aerosol in trcrn_sw + + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nlt_bgc_N , & ! algae + nlt_bgc_chl + + integer (kind=int_kind), dimension(icepack_max_doc) :: & + nlt_bgc_DOC ! disolved organic carbon + + integer (kind=int_kind), dimension(icepack_max_don) :: & + nlt_bgc_DON ! + + integer (kind=int_kind), dimension(icepack_max_dic) :: & + nlt_bgc_DIC ! disolved inorganic carbon + + integer (kind=int_kind), dimension(icepack_max_fe) :: & + nlt_bgc_Fed , & ! + nlt_bgc_Fep ! + + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nlt_zaero ! non-reacting layer aerosols + + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nt_bgc_N , & ! diatoms, phaeocystis, pico/small + nt_bgc_chl ! diatoms, phaeocystis, pico/small + + integer (kind=int_kind), dimension(icepack_max_doc) :: & + nt_bgc_DOC ! dissolved organic carbon + + integer (kind=int_kind), dimension(icepack_max_don) :: & + nt_bgc_DON ! dissolved organic nitrogen + + integer (kind=int_kind), dimension(icepack_max_dic) :: & + nt_bgc_DIC ! dissolved inorganic carbon + + integer (kind=int_kind), dimension(icepack_max_fe) :: & + nt_bgc_Fed, & ! dissolved iron + nt_bgc_Fep ! particulate iron + + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nt_zaero ! black carbon and other aerosols + + logical (kind=log_kind) :: & + tr_brine, & + tr_bgc_Nit, tr_bgc_Am, tr_bgc_Sil, & + tr_bgc_DMS, tr_bgc_PON, & + tr_bgc_N, tr_bgc_C, tr_bgc_chl, & + tr_bgc_DON, tr_bgc_Fe, tr_zaero, & + tr_bgc_hum + + logical (kind=log_kind) :: & + solve_zsal, skl_bgc, z_tracers + + character(len=*), parameter :: subname='(count_tracers)' + + !----------------------------------------------------------------- + + call icepack_query_parameters( & + solve_zsal_out=solve_zsal, & + skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_aero_out=tr_aero, tr_pond_out=tr_pond, & + tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_fsd_out=tr_fsd, & + tr_snow_out=tr_snow, tr_iso_out=tr_iso, & + tr_bgc_Nit_out=tr_bgc_Nit, tr_bgc_Am_out =tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & + tr_bgc_DMS_out=tr_bgc_DMS, tr_bgc_PON_out=tr_bgc_PON, & + tr_bgc_N_out =tr_bgc_N, tr_bgc_C_out =tr_bgc_C, tr_bgc_chl_out=tr_bgc_chl, & + tr_bgc_DON_out=tr_bgc_DON, tr_bgc_Fe_out =tr_bgc_Fe, tr_zaero_out =tr_zaero, & + tr_bgc_hum_out=tr_bgc_hum) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ntrcr = 0 + + ntrcr = ntrcr + 1 ! count tracers, starting with Tsfc = 1 + nt_Tsfc = ntrcr ! index tracers, starting with Tsfc = 1 + + nt_qice = ntrcr + 1 + ntrcr = ntrcr + nilyr ! qice in nilyr layers + + nt_qsno = ntrcr + 1 + ntrcr = ntrcr + nslyr ! qsno in nslyr layers + + nt_sice = ntrcr + 1 + ntrcr = ntrcr + nilyr ! sice in nilyr layers + + nt_iage = 0 + if (tr_iage) then + ntrcr = ntrcr + 1 + nt_iage = ntrcr ! chronological ice age + endif + + nt_FY = 0 + if (tr_FY) then + ntrcr = ntrcr + 1 + nt_FY = ntrcr ! area of first year ice + endif + + nt_alvl = 0 + nt_vlvl = 0 + if (tr_lvl) then + ntrcr = ntrcr + 1 + nt_alvl = ntrcr + ntrcr = ntrcr + 1 + nt_vlvl = ntrcr + endif + + nt_apnd = 0 + nt_hpnd = 0 + nt_ipnd = 0 + if (tr_pond) then ! all explicit melt pond schemes + ntrcr = ntrcr + 1 + nt_apnd = ntrcr + ntrcr = ntrcr + 1 + nt_hpnd = ntrcr + if (tr_pond_lvl) then + ntrcr = ntrcr + 1 ! refrozen pond ice lid thickness + nt_ipnd = ntrcr ! on level-ice ponds (if frzpnd='hlid') + endif + if (tr_pond_topo) then + ntrcr = ntrcr + 1 ! + nt_ipnd = ntrcr ! refrozen pond ice lid thickness + endif + endif + + nt_smice = 0 + nt_smliq = 0 + nt_rhos = 0 + nt_rsnw = 0 + if (tr_snow) then + nt_smice = ntrcr + 1 + ntrcr = ntrcr + nslyr ! mass of ice in nslyr snow layers + nt_smliq = ntrcr + 1 + ntrcr = ntrcr + nslyr ! mass of liquid in nslyr snow layers + nt_rhos = ntrcr + 1 + ntrcr = ntrcr + nslyr ! snow density in nslyr layers + nt_rsnw = ntrcr + 1 + ntrcr = ntrcr + nslyr ! snow grain radius in nslyr layers + endif + + nt_fsd = 0 + if (tr_fsd) then + nt_fsd = ntrcr + 1 ! floe size distribution + ntrcr = ntrcr + nfsd + endif + + nt_isosno = 0 + nt_isoice = 0 + if (tr_iso) then + nt_isosno = ntrcr + 1 ! isotopes in snow + ntrcr = ntrcr + n_iso + nt_isoice = ntrcr + 1 ! isotopes in ice + ntrcr = ntrcr + n_iso + endif + + nt_aero = 0 + if (tr_aero) then + nt_aero = ntrcr + 1 + ntrcr = ntrcr + 4*n_aero ! 4 dEdd layers, n_aero species + else +!tcx, modify code so we don't have to reset n_aero here + n_aero = 0 !echmod - this is not getting set correctly (overwritten later?) + endif + + !----------------------------------------------------------------- + ! initialize zbgc tracer indices + !----------------------------------------------------------------- + + nbtrcr = 0 + nbtrcr_sw = 0 + nt_zbgc_frac = 0 + + ! vectors of size icepack_max_algae + nlt_bgc_N(:) = 0 + nlt_bgc_chl(:) = 0 + nt_bgc_N(:) = 0 + nt_bgc_chl(:) = 0 + + ! vectors of size icepack_max_dic + nlt_bgc_DIC(:) = 0 + nt_bgc_DIC(:) = 0 + + ! vectors of size icepack_max_doc + nlt_bgc_DOC(:) = 0 + nt_bgc_DOC(:) = 0 + + ! vectors of size icepack_max_don + nlt_bgc_DON(:) = 0 + nt_bgc_DON(:) = 0 + + ! vectors of size icepack_max_fe + nlt_bgc_Fed(:) = 0 + nlt_bgc_Fep(:) = 0 + nt_bgc_Fed(:) = 0 + nt_bgc_Fep(:) = 0 + + ! vectors of size icepack_max_aero + nlt_zaero(:) = 0 + nlt_zaero_sw(:) = 0 + nt_zaero(:) = 0 + + nlt_bgc_Nit = 0 + nlt_bgc_Am = 0 + nlt_bgc_Sil = 0 + nlt_bgc_DMSPp = 0 + nlt_bgc_DMSPd = 0 + nlt_bgc_DMS = 0 + nlt_bgc_PON = 0 + nlt_bgc_hum = 0 +! nlt_bgc_C = 0 + nlt_chl_sw = 0 + + nt_bgc_Nit = 0 + nt_bgc_Am = 0 + nt_bgc_Sil = 0 + nt_bgc_DMSPp = 0 + nt_bgc_DMSPd = 0 + nt_bgc_DMS = 0 + nt_bgc_PON = 0 + nt_bgc_hum = 0 +! nt_bgc_C = 0 + + ntrcr_o = ntrcr + nt_fbri = 0 + if (tr_brine) then + nt_fbri = ntrcr + 1 ! ice volume fraction with salt + ntrcr = ntrcr + 1 + endif + + nt_bgc_S = 0 + if (solve_zsal) then ! .true. only if tr_brine = .true. + nt_bgc_S = ntrcr + 1 + ntrcr = ntrcr + nblyr + endif + + if (skl_bgc .or. z_tracers) then + + if (skl_bgc) then + nk = 1 + elseif (z_tracers) then ! defined on nblyr+1 in ice + ! and 2 snow layers (snow surface + interior) + nk = nblyr + 1 + endif ! skl_bgc or z_tracers + nk_bgc = nk ! number of bgc layers in ice + if (nk > 1) nk_bgc = nk + 2 ! number of bgc layers in ice and snow + + !----------------------------------------------------------------- + ! count tracers and assign tracer indices + !----------------------------------------------------------------- + + if (tr_bgc_N) then + do mm = 1, n_algae + nt_bgc_N(mm) = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_N(mm) = nbtrcr + enddo ! mm + endif ! tr_bgc_N + + if (tr_bgc_Nit) then + nt_bgc_Nit = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_Nit = nbtrcr + endif ! tr_bgc_Nit + + if (tr_bgc_C) then + ! + ! Algal C is not yet distinct from algal N + ! * Reqires exudation and/or changing C:N ratios + ! for implementation + ! + ! do mm = 1,n_algae + ! nt_bgc_C(mm) = ntrcr + 1 + ! do k = 1, nk_bgc + ! ntrcr = ntrcr + 1 + ! enddo + ! nbtrcr = nbtrcr + 1 + ! nlt_bgc_C(mm) = nbtrcr + ! enddo ! mm + + do mm = 1, n_doc + nt_bgc_DOC(mm) = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_DOC(mm) = nbtrcr + enddo ! mm + do mm = 1, n_dic + nt_bgc_DIC(mm) = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_DIC(mm) = nbtrcr + enddo ! mm + endif ! tr_bgc_C + + if (tr_bgc_chl) then + do mm = 1, n_algae + nt_bgc_chl(mm) = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_chl(mm) = nbtrcr + enddo ! mm + endif ! tr_bgc_chl + + if (tr_bgc_Am) then + nt_bgc_Am = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_Am = nbtrcr + endif + if (tr_bgc_Sil) then + nt_bgc_Sil = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_Sil = nbtrcr + endif + + if (tr_bgc_DMS) then ! all together + nt_bgc_DMSPp = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_DMSPp = nbtrcr + + nt_bgc_DMSPd = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_DMSPd = nbtrcr + + nt_bgc_DMS = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_DMS = nbtrcr + endif + + if (tr_bgc_PON) then + nt_bgc_PON = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_PON = nbtrcr + endif + + if (tr_bgc_DON) then + do mm = 1, n_don + nt_bgc_DON(mm) = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_DON(mm) = nbtrcr + enddo ! mm + endif ! tr_bgc_DON + + if (tr_bgc_Fe) then + do mm = 1, n_fed + nt_bgc_Fed(mm) = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_Fed(mm) = nbtrcr + enddo ! mm + do mm = 1, n_fep + nt_bgc_Fep(mm) = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_Fep(mm) = nbtrcr + enddo ! mm + endif ! tr_bgc_Fe + + if (tr_bgc_hum) then + nt_bgc_hum = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_bgc_hum = nbtrcr + endif + + endif ! skl_bgc .or. z_tracers + + if (z_tracers) then ! defined on nblyr+1 in ice + ! and 2 snow layers (snow surface + interior) + ! z layer aerosols + if (tr_zaero) then + do mm = 1, n_zaero + nt_zaero(mm) = ntrcr + 1 + do k = 1, nk_bgc + ntrcr = ntrcr + 1 + enddo + nbtrcr = nbtrcr + 1 + nlt_zaero(mm) = nbtrcr + enddo ! mm + endif ! tr_zaero + + if (nbtrcr > 0) then + nt_zbgc_frac = ntrcr + 1 + ntrcr = ntrcr + nbtrcr + endif + endif ! z_tracers + +!tcx, +1 here is the unused tracer, want to get rid of it + ntrcr = ntrcr + 1 + +!tcx, reset unused tracer index, eventually get rid of it. + if (nt_iage <= 0) nt_iage = ntrcr + if (nt_FY <= 0) nt_FY = ntrcr + if (nt_alvl <= 0) nt_alvl = ntrcr + if (nt_vlvl <= 0) nt_vlvl = ntrcr + if (nt_apnd <= 0) nt_apnd = ntrcr + if (nt_hpnd <= 0) nt_hpnd = ntrcr + if (nt_ipnd <= 0) nt_ipnd = ntrcr + if (nt_smice <= 0) nt_smice = ntrcr + if (nt_smliq <= 0) nt_smliq = ntrcr + if (nt_rhos <= 0) nt_rhos = ntrcr + if (nt_rsnw <= 0) nt_rsnw = ntrcr + if (nt_fsd <= 0) nt_fsd = ntrcr + if (nt_isosno<= 0) nt_isosno= ntrcr + if (nt_isoice<= 0) nt_isoice= ntrcr + if (nt_aero <= 0) nt_aero = ntrcr + if (nt_fbri <= 0) nt_fbri = ntrcr + if (nt_bgc_S <= 0) nt_bgc_S = ntrcr + + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,1020) ' ntrcr = ', ntrcr + write(nu_diag,1020) ' nbtrcr = ', nbtrcr + write(nu_diag,1020) ' nbtrcr_sw = ', nbtrcr_sw + write(nu_diag,*) ' ' + write(nu_diag,1020) ' nt_sice = ', nt_sice + write(nu_diag,1020) ' nt_qice = ', nt_qice + write(nu_diag,1020) ' nt_qsno = ', nt_qsno + write(nu_diag,*)' ' + 1020 format (a30,2x,i6) ! integer + call flush_fileunit(nu_diag) + endif ! my_task = master_task + call icepack_init_tracer_sizes(ntrcr_in=ntrcr, & + ntrcr_o_in=ntrcr_o, nbtrcr_in=nbtrcr, nbtrcr_sw_in=nbtrcr_sw) + call icepack_init_tracer_indices(nt_Tsfc_in=nt_Tsfc, nt_sice_in=nt_sice, & + nt_qice_in=nt_qice, nt_qsno_in=nt_qsno, nt_iage_in=nt_iage, nt_fy_in=nt_fy, & + nt_alvl_in=nt_alvl, nt_vlvl_in=nt_vlvl, nt_apnd_in=nt_apnd, nt_hpnd_in=nt_hpnd, & + nt_ipnd_in=nt_ipnd, nt_fsd_in=nt_fsd, nt_aero_in=nt_aero, & + nt_smice_in=nt_smice, nt_smliq_in=nt_smliq, nt_rhos_in=nt_rhos, nt_rsnw_in=nt_rsnw, & + nt_isosno_in=nt_isosno, nt_isoice_in=nt_isoice, nt_fbri_in=nt_fbri, & + nt_bgc_Nit_in=nt_bgc_Nit, nt_bgc_Am_in=nt_bgc_Am, nt_bgc_Sil_in=nt_bgc_Sil, & + nt_bgc_DMS_in=nt_bgc_DMS, nt_bgc_PON_in=nt_bgc_PON, nt_bgc_S_in=nt_bgc_S, & + nt_bgc_N_in=nt_bgc_N, nt_bgc_chl_in=nt_bgc_chl, & + nt_bgc_DOC_in=nt_bgc_DOC, nt_bgc_DON_in=nt_bgc_DON, nt_bgc_DIC_in=nt_bgc_DIC, & + nt_zaero_in=nt_zaero, nt_bgc_DMSPp_in=nt_bgc_DMSPp, nt_bgc_DMSPd_in=nt_bgc_DMSPd, & + nt_bgc_Fed_in=nt_bgc_Fed, nt_bgc_Fep_in=nt_bgc_Fep, nt_zbgc_frac_in=nt_zbgc_frac, & + nlt_zaero_sw_in=nlt_zaero_sw, nlt_chl_sw_in=nlt_chl_sw, nlt_bgc_Sil_in=nlt_bgc_Sil, & + nlt_bgc_N_in=nlt_bgc_N, nlt_bgc_Nit_in=nlt_bgc_Nit, nlt_bgc_Am_in=nlt_bgc_Am, & + nlt_bgc_DMS_in=nlt_bgc_DMS, nlt_bgc_DMSPp_in=nlt_bgc_DMSPp, nlt_bgc_DMSPd_in=nlt_bgc_DMSPd, & + nlt_zaero_in=nlt_zaero, nlt_bgc_chl_in=nlt_bgc_chl, & + nlt_bgc_DIC_in=nlt_bgc_DIC, nlt_bgc_DOC_in=nlt_bgc_DOC, nlt_bgc_PON_in=nlt_bgc_PON, & + nlt_bgc_DON_in=nlt_bgc_DON, nlt_bgc_Fed_in=nlt_bgc_Fed, nlt_bgc_Fep_in=nlt_bgc_Fep, & + nt_bgc_hum_in=nt_bgc_hum, nlt_bgc_hum_in=nlt_bgc_hum) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname//' Icepack Abort2', & + file=__FILE__, line=__LINE__) + + if (my_task == master_task) then + call icepack_write_tracer_flags(nu_diag) + call icepack_write_tracer_sizes(nu_diag) + call icepack_write_tracer_indices(nu_diag) + endif + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname//' Icepack Abort3', & + file=__FILE__, line=__LINE__) + + end subroutine count_tracers + +!======================================================================= + +! Initialize vertical biogeochemistry +! +! author Elizabeth C. Hunke, LANL +! Nicole Jeffery, LANL + + subroutine init_zbgc + + use ice_state, only: trcr_base, trcr_depend, n_trcr_strata, & + nt_strata + use ice_arrays_column, only: R_C2N, R_chl2N, R_C2N_DON, R_Si2N, trcrn_sw + + integer (kind=int_kind) :: & + nbtrcr, nbtrcr_sw, nt_fbri, & + nt_bgc_Nit, nt_bgc_Am, nt_bgc_Sil, & + nt_bgc_DMS, nt_bgc_PON, nt_bgc_S, & + nt_bgc_DMSPp, nt_bgc_DMSPd, & + nt_zbgc_frac, nlt_chl_sw, & + nlt_bgc_Nit, nlt_bgc_Am, nlt_bgc_Sil, & + nlt_bgc_DMS, nlt_bgc_DMSPp, nlt_bgc_DMSPd, & + nlt_bgc_PON, nt_bgc_hum, nlt_bgc_hum + + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nlt_zaero_sw ! points to aerosol in trcrn_sw + + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nlt_bgc_N , & ! algae + nlt_bgc_chl + + integer (kind=int_kind), dimension(icepack_max_doc) :: & + nlt_bgc_DOC ! disolved organic carbon + + integer (kind=int_kind), dimension(icepack_max_don) :: & + nlt_bgc_DON ! + + integer (kind=int_kind), dimension(icepack_max_dic) :: & + nlt_bgc_DIC ! disolved inorganic carbon + + integer (kind=int_kind), dimension(icepack_max_fe) :: & + nlt_bgc_Fed , & ! + nlt_bgc_Fep ! + + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nlt_zaero ! non-reacting layer aerosols + + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nt_bgc_N , & ! diatoms, phaeocystis, pico/small + nt_bgc_chl ! diatoms, phaeocystis, pico/small + + integer (kind=int_kind), dimension(icepack_max_doc) :: & + nt_bgc_DOC ! dissolved organic carbon + + integer (kind=int_kind), dimension(icepack_max_don) :: & + nt_bgc_DON ! dissolved organic nitrogen + + integer (kind=int_kind), dimension(icepack_max_dic) :: & + nt_bgc_DIC ! dissolved inorganic carbon + + integer (kind=int_kind), dimension(icepack_max_fe) :: & + nt_bgc_Fed, & ! dissolved iron + nt_bgc_Fep ! particulate iron + + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nt_zaero ! black carbon and other aerosols + + integer (kind=int_kind), dimension(icepack_max_nbtrcr) :: & + bio_index_o ! relates nlt_bgc_NO to ocean concentration index + + integer (kind=int_kind), dimension(icepack_max_nbtrcr) :: & + bio_index ! relates bio indices, ie. nlt_bgc_N to nt_bgc_N + + logical (kind=log_kind) :: & + tr_brine, & + tr_bgc_Nit, tr_bgc_Am, tr_bgc_Sil, & + tr_bgc_DMS, tr_bgc_PON, & + tr_bgc_N, tr_bgc_C, tr_bgc_chl, & + tr_bgc_DON, tr_bgc_Fe, tr_zaero, & + tr_bgc_hum + + real (kind=dbl_kind) :: & + initbio_frac, & + frazil_scav + + real (kind=dbl_kind), dimension(icepack_max_nbtrcr) :: & + zbgc_frac_init,&! initializes mobile fraction + bgc_tracer_type ! described tracer in mobile or stationary phases + ! < 0 is purely mobile (eg. nitrate) + ! > 0 has timescales for transitions between + ! phases based on whether the ice is melting or growing + + real (kind=dbl_kind), dimension(icepack_max_nbtrcr) :: & + zbgc_init_frac, & ! fraction of ocean tracer concentration in new ice + tau_ret, & ! retention timescale (s), mobile to stationary phase + tau_rel ! release timescale (s), stationary to mobile phase + + logical (kind=log_kind) :: & + skl_bgc, z_tracers, dEdd_algae, solve_zsal + + real (kind=dbl_kind), dimension(icepack_max_algae) :: & + F_abs_chl ! to scale absorption in Dedd + + real (kind=dbl_kind), dimension(icepack_max_algae) :: & + R_S2N , & ! algal S to N (mole/mole) + ! Marchetti et al 2006, 3 umol Fe/mol C for iron limited Pseudo-nitzschia + R_Fe2C , & ! algal Fe to carbon (umol/mmol) + R_Fe2N ! algal Fe to N (umol/mmol) + + real (kind=dbl_kind), dimension(icepack_max_don) :: & + R_Fe2DON ! Fe to N of DON (nmol/umol) + + real (kind=dbl_kind), dimension(icepack_max_doc) :: & + R_Fe2DOC ! Fe to C of DOC (nmol/umol) + + real (kind=dbl_kind), dimension(icepack_max_algae) :: & + chlabs , & ! chla absorption 1/m/(mg/m^3) + alpha2max_low , & ! light limitation (1/(W/m^2)) + beta2max , & ! light inhibition (1/(W/m^2)) + mu_max , & ! maximum growth rate (1/d) + grow_Tdep , & ! T dependence of growth (1/C) + fr_graze , & ! fraction of algae grazed + mort_pre , & ! mortality (1/day) + mort_Tdep , & ! T dependence of mortality (1/C) + k_exude , & ! algal carbon exudation rate (1/d) + K_Nit , & ! nitrate half saturation (mmol/m^3) + K_Am , & ! ammonium half saturation (mmol/m^3) + K_Sil , & ! silicon half saturation (mmol/m^3) + K_Fe ! iron half saturation or micromol/m^3 + + real (kind=dbl_kind), dimension(icepack_max_DON) :: & + f_don , & ! fraction of spilled grazing to DON + kn_bac , & ! Bacterial degredation of DON (1/d) + f_don_Am ! fraction of remineralized DON to Am + + real (kind=dbl_kind), dimension(icepack_max_DOC) :: & + f_doc , & ! fraction of mort_N that goes to each doc pool + f_exude , & ! fraction of exuded carbon to each DOC pool + k_bac ! Bacterial degredation of DOC (1/d) + + integer (kind=int_kind) :: & + k, mm , & ! loop index + nk , & ! layer index + ierr + + integer (kind=int_kind) :: & + ntd , & ! for tracer dependency calculation + nt_depend + + character(len=*), parameter :: subname='(init_zbgc)' + + !------------------------------------------------------------ + ! Tracers have mobile and stationary phases. + ! ice growth allows for retention, ice melt facilitates mobility + ! bgc_tracer_type defines the exchange timescales between these phases + ! -1 : entirely in the mobile phase, no exchange (this is the default) + ! 0 : retention time scale is tau_min, release time scale is tau_max + ! 1 : retention time scale is tau_max, release time scale is tau_min + ! 0.5: retention time scale is tau_min, release time scale is tau_min + ! 2 : retention time scale is tau_max, release time scale is tau_max + ! tau_min and tau_max are defined in icepack_intfc.f90 + !------------------------------------------------------------ + + !----------------------------------------------------------------- + ! get values from icepack + !----------------------------------------------------------------- + + call icepack_query_parameters( & + solve_zsal_out=solve_zsal, & + skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & + dEdd_algae_out=dEdd_algae, & + grid_o_out=grid_o, l_sk_out=l_sk, & + initbio_frac_out=initbio_frac, & + grid_oS_out=grid_oS, l_skS_out=l_skS, & + phi_snow_out=phi_snow, frazil_scav_out = frazil_scav) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_tracer_sizes( & + nbtrcr_out=nbtrcr, nbtrcr_sw_out=nbtrcr_sw) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_tracer_flags( & + tr_brine_out =tr_brine, & + tr_bgc_Nit_out=tr_bgc_Nit, tr_bgc_Am_out=tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & + tr_bgc_DMS_out=tr_bgc_DMS, tr_bgc_PON_out=tr_bgc_PON, & + tr_bgc_N_out =tr_bgc_N, tr_bgc_C_out =tr_bgc_C, tr_bgc_chl_out=tr_bgc_chl, & + tr_bgc_DON_out=tr_bgc_DON, tr_bgc_Fe_out=tr_bgc_Fe, tr_zaero_out =tr_zaero, & + tr_bgc_hum_out=tr_bgc_hum) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_tracer_indices( & + nt_fbri_out=nt_fbri, & + nt_bgc_Nit_out=nt_bgc_Nit, nt_bgc_Am_out=nt_bgc_Am, nt_bgc_Sil_out=nt_bgc_Sil, & + nt_bgc_DMS_out=nt_bgc_DMS, nt_bgc_PON_out=nt_bgc_PON, nt_bgc_S_out=nt_bgc_S, & + nt_bgc_N_out=nt_bgc_N, nt_bgc_chl_out=nt_bgc_chl, & + nt_bgc_DOC_out=nt_bgc_DOC, nt_bgc_DON_out=nt_bgc_DON, nt_bgc_DIC_out=nt_bgc_DIC, & + nt_zaero_out=nt_zaero, nt_bgc_DMSPp_out=nt_bgc_DMSPp, nt_bgc_DMSPd_out=nt_bgc_DMSPd, & + nt_bgc_Fed_out=nt_bgc_Fed, nt_bgc_Fep_out=nt_bgc_Fep, nt_zbgc_frac_out=nt_zbgc_frac, & + nlt_zaero_sw_out=nlt_zaero_sw, nlt_chl_sw_out=nlt_chl_sw, nlt_bgc_Sil_out=nlt_bgc_Sil, & + nlt_bgc_N_out=nlt_bgc_N, nlt_bgc_Nit_out=nlt_bgc_Nit, nlt_bgc_Am_out=nlt_bgc_Am, & + nlt_bgc_DMS_out=nlt_bgc_DMS, nlt_bgc_DMSPp_out=nlt_bgc_DMSPp, nlt_bgc_DMSPd_out=nlt_bgc_DMSPd, & + nlt_zaero_out=nlt_zaero, nlt_bgc_chl_out=nlt_bgc_chl, & + nlt_bgc_DIC_out=nlt_bgc_DIC, nlt_bgc_DOC_out=nlt_bgc_DOC, nlt_bgc_PON_out=nlt_bgc_PON, & + nlt_bgc_DON_out=nlt_bgc_DON, nlt_bgc_Fed_out=nlt_bgc_Fed, nlt_bgc_Fep_out=nlt_bgc_Fep, & + nt_bgc_hum_out=nt_bgc_hum, nlt_bgc_hum_out=nlt_bgc_hum) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Define array parameters + !----------------------------------------------------------------- + + allocate( & + R_C2N_DON(icepack_max_don), & ! carbon to nitrogen mole ratio of DON pool + R_C2N(icepack_max_algae), & ! algal C to N (mole/mole) + R_chl2N(icepack_max_algae), & ! 3 algal chlorophyll to N (mg/mmol) + R_Si2N(icepack_max_algae), & ! silica to nitrogen mole ratio for algal groups + stat=ierr) + if (ierr/=0) call abort_ice(subname//' Out of Memory') + + R_Si2N(1) = ratio_Si2N_diatoms + R_Si2N(2) = ratio_Si2N_sp + R_Si2N(3) = ratio_Si2N_phaeo + + R_S2N(1) = ratio_S2N_diatoms + R_S2N(2) = ratio_S2N_sp + R_S2N(3) = ratio_S2N_phaeo + + R_Fe2C(1) = ratio_Fe2C_diatoms + R_Fe2C(2) = ratio_Fe2C_sp + R_Fe2C(3) = ratio_Fe2C_phaeo + + R_Fe2N(1) = ratio_Fe2N_diatoms + R_Fe2N(2) = ratio_Fe2N_sp + R_Fe2N(3) = ratio_Fe2N_phaeo + + R_C2N(1) = ratio_C2N_diatoms + R_C2N(2) = ratio_C2N_sp + R_C2N(3) = ratio_C2N_phaeo + + R_chl2N(1) = ratio_chl2N_diatoms + R_chl2N(2) = ratio_chl2N_sp + R_chl2N(3) = ratio_chl2N_phaeo + + F_abs_chl(1) = F_abs_chl_diatoms + F_abs_chl(2) = F_abs_chl_sp + F_abs_chl(3) = F_abs_chl_phaeo + + R_Fe2DON(1) = ratio_Fe2DON + R_C2N_DON(1) = ratio_C2N_proteins + + R_Fe2DOC(1) = ratio_Fe2DOC_s + R_Fe2DOC(2) = ratio_Fe2DOC_l + R_Fe2DOC(3) = c0 + + chlabs(1) = chlabs_diatoms + chlabs(2) = chlabs_sp + chlabs(3) = chlabs_phaeo + + alpha2max_low(1) = alpha2max_low_diatoms + alpha2max_low(2) = alpha2max_low_sp + alpha2max_low(3) = alpha2max_low_phaeo + + beta2max(1) = beta2max_diatoms + beta2max(2) = beta2max_sp + beta2max(3) = beta2max_phaeo + + mu_max(1) = mu_max_diatoms + mu_max(2) = mu_max_sp + mu_max(3) = mu_max_phaeo + + grow_Tdep(1) = grow_Tdep_diatoms + grow_Tdep(2) = grow_Tdep_sp + grow_Tdep(3) = grow_Tdep_phaeo + + fr_graze(1) = fr_graze_diatoms + fr_graze(2) = fr_graze_sp + fr_graze(3) = fr_graze_phaeo + + mort_pre(1) = mort_pre_diatoms + mort_pre(2) = mort_pre_sp + mort_pre(3) = mort_pre_phaeo + + mort_Tdep(1) = mort_Tdep_diatoms + mort_Tdep(2) = mort_Tdep_sp + mort_Tdep(3) = mort_Tdep_phaeo + + k_exude(1) = k_exude_diatoms + k_exude(2) = k_exude_sp + k_exude(3) = k_exude_phaeo + + K_Nit(1) = K_Nit_diatoms + K_Nit(2) = K_Nit_sp + K_Nit(3) = K_Nit_phaeo + + K_Am(1) = K_Am_diatoms + K_Am(2) = K_Am_sp + K_Am(3) = K_Am_phaeo + + K_Sil(1) = K_Sil_diatoms + K_Sil(2) = K_Sil_sp + K_Sil(3) = K_Sil_phaeo + + K_Fe(1) = K_Fe_diatoms + K_Fe(2) = K_Fe_sp + K_Fe(3) = K_Fe_phaeo + + f_don(1) = f_don_protein + kn_bac(1) = kn_bac_protein + f_don_Am(1) = f_don_Am_protein + + f_doc(1) = f_doc_s + f_doc(2) = f_doc_l + + f_exude(1) = f_exude_s + f_exude(2) = f_exude_l + k_bac(1) = k_bac_s + k_bac(2) = k_bac_l + + dictype(:) = -c1 + + algaltype(1) = algaltype_diatoms + algaltype(2) = algaltype_sp + algaltype(3) = algaltype_phaeo + + doctype(1) = doctype_s + doctype(2) = doctype_l + + dontype(1) = dontype_protein + + fedtype(1) = fedtype_1 + feptype(1) = feptype_1 + + zaerotype(1) = zaerotype_bc1 + zaerotype(2) = zaerotype_bc2 + zaerotype(3) = zaerotype_dust1 + zaerotype(4) = zaerotype_dust2 + zaerotype(5) = zaerotype_dust3 + zaerotype(6) = zaerotype_dust4 + + call icepack_init_zbgc ( & +!opt R_S2N_in=R_S2N, R_Fe2C_in=R_Fe2C, R_Fe2N_in=R_Fe2N, R_C2N_in=R_C2N, & +!opt R_chl2N_in=R_chl2N, F_abs_chl_in=F_abs_chl, R_Fe2DON_in=R_Fe2DON, R_Fe2DOC_in=R_Fe2DOC, & +!opt mort_Tdep_in=mort_Tdep, k_exude_in=k_exude, & +!opt K_Nit_in=K_Nit, K_Am_in=K_Am, K_sil_in=K_Sil, K_Fe_in=K_Fe, & +!opt f_don_in=f_don, kn_bac_in=kn_bac, f_don_Am_in=f_don_Am, f_exude_in=f_exude, k_bac_in=k_bac, & +!opt fr_resp_in=fr_resp, algal_vel_in=algal_vel, R_dFe2dust_in=R_dFe2dust, & +!opt dustFe_sol_in=dustFe_sol, T_max_in=T_max, fr_mort2min_in=fr_mort2min, fr_dFe_in=fr_dFe, & +!opt op_dep_min_in=op_dep_min, fr_graze_s_in=fr_graze_s, fr_graze_e_in=fr_graze_e, & +!opt k_nitrif_in=k_nitrif, t_iron_conv_in=t_iron_conv, max_loss_in=max_loss, max_dfe_doc1_in=max_dfe_doc1, & +!opt fr_resp_s_in=fr_resp_s, y_sk_DMS_in=y_sk_DMS, t_sk_conv_in=t_sk_conv, t_sk_ox_in=t_sk_ox, & +!opt mu_max_in=mu_max, R_Si2N_in=R_Si2N, R_C2N_DON_in=R_C2N_DON, chlabs_in=chlabs, & +!opt alpha2max_low_in=alpha2max_low, beta2max_in=beta2max, grow_Tdep_in=grow_Tdep, & +!opt fr_graze_in=fr_graze, mort_pre_in=mort_pre, f_doc_in=f_doc,fsal_in=fsal, & + ) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! assign tracer dependencies + ! bgc_tracer_type: < 0 purely mobile , >= 0 stationary + !------------------------------------------------------------------ + + if (tr_brine) then + trcr_depend(nt_fbri) = 1 ! volume-weighted + trcr_base (nt_fbri,1) = c0 ! volume-weighted + trcr_base (nt_fbri,2) = c1 ! volume-weighted + trcr_base (nt_fbri,3) = c0 ! volume-weighted + n_trcr_strata(nt_fbri) = 0 + nt_strata (nt_fbri,1) = 0 + nt_strata (nt_fbri,2) = 0 + endif + + ntd = 0 ! if nt_fbri /= 0 then use fbri dependency + if (nt_fbri == 0) ntd = -1 ! otherwise make tracers depend on ice volume + + if (solve_zsal) then ! .true. only if tr_brine = .true. + do k = 1,nblyr + trcr_depend(nt_bgc_S + k - 1) = 2 + nt_fbri + ntd + trcr_base (nt_bgc_S,1) = c0 ! default: ice area + trcr_base (nt_bgc_S,2) = c1 + trcr_base (nt_bgc_S,3) = c0 + n_trcr_strata(nt_bgc_S) = 1 + nt_strata(nt_bgc_S,1) = nt_fbri + nt_strata(nt_bgc_S,2) = 0 + enddo + endif + + bio_index(:) = 0 + bio_index_o(:) = 0 + + if (skl_bgc) then + nk = 1 + nt_depend = 0 + elseif (z_tracers) then ! defined on nblyr+1 in ice + ! and 2 snow layers (snow surface + interior) + nk = nblyr + 1 + nt_depend = 2 + nt_fbri + ntd + endif ! skl_bgc or z_tracers + + if (skl_bgc .or. z_tracers) then + + if (tr_bgc_N) then + do mm = 1, n_algae + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_N(mm), nlt_bgc_N(mm), & + algaltype(mm), nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_N(mm)) = mm + enddo ! mm + endif ! tr_bgc_N + + if (tr_bgc_Nit) then + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_Nit, nlt_bgc_Nit, & + nitratetype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_Nit) = icepack_max_algae + 1 + endif ! tr_bgc_Nit + + if (tr_bgc_C) then + ! + ! Algal C is not yet distinct from algal N + ! * Reqires exudation and/or changing C:N ratios + ! for implementation + ! + ! do mm = 1,n_algae + ! call init_bgc_trcr(nk, nt_fbri, & + ! nt_bgc_C(mm), nlt_bgc_C(mm), & + ! algaltype(mm), nt_depend, & + ! bgc_tracer_type, trcr_depend, & + ! trcr_base, n_trcr_strata, & + ! nt_strata, bio_index) + ! bio_index_o(nlt_bgc_C(mm)) = icepack_max_algae + 1 + mm + ! enddo ! mm + + do mm = 1, n_doc + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_DOC(mm), nlt_bgc_DOC(mm), & + doctype(mm), nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_DOC(mm)) = icepack_max_algae + 1 + mm + enddo ! mm + do mm = 1, n_dic + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_DIC(mm), nlt_bgc_DIC(mm), & + dictype(mm), nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_DIC(mm)) = icepack_max_algae + icepack_max_doc + 1 + mm + enddo ! mm + endif ! tr_bgc_C + + if (tr_bgc_chl) then + do mm = 1, n_algae + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_chl(mm), nlt_bgc_chl(mm), & + algaltype(mm), nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_chl(mm)) = icepack_max_algae + 1 + icepack_max_doc + icepack_max_dic + mm + enddo ! mm + endif ! tr_bgc_chl + + if (tr_bgc_Am) then + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_Am, nlt_bgc_Am, & + ammoniumtype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_Am) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 2 + endif + if (tr_bgc_Sil) then + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_Sil, nlt_bgc_Sil, & + silicatetype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_Sil) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 3 + endif + if (tr_bgc_DMS) then ! all together + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_DMSPp, nlt_bgc_DMSPp, & + dmspptype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_DMSPp) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 4 + + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_DMSPd, nlt_bgc_DMSPd, & + dmspdtype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_DMSPd) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 5 + + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_DMS, nlt_bgc_DMS, & + dmspdtype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_DMS) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 6 + endif + if (tr_bgc_PON) then + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_PON, nlt_bgc_PON, & + nitratetype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_PON) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 7 + endif + if (tr_bgc_DON) then + do mm = 1, n_don + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_DON(mm), nlt_bgc_DON(mm), & + dontype(mm), nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_DON(mm)) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 7 + mm + enddo ! mm + endif ! tr_bgc_DON + if (tr_bgc_Fe) then + do mm = 1, n_fed + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_Fed(mm), nlt_bgc_Fed(mm), & + fedtype(mm), nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_Fed(mm)) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic & + + icepack_max_don + 7 + mm + enddo ! mm + do mm = 1, n_fep + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_Fep(mm), nlt_bgc_Fep(mm), & + feptype(mm), nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_Fep(mm)) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic & + + icepack_max_don + icepack_max_fe + 7 + mm + enddo ! mm + endif ! tr_bgc_Fe + + if (tr_bgc_hum) then + call init_bgc_trcr(nk, nt_fbri, & + nt_bgc_hum, nlt_bgc_hum, & + humtype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_bgc_hum) = 2*icepack_max_algae + icepack_max_doc + 8 + icepack_max_dic & + + icepack_max_don + 2*icepack_max_fe + icepack_max_aero + endif + endif ! skl_bgc or z_tracers + + if (skl_bgc) then + if (dEdd_algae) then + nlt_chl_sw = 1 + nbtrcr_sw = nilyr+nslyr+2 ! only the bottom layer will be nonzero + endif + + elseif (z_tracers) then ! defined on nblyr+1 in ice + ! and 2 snow layers (snow surface + interior) + if (tr_bgc_N) then + if (dEdd_algae) then + nlt_chl_sw = 1 + nbtrcr_sw = nilyr+nslyr+2 + endif + endif ! tr_bgc_N + endif ! skl_bgc or z_tracers + + if (z_tracers) then ! defined on nblyr+1 in ice + ! and 2 snow layers (snow surface + interior) + + nk = nblyr + 1 + nt_depend = 2 + nt_fbri + ntd + + ! z layer aerosols + if (tr_zaero) then + do mm = 1, n_zaero + if (dEdd_algae) then + nlt_zaero_sw(mm) = nbtrcr_sw + 1 + nbtrcr_sw = nbtrcr_sw + nilyr + nslyr+2 + endif + call init_bgc_trcr(nk, nt_fbri, & + nt_zaero(mm), nlt_zaero(mm), & + zaerotype(mm), nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + bio_index_o(nlt_zaero(mm)) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic & + + icepack_max_don + 2*icepack_max_fe + 7 + mm + enddo ! mm + endif ! tr_zaero + + if (nbtrcr > 0) then + do k = 1,nbtrcr + zbgc_frac_init(k) = c1 + trcr_depend(nt_zbgc_frac+k-1) = 2+nt_fbri + trcr_base(nt_zbgc_frac+ k - 1,1) = c0 + trcr_base(nt_zbgc_frac+ k - 1,2) = c1 + trcr_base(nt_zbgc_frac+ k - 1,3) = c0 + n_trcr_strata(nt_zbgc_frac+ k - 1)= 1 + nt_strata(nt_zbgc_frac+ k - 1,1) = nt_fbri + nt_strata(nt_zbgc_frac+ k - 1,2) = 0 + tau_ret(k) = c1 + tau_rel(k) = c1 + if (bgc_tracer_type(k) >= c0 .and. bgc_tracer_type(k) < p5) then + tau_ret(k) = tau_min + tau_rel(k) = tau_max + zbgc_frac_init(k) = c1 + elseif (bgc_tracer_type(k) >= p5 .and. bgc_tracer_type(k) < c1) then + tau_ret(k) = tau_min + tau_rel(k) = tau_min + zbgc_frac_init(k) = c1 + elseif (bgc_tracer_type(k) >= c1 .and. bgc_tracer_type(k) < c2) then + tau_ret(k) = tau_max + tau_rel(k) = tau_min + zbgc_frac_init(k) = c1 + elseif (bgc_tracer_type(k) >= c2 ) then + tau_ret(k) = tau_max + tau_rel(k) = tau_max + zbgc_frac_init(k) = c1 + endif + enddo + endif + + endif ! z_tracers + + do k = 1, nbtrcr + zbgc_init_frac(k) = frazil_scav + if (bgc_tracer_type(k) < c0) zbgc_init_frac(k) = initbio_frac + enddo + + !----------------------------------------------------------------- + ! set values in icepack + !----------------------------------------------------------------- + + call icepack_init_zbgc( & +!opt zbgc_init_frac_in=zbgc_init_frac, tau_ret_in=tau_ret, tau_rel_in=tau_rel, & +!opt zbgc_frac_init_in=zbgc_frac_init, bgc_tracer_type_in=bgc_tracer_type, & + ) + call icepack_init_tracer_indices( & + bio_index_o_in=bio_index_o, bio_index_in=bio_index) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! final consistency checks + !----------------------------------------------------------------- + if (nbtrcr > icepack_max_nbtrcr) then + write (nu_diag,*) subname,' ' + write (nu_diag,*) subname,'nbtrcr > icepack_max_nbtrcr' + write (nu_diag,*) subname,'nbtrcr, icepack_max_nbtrcr:',nbtrcr, icepack_max_nbtrcr + call abort_ice (subname//'ERROR: nbtrcr > icepack_max_nbtrcr') + endif + if (.NOT. dEdd_algae) nbtrcr_sw = 1 + + ! tcraig, added 6/1/21, why is nbtrcr_sw set here? + call icepack_init_tracer_sizes(nbtrcr_sw_in=nbtrcr_sw) + allocate(trcrn_sw(nx_block,ny_block,nbtrcr_sw,ncat,max_blocks)) ! bgc tracers active in the delta-Eddington shortwave + + !----------------------------------------------------------------- + ! spew + !----------------------------------------------------------------- + if (my_task == master_task) then + if (skl_bgc) then + + write(nu_diag,1020) ' number of bio tracers = ', nbtrcr + write(nu_diag,1020) ' number of Isw tracers = ', nbtrcr_sw + + elseif (z_tracers) then + + write(nu_diag,1020) ' number of ztracers = ', nbtrcr + write(nu_diag,1020) ' number of Isw tracers = ', nbtrcr_sw + write(nu_diag,1000) ' initbio_frac = ', initbio_frac + write(nu_diag,1000) ' frazil_scav = ', frazil_scav + + endif ! skl_bgc or solve_bgc + endif ! master_task + + 1000 format (a30,2x,f9.2) ! a30 to align formatted, unformatted statements + 1020 format (a30,2x,i6) ! integer + + end subroutine init_zbgc + +!======================================================================= + + subroutine init_bgc_trcr(nk, nt_fbri, & + nt_bgc, nlt_bgc, & + bgctype, nt_depend, & + bgc_tracer_type, trcr_depend, & + trcr_base, n_trcr_strata, & + nt_strata, bio_index) + + integer (kind=int_kind), intent(in) :: & + nk , & ! counter + nt_depend , & ! tracer dependency index + nt_bgc , & ! tracer index + nlt_bgc , & ! bio tracer index + nt_fbri + + integer (kind=int_kind), dimension(:), intent(inout) :: & + trcr_depend , & ! tracer dependencies + n_trcr_strata, & ! number of underlying tracer layers + bio_index ! + + integer (kind=int_kind), dimension(:,:), intent(inout) :: & + nt_strata ! indices of underlying tracer layers + + real (kind=dbl_kind), dimension(:,:), intent(inout) :: & + trcr_base ! = 0 or 1 depending on tracer dependency + ! argument 2: (1) aice, (2) vice, (3) vsno + + real (kind=dbl_kind), intent(in) :: & + bgctype ! bio tracer transport type (mobile vs stationary) + + real (kind=dbl_kind), dimension(:), intent(inout) :: & + bgc_tracer_type ! bio tracer transport type array + + ! local variables + + integer (kind=int_kind) :: & + k , & ! loop index + n_strata , & ! temporary values + nt_strata1, & ! + nt_strata2 + + real (kind=dbl_kind) :: & + trcr_base1, & ! temporary values + trcr_base2, & + trcr_base3 + + character(len=*), parameter :: subname='(init_bgc_trcr)' + + !-------- + + bgc_tracer_type(nlt_bgc) = bgctype + + if (nk > 1) then ! include vertical bgc in snow + do k = nk, nk+1 + trcr_depend (nt_bgc + k ) = 2 ! snow volume + trcr_base (nt_bgc + k,1) = c0 + trcr_base (nt_bgc + k,2) = c0 + trcr_base (nt_bgc + k,3) = c1 + n_trcr_strata(nt_bgc + k ) = 0 + nt_strata (nt_bgc + k,1) = 0 + nt_strata (nt_bgc + k,2) = 0 + enddo + + trcr_base1 = c0 + trcr_base2 = c1 + trcr_base3 = c0 + n_strata = 1 + nt_strata1 = nt_fbri + nt_strata2 = 0 + else ! nk = 1 + trcr_base1 = c1 + trcr_base2 = c0 + trcr_base3 = c0 + n_strata = 0 + nt_strata1 = 0 + nt_strata2 = 0 + endif ! nk + + do k = 1, nk ! in ice + trcr_depend (nt_bgc + k - 1 ) = nt_depend + trcr_base (nt_bgc + k - 1,1) = trcr_base1 + trcr_base (nt_bgc + k - 1,2) = trcr_base2 + trcr_base (nt_bgc + k - 1,3) = trcr_base3 + n_trcr_strata(nt_bgc + k - 1 ) = n_strata + nt_strata (nt_bgc + k - 1,1) = nt_strata1 + nt_strata (nt_bgc + k - 1,2) = nt_strata2 + enddo + + bio_index (nlt_bgc) = nt_bgc + + end subroutine init_bgc_trcr + +!======================================================================= + + end module ice_init_column + +!======================================================================= diff --git a/cicecore/drivers/unittest/opticep/ice_step_mod.F90 b/cicecore/drivers/unittest/opticep/ice_step_mod.F90 new file mode 100644 index 000000000..ac66255a4 --- /dev/null +++ b/cicecore/drivers/unittest/opticep/ice_step_mod.F90 @@ -0,0 +1,1784 @@ +!======================================================================= +! +! Contains CICE component driver routines common to all drivers. +! +! authors Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL +! +! 2008 ECH: created module by moving subroutines from drivers/cice4/ +! 2014 ECH: created column package + + module ice_step_mod + + use ice_kinds_mod + use ice_blocks, only: block, get_block + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: c0, c1, c1000, c4, p25 + use ice_constants, only: field_loc_center, field_loc_NEcorner, & + field_loc_Nface, field_loc_Eface, & + field_type_scalar, field_type_vector + use ice_domain, only: halo_info, nblocks, blocks_ice + use ice_domain_size, only: max_blocks + use ice_exit, only: abort_ice + use ice_fileunits, only: nu_diag + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_prep_radiation + use icepack_intfc, only: icepack_step_therm1 + use icepack_intfc, only: icepack_step_therm2 + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_step_ridge + use icepack_intfc, only: icepack_step_wavefracture + use icepack_intfc, only: icepack_step_radiation + use icepack_intfc, only: icepack_ocn_mixed_layer, icepack_atm_boundary + use icepack_intfc, only: icepack_biogeochemistry, icepack_load_ocean_bio_array + use icepack_intfc, only: icepack_max_algae, icepack_max_nbtrcr, icepack_max_don + use icepack_intfc, only: icepack_max_doc, icepack_max_dic, icepack_max_aero + use icepack_intfc, only: icepack_max_fe, icepack_max_iso + use icepack_intfc, only: icepack_query_parameters + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes + use icepack_intfc, only: icepack_query_tracer_indices + + implicit none + private + + public :: step_therm1, step_therm2, step_dyn_horiz, step_dyn_ridge, & + step_snow, prep_radiation, step_radiation, ocean_mixed_layer, & + update_state, biogeochemistry, step_dyn_wave, step_prep + + real (kind=dbl_kind), dimension (:,:,:), allocatable :: & + uvelT_icep, & ! uvel for wind stress computation in icepack + vvelT_icep ! vvel for wind stress computation in icepack + +!======================================================================= + + contains + +!======================================================================= + + subroutine save_init +! saves initial values for aice, aicen, vicen, vsnon + + use ice_state, only: aice, aicen, aice_init, aicen_init, & + vicen, vicen_init, vsnon, vsnon_init + + character(len=*), parameter :: subname = '(save_init)' + + !----------------------------------------------------------------- + ! Save the ice area passed to the coupler (so that history fields + ! can be made consistent with coupler fields). + ! Save the initial ice area and volume in each category. + !----------------------------------------------------------------- + + aice_init = aice + aicen_init = aicen + vicen_init = vicen + vsnon_init = vsnon + + end subroutine save_init + +!======================================================================= + + subroutine step_prep +! prep for step, called outside nblock loop + + use ice_flux, only: uatm, vatm, uatmT, vatmT + use ice_grid, only: grid_atm_dynu, grid_atm_dynv, grid_average_X2Y + use ice_state, only: uvel, vvel + + logical (kind=log_kind) :: & + highfreq ! highfreq flag + + logical (kind=log_kind), save :: & + first_call = .true. ! first call flag + + character(len=*), parameter :: subname = '(step_prep)' + + ! Save initial state + + call save_init + + ! Compute uatmT, vatmT + + call grid_average_X2Y('S',uatm,grid_atm_dynu,uatmT,'T') + call grid_average_X2Y('S',vatm,grid_atm_dynv,vatmT,'T') + + !----------------------------------------------------------------- + ! Compute uvelT_icep, vvelT_icep + !----------------------------------------------------------------- + + if (first_call) then + allocate(uvelT_icep(nx_block,ny_block,max_blocks)) + allocate(vvelT_icep(nx_block,ny_block,max_blocks)) + uvelT_icep = c0 + vvelT_icep = c0 + endif + + call icepack_query_parameters(highfreq_out=highfreq) + + if (highfreq) then + call grid_average_X2Y('A', uvel, 'U', uvelT_icep, 'T') + call grid_average_X2Y('A', vvel, 'U', vvelT_icep, 'T') + endif + + first_call = .false. + + end subroutine step_prep + +!======================================================================= +! +! Scales radiation fields computed on the previous time step. +! +! authors: Elizabeth Hunke, LANL + + subroutine prep_radiation (iblk) + + use ice_domain_size, only: ncat, nilyr, nslyr + use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & + alvdr_ai, alvdf_ai, alidr_ai, alidf_ai, & + alvdr_init, alvdf_init, alidr_init, alidf_init + use ice_arrays_column, only: fswsfcn, fswintn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & + fswpenln, Sswabsn, Iswabsn + use ice_state, only: aice, aicen + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_sw + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + i, j ! horizontal indices + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(prep_radiation)' + + call ice_timer_start(timer_sw,iblk) ! shortwave + + alvdr_init(:,:,iblk) = c0 + alvdf_init(:,:,iblk) = c0 + alidr_init(:,:,iblk) = c0 + alidf_init(:,:,iblk) = c0 + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + !----------------------------------------------------------------- + ! Compute netsw scaling factor (new netsw / old netsw) + !----------------------------------------------------------------- + + do j = jlo, jhi + do i = ilo, ihi + + alvdr_init(i,j,iblk) = alvdr_ai(i,j,iblk) + alvdf_init(i,j,iblk) = alvdf_ai(i,j,iblk) + alidr_init(i,j,iblk) = alidr_ai(i,j,iblk) + alidf_init(i,j,iblk) = alidf_ai(i,j,iblk) + + call icepack_prep_radiation (ncat=ncat, nilyr=nilyr, nslyr=nslyr, & + scale_factor=scale_factor(i,j,iblk), & + aice = aice (i,j, iblk), aicen = aicen (i,j, :,iblk), & + swvdr = swvdr (i,j, iblk), swvdf = swvdf (i,j, iblk), & + swidr = swidr (i,j, iblk), swidf = swidf (i,j, iblk), & + alvdr_ai = alvdr_ai(i,j, iblk), alvdf_ai = alvdf_ai(i,j, iblk), & + alidr_ai = alidr_ai(i,j, iblk), alidf_ai = alidf_ai(i,j, iblk), & + fswsfcn = fswsfcn (i,j, :,iblk), fswintn = fswintn (i,j, :,iblk), & + fswthrun = fswthrun(i,j, :,iblk), & +!opt fswthrun_vdr = fswthrun_vdr(i,j, :,iblk), & +!opt fswthrun_vdf = fswthrun_vdf(i,j, :,iblk), & +!opt fswthrun_idr = fswthrun_idr(i,j, :,iblk), & +!opt fswthrun_idf = fswthrun_idf(i,j, :,iblk), & + fswpenln = fswpenln(i,j,:,:,iblk), & + Sswabsn = Sswabsn (i,j,:,:,iblk), Iswabsn = Iswabsn (i,j,:,:,iblk)) + + enddo ! i + enddo ! j + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call ice_timer_stop(timer_sw,iblk) ! shortwave + + end subroutine prep_radiation + +!======================================================================= +! +! Driver for updating ice and snow internal temperatures and +! computing thermodynamic growth rates and coupler fluxes. +! +! authors: William H. Lipscomb, LANL + + subroutine step_therm1 (dt, iblk) + + use ice_arrays_column, only: ffracn, dhsn, & + Cdn_ocn, Cdn_ocn_skin, Cdn_ocn_floe, Cdn_ocn_keel, Cdn_atm_ratio, & + Cdn_atm, Cdn_atm_skin, Cdn_atm_floe, Cdn_atm_rdg, Cdn_atm_pond, & + hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & + fswsfcn, fswintn, Sswabsn, Iswabsn, meltsliqn, meltsliq, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf + use ice_calendar, only: yday + use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero + use ice_flux, only: frzmlt, sst, Tf, strocnxT_iavg, strocnyT_iavg, rside, fbot, Tbot, Tsnice, & + meltsn, melttn, meltbn, congeln, snoicen, uatmT, vatmT, fside, wlat, & + wind, rhoa, potT, Qa, zlvl, zlvs, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & + flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, fsloss, & + frain, Tair, strairxT, strairyT, fsurf, fcondtop, fsens, & + flat, fswabs, flwout, evap, evaps, evapi, Tref, Qref, Uref, fresh, fsalt, fhocn, & + fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + meltt, melts, meltb, congel, snoice, & + flatn_f, fsensn_f, fsurfn_f, fcondtopn_f, & + send_i2x_per_cat, fswthrun_ai, dsnow + use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn, fiso_atm, fiso_ocn, & + Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn + use ice_grid, only: lmask_n, lmask_s, tmask + use ice_state, only: aice, aicen, aicen_init, vicen_init, & + vice, vicen, vsno, vsnon, trcrn, vsnon_init +#ifdef CICE_IN_NEMO + use ice_state, only: aice_init +#endif + +#ifdef CESMCOUPLED + use ice_prescribed_mod, only: prescribed_ice +#else + logical (kind=log_kind) :: & + prescribed_ice ! if .true., use prescribed ice instead of computed +#endif + real (kind=dbl_kind), intent(in) :: & + dt ! time step (s) + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables +#ifdef CICE_IN_NEMO + real (kind=dbl_kind) :: & + raice ! reciprocal of ice concentration +#endif + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + i, j , & ! horizontal indices + n , & ! thickness category index + k, kk ! indices for aerosols + + integer (kind=int_kind) :: & + ntrcr, nt_apnd, nt_hpnd, nt_ipnd, nt_alvl, nt_vlvl, nt_Tsfc, & + nt_iage, nt_FY, nt_qice, nt_sice, nt_aero, nt_qsno, & + nt_isosno, nt_isoice, nt_rsnw, nt_smice, nt_smliq + + logical (kind=log_kind) :: & + tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, & + tr_pond_lvl, tr_pond_topo, calc_Tsfc, highfreq, tr_snow + + real (kind=dbl_kind) :: & + puny ! a very small number + + real (kind=dbl_kind), dimension(n_aero,2,ncat) :: & + aerosno, aeroice ! kg/m^2 + + real (kind=dbl_kind), dimension(n_iso,ncat) :: & + isosno, isoice ! kg/m^2 + + real (kind=dbl_kind), dimension(nslyr,ncat) :: & + rsnwn, smicen, smliqn + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(step_therm1)' + + call icepack_query_parameters(puny_out=puny) + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) + call icepack_query_parameters(highfreq_out=highfreq) + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_query_tracer_flags( & + tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_iso_out=tr_iso, & + tr_aero_out=tr_aero, tr_pond_out=tr_pond, & + tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & + tr_snow_out=tr_snow) + call icepack_query_tracer_indices( & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_Tsfc_out=nt_Tsfc, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, & + nt_qice_out=nt_qice, nt_sice_out=nt_sice, & + nt_aero_out=nt_aero, nt_qsno_out=nt_qsno, & + nt_rsnw_out=nt_rsnw, nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + +#ifndef CESMCOUPLED + prescribed_ice = .false. +#endif + + rsnwn (:,:) = c0 + smicen (:,:) = c0 + smliqn (:,:) = c0 + isoice (:,:) = c0 + aerosno(:,:,:) = c0 + aeroice(:,:,:) = c0 + +#ifdef CICE_IN_NEMO + do j = 1, ny_block + do i = 1, nx_block + + !--------------------------------------------------------------- + ! Scale frain and fsnow by ice concentration as these fields + ! are supplied by NEMO multiplied by ice concentration + !--------------------------------------------------------------- + + if (aice_init(i,j,iblk) > puny) then + raice = c1 / aice_init(i,j,iblk) + frain(i,j,iblk) = frain(i,j,iblk)*raice + fsnow(i,j,iblk) = fsnow(i,j,iblk)*raice + else + frain(i,j,iblk) = c0 + fsnow(i,j,iblk) = c0 + endif + + enddo ! i + enddo ! j +#endif + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + if (tr_snow) then + do n = 1, ncat + do k = 1, nslyr + rsnwn (k,n) = trcrn(i,j,nt_rsnw +k-1,n,iblk) + smicen(k,n) = trcrn(i,j,nt_smice+k-1,n,iblk) + smliqn(k,n) = trcrn(i,j,nt_smliq+k-1,n,iblk) + enddo + enddo + endif ! tr_snow + + if (tr_iso) then ! trcrn(nt_iso*) has units kg/m^3 + do n=1,ncat + do k=1,n_iso + isosno(k,n) = trcrn(i,j,nt_isosno+k-1,n,iblk) * vsnon_init(i,j,n,iblk) + isoice(k,n) = trcrn(i,j,nt_isoice+k-1,n,iblk) * vicen_init(i,j,n,iblk) + enddo + enddo + endif ! tr_iso + + if (tr_aero) then ! trcrn(nt_aero) has units kg/m^3 + do n=1,ncat + do k=1,n_aero + aerosno (k,:,n) = & + trcrn(i,j,nt_aero+(k-1)*4 :nt_aero+(k-1)*4+1,n,iblk) & + * vsnon_init(i,j,n,iblk) + aeroice (k,:,n) = & + trcrn(i,j,nt_aero+(k-1)*4+2:nt_aero+(k-1)*4+3,n,iblk) & + * vicen_init(i,j,n,iblk) + enddo + enddo + endif ! tr_aero + + if (tmask(i,j,iblk)) then + + call icepack_step_therm1(dt=dt, ncat=ncat, & + nilyr=nilyr, nslyr=nslyr, & + aicen_init = aicen_init (i,j,:,iblk), & + vicen_init = vicen_init (i,j,:,iblk), & + vsnon_init = vsnon_init (i,j,:,iblk), & + aice = aice (i,j, iblk), & + aicen = aicen (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vicen = vicen (i,j,:,iblk), & + vsno = vsno (i,j, iblk), & + vsnon = vsnon (i,j,:,iblk), & + uvel = uvelT_icep (i,j, iblk), & + vvel = vvelT_icep (i,j, iblk), & + Tsfc = trcrn (i,j,nt_Tsfc,:,iblk), & + zqsn = trcrn (i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & + zqin = trcrn (i,j,nt_qice:nt_qice+nilyr-1,:,iblk), & + zSin = trcrn (i,j,nt_sice:nt_sice+nilyr-1,:,iblk), & + alvl = trcrn (i,j,nt_alvl,:,iblk), & + vlvl = trcrn (i,j,nt_vlvl,:,iblk), & + apnd = trcrn (i,j,nt_apnd,:,iblk), & + hpnd = trcrn (i,j,nt_hpnd,:,iblk), & + ipnd = trcrn (i,j,nt_ipnd,:,iblk), & + iage = trcrn (i,j,nt_iage,:,iblk), & + FY = trcrn (i,j,nt_FY ,:,iblk), & +!opt rsnwn = rsnwn (:,:), & +!opt smicen = smicen (:,:), & +!opt smliqn = smliqn (:,:), & + aerosno = aerosno (:,:,:), & + aeroice = aeroice (:,:,:), & +!opt isosno = isosno (:,:), & +!opt isoice = isoice (:,:), & + uatm = uatmT (i,j, iblk), & + vatm = vatmT (i,j, iblk), & + wind = wind (i,j, iblk), & + zlvl = zlvl (i,j, iblk), & +!opt zlvs = zlvs (i,j, iblk), & + Qa = Qa (i,j, iblk), & +!opt Qa_iso = Qa_iso (i,j,:,iblk), & + rhoa = rhoa (i,j, iblk), & + Tair = Tair (i,j, iblk), & + Tref = Tref (i,j, iblk), & + Qref = Qref (i,j, iblk), & +!opt Qref_iso = Qref_iso (i,j,:,iblk), & + Uref = Uref (i,j, iblk), & + Cdn_atm_ratio= Cdn_atm_ratio(i,j, iblk), & + Cdn_ocn = Cdn_ocn (i,j, iblk), & + Cdn_ocn_skin = Cdn_ocn_skin(i,j, iblk), & + Cdn_ocn_floe = Cdn_ocn_floe(i,j, iblk), & + Cdn_ocn_keel = Cdn_ocn_keel(i,j, iblk), & + Cdn_atm = Cdn_atm (i,j, iblk), & + Cdn_atm_skin = Cdn_atm_skin(i,j, iblk), & + Cdn_atm_floe = Cdn_atm_floe(i,j, iblk), & + Cdn_atm_pond = Cdn_atm_pond(i,j, iblk), & + Cdn_atm_rdg = Cdn_atm_rdg (i,j, iblk), & + hfreebd = hfreebd (i,j, iblk), & + hdraft = hdraft (i,j, iblk), & + hridge = hridge (i,j, iblk), & + distrdg = distrdg (i,j, iblk), & + hkeel = hkeel (i,j, iblk), & + dkeel = dkeel (i,j, iblk), & + lfloe = lfloe (i,j, iblk), & + dfloe = dfloe (i,j, iblk), & + strax = strax (i,j, iblk), & + stray = stray (i,j, iblk), & + strairxT = strairxT (i,j, iblk), & + strairyT = strairyT (i,j, iblk), & + potT = potT (i,j, iblk), & + sst = sst (i,j, iblk), & + sss = sss (i,j, iblk), & + Tf = Tf (i,j, iblk), & + strocnxT = strocnxT_iavg(i,j, iblk), & + strocnyT = strocnyT_iavg(i,j, iblk), & + fbot = fbot (i,j, iblk), & + Tbot = Tbot (i,j, iblk), & + Tsnice = Tsnice (i,j, iblk), & + frzmlt = frzmlt (i,j, iblk), & + rside = rside (i,j, iblk), & + fside = fside (i,j, iblk), & +!opt wlat = wlat (i,j, iblk), & + fsnow = fsnow (i,j, iblk), & + frain = frain (i,j, iblk), & + fpond = fpond (i,j, iblk), & +!opt fsloss = fsloss (i,j, iblk), & + fsurf = fsurf (i,j, iblk), & + fsurfn = fsurfn (i,j,:,iblk), & + fcondtop = fcondtop (i,j, iblk), & + fcondtopn = fcondtopn (i,j,:,iblk), & + fcondbot = fcondbot (i,j, iblk), & + fcondbotn = fcondbotn (i,j,:,iblk), & + fswsfcn = fswsfcn (i,j,:,iblk), & + fswintn = fswintn (i,j,:,iblk), & + fswthrun = fswthrun (i,j,:,iblk), & +!opt fswthrun_vdr = fswthrun_vdr (i,j,:,iblk),& +!opt fswthrun_vdf = fswthrun_vdf (i,j,:,iblk),& +!opt fswthrun_idr = fswthrun_idr (i,j,:,iblk),& +!opt fswthrun_idf = fswthrun_idf (i,j,:,iblk),& + fswabs = fswabs (i,j, iblk), & + flwout = flwout (i,j, iblk), & + Sswabsn = Sswabsn (i,j,:,:,iblk), & + Iswabsn = Iswabsn (i,j,:,:,iblk), & + flw = flw (i,j, iblk), & + fsens = fsens (i,j, iblk), & + fsensn = fsensn (i,j,:,iblk), & + flat = flat (i,j, iblk), & + flatn = flatn (i,j,:,iblk), & + evap = evap (i,j, iblk), & + evaps = evaps (i,j, iblk), & + evapi = evapi (i,j, iblk), & + fresh = fresh (i,j, iblk), & + fsalt = fsalt (i,j, iblk), & + fhocn = fhocn (i,j, iblk), & + fswthru = fswthru (i,j, iblk), & +!opt fswthru_vdr = fswthru_vdr (i,j, iblk), & +!opt fswthru_vdf = fswthru_vdf (i,j, iblk), & +!opt fswthru_idr = fswthru_idr (i,j, iblk), & +!opt fswthru_idf = fswthru_idf (i,j, iblk), & + flatn_f = flatn_f (i,j,:,iblk), & + fsensn_f = fsensn_f (i,j,:,iblk), & + fsurfn_f = fsurfn_f (i,j,:,iblk), & + fcondtopn_f = fcondtopn_f (i,j,:,iblk), & + faero_atm = faero_atm (i,j,1:n_aero,iblk), & + faero_ocn = faero_ocn (i,j,1:n_aero,iblk), & +!opt fiso_atm = fiso_atm (i,j,:,iblk), & +!opt fiso_ocn = fiso_ocn (i,j,:,iblk), & +!opt fiso_evap = fiso_evap (i,j,:,iblk), & +!opt HDO_ocn = HDO_ocn (i,j, iblk), & +!opt H2_16O_ocn = H2_16O_ocn (i,j, iblk), & +!opt H2_18O_ocn = H2_18O_ocn (i,j, iblk), & + dhsn = dhsn (i,j,:,iblk), & + ffracn = ffracn (i,j,:,iblk), & + meltt = meltt (i,j, iblk), & + melttn = melttn (i,j,:,iblk), & + meltb = meltb (i,j, iblk), & + meltbn = meltbn (i,j,:,iblk), & + melts = melts (i,j, iblk), & + meltsn = meltsn (i,j,:,iblk), & + congel = congel (i,j, iblk), & + congeln = congeln (i,j,:,iblk), & + snoice = snoice (i,j, iblk), & + snoicen = snoicen (i,j,:,iblk), & +!opt dsnow = dsnow (i,j, iblk), & + dsnown = dsnown (i,j,:,iblk), & +!opt meltsliq = meltsliq (i,j, iblk), & +!opt meltsliqn = meltsliqn (i,j,:,iblk), & + lmask_n = lmask_n (i,j, iblk), & + lmask_s = lmask_s (i,j, iblk), & + mlt_onset = mlt_onset (i,j, iblk), & + frz_onset = frz_onset (i,j, iblk), & + yday=yday & +!opt prescribed_ice=prescribed_ice, & + ) + + !----------------------------------------------------------------- + ! handle per-category i2x fields, no merging + !----------------------------------------------------------------- + + if (send_i2x_per_cat) then + do n = 1, ncat + ! TODO (mvertens, 2018-12-22): do we need to add the band separated quantities + ! for MOM6 here also? + + fswthrun_ai(i,j,n,iblk) = fswthrun(i,j,n,iblk)*aicen_init(i,j,n,iblk) + enddo ! ncat + endif + + endif + + if (tr_snow) then + do n = 1, ncat + do k = 1, nslyr + trcrn(i,j,nt_rsnw +k-1,n,iblk) = rsnwn (k,n) + trcrn(i,j,nt_smice+k-1,n,iblk) = smicen(k,n) + trcrn(i,j,nt_smliq+k-1,n,iblk) = smliqn(k,n) + enddo + enddo + endif ! tr_snow + + if (tr_iso) then + do n = 1, ncat + if (vicen(i,j,n,iblk) > puny) & + isoice(:,n) = isoice(:,n)/vicen(i,j,n,iblk) + if (vsnon(i,j,n,iblk) > puny) & + isosno(:,n) = isosno(:,n)/vsnon(i,j,n,iblk) + do k = 1, n_iso + trcrn(i,j,nt_isosno+k-1,n,iblk) = isosno(k,n) + trcrn(i,j,nt_isoice+k-1,n,iblk) = isoice(k,n) + enddo + enddo + endif ! tr_iso + + if (tr_aero) then + do n = 1, ncat + if (vicen(i,j,n,iblk) > puny) & + aeroice(:,:,n) = aeroice(:,:,n)/vicen(i,j,n,iblk) + if (vsnon(i,j,n,iblk) > puny) & + aerosno(:,:,n) = aerosno(:,:,n)/vsnon(i,j,n,iblk) + do k = 1, n_aero + do kk = 1, 2 + trcrn(i,j,nt_aero+(k-1)*4+kk-1,n,iblk)=aerosno(k,kk,n) + trcrn(i,j,nt_aero+(k-1)*4+kk+1,n,iblk)=aeroice(k,kk,n) + enddo + enddo + enddo + endif ! tr_aero + + enddo ! i + enddo ! j + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine step_therm1 + +!======================================================================= +! Driver for thermodynamic changes not needed for coupling: +! transport in thickness space, lateral growth and melting. +! +! authors: William H. Lipscomb, LANL +! Elizabeth C. Hunke, LANL + + subroutine step_therm2 (dt, iblk) + + use ice_arrays_column, only: hin_max, fzsal, ocean_bio, wave_sig_ht, & + wave_spectrum, wavefreq, dwavefreq, & + first_ice, bgrid, cgrid, igrid, floe_rad_c, floe_binwidth, & + d_afsd_latg, d_afsd_newi, d_afsd_latm, d_afsd_weld + use ice_calendar, only: yday + use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd + use ice_flux, only: fresh, frain, fpond, frzmlt, frazil, frz_onset, & + update_ocn_f, fsalt, Tf, sss, salinz, fhocn, rside, fside, wlat, & + meltl, frazil_diag + use ice_flux_bgc, only: flux_bio, faero_ocn, & + fiso_ocn, HDO_ocn, H2_16O_ocn, H2_18O_ocn + use ice_grid, only: tmask + use ice_state, only: aice, aicen, aice0, trcr_depend, & + aicen_init, vicen_init, trcrn, vicen, vsnon, & + trcr_base, n_trcr_strata, nt_strata + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + i, j ! horizontal indices + + integer (kind=int_kind) :: & + ntrcr, nbtrcr, nltrcr + + logical (kind=log_kind) :: & + tr_fsd, & ! floe size distribution tracers + z_tracers, & ! vertical biogeochemistry + solve_zsal ! zsalinity + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(step_therm2)' + + call icepack_query_parameters(z_tracers_out=z_tracers,solve_zsal_out=solve_zsal) + call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr) + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! nltrcr is only used as a zbgc flag in icepack (number of zbgc tracers > 0) + if (z_tracers .or. solve_zsal) then + nltrcr = 1 + else + nltrcr = 0 + endif + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + if (tmask(i,j,iblk)) then + + ! significant wave height for FSD + if (tr_fsd) & + wave_sig_ht(i,j,iblk) = c4*SQRT(SUM(wave_spectrum(i,j,:,iblk)*dwavefreq(:))) + + call icepack_step_therm2(dt=dt, ncat=ncat, & + nltrcr=nltrcr, nilyr=nilyr, nslyr=nslyr, nblyr=nblyr, & + hin_max = hin_max (:), & + aicen = aicen (i,j,:,iblk), & + vicen = vicen (i,j,:,iblk), & + vsnon = vsnon (i,j,:,iblk), & + aicen_init = aicen_init(i,j,:,iblk), & + vicen_init = vicen_init(i,j,:,iblk), & + trcrn = trcrn (i,j,:,:,iblk), & + aice0 = aice0 (i,j, iblk), & + aice = aice (i,j, iblk), & + trcr_depend= trcr_depend(:), & + trcr_base = trcr_base(:,:), & + n_trcr_strata = n_trcr_strata(:), & + nt_strata = nt_strata(:,:), & + Tf = Tf (i,j, iblk), & + sss = sss (i,j, iblk), & + salinz = salinz (i,j,:,iblk), & + rside = rside (i,j, iblk), & + meltl = meltl (i,j, iblk), & + fside = fside (i,j, iblk), & +!opt wlat = wlat (i,j, iblk), & + frzmlt = frzmlt (i,j, iblk), & + frazil = frazil (i,j, iblk), & + frain = frain (i,j, iblk), & + fpond = fpond (i,j, iblk), & + fresh = fresh (i,j, iblk), & + fsalt = fsalt (i,j, iblk), & + fhocn = fhocn (i,j, iblk), & + update_ocn_f = update_ocn_f, & + bgrid = bgrid, & + cgrid = cgrid, & + igrid = igrid, & + faero_ocn = faero_ocn (i,j,:,iblk), & + first_ice = first_ice (i,j,:,iblk), & + fzsal = fzsal (i,j, iblk), & + flux_bio = flux_bio (i,j,1:nbtrcr,iblk), & + ocean_bio = ocean_bio (i,j,1:nbtrcr,iblk), & + frazil_diag= frazil_diag(i,j,iblk) & +!opt frz_onset = frz_onset (i,j, iblk), & +!opt yday = yday, & +!opt fiso_ocn = fiso_ocn (i,j,:,iblk), & +!opt HDO_ocn = HDO_ocn (i,j, iblk), & +!opt H2_16O_ocn = H2_16O_ocn(i,j, iblk), & +!opt H2_18O_ocn = H2_18O_ocn(i,j, iblk), & +!opt nfsd = nfsd, & +!opt wave_sig_ht= wave_sig_ht(i,j,iblk), & +!opt wave_spectrum = wave_spectrum(i,j,:,iblk), & +!opt wavefreq = wavefreq(:), & +!opt dwavefreq = dwavefreq(:), & +!opt d_afsd_latg= d_afsd_latg(i,j,:,iblk),& +!opt d_afsd_newi= d_afsd_newi(i,j,:,iblk),& +!opt d_afsd_latm= d_afsd_latm(i,j,:,iblk),& +!opt d_afsd_weld= d_afsd_weld(i,j,:,iblk),& +!opt floe_rad_c = floe_rad_c(:), & +!opt floe_binwidth = floe_binwidth(:) & + ) + endif ! tmask + + enddo ! i + enddo ! j + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine step_therm2 + +!======================================================================= +! +! finalize thermo updates +! +! authors: Elizabeth Hunke, LANL + + subroutine update_state (dt, daidt, dvidt, dagedt, offset) + + use ice_domain_size, only: ncat +! use ice_grid, only: tmask + use ice_state, only: aicen, trcrn, vicen, vsnon, & + aice, trcr, vice, vsno, aice0, trcr_depend, & + bound_state, trcr_base, nt_strata, n_trcr_strata + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound, timer_updstate + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + real (kind=dbl_kind), dimension(:,:,:), intent(inout), optional :: & + daidt, & ! change in ice area per time step + dvidt, & ! change in ice volume per time step + dagedt ! change in ice age per time step + + real (kind=dbl_kind), intent(in), optional :: & + offset ! d(age)/dt time offset = dt for thermo, 0 for dyn + + integer (kind=int_kind) :: & + iblk, & ! block index + i,j, & ! horizontal indices + ntrcr, & ! + nt_iage ! + + logical (kind=log_kind) :: & + tr_iage ! + + character(len=*), parameter :: subname='(update_state)' + + call ice_timer_start(timer_updstate) + call icepack_query_tracer_flags(tr_iage_out=tr_iage) + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_query_tracer_indices(nt_iage_out=nt_iage) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !------------------------------------------------------------------- + ! Ghost cell updates for state variables. + !------------------------------------------------------------------- + + call ice_timer_start(timer_bound) + call bound_state (aicen, & + vicen, vsnon, & + ntrcr, trcrn) + call ice_timer_stop(timer_bound) + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) SCHEDULE(runtime) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + + !----------------------------------------------------------------- + ! Aggregate the updated state variables (includes ghost cells). + !----------------------------------------------------------------- + +! if (tmask(i,j,iblk)) & + call icepack_aggregate(ncat = ncat, & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + ntrcr = ntrcr, & + trcr_depend = trcr_depend(:), & + trcr_base = trcr_base(:,:), & + n_trcr_strata = n_trcr_strata(:), & + nt_strata = nt_strata(:,:)) + + if (present(offset)) then + + !----------------------------------------------------------------- + ! Compute thermodynamic area and volume tendencies. + !----------------------------------------------------------------- + + daidt(i,j,iblk) = (aice(i,j,iblk) - daidt(i,j,iblk)) / dt + dvidt(i,j,iblk) = (vice(i,j,iblk) - dvidt(i,j,iblk)) / dt + if (tr_iage) then + if (offset > c0) then ! thermo + if (trcr(i,j,nt_iage,iblk) > c0) & + dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & + - dagedt(i,j,iblk) - offset) / dt + else ! dynamics + dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & + - dagedt(i,j,iblk)) / dt + endif + endif ! tr_iage + endif ! present(offset) + + enddo ! i + enddo ! j + enddo ! iblk + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + call ice_timer_stop(timer_updstate) + + end subroutine update_state + +!======================================================================= +! +! Run one time step of wave-fracturing the floe size distribution +! +! authors: Lettie Roach, NIWA +! Elizabeth C. Hunke, LANL + + subroutine step_dyn_wave (dt) + + use ice_arrays_column, only: wave_spectrum, & + d_afsd_wave, floe_rad_l, floe_rad_c, wavefreq, dwavefreq + use ice_domain_size, only: ncat, nfsd, nfreq + use ice_state, only: trcrn, aicen, aice, vice + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_column, & + timer_fsd + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + ! local variables + + type (block) :: & + this_block ! block information for current block + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + iblk, & ! block index + i, j ! horizontal indices + + character (len=char_len) :: wave_spec_type + + character(len=*), parameter :: subname = '(step_dyn_wave)' + + call ice_timer_start(timer_column) + call ice_timer_start(timer_fsd) + + call icepack_query_parameters(wave_spec_type_out=wave_spec_type) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + d_afsd_wave(i,j,:,iblk) = c0 + call icepack_step_wavefracture (wave_spec_type, & + dt, ncat, nfsd, nfreq, & + aice (i,j, iblk), & + vice (i,j, iblk), & + aicen (i,j,:, iblk), & + floe_rad_l(:), floe_rad_c(:), & + wave_spectrum (i,j,:, iblk), & + wavefreq(:), dwavefreq(:), & + trcrn (i,j,:,:,iblk), & + d_afsd_wave (i,j,:, iblk)) + end do ! i + end do ! j + end do ! iblk + !$OMP END PARALLEL DO + + call ice_timer_stop(timer_fsd) + call ice_timer_stop(timer_column) + + end subroutine step_dyn_wave + +!======================================================================= +! +! Run one time step of dynamics and horizontal transport. +! NOTE: The evp and transport modules include boundary updates, so +! they cannot be done inside a single block loop. +! +! authors: William H. Lipscomb, LANL +! Elizabeth C. Hunke, LANL + + subroutine step_dyn_horiz (dt) + + use ice_boundary, only: ice_HaloUpdate + use ice_dyn_evp, only: evp + use ice_dyn_eap, only: eap + use ice_dyn_vp, only: implicit_solver + use ice_dyn_shared, only: kdyn + use ice_flux, only: strocnxU, strocnyU, strocnxT_iavg, strocnyT_iavg + use ice_flux, only: init_history_dyn + use ice_grid, only: grid_average_X2Y + use ice_state, only: aiU + use ice_transport_driver, only: advection, transport_upwind, transport_remap + + real (kind=dbl_kind), intent(in) :: & + dt ! dynamics time step + + ! local variables + + type (block) :: & + this_block ! block information for current block + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + iblk, & ! block index + i, j ! horizontal indices + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1, & ! temporary + work2 ! temporary + + character(len=*), parameter :: subname = '(step_dyn_horiz)' + + call init_history_dyn ! initialize dynamic history variables + + !----------------------------------------------------------------- + ! Ice dynamics (momentum equation) + !----------------------------------------------------------------- + + if (kdyn == 1) call evp (dt) + if (kdyn == 2) call eap (dt) + if (kdyn == 3) call implicit_solver (dt) + + !----------------------------------------------------------------- + ! Compute strocnxT_iavg, strocnyT_iavg for thermo and coupling + !----------------------------------------------------------------- + + ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T + ! conservation requires aiU be divided before averaging + work1 = c0 + work2 = c0 + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk), iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + if (aiU(i,j,iblk) /= c0) then + work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) + work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + call ice_HaloUpdate (work1, halo_info, & + field_loc_NEcorner, field_type_vector) + call ice_HaloUpdate (work2, halo_info, & + field_loc_NEcorner, field_type_vector) + call grid_average_X2Y('F', work1, 'U', strocnxT_iavg, 'T') ! shift + call grid_average_X2Y('F', work2, 'U', strocnyT_iavg, 'T') + + !----------------------------------------------------------------- + ! Horizontal ice transport + !----------------------------------------------------------------- + + if (advection == 'upwind') then + call transport_upwind (dt) ! upwind + elseif (advection == 'remap') then + call transport_remap (dt) ! incremental remapping + endif + + end subroutine step_dyn_horiz + +!======================================================================= +! +! Run one time step of ridging. +! +! authors: William H. Lipscomb, LANL +! Elizabeth C. Hunke, LANL + + subroutine step_dyn_ridge (dt, ndtd, iblk) + + use ice_arrays_column, only: hin_max, fzsal, first_ice + use ice_domain_size, only: ncat, nilyr, nslyr, n_aero, nblyr + use ice_flux, only: & + rdg_conv, rdg_shear, dardg1dt, dardg2dt, & + dvirdgdt, opening, fpond, fresh, fhocn, & + aparticn, krdgn, aredistn, vredistn, dardg1ndt, dardg2ndt, & + dvirdgndt, araftn, vraftn, fsalt + use ice_flux_bgc, only: flux_bio, faero_ocn, fiso_ocn + use ice_grid, only: tmask + use ice_state, only: trcrn, vsnon, aicen, vicen, & + aice, aice0, trcr_depend, n_trcr_strata, & + trcr_base, nt_strata + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_column, & + timer_ridge + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + integer (kind=int_kind), intent(in) :: & + ndtd, & ! number of dynamics subcycles + iblk ! block index + + ! local variables + + type (block) :: & + this_block ! block information for current block + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + i, j, & ! horizontal indices + ntrcr, & ! + nbtrcr ! + + character(len=*), parameter :: subname = '(step_dyn_ridge)' + + !----------------------------------------------------------------- + ! Ridging + !----------------------------------------------------------------- + + call ice_timer_start(timer_column,iblk) + call ice_timer_start(timer_ridge,iblk) + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + this_block = get_block(blocks_ice(iblk), iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + +!echmod: this changes the answers, continue using tmask for now +! call aggregate_area (ncat, aicen(i,j,:,iblk), atmp, atmp0) +! if (atmp > c0) then + + if (tmask(i,j,iblk)) then + + call icepack_step_ridge (dt=dt, ndtd=ndtd, & + nilyr=nilyr, nslyr=nslyr, nblyr=nblyr, & + ncat=ncat, n_aero=n_aero, hin_max=hin_max(:), & + trcr_depend = trcr_depend (:), & + trcr_base = trcr_base (:,:), & + n_trcr_strata = n_trcr_strata(:), & + nt_strata = nt_strata (:,:), & + trcrn = trcrn (i,j,:,:,iblk), & + rdg_conv = rdg_conv (i,j, iblk), & + rdg_shear = rdg_shear(i,j, iblk), & + aicen = aicen (i,j,:,iblk), & + vicen = vicen (i,j,:,iblk), & + vsnon = vsnon (i,j,:,iblk), & + aice0 = aice0 (i,j, iblk), & + dardg1dt = dardg1dt (i,j, iblk), & + dardg2dt = dardg2dt (i,j, iblk), & + dvirdgdt = dvirdgdt (i,j, iblk), & + opening = opening (i,j, iblk), & + fpond = fpond (i,j, iblk), & + fresh = fresh (i,j, iblk), & + fhocn = fhocn (i,j, iblk), & + faero_ocn = faero_ocn(i,j,:,iblk), & +!opt fiso_ocn = fiso_ocn (i,j,:,iblk), & + aparticn = aparticn (i,j,:,iblk), & + krdgn = krdgn (i,j,:,iblk), & + aredistn = aredistn (i,j,:,iblk), & + vredistn = vredistn (i,j,:,iblk), & + dardg1ndt = dardg1ndt(i,j,:,iblk), & + dardg2ndt = dardg2ndt(i,j,:,iblk), & + dvirdgndt = dvirdgndt(i,j,:,iblk), & + araftn = araftn (i,j,:,iblk), & + vraftn = vraftn (i,j,:,iblk), & + aice = aice (i,j, iblk), & + fsalt = fsalt (i,j, iblk), & + first_ice = first_ice(i,j,:,iblk), & + fzsal = fzsal (i,j, iblk), & + flux_bio = flux_bio (i,j,1:nbtrcr,iblk)) + + endif ! tmask + + enddo ! i + enddo ! j + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call ice_timer_stop(timer_ridge,iblk) + call ice_timer_stop(timer_column,iblk) + + end subroutine step_dyn_ridge + +!======================================================================= +! +! Updates snow tracers +! +! authors: Elizabeth C. Hunke, LANL +! Nicole Jeffery, LANL + + subroutine step_snow (dt, iblk) + + use ice_calendar, only: nstreams + use ice_domain_size, only: ncat, nslyr, nilyr + use ice_flux, only: snwcnt, wind, fresh, fhocn, fsloss, fsnow + use ice_state, only: trcrn, vsno, vsnon, vicen, aicen, aice + use icepack_intfc, only: icepack_step_snow + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + nt_smice, nt_smliq, nt_rsnw, & + nt_Tsfc, nt_qice, nt_sice, nt_qsno, & + nt_alvl, nt_vlvl, nt_rhos + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + i, j, & ! horizontal indices + ns ! history streams index + + real (kind=dbl_kind) :: & + puny + + real (kind=dbl_kind) :: & + fhs ! flag for presence of snow + + character(len=*), parameter :: subname = '(step_snow)' + + type (block) :: & + this_block ! block information for current block + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + !----------------------------------------------------------------- + ! query icepack values + !----------------------------------------------------------------- + + call icepack_query_parameters(puny_out=puny) + call icepack_query_tracer_indices( & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rsnw_out=nt_rsnw, nt_Tsfc_out=nt_Tsfc, & + nt_qice_out=nt_qice, nt_sice_out=nt_sice, nt_qsno_out=nt_qsno, & + nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_rhos_out=nt_rhos) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Snow redistribution and metamorphosis + !----------------------------------------------------------------- + + do j = jlo, jhi + do i = ilo, ihi + + call icepack_step_snow (dt, nilyr, & + nslyr, ncat, & + wind (i,j, iblk), & + aice (i,j, iblk), & + aicen(i,j,:,iblk), & + vicen(i,j,:,iblk), & + vsnon(i,j,:,iblk), & + trcrn(i,j,nt_Tsfc,:,iblk), & + trcrn(i,j,nt_qice,:,iblk), & ! top layer only + trcrn(i,j,nt_sice,:,iblk), & ! top layer only + trcrn(i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & + trcrn(i,j,nt_alvl,:,iblk), & + trcrn(i,j,nt_vlvl,:,iblk), & + trcrn(i,j,nt_smice:nt_smice+nslyr-1,:,iblk), & + trcrn(i,j,nt_smliq:nt_smliq+nslyr-1,:,iblk), & + trcrn(i,j,nt_rsnw:nt_rsnw+nslyr-1,:,iblk), & + trcrn(i,j,nt_rhos:nt_rhos+nslyr-1,:,iblk), & + fresh (i,j,iblk), & + fhocn (i,j,iblk), & + fsloss (i,j,iblk), & + fsnow (i,j,iblk)) + enddo + enddo + + ! increment counter for history averaging + do j = jlo, jhi + do i = ilo, ihi + fhs = c0 + if (vsno(i,j,iblk) > puny) fhs = c1 + do ns = 1, nstreams + snwcnt(i,j,iblk,ns) = snwcnt(i,j,iblk,ns) + fhs + enddo + enddo + enddo + + end subroutine step_snow + +!======================================================================= +! +! Computes radiation fields +! +! authors: William H. Lipscomb, LANL +! David Bailey, NCAR +! Elizabeth C. Hunke, LANL + + subroutine step_radiation (dt, iblk) + + use ice_arrays_column, only: ffracn, dhsn, & + fswsfcn, fswintn, fswpenln, Sswabsn, Iswabsn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & + albicen, albsnon, albpndn, & + alvdrn, alidrn, alvdfn, alidfn, apeffn, trcrn_sw, snowfracn, & + kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, & + gaer_bc_tab, bcenh, swgrid, igrid + use ice_calendar, only: calendar_type, days_per_year, nextsw_cday, yday, msec + use ice_domain_size, only: ncat, n_aero, nilyr, nslyr, n_zaero, n_algae, nblyr + use ice_flux, only: swvdr, swvdf, swidr, swidf, coszen, fsnow + use ice_grid, only: TLAT, TLON, tmask + use ice_state, only: aicen, vicen, vsnon, trcrn + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_sw + use ice_communicate, only: my_task + use ice_diagnostics, only: npnt, print_points, pmloc, piloc, pjloc + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + i, j, n, k, & ! horizontal indices + ipoint ! index for print diagnostic + + type (block) :: & + this_block ! block information for current block + + integer (kind=int_kind) :: & + nt_Tsfc, nt_alvl, nt_rsnw, & + nt_apnd, nt_hpnd, nt_ipnd, nt_aero, nlt_chl_sw, & + ntrcr, nbtrcr, nbtrcr_sw, nt_fbri + + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nt_bgc_N + + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nlt_zaero_sw, nt_zaero + + logical (kind=log_kind) :: & + tr_bgc_N, tr_zaero, tr_brine, dEdd_algae, modal_aero, snwgrain + + real (kind=dbl_kind), dimension(ncat) :: & + fbri ! brine height to ice thickness + + real(kind= dbl_kind), dimension(:,:), allocatable :: & + ztrcr_sw, & ! zaerosols (kg/m^3) and chla (mg/m^3) + rsnow ! snow grain radius tracer (10^-6 m) + + logical (kind=log_kind) :: & + debug, & ! flag for printing debugging information + l_print_point ! flag for printing debugging information + + character(len=*), parameter :: subname = '(step_radiation)' + + call ice_timer_start(timer_sw,iblk) ! shortwave + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr, & + nbtrcr_out=nbtrcr, nbtrcr_sw_out=nbtrcr_sw) + call icepack_query_tracer_flags( & + tr_brine_out=tr_brine, tr_bgc_N_out=tr_bgc_N, tr_zaero_out=tr_zaero) + call icepack_query_tracer_indices( & + nt_Tsfc_out=nt_Tsfc, nt_alvl_out=nt_alvl, nt_rsnw_out=nt_rsnw, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, & + nlt_chl_sw_out=nlt_chl_sw, nlt_zaero_sw_out=nlt_zaero_sw, & + nt_fbri_out=nt_fbri, nt_zaero_out=nt_zaero, nt_bgc_N_out=nt_bgc_N) + call icepack_query_parameters(dEdd_algae_out=dEdd_algae, modal_aero_out=modal_aero, & + snwgrain_out=snwgrain) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + allocate(ztrcr_sw(nbtrcr_sw,ncat)) + allocate(rsnow(nslyr,ncat)) + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + l_print_point = .false. + debug = .false. + if (debug .and. print_points) then + do ipoint = 1, npnt + if (my_task == pmloc(ipoint) .and. & + i == piloc(ipoint) .and. & + j == pjloc(ipoint)) & + l_print_point = .true. + write (nu_diag, *) 'my_task = ',my_task + enddo ! ipoint + endif + fbri (:) = c0 + ztrcr_sw(:,:) = c0 + rsnow (:,:) = c0 + do n = 1, ncat + if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) + if (snwgrain) then + do k = 1, nslyr + rsnow(k,n) = trcrn(i,j,nt_rsnw+k-1,n,iblk) + enddo + endif + enddo + + if (tmask(i,j,iblk)) then + + call icepack_step_radiation (dt=dt, ncat=ncat, & + nblyr=nblyr, nilyr=nilyr, nslyr=nslyr, & + dEdd_algae=dEdd_algae, & + swgrid=swgrid(:), igrid=igrid(:), & + fbri=fbri(:), & + aicen=aicen(i,j, :,iblk), & + vicen=vicen(i,j, :,iblk), & + vsnon=vsnon(i,j, :,iblk), & + Tsfcn=trcrn(i,j,nt_Tsfc,:,iblk), & + alvln=trcrn(i,j,nt_alvl,:,iblk), & + apndn=trcrn(i,j,nt_apnd,:,iblk), & + hpndn=trcrn(i,j,nt_hpnd,:,iblk), & + ipndn=trcrn(i,j,nt_ipnd,:,iblk), & + aeron=trcrn(i,j,nt_aero:nt_aero+4*n_aero-1,:,iblk), & + bgcNn=trcrn(i,j,nt_bgc_N(1):nt_bgc_N(1)+n_algae*(nblyr+3)-1,:,iblk), & + zaeron=trcrn(i,j,nt_zaero(1):nt_zaero(1)+n_zaero*(nblyr+3)-1,:,iblk), & + trcrn_bgcsw=ztrcr_sw, & + TLAT=TLAT(i,j,iblk), TLON=TLON(i,j,iblk), & + calendar_type=calendar_type, & + days_per_year=days_per_year, & + nextsw_cday=nextsw_cday, yday=yday, & + sec=msec, & + kaer_tab=kaer_tab, kaer_bc_tab=kaer_bc_tab(:,:), & + waer_tab=waer_tab, waer_bc_tab=waer_bc_tab(:,:), & + gaer_tab=gaer_tab, gaer_bc_tab=gaer_bc_tab(:,:), & + bcenh=bcenh(:,:,:), & + modal_aero=modal_aero, & + swvdr =swvdr (i,j ,iblk), swvdf =swvdf (i,j ,iblk), & + swidr =swidr (i,j ,iblk), swidf =swidf (i,j ,iblk), & + coszen =coszen (i,j ,iblk), fsnow =fsnow (i,j ,iblk), & + alvdrn =alvdrn (i,j,: ,iblk), alvdfn =alvdfn (i,j,: ,iblk), & + alidrn =alidrn (i,j,: ,iblk), alidfn =alidfn (i,j,: ,iblk), & + fswsfcn =fswsfcn (i,j,: ,iblk), fswintn =fswintn (i,j,: ,iblk), & + fswthrun =fswthrun (i,j,: ,iblk), & +!opt fswthrun_vdr =fswthrun_vdr (i,j,: ,iblk), & +!opt fswthrun_vdf =fswthrun_vdf (i,j,: ,iblk), & +!opt fswthrun_idr =fswthrun_idr (i,j,: ,iblk), & +!opt fswthrun_idf =fswthrun_idf (i,j,: ,iblk), & + fswpenln=fswpenln(i,j,:,:,iblk), & + Sswabsn =Sswabsn (i,j,:,:,iblk), Iswabsn =Iswabsn (i,j,:,:,iblk), & + albicen =albicen (i,j,: ,iblk), albsnon =albsnon (i,j,: ,iblk), & + albpndn =albpndn (i,j,: ,iblk), apeffn =apeffn (i,j,: ,iblk), & + snowfracn=snowfracn(i,j,: ,iblk), & + dhsn =dhsn (i,j,: ,iblk), ffracn =ffracn(i,j,:,iblk), & +!opt rsnow =rsnow (:,:), & + l_print_point=l_print_point) + endif + + if (dEdd_algae .and. (tr_zaero .or. tr_bgc_N)) then + do n = 1, ncat + do k = 1, nbtrcr_sw + trcrn_sw(i,j,k,n,iblk) = ztrcr_sw(k,n) + enddo + enddo + endif + + enddo ! i + enddo ! j + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + deallocate(ztrcr_sw) + deallocate(rsnow) + + call ice_timer_stop(timer_sw,iblk) ! shortwave + + end subroutine step_radiation + +!======================================================================= +! Ocean mixed layer calculation (internal to sea ice model). +! Allows heat storage in ocean for uncoupled runs. +! +! authors: John Weatherly, CRREL +! C.M. Bitz, UW +! Elizabeth C. Hunke, LANL +! Bruce P. Briegleb, NCAR +! William H. Lipscomb, LANL + + subroutine ocean_mixed_layer (dt, iblk) + + use ice_arrays_column, only: Cdn_atm, Cdn_atm_ratio + use ice_flux, only: sst, Tf, Qa, uatmT, vatmT, wind, potT, rhoa, zlvl, & + frzmlt, fhocn, fswthru, flw, flwout_ocn, fsens_ocn, flat_ocn, evap_ocn, & + alvdr_ocn, alidr_ocn, alvdf_ocn, alidf_ocn, swidf, swvdf, swidr, swvdr, & + qdp, hmix, strairx_ocn, strairy_ocn, Tref_ocn, Qref_ocn + use ice_grid, only: tmask + use ice_state, only: aice + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + real (kind=dbl_kind) :: albocn + + real (kind=dbl_kind), parameter :: & + frzmlt_max = c1000 ! max magnitude of frzmlt (W/m^2) + + integer (kind=int_kind) :: & + i, j , & ! horizontal indices + ij ! combined ij index + + real (kind=dbl_kind), dimension(nx_block,ny_block) :: & + delt , & ! potential temperature difference (K) + delq , & ! specific humidity difference (kg/kg) + shcoef, & ! transfer coefficient for sensible heat + lhcoef ! transfer coefficient for latent heat + + integer (kind=int_kind) :: & + icells ! number of ocean cells + + integer (kind=int_kind), dimension(nx_block*ny_block) :: & + indxi, indxj ! compressed indices for ocean cells + + character(len=*), parameter :: subname = '(ocn_mixed_layer)' + + !----------------------------------------------------------------- + + call icepack_query_parameters(albocn_out=albocn) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Identify ocean cells. + ! Set fluxes to zero in land cells. + !----------------------------------------------------------------- + + icells = 0 + indxi(:) = 0 + indxj(:) = 0 + + do j = 1, ny_block + do i = 1, nx_block + + if (tmask(i,j,iblk)) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + else + sst (i,j,iblk) = c0 + frzmlt (i,j,iblk) = c0 + flwout_ocn(i,j,iblk) = c0 + fsens_ocn (i,j,iblk) = c0 + flat_ocn (i,j,iblk) = c0 + evap_ocn (i,j,iblk) = c0 + endif + enddo ! i + enddo ! j + + !----------------------------------------------------------------- + ! Compute boundary layer quantities + !----------------------------------------------------------------- + + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + + call icepack_atm_boundary(sfctype = 'ocn', & + Tsf = sst (i,j,iblk), & + potT = potT (i,j,iblk), & + uatm = uatmT (i,j,iblk), & + vatm = vatmT (i,j,iblk), & + wind = wind (i,j,iblk), & + zlvl = zlvl (i,j,iblk), & + Qa = Qa (i,j,iblk), & + rhoa = rhoa (i,j,iblk), & + strx = strairx_ocn(i,j,iblk), & + stry = strairy_ocn(i,j,iblk), & + Tref = Tref_ocn (i,j,iblk), & + Qref = Qref_ocn (i,j,iblk), & + delt = delt (i,j), & + delq = delq (i,j), & + lhcoef = lhcoef (i,j), & + shcoef = shcoef (i,j), & + Cdn_atm = Cdn_atm (i,j,iblk), & + Cdn_atm_ratio_n = Cdn_atm_ratio(i,j,iblk)) + enddo ! ij + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Ocean albedo + ! For now, assume albedo = albocn in each spectral band. + !----------------------------------------------------------------- + + alvdr_ocn(:,:,iblk) = albocn + alidr_ocn(:,:,iblk) = albocn + alvdf_ocn(:,:,iblk) = albocn + alidf_ocn(:,:,iblk) = albocn + + !----------------------------------------------------------------- + ! Compute ocean fluxes and update SST + !----------------------------------------------------------------- + + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + + call icepack_ocn_mixed_layer(alvdr_ocn=alvdr_ocn(i,j,iblk), swvdr =swvdr (i,j,iblk), & + alidr_ocn=alidr_ocn(i,j,iblk), swidr =swidr (i,j,iblk), & + alvdf_ocn=alvdf_ocn(i,j,iblk), swvdf =swvdf (i,j,iblk), & + alidf_ocn=alidf_ocn(i,j,iblk), swidf =swidf (i,j,iblk), & + sst =sst (i,j,iblk), flwout_ocn=flwout_ocn(i,j,iblk), & + fsens_ocn=fsens_ocn(i,j,iblk), shcoef=shcoef(i,j), & + flat_ocn =flat_ocn (i,j,iblk), lhcoef=lhcoef(i,j), & + evap_ocn =evap_ocn (i,j,iblk), flw =flw (i,j,iblk), & + delt =delt (i,j), delq =delq (i,j), & + aice =aice (i,j,iblk), fhocn =fhocn (i,j,iblk), & + fswthru =fswthru (i,j,iblk), hmix =hmix (i,j,iblk), & + Tf =Tf (i,j,iblk), qdp =qdp (i,j,iblk), & + frzmlt =frzmlt (i,j,iblk), dt =dt) + enddo ! ij + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine ocean_mixed_layer + +!======================================================================= + + subroutine biogeochemistry (dt, iblk) + + use ice_arrays_column, only: upNO, upNH, iDi, iki, zfswin, & + zsal_tot, darcy_V, grow_net, & + PP_net, hbri,dhbr_bot, dhbr_top, Zoo,& + fbio_snoice, fbio_atmice, ocean_bio, & + first_ice, fswpenln, bphi, bTiz, ice_bio_net, & + snow_bio_net, fswthrun, Rayleigh_criteria, & + ocean_bio_all, sice_rho, fzsal, fzsal_g, & + bgrid, igrid, icgrid, cgrid + use ice_domain_size, only: nblyr, nilyr, nslyr, n_algae, n_zaero, ncat, & + n_doc, n_dic, n_don, n_fed, n_fep + use ice_flux, only: meltbn, melttn, congeln, snoicen, & + sst, sss, fsnow, meltsn + use ice_flux_bgc, only: hin_old, flux_bio, flux_bio_atm, faero_atm, & + nit, amm, sil, dmsp, dms, algalN, doc, don, dic, fed, fep, zaeros, hum + use ice_state, only: aicen_init, vicen_init, aicen, vicen, vsnon, & + trcrn, vsnon_init, aice0 + use ice_timers, only: timer_bgc, ice_timer_start, ice_timer_stop + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + i, j , & ! horizontal indices + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + mm ! tracer index + + type (block) :: & + this_block ! block information for current block + + integer (kind=int_kind) :: & + nbtrcr, ntrcr + + integer (kind=int_kind), dimension(icepack_max_aero) :: & + nlt_zaero + + integer (kind=int_kind), dimension(icepack_max_nbtrcr) :: & + bio_index_o + + logical (kind=log_kind) :: & + skl_bgc, tr_brine, tr_zaero + + character(len=*), parameter :: subname='(biogeochemistry)' + + call icepack_query_tracer_flags(tr_brine_out=tr_brine) + call icepack_query_parameters(skl_bgc_out=skl_bgc) + call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr) + call icepack_query_tracer_flags(tr_zaero_out=tr_zaero) + call icepack_query_tracer_indices(nlt_zaero_out=nlt_zaero) + call icepack_query_tracer_indices(bio_index_o_out=bio_index_o) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (tr_brine .or. skl_bgc) then + + call ice_timer_start(timer_bgc,iblk) ! biogeochemistry + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + ! Define ocean concentrations for tracers used in simulation + do j = jlo, jhi + do i = ilo, ihi + + call icepack_load_ocean_bio_array(max_nbtrcr = icepack_max_nbtrcr, & + max_algae = icepack_max_algae, max_don = icepack_max_don, & + max_doc = icepack_max_doc, max_dic = icepack_max_dic, & + max_aero = icepack_max_aero, max_fe = icepack_max_fe, & + nit = nit(i,j, iblk), amm = amm (i,j, iblk), & + sil = sil(i,j, iblk), dmsp = dmsp (i,j, iblk), & + dms = dms(i,j, iblk), algalN = algalN(i,j,:,iblk), & + doc = doc(i,j,:,iblk), don = don (i,j,:,iblk), & + dic = dic(i,j,:,iblk), fed = fed (i,j,:,iblk), & + fep = fep(i,j,:,iblk), zaeros = zaeros(i,j,:,iblk), & + hum = hum(i,j, iblk), & + ocean_bio_all = ocean_bio_all(i,j,:,iblk)) + + do mm = 1,nbtrcr + ocean_bio(i,j,mm,iblk) = ocean_bio_all(i,j,bio_index_o(mm),iblk) + enddo ! mm + if (tr_zaero) then + do mm = 1, n_zaero ! update aerosols + flux_bio_atm(i,j,nlt_zaero(mm),iblk) = faero_atm(i,j,mm,iblk) + enddo ! mm + endif + + call icepack_biogeochemistry(dt=dt, ntrcr=ntrcr, nbtrcr=nbtrcr,& + bgrid=bgrid, igrid=igrid, icgrid=icgrid, cgrid=cgrid, & + nblyr=nblyr, nilyr=nilyr, nslyr=nslyr, n_algae=n_algae, n_zaero=n_zaero, & + ncat=ncat, n_doc=n_doc, n_dic=n_dic, n_don=n_don, n_fed=n_fed, n_fep=n_fep, & + upNO = upNO (i,j, iblk), & + upNH = upNH (i,j, iblk), & + iDi = iDi (i,j,:,:, iblk), & + iki = iki (i,j,:,:, iblk), & + zfswin = zfswin (i,j,:,:, iblk), & + zsal_tot = zsal_tot (i,j, iblk), & + darcy_V = darcy_V (i,j,:, iblk), & + grow_net = grow_net (i,j, iblk), & + PP_net = PP_net (i,j, iblk), & + hbri = hbri (i,j, iblk), & + dhbr_bot = dhbr_bot (i,j,:, iblk), & + dhbr_top = dhbr_top (i,j,:, iblk), & + Zoo = Zoo (i,j,:,:, iblk), & + fbio_snoice = fbio_snoice (i,j,:, iblk), & + fbio_atmice = fbio_atmice (i,j,:, iblk), & + ocean_bio = ocean_bio (i,j,1:nbtrcr, iblk), & + first_ice = first_ice (i,j,:, iblk), & + fswpenln = fswpenln (i,j,:,:, iblk), & + bphi = bphi (i,j,:,:, iblk), & + bTiz = bTiz (i,j,:,:, iblk), & + ice_bio_net = ice_bio_net (i,j,1:nbtrcr, iblk), & + snow_bio_net = snow_bio_net(i,j,1:nbtrcr, iblk), & + fswthrun = fswthrun (i,j,:, iblk), & + sice_rho = sice_rho (i,j,:, iblk), & + fzsal = fzsal (i,j, iblk), & + fzsal_g = fzsal_g (i,j, iblk), & + meltbn = meltbn (i,j,:, iblk), & + melttn = melttn (i,j,:, iblk), & + congeln = congeln (i,j,:, iblk), & + snoicen = snoicen (i,j,:, iblk), & + sst = sst (i,j, iblk), & + sss = sss (i,j, iblk), & + fsnow = fsnow (i,j, iblk), & + meltsn = meltsn (i,j,:, iblk), & + hin_old = hin_old (i,j,:, iblk), & + flux_bio = flux_bio (i,j,1:nbtrcr, iblk), & + flux_bio_atm = flux_bio_atm(i,j,1:nbtrcr, iblk), & + aicen_init = aicen_init (i,j,:, iblk), & + vicen_init = vicen_init (i,j,:, iblk), & + aicen = aicen (i,j,:, iblk), & + vicen = vicen (i,j,:, iblk), & + vsnon = vsnon (i,j,:, iblk), & + aice0 = aice0 (i,j, iblk), & + trcrn = trcrn (i,j,:,:, iblk), & + vsnon_init = vsnon_init (i,j,:, iblk), & + Rayleigh_criteria = Rayleigh_criteria(i,j,iblk), & + skl_bgc = skl_bgc) + + enddo ! i + enddo ! j + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call ice_timer_stop(timer_bgc,iblk) ! biogeochemistry + + endif ! tr_brine .or. skl_bgc + + end subroutine biogeochemistry + +!======================================================================= + + end module ice_step_mod + +!======================================================================= diff --git a/cicecore/drivers/unittest/sumchk/sumchk.F90 b/cicecore/drivers/unittest/sumchk/sumchk.F90 index d9ea72d8c..1a2745aea 100644 --- a/cicecore/drivers/unittest/sumchk/sumchk.F90 +++ b/cicecore/drivers/unittest/sumchk/sumchk.F90 @@ -16,10 +16,10 @@ program sumchk use ice_communicate, only: my_task, master_task, get_num_procs use ice_domain_size, only: nx_global, ny_global use ice_domain_size, only: block_size_x, block_size_y, max_blocks - use ice_domain, only: distrb_info + use ice_domain, only: distrb_info, ns_boundary_type use ice_blocks, only: block, get_block, nx_block, ny_block, nblocks_tot use ice_distribution, only: ice_distributionGetBlockID, ice_distributionGet - use ice_constants, only: field_loc_center, field_loc_Nface + use ice_constants, only: field_loc_center, field_loc_Nface, field_loc_Eface, field_loc_NEcorner use ice_fileunits, only: bfbflag use ice_global_reductions use ice_exit, only: abort_ice, end_run @@ -113,6 +113,13 @@ program sumchk write(6,*) ' block_size_y = ',block_size_y write(6,*) ' nblocks_tot = ',nblocks_tot write(6,*) ' ' + write(6,*) ' Values are generally O(1.), lscale is the relative size of' + write(6,*) ' values set in the array to test precision. A pair of equal' + write(6,*) ' and opposite values of O(lscale) are placed in the array.' + write(6,*) ' "easy" sets the lscaled values at the start of the array so' + write(6,*) ' are added to the sum first. Otherwise, the lscaled values' + write(6,*) ' are set near the end of the array and to create precision' + write(6,*) ' challenges in the global sums' endif ! --------------------------- @@ -165,7 +172,7 @@ program sumchk reldigchk(4,4) = 0. reldigchk(5,4) = 15. if (nx_global == 360 .and. ny_global == 240) then - reldigchk(1:3,1) = 13. + reldigchk(1:3,1) = 12.5 reldigchk(5,4) = 14. endif #else @@ -181,7 +188,7 @@ program sumchk reldigchk(4,4) = 0. reldigchk(5,4) = 15. if (nx_global == 360 .and. ny_global == 240) then - reldigchk(1:2,1) = 13. + reldigchk(1:2,1) = 12.5 reldigchk(5,4) = 14. endif #endif @@ -212,20 +219,22 @@ program sumchk ! set corval to something a little interesting (not 1.0 for instance which gives atypical results) corval = 4.0_dbl_kind/3.0_dbl_kind iocval = 8 - ! tuned for gx3 and tx1 only - if ((nx_global == 100 .and. ny_global == 116) .or. & - (nx_global == 360 .and. ny_global == 240)) then - if (field_loc(m) == field_loc_Nface .and. nx_global == 360 .and. ny_global == 240) then - ! tx1 tripole face, need to adjust local value to remove half of row at ny_global - ! or modify corval to account for different sum - locval = corval / real((nblocks_tot*(block_size_x*block_size_y-2)-nx_global/2),dbl_kind) - corvali = (nblocks_tot*(block_size_x*block_size_y-2)-nx_global/2)*iocval - else - locval = corval / real(nblocks_tot*(block_size_x*block_size_y-2),dbl_kind) - corvali = nblocks_tot*(block_size_x*block_size_y-2)*iocval - endif + if ((ns_boundary_type == 'tripoleT' .and. field_loc(m) == field_loc_Nface ) .or. & + (ns_boundary_type == 'tripoleT' .and. field_loc(m) == field_loc_NEcorner)) then + ! remove full row at ny_global + locval = corval / real((nblocks_tot*(block_size_x*block_size_y-2)-nx_global),dbl_kind) + corvali = (nblocks_tot*(block_size_x*block_size_y-2)-nx_global)*iocval + elseif ((ns_boundary_type == 'tripoleT' .and. field_loc(m) == field_loc_center ) .or. & + (ns_boundary_type == 'tripoleT' .and. field_loc(m) == field_loc_Eface ) .or. & + (ns_boundary_type == 'tripole' .and. field_loc(m) == field_loc_NEcorner) .or. & + (ns_boundary_type == 'tripole' .and. field_loc(m) == field_loc_Nface )) then + ! remove half of row at ny_global + locval = corval / real((nblocks_tot*(block_size_x*block_size_y-2)-nx_global/2),dbl_kind) + corvali = (nblocks_tot*(block_size_x*block_size_y-2)-nx_global/2)*iocval else - call abort_ice(subname//' ERROR not set for this grid ') + ! all gridcells + locval = corval / real(nblocks_tot*(block_size_x*block_size_y-2),dbl_kind) + corvali = nblocks_tot*(block_size_x*block_size_y-2)*iocval endif do l = 1, nscale @@ -253,18 +262,18 @@ program sumchk jb = this_block%jlo je = this_block%jhi - lmask(ie,je-1,iblock) = .false. - lmask(ie,je-2,iblock) = .false. - arrayA(ie,je-1,iblock) = locval * lscale(l) + lmask(ie,je-1,iblock) = .false. + lmask(ie,je-2,iblock) = .false. + arrayA(ie,je-1,iblock) = locval * lscale(l) arrayA(ie,je-2,iblock) = -arrayA(ie,je-1,iblock) - arrayB(ie,je-1,iblock) = locval * lscale(l) + arrayB(ie,je-1,iblock) = locval * lscale(l) arrayB(ie,je-2,iblock) = arrayB(ie,je-1,iblock) arrayC(ib,jb,iblock) = locval * lscale(l) arrayC(ib+1,jb,iblock) = -arrayC(ib,jb,iblock) - arrayiA(:,:,iblock) = iocval - arrayiB(:,:,iblock) = iocval - arrayiA(ie,je-1,iblock) = 13 * iocval - arrayiA(ie,je-2,iblock) = -arrayiA(ie,je-1,iblock) + arrayiA(:,:,iblock) = iocval + arrayiB(:,:,iblock) = iocval + arrayiA(ie,je-1,iblock)= 13 * iocval + arrayiA(ie,je-2,iblock)= -arrayiA(ie,je-1,iblock) enddo do k = 1,ntests1 diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index 72a40f513..d4823d175 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -328,32 +328,32 @@ end subroutine flush_fileunit subroutine goto_nml(iunit, nml, status) ! Search to namelist group within ice_in file. ! for compilers that do not allow optional namelists - + ! passed variables integer(kind=int_kind), intent(in) :: & iunit ! namelist file unit - + character(len=*), intent(in) :: & nml ! namelist to search for - + integer(kind=int_kind), intent(out) :: & status ! status of subrouine - + ! local variables character(len=char_len) :: & file_str, & ! string in file nml_str ! namelist string to test - + integer(kind=int_kind) :: & i, n ! dummy integers - - + + ! rewind file rewind(iunit) - + ! define test string with ampersand nml_str = '&' // trim(adjustl(nml)) - + ! search for the record containing the namelist group we're looking for do read(iunit, '(a)', iostat=status) file_str @@ -365,10 +365,10 @@ subroutine goto_nml(iunit, nml, status) end if end if end do - + ! backspace to namelist name in file backspace(iunit) - + end subroutine goto_nml !======================================================================= diff --git a/configuration/scripts/Makefile b/configuration/scripts/Makefile index 872f426ad..d109472b0 100644 --- a/configuration/scripts/Makefile +++ b/configuration/scripts/Makefile @@ -74,7 +74,7 @@ AR := ar .SUFFIXES: -.PHONY: all cice libcice targets target db_files db_flags clean realclean helloworld calchk sumchk bcstchk gridavgchk halochk optargs +.PHONY: all cice libcice targets target db_files db_flags clean realclean helloworld calchk sumchk bcstchk gridavgchk halochk optargs opticep all: $(EXEC) cice: $(EXEC) @@ -93,7 +93,7 @@ targets: @echo " " @echo "Supported Makefile Targets are: cice, libcice, makdep, depends, clean, realclean" @echo " Diagnostics: targets, db_files, db_flags" - @echo " Unit Tests : helloworld, calchk, sumchk, bcstchk, gridavgchk, halochk, optargs" + @echo " Unit Tests : helloworld, calchk, sumchk, bcstchk, gridavgchk, halochk, optargs, opticep" target: targets db_files: @@ -153,6 +153,8 @@ gridavgchk: $(EXEC) halochk: $(EXEC) +opticep: $(EXEC) + # this builds just a subset of source code specified explicitly and requires a separate target HWOBJS := helloworld.o diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 1cf23da45..5a47decf1 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -33,6 +33,21 @@ cat >> ${jobfile} << EOFB #PBS -l walltime=${batchtime} EOFB +else if (${ICE_MACHINE} =~ derecho*) then +cat >> ${jobfile} << EOFB +#PBS -q ${queue} +#PBS -l job_priority=regular +#PBS -N ${ICE_CASENAME} +#PBS -A ${acct} +#PBS -l select=${nnodes}:ncpus=${corespernode}:mpiprocs=${taskpernodelimit}:ompthreads=${nthrds} +#PBS -l walltime=${batchtime} +#PBS -j oe +#PBS -W umask=022 +#PBS -o ${ICE_CASEDIR} +###PBS -M username@domain.com +###PBS -m be +EOFB + else if (${ICE_MACHINE} =~ gust*) then cat >> ${jobfile} << EOFB #PBS -q ${queue} diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index b13da1813..971bc0075 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -22,6 +22,18 @@ mpiexec_mpt -np ${ntasks} omplace ./cice >&! \$ICE_RUNLOG_FILE EOFR endif +#======= +else if (${ICE_MACHCOMP} =~ derecho*) then +if (${ICE_COMMDIR} =~ serial*) then +cat >> ${jobfile} << EOFR +./cice >&! \$ICE_RUNLOG_FILE +EOFR +else +cat >> ${jobfile} << EOFR +mpiexec --cpu-bind depth -n ${ntasks} -ppn ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE +EOFR +endif + #======= else if (${ICE_MACHCOMP} =~ gust*) then if (${ICE_COMMDIR} =~ serial*) then diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 32db0270b..e0e317e40 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -48,11 +48,12 @@ histfreq = 'm','x','x','x','x' histfreq_n = 1 , 1 , 1 , 1 , 1 histfreq_base = 'zero' - hist_avg = .true. + hist_avg = .true.,.true.,.true.,.true.,.true. history_dir = './history/' history_file = 'iceh' history_precision = 4 history_format = 'default' + hist_time_axis = 'end' write_ic = .true. incond_dir = './history/' incond_file = 'iceh_ic' diff --git a/configuration/scripts/machines/Macros.derecho_intel b/configuration/scripts/machines/Macros.derecho_intel new file mode 100644 index 000000000..df0d2320e --- /dev/null +++ b/configuration/scripts/machines/Macros.derecho_intel @@ -0,0 +1,69 @@ +#============================================================================== +# Makefile macros for NCAR cheyenne, intel compiler +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -march=core-avx2 + +FIXEDFLAGS := -fixed -132 +FREEFLAGS := -free +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -march=core-avx2 +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -link_mpi=dbg -stand f08 +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays -link_mpi=dbg +else + FFLAGS += -O2 +endif + +SCC := icx +SFC := ifort +MPICC := mpicc +MPIFC := mpif90 + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +NETCDF_PATH := $(NETCDF) + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/u/apps/ch/opt/pio/2.2/mpt/2.15f/intel/17.0.1/lib + +INCLDIR := $(INCLDIR) + +LIB_NETCDF := $(NETCDF)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) + +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + +#ifeq ($(ICE_IOTYPE), pio1) +# LIB_PIO := $(PIO_LIBDIR) +# SLIBS := $(SLIBS) -L$(LIB_PIO) -lpio +#endif + +ifeq ($(ICE_IOTYPE), pio2) +# CPPDEFS := $(CPPDEFS) -DGPTL +# LIB_PIO := $(PIO_LIBDIR) +# SLIBS := $(SLIBS) -L$(LIB_PIO) -lpiof -lpioc -lgptl + SLIBS := $(SLIBS) -lpiof -lpioc +endif + diff --git a/configuration/scripts/machines/Macros.onyx_intel b/configuration/scripts/machines/Macros.onyx_intel index 92879ee82..17cec8c74 100644 --- a/configuration/scripts/machines/Macros.onyx_intel +++ b/configuration/scripts/machines/Macros.onyx_intel @@ -4,11 +4,11 @@ CPP := fpp CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -fp-model precise -xHost +CFLAGS := -c -O2 -fp-model precise FIXEDFLAGS := -132 FREEFLAGS := -FR -FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -xHost +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) diff --git a/configuration/scripts/machines/env.derecho_intel b/configuration/scripts/machines/env.derecho_intel new file mode 100644 index 000000000..baa053e75 --- /dev/null +++ b/configuration/scripts/machines/env.derecho_intel @@ -0,0 +1,70 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module --force purge +module load ncarenv/23.06 +module load craype +module load intel/2023.0.0 +module load ncarcompilers +module load cray-mpich/8.1.25 +#module load hdf5/1.12.2 +module load netcdf-mpi/4.9.2 +module load cray-libsci/23.02.1.1 + +if ($?ICE_IOTYPE) then +if ($ICE_IOTYPE =~ pio*) then + module load parallel-netcdf/1.12.3 + if ($ICE_IOTYPE == "pio1") then + module load parallelio/1.10.1 + else + module load parallelio/2.6.0 + endif +endif +endif + +if ($?ICE_BFBTYPE) then +if ($ICE_BFBTYPE =~ qcchk*) then + module load conda +# conda env create -f ../../configuration/scripts/tests/qctest.yml + conda activate qctest +endif +endif + +# For perftools with mpiexec +# module load perftools-base +# module load perftools +#setenv PALS_TRANSFER FALSE + +endif + +limit coredumpsize unlimited +limit stacksize unlimited +setenv PALS_QUIET TRUE + +# May be needed for OpenMP memory +setenv OMP_STACKSIZE 64M +# OMP runtime diagnostics +#setenv OMP_DISPLAY_ENV TRUE + +setenv ICE_MACHINE_MACHNAME derecho +setenv ICE_MACHINE_MACHINFO "HPE Cray EX Milan Slingshot 11" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "ifort 2021.8.0 20221119, cray-mpich 2.25, netcdf-mpi4.9.2, pnetcdf1.12.3, pio2.6.0" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /glade/derecho/scratch/$user/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /glade/p/cesm/pcwg_dev +setenv ICE_MACHINE_BASELINE /glade/derecho/scratch/$user/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub" +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "main" +setenv ICE_MACHINE_TPNODE 128 +setenv ICE_MACHINE_BLDTHRDS 8 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.nrlssc_gnu b/configuration/scripts/machines/env.nrlssc_gnu index 1f8dd4441..94025ddf9 100644 --- a/configuration/scripts/machines/env.nrlssc_gnu +++ b/configuration/scripts/machines/env.nrlssc_gnu @@ -5,12 +5,12 @@ setenv ICE_MACHINE_MACHINFO "nrlssc" setenv ICE_MACHINE_ENVNAME gnu setenv ICE_MACHINE_ENVINFO "gnu" setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR /u/data/hebert/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /u/data/hebert/CICE_RUNS -setenv ICE_MACHINE_BASELINE /u/data/hebert/CICE_BASELINE +setenv ICE_MACHINE_WKDIR /u/hebert/data/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /u/hebert/data/ +setenv ICE_MACHINE_BASELINE /u/hebert/data/CICE_BASELINE setenv ICE_MACHINE_SUBMIT "qsub " setenv ICE_MACHINE_ACCT P00000000 setenv ICE_MACHINE_QUEUE "standard" -setenv ICE_MACHINE_TPNODE 20 # tasks per node -setenv ICE_MACHINE_BLDTHRDS 1 +setenv ICE_MACHINE_TPNODE 28 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 8 setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.onyx_cray b/configuration/scripts/machines/env.onyx_cray index e696d1b98..e879cdf03 100644 --- a/configuration/scripts/machines/env.onyx_cray +++ b/configuration/scripts/machines/env.onyx_cray @@ -13,14 +13,14 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-cray/6.0.9 +module load PrgEnv-cray/6.0.10 module unload cce -module load cce/11.0.2 +module load cce/14.0.3 module unload cray-mpich module unload cray-mpich2 -module load cray-mpich/7.7.16 +module load cray-mpich/7.7.20 module unload netcdf module unload cray-netcdf @@ -28,8 +28,8 @@ module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf -module load cray-netcdf/4.7.4.0 -module load cray-hdf5/1.12.0.0 +module load cray-netcdf/4.8.1.3 +module load cray-hdf5/1.12.1.3 module unload cray-libsci module unload craype-hugepages2M @@ -46,7 +46,7 @@ endif setenv ICE_MACHINE_MACHNAME onyx setenv ICE_MACHINE_MACHINFO "Cray XC40/50 Xeon E5-2699v4 Broadwell" setenv ICE_MACHINE_ENVNAME cray -setenv ICE_MACHINE_ENVINFO "Cray cce/11.0.2, cray-mpich/7.7.16, netcdf/4.7.4.0" +setenv ICE_MACHINE_ENVINFO "Cray cce/14.0.3, cray-mpich/7.7.20, netcdf/4.8.1.3" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.onyx_gnu b/configuration/scripts/machines/env.onyx_gnu index 80ebb8e43..19a4eb701 100644 --- a/configuration/scripts/machines/env.onyx_gnu +++ b/configuration/scripts/machines/env.onyx_gnu @@ -13,14 +13,14 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-gnu/6.0.9 +module load PrgEnv-gnu/6.0.10 module unload gcc -module load gcc/10.2.0 +module load gcc/12.1.0 module unload cray-mpich module unload cray-mpich2 -module load cray-mpich/7.7.16 +module load cray-mpich/7.7.20 module unload netcdf module unload cray-netcdf @@ -28,8 +28,8 @@ module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf -module load cray-netcdf/4.7.4.0 -module load cray-hdf5/1.12.0.0 +module load cray-netcdf/4.8.1.3 +module load cray-hdf5/1.12.1.3 module unload cray-libsci module unload craype-hugepages2M @@ -46,7 +46,7 @@ endif setenv ICE_MACHINE_MACHNAME onyx setenv ICE_MACHINE_MACHINFO "Cray XC40/50 Xeon E5-2699v4 Broadwell" setenv ICE_MACHINE_ENVNAME gnu -setenv ICE_MACHINE_ENVINFO "GNU Fortran (GCC) 10.2.0, cray-mpich/7.7.16, netcdf/4.7.4.0" +setenv ICE_MACHINE_ENVINFO "GNU Fortran (GCC) 12.1.0, cray-mpich/7.7.20, netcdf/4.8.1.3" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.onyx_intel b/configuration/scripts/machines/env.onyx_intel index 362454dd4..999d5a2bd 100644 --- a/configuration/scripts/machines/env.onyx_intel +++ b/configuration/scripts/machines/env.onyx_intel @@ -13,14 +13,14 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-intel/6.0.9 +module load PrgEnv-intel/6.0.10 module unload intel -module load intel/19.1.3.304 +module load intel/2021.4.0 module unload cray-mpich module unload cray-mpich2 -module load cray-mpich/7.7.16 +module load cray-mpich/7.7.20 module unload netcdf module unload cray-netcdf @@ -28,8 +28,8 @@ module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf -module load cray-netcdf/4.7.4.0 -module load cray-hdf5/1.12.0.0 +module load cray-netcdf/4.8.1.3 +module load cray-hdf5/1.12.1.3 module unload cray-libsci module unload craype-hugepages2M @@ -46,7 +46,7 @@ endif setenv ICE_MACHINE_MACHNAME onyx setenv ICE_MACHINE_MACHINFO "Cray XC40/50 Xeon E5-2699v4 Broadwell" setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 19.1.3.304, cray-mpich/7.7.16, netcdf/4.7.4.0" +setenv ICE_MACHINE_ENVINFO "ifort 2021.4.0, cray-mpich/7.7.20, netcdf/4.8.1.3" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/options/set_env.opticep b/configuration/scripts/options/set_env.opticep new file mode 100644 index 000000000..81339ea42 --- /dev/null +++ b/configuration/scripts/options/set_env.opticep @@ -0,0 +1,2 @@ +setenv ICE_DRVOPT unittest/opticep +setenv ICE_TARGET opticep diff --git a/configuration/scripts/options/set_nml.gx1 b/configuration/scripts/options/set_nml.gx1 index 50615e81e..781da3389 100644 --- a/configuration/scripts/options/set_nml.gx1 +++ b/configuration/scripts/options/set_nml.gx1 @@ -14,8 +14,8 @@ maskhalo_remap = .true. maskhalo_bound = .true. fyear_init = 2005 atm_data_format = 'nc' -atm_data_type = 'JRA55_gx1' -atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/JRA55' +atm_data_type = 'JRA55' +atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1' precip_units = 'mks' ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/CESM/MONTHLY' bgc_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/WOA/MONTHLY' diff --git a/configuration/scripts/options/set_nml.gx3 b/configuration/scripts/options/set_nml.gx3 index 1a2fe62a5..3492509c6 100644 --- a/configuration/scripts/options/set_nml.gx3 +++ b/configuration/scripts/options/set_nml.gx3 @@ -11,8 +11,8 @@ kmt_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx3/kmt_gx3.bin' bathymetry_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx3/global_gx3.bathy.nc' fyear_init = 2005 atm_data_format = 'nc' -atm_data_type = 'JRA55_gx3' -atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx3/JRA55' +atm_data_type = 'JRA55' +atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx3' precip_units = 'mks' ocn_data_format = 'bin' ocn_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx3/' diff --git a/configuration/scripts/options/set_nml.gx3ncarbulk b/configuration/scripts/options/set_nml.gx3ncarbulk index fbe0f7ae7..044c77a54 100644 --- a/configuration/scripts/options/set_nml.gx3ncarbulk +++ b/configuration/scripts/options/set_nml.gx3ncarbulk @@ -4,6 +4,6 @@ use_restart_time = .true. fyear_init = 1997 atm_data_format = 'bin' atm_data_type = 'ncar' -atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx3/NCAR_bulk' +atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx3' precip_units = 'mm_per_month' diff --git a/configuration/scripts/options/set_nml.histinst b/configuration/scripts/options/set_nml.histinst index f2f0995c8..31d566d76 100644 --- a/configuration/scripts/options/set_nml.histinst +++ b/configuration/scripts/options/set_nml.histinst @@ -1 +1 @@ -hist_avg = .false. +hist_avg = .false.,.false.,.false.,.false.,.false. diff --git a/configuration/scripts/options/set_nml.jra55 b/configuration/scripts/options/set_nml.jra55 new file mode 100644 index 000000000..465152498 --- /dev/null +++ b/configuration/scripts/options/set_nml.jra55 @@ -0,0 +1,2 @@ +atm_data_format = 'nc' +atm_data_type = 'JRA55' diff --git a/configuration/scripts/options/set_nml.jra55do b/configuration/scripts/options/set_nml.jra55do new file mode 100644 index 000000000..5ca4cb397 --- /dev/null +++ b/configuration/scripts/options/set_nml.jra55do @@ -0,0 +1,2 @@ +atm_data_format = 'nc' +atm_data_type = 'JRA55do' diff --git a/configuration/scripts/options/set_nml.qc b/configuration/scripts/options/set_nml.qc index feefb376d..5de4dd28e 100644 --- a/configuration/scripts/options/set_nml.qc +++ b/configuration/scripts/options/set_nml.qc @@ -12,5 +12,5 @@ dumpfreq_n = 12 diagfreq = 24 histfreq = 'd','x','x','x','x' f_hi = 'd' -hist_avg = .false. +hist_avg = .false.,.false.,.false.,.false.,.false. distribution_wght = 'blockall' diff --git a/configuration/scripts/options/set_nml.run3dt b/configuration/scripts/options/set_nml.run3dt index 4ff27ce22..11a8c0f85 100644 --- a/configuration/scripts/options/set_nml.run3dt +++ b/configuration/scripts/options/set_nml.run3dt @@ -2,6 +2,6 @@ npt_unit = '1' npt = 3 dump_last = .true. histfreq = '1','x','x','x','x' -hist_avg = .false. +hist_avg = .false.,.false.,.false.,.false.,.false. f_uvel = '1' f_vvel = '1' diff --git a/configuration/scripts/options/set_nml.tx1 b/configuration/scripts/options/set_nml.tx1 index 5e66db871..c21231a0f 100644 --- a/configuration/scripts/options/set_nml.tx1 +++ b/configuration/scripts/options/set_nml.tx1 @@ -6,8 +6,8 @@ grid_type = 'tripole' ns_boundary_type = 'tripole' grid_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/tx1/grid_tx1.bin' kmt_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/tx1/kmt_tx1.bin' -atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/tx1/JRA55' +atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/tx1' atm_data_format = 'nc' -atm_data_type = 'JRA55_tx1' +atm_data_type = 'JRA55' year_init = 2005 fyear_init = 2005 diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index 8685ab9a8..906aae08d 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -12,6 +12,7 @@ smoke gx3 4x1 diag1,run5day,thread smoke_gx3_8x2_diag1_ smoke gx3 1x8 diag1,run5day,evp1d restart gx1 40x4 droundrobin,medium restart tx1 40x4 dsectrobin,medium +restart tx1 40x4 dsectrobin,medium,jra55do restart gx3 4x4 none restart gx3 10x4 maskhalo restart gx3 6x2 alt01 @@ -46,12 +47,14 @@ smoke gbox80 1x1 boxslotcyl smoke gbox12 1x1x12x12x1 boxchan,diag1,debug restart gx3 8x2 modal smoke gx3 8x2 bgcz +smoke gx3 8x2 jra55do smoke gx3 8x2 bgczm,debug smoke gx3 8x1 bgcskl,debug #smoke gx3 4x1 bgcz,thread smoke_gx3_8x2_bgcz restart gx1 4x2 bgcsklclim,medium restart gx1 8x1 bgczclim,medium smoke gx1 24x1 medium,run90day,yi2008 +smoke gx1 24x1 medium,run90day,yi2008,jra55do smoke gx3 8x1 medium,run90day,yi2008 restart gx1 24x1 short restart gx1 16x2 seabedLKD,gx1apr,short,debug diff --git a/configuration/scripts/tests/baseline.script b/configuration/scripts/tests/baseline.script index 2700fe71f..a24236c9e 100644 --- a/configuration/scripts/tests/baseline.script +++ b/configuration/scripts/tests/baseline.script @@ -33,8 +33,13 @@ if (${ICE_BASECOM} != ${ICE_SPVAL}) then echo "base_file: ${base_file}" echo "test_file: ${test_file}" - ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} notcicefile - set bfbstatus = $status + if (${ICE_TARGET} == "opticep") then + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} modcicefile + set bfbstatus = $status + else + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} notcicefile + set bfbstatus = $status + endif else if (${ICE_BFBTYPE} =~ qcchk*) then set test_dir = ${ICE_RUNDIR} @@ -160,8 +165,13 @@ if (${ICE_BFBCOMP} != ${ICE_SPVAL}) then echo "base_file: ${base_file}" echo "test_file: ${test_file}" - ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} - set bfbstatus = $status + if (${ICE_TARGET} == "opticep") then + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} modcicefile + set bfbstatus = $status + else + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} + set bfbstatus = $status + endif else if (${ICE_BFBTYPE} == "logrest") then set test_file = `ls -1t ${ICE_RUNDIR}/cice.runlog* | head -1` @@ -172,8 +182,13 @@ if (${ICE_BFBCOMP} != ${ICE_SPVAL}) then echo "base_file: ${base_file}" echo "test_file: ${test_file}" - ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} - set bfbstatusl = $status + if (${ICE_TARGET} == "opticep") then + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} modcicefile + set bfbstatus = $status + else + ${ICE_CASEDIR}/casescripts/comparelog.csh ${base_file} ${test_file} + set bfbstatusl = $status + endif set test_dir = ${ICE_RUNDIR}/restart set base_dir = ${ICE_RUNDIR}/../${ICE_BFBCOMP}.${ICE_TESTID}/restart diff --git a/configuration/scripts/tests/comparelog.csh b/configuration/scripts/tests/comparelog.csh index 576289cd7..6659906b8 100755 --- a/configuration/scripts/tests/comparelog.csh +++ b/configuration/scripts/tests/comparelog.csh @@ -16,6 +16,7 @@ set filearg = 0 set cicefile = 0 set notcicefile = "notcicefile" +set modcicefile = "modcicefile" if ( $#argv == 2 ) then set cicefile = 1 set filearg = 1 @@ -23,12 +24,18 @@ if ( $#argv == 2 ) then set test_data = $argv[2] if ("$argv[1]" == "${notcicefile}") set filearg = 0 if ("$argv[2]" == "${notcicefile}") set filearg = 0 + if ("$argv[1]" == "${modcicefile}") set filearg = 0 + if ("$argv[2]" == "${modcicefile}") set filearg = 0 else if ( $#argv == 3 ) then set cicefile = 0 set filearg = 1 set base_data = $argv[1] set test_data = $argv[2] - if ("$argv[3]" != "${notcicefile}") set filearg = 0 + if ("$argv[3]" == "${modcicefile}") then + set cicefile = 2 + else if ("$argv[3]" != "${notcicefile}") then + set filearg = 0 + endif endif if (${filearg} == 0) then @@ -57,6 +64,9 @@ if (${filearg} == 1) then if (${cicefile} == 1) then cat ${base_data} | grep -A 99999999 "total ice area (km^2)" | grep -e istep1: -e = | grep -iv "min, max, sum" | grep -iv "init_vert" | grep -iv "ridge_ice" >&! ${base_out} cat ${test_data} | grep -A 99999999 "total ice area (km^2)" | grep -e istep1: -e = | grep -iv "min, max, sum" | grep -iv "init_vert" | grep -iv "ridge_ice" >&! ${test_out} + else if (${cicefile} == 2) then + cat ${base_data} | grep -A 99999999 "total ice area (km^2)" | grep -e "total " -e "arwt " -e "max " -e "kinetic" >&! ${base_out} + cat ${test_data} | grep -A 99999999 "total ice area (km^2)" | grep -e "total " -e "arwt " -e "max " -e "kinetic" >&! ${test_out} else sed -n '/RunningUnitTest/,$p' ${base_data} >! ${base_out} sed -n '/RunningUnitTest/,$p' ${test_data} >! ${test_out} diff --git a/configuration/scripts/tests/unittest_suite.ts b/configuration/scripts/tests/unittest_suite.ts index 7486e87aa..840fc822e 100644 --- a/configuration/scripts/tests/unittest_suite.ts +++ b/configuration/scripts/tests/unittest_suite.ts @@ -1,10 +1,12 @@ # Test Grid PEs Sets BFB-compare +smoke gx3 8x2 diag1,run5day +smoke gx3 4x2x25x29x4 debug,run2day,dslenderX2 unittest gx3 1x1 helloworld -unittest gx3 1x1 optargs unittest gx3 1x1 calchk,short unittest gx3 4x1x25x29x4 sumchk unittest gx3 1x1x25x29x16 sumchk unittest tx1 8x1 sumchk +unittest tx1 8x1 sumchk,tripolet unittest gx3 4x1 bcstchk unittest gx3 1x1 bcstchk unittest gx3 8x2 gridavgchk,dwblockall @@ -27,3 +29,7 @@ unittest tx1 4x2 halochk,dwblockall unittest tx1 4x2 halochk,dwblockall,tripolet unittest tx1 4x2x65x45x10 halochk,dwblockall unittest tx1 4x2x57x43x12 halochk,dwblockall,tripolet +unittest gx3 1x1 optargs +unittest gx3 1x1 opticep +unittest gx3 4x2x25x29x4 debug,run2day,dslenderX2,opticep,cmplog smoke_gx3_4x2x25x29x4_debug_dslenderX2_run2day +unittest gx3 8x2 diag1,run5day,opticep,cmplog smoke_gx3_8x2_diag1_run5day diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 000004bb9..36c772eff 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -314,14 +314,15 @@ either Celsius or Kelvin units). Deprecated parameters are listed at the end. "highfreq", "high-frequency atmo coupling", "F" "hin_old", "ice thickness prior to growth/melt", "m" "hin_max", "category thickness limits", "m" - "hist_avg", "if true, write averaged data instead of snapshots", "T" - "histfreq", "units of history output frequency: y, m, w, d or 1", "" + "hist_avg", "if true, write averaged data instead of snapshots", "T,T,T,T,T" + "histfreq", "units of history output frequency: y, m, w, d or 1", "m,x,x,x,x" "histfreq_base", "reference date for history output", "" - "histfreq_n", "integer output frequency in histfreq units", "" + "histfreq_n", "integer output frequency in histfreq units", "1,1,1,1,1" "history_dir", "path to history output files", "" "history_file", "history output file prefix", "" "history_format", "history file format", "" "history_precision", "history output precision: 4 or 8 byte", "4" + "hist_time_axis", "history file time axis interval location: begin, middle, end", "end" "hm", "land/boundary mask, thickness (T-cell)", "" "hmix", "ocean mixed layer depth", "20. m" "hour", "hour of the year", "" diff --git a/doc/source/developer_guide/dg_forcing.rst b/doc/source/developer_guide/dg_forcing.rst index 0b90a9b2e..8cf293843 100644 --- a/doc/source/developer_guide/dg_forcing.rst +++ b/doc/source/developer_guide/dg_forcing.rst @@ -21,12 +21,12 @@ primitive, in part due to historical reasons and in part because standalone runs are discouraged for evaluating complex science. In general, most implementations use aspects of the following approach, -- Input files are organized by year. +- Input files are organized by year. The underlying implementation provides for some flexibility and extensibility in filenames. For instance, JRA55 and JRA55do filenames can have syntax like ``[JRA55,JRA55do][_$grid,'']_03hr_forcing[_$grid,'']_$year.nc`` where $grid is optional or may be present at one of two locations within the filename. This implementation exists to support the current naming conventions within the gx1, gx3, and tx1 JRA55 and JRA55do CICE_DATA directory structure automatically. See **JRA55_files** in **ice_forcing.F90** for more details. - Namelist inputs ``fyear`` and ``ycycle`` specify the forcing year dataset. - The forcing year is computed on the fly and is assumed to be cyclical over the forcing dataset length defined by ``ycycle``. -- The namelist ``atm_dat_dir`` specifies the directory of the atmosphere input data files and the namelist ``atm_data_type`` defines the atmospheric forcing mode. -- The namelist ``ocn_dat_dir`` specifies the directory of the ocean input data files and the namelist ``ocn_data_type`` defines the ocean forcing mode. -- The filenames follow a particular naming convention that is defined in the source code (ie. subroutine **JRA55_gx1_files**). The forcing year is typically found just before the **.nc** part of the filename and there are tools (subroutine **file_year**) to update the filename based on the model year and appropriate forcing year. +- The namelist ``atm_data_dir`` specifies the path or partial path for the atmosphere input data files and the namelist ``atm_data_type`` defines the atmospheric forcing mode. ``atm_data_type`` values of ``JRA55``, ``JRA55do``, or ``ncar`` provide some flexibility for directory paths and filenames. Many details can be gleaned from the CICE_data directory structure and file names as well as from the implementation in **ice_forcing.F90**. But the main point is that atm_data_dir should be set to ${CICE_DATA_root}/forcing/$grid/[JRA55,JRA55do,NCAR_bulk,''] where [JRA55,JRA55do,NCAR_bulk] are optional but provided for backwards compatibility. grid is typically gx1, gx3, tx1, or similar. +- The namelist ``ocn_data_dir`` specifies the directory of the ocean input data files and the namelist ``ocn_data_type`` defines the ocean forcing mode. +- The filenames follow a particular naming convention that is defined in the source code (ie. subroutine **JRA55_files**). The forcing year is typically found just before the **.nc** part of the filename and there are tools (subroutine **file_year**) to update the filename based on the model year and appropriate forcing year. - The input data time axis is generally NOT read by the forcing subroutine. The forcing frequency is hardwired into the model and the file record number is computed based on the forcing frequency and model time. Mixing leap year input data and noleap model calendars (and vice versa) is not handled particularly gracefully. The CICE model does not read or check against the input data time axis. - Data is read on the model grid, no spatial interpolation exists. - Data is often time interpolated linearly between two input timestamps to the model time each model timestep. @@ -79,8 +79,8 @@ input data fields to model forcing fields. .. _JRA55forcing: -JRA55 Atmosphere Forcing -------------------------- +JRA55 and JRA55do Atmosphere Forcing +------------------------------------ The current default atmosphere forcing for gx3, gx1, and tx1 standalone grids for Consortium testing is the JRA55 forcing @@ -136,6 +136,11 @@ March 1, and all data after March 1 will be shifted one day. December 31 in leap years will be skipped when running with a CICE calendar with no leap days. +JRA55do forcing is also provided by the Consortium in the same format and scheme. The JRA55do +dataset is more focused on forcing for ocean and ice models, but provides a very similar climate +as the JRA55 forcing. To switch to JRA55do, set the namelist ``atm_data_type`` to ``JRA55do`` +and populate the input data directory with the JRA55do dataset provided by the Consortium. + .. _NCARforcing: diff --git a/doc/source/developer_guide/dg_other.rst b/doc/source/developer_guide/dg_other.rst index 308c2629c..a8f6e8b15 100644 --- a/doc/source/developer_guide/dg_other.rst +++ b/doc/source/developer_guide/dg_other.rst @@ -177,14 +177,14 @@ the tracer dependencies (weights), which are tracked using the arrays ``trcr_base`` (a dependency mask), ``n_trcr_strata`` (the number of underlying tracer layers), and ``nt_strata`` (indices of underlying layers). Additional information about tracers can be found in the -`Icepack documentation `__. +`Icepack documentation `__. To add a tracer, follow these steps using one of the existing tracers as a pattern. 1) **icepack\_tracers.F90** and **icepack\_[tracer].F90**: declare tracers, add flags and indices, and create physics routines as described in the - `Icepack documentation `__ + `Icepack documentation `__ 2) **ice_arrays_column.F90**: declare arrays @@ -233,6 +233,6 @@ a pattern. configuration in **configuration/scripts/options**. 12) If strict conservation is necessary, add diagnostics as noted for - topo ponds in the `Icepack documentation `__. + topo ponds in the `Icepack documentation `__. 13) Update documentation, including **cice_index.rst** and **ug_case_settings.rst** diff --git a/doc/source/developer_guide/dg_tools.rst b/doc/source/developer_guide/dg_tools.rst index ba29e0184..74df2343b 100644 --- a/doc/source/developer_guide/dg_tools.rst +++ b/doc/source/developer_guide/dg_tools.rst @@ -27,10 +27,10 @@ JRA55 forcing datasets ------------------------ This section describes how to generate JRA55 forcing data for the CICE model. -Raw JRA55 files have to be interpolated and processed into input files specifically +Raw JRA55 or JRA55do files have to be interpolated and processed into input files specifically for the CICE model. A tool exists in **configuration/tools/jra55_datasets** to support that process. -The raw JRA55 data is obtained from the NCAR/UCAR Research Data Archive and +The raw JRA55 or JRA55do data is obtained from the NCAR/UCAR Research Data Archive and the conversion tools are written in python. Requirements diff --git a/doc/source/intro/about.rst b/doc/source/intro/about.rst index b249a8dfb..3845cfbc0 100644 --- a/doc/source/intro/about.rst +++ b/doc/source/intro/about.rst @@ -23,7 +23,7 @@ coupled with other earth system model components, routines external to the CICE model prepare and execute data exchanges with an external “flux coupler”. Icepack is implemented in CICE as a git submodule, and it is documented at -https://cice-consortium-icepack.readthedocs.io/en/master/index.html. +https://cice-consortium-icepack.readthedocs.io/en/main/index.html. Development and testing of CICE and Icepack may be done together, but the repositories are independent. This document describes the remainder of the CICE model. The CICE code is diff --git a/doc/source/intro/citing.rst b/doc/source/intro/citing.rst index c128bc4e6..593041b21 100644 --- a/doc/source/intro/citing.rst +++ b/doc/source/intro/citing.rst @@ -15,7 +15,7 @@ More information can be found by following the DOI link to zenodo. If you use CICE, please cite the version number of the code you are using or modifying. -If using code from the CICE-Consortium repository ``master`` branch +If using code from the CICE-Consortium repository ``main`` branch that includes modifications that have not yet been released with a version number, then in addition to the most recent version number, the hash at time of diff --git a/doc/source/master_list.bib b/doc/source/master_list.bib index 9e387efb9..a7c3a1174 100644 --- a/doc/source/master_list.bib +++ b/doc/source/master_list.bib @@ -331,7 +331,7 @@ @Manual{Smith95 title = "{Curvilinear coordinates for global ocean models}", organization = "Technical Report LA-UR-95-1146, Los Alamos National Laboratory", year = {1995}, - url = {https://github.com/CICE-Consortium/CICE/blob/master/doc/PDF/LAUR-95-1146.pdf} + url = {https://github.com/CICE-Consortium/CICE/blob/main/doc/PDF/LAUR-95-1146.pdf} } @Article{Zwiers95, author = "F.W. Zwiers and H. von Storch", @@ -523,14 +523,14 @@ @Manual{Kauffman02 title = "{The CCSM coupler, version 5.0.1}", journal = NTN, year = {2002}, - url = {https://github.com/CICE-Consortium/CICE/blob/master/doc/PDF/KL_NCAR2002.pdf} + url = {https://github.com/CICE-Consortium/CICE/blob/main/doc/PDF/KL_NCAR2002.pdf} } @Manual{Hunke03, author = "E.C. Hunke and J.K. Dukowicz", title = "{The sea ice momentum equation in the free drift regime}", organization = "Technical Report LA-UR-03-2219, Los Alamos National Laboratory", year = {2003}, - url = {https://github.com/CICE-Consortium/CICE/blob/master/doc/PDF/LAUR-03-2219.pdf} + url = {https://github.com/CICE-Consortium/CICE/blob/main/doc/PDF/LAUR-03-2219.pdf} } @Article{Amundrud04, author = "T.L. Amundrud and H. Malling and R.G. Ingram", @@ -636,7 +636,7 @@ @Article{Jin06 year = {2006}, volume = {44}, pages = {63-72}, - url = {https://github.com/CICE-Consortium/CICE/blob/master/doc/PDF/JDWSTWLG06.pdf} + url = {https://github.com/CICE-Consortium/CICE/blob/main/doc/PDF/JDWSTWLG06.pdf} } @Article{Wilchinsky06, author = "A.V. Wilchinsky and D.L. Feltham", @@ -660,7 +660,7 @@ @Manual{Briegleb07 title = "{A Delta-Eddington multiple scattering parameterization for solar radiation in the sea ice component of the Community Climate System Model}", organization = "NCAR Technical Note NCAR/TN-472+STR, National Center for Atmospheric Research", year = {2007}, - url = {https://github.com/CICE-Consortium/CICE/blob/master/doc/PDF/BL_NCAR2007.pdf} + url = {https://github.com/CICE-Consortium/CICE/blob/main/doc/PDF/BL_NCAR2007.pdf} } @Article{Flocco07, author = "D. Flocco and D.L. Feltham", diff --git a/doc/source/science_guide/sg_coupling.rst b/doc/source/science_guide/sg_coupling.rst index c01e2bea5..666c13ed4 100644 --- a/doc/source/science_guide/sg_coupling.rst +++ b/doc/source/science_guide/sg_coupling.rst @@ -27,7 +27,7 @@ variables for each cell. These considerations are explained in more detail below. The fluxes and state variables passed between the sea ice model and the -CESM flux coupler are listed in the `Icepack documentation `_. +CESM flux coupler are listed in the `Icepack documentation `_. By convention, directional fluxes are positive downward. In CESM, the sea ice model may exchange coupling fluxes using a different grid than the computational @@ -135,6 +135,6 @@ thin compared to the typical depth of the Ekman spiral, then :math:`\theta=0` is a good approximation. Here we assume that the top layer is thin enough. -Please see the `Icepack documentation `_ for additional information about +Please see the `Icepack documentation `_ for additional information about atmospheric and oceanic forcing and other data exchanged between the flux coupler and the sea ice model. diff --git a/doc/source/science_guide/sg_dynamics.rst b/doc/source/science_guide/sg_dynamics.rst index 585c18616..1ddf94472 100644 --- a/doc/source/science_guide/sg_dynamics.rst +++ b/doc/source/science_guide/sg_dynamics.rst @@ -284,7 +284,7 @@ Parameters for the FGMRES linear solver and the preconditioner can be controlled Surface stress terms ******************** -The formulation for the wind stress is described in `Icepack Documentation `_. Below, some details about the ice-ocean stress and the seabed stress are given. +The formulation for the wind stress is described in `Icepack Documentation `_. Below, some details about the ice-ocean stress and the seabed stress are given. Ice-Ocean stress ~~~~~~~~~~~~~~~~ @@ -515,7 +515,7 @@ where When the deformation :math:`\Delta` tends toward zero, the viscosities tend toward infinity. To avoid this issue, :math:`\Delta` needs to be limited and is replaced by :math:`\Delta^*` in equation :eq:`zeta`. Two methods for limiting :math:`\Delta` (or for capping the viscosities) are available in the code. If the namelist parameter ``capping_method`` is set to ``max``, :math:`\Delta^*=max(\Delta, \Delta_{min})` :cite:`Hibler79` while with ``capping_method`` set to ``sum``, the smoother formulation :math:`\Delta^*=(\Delta + \Delta_{min})` of :cite:`Kreyscher00` is used. The ice strength :math:`P` is a function of the ice thickness distribution as -described in the `Icepack Documentation `_. +described in the `Icepack Documentation `_. Two other modifications to the standard VP rheology of :cite:`Hibler79` are available. First, following the approach of :cite:`Konig10` (see also :cite:`Lemieux16`), the diff --git a/doc/source/science_guide/sg_fundvars.rst b/doc/source/science_guide/sg_fundvars.rst index 5b5703266..2d6f50328 100644 --- a/doc/source/science_guide/sg_fundvars.rst +++ b/doc/source/science_guide/sg_fundvars.rst @@ -17,7 +17,7 @@ In addition to an ice thickness distribution, CICE includes an optional capabili Ice floe horizontal size may change through vertical and lateral growth and melting of existing floes, freezing of new ice, wave breaking, and welding of floes in freezing conditions. The floe size distribution (FSD) is a probability function that characterizes this variability. The scheme is based on the theoretical framework described in :cite:`Horvat15` for a joint floe size and thickness distribution (FSTD), and was implemented by :cite:`Roach18` and :cite:`Roach19`. The joint floe size distribution is carried as an area-weighted tracer, defined as the fraction of ice belonging to a given thickness category with lateral floe size belong to a given floe size class. This development includes interactions between sea ice and ocean surface waves. Input data on ocean surface wave spectra at a single time is provided for testing, but as with the other CICE datasets, it should not be used for production runs or publications. It is not recommended to use the FSD without ocean surface waves. Additional information about the ITD and joint FSTD for CICE can be found in the -`Icepack documentation `_. +`Icepack documentation `_. The fundamental equation solved by CICE is :cite:`Thorndike75`: @@ -87,7 +87,7 @@ Section :ref:`horiz-trans`. Ice is transported in thickness space using the remapping scheme of :cite:`Lipscomb01`. The mechanical redistribution scheme, based on :cite:`Thorndike75`, :cite:`Rothrock75`, :cite:`Hibler80`, :cite:`Flato95`, and :cite:`Lipscomb07` is outlined -in the `Icepack Documentation `_. +in the `Icepack Documentation `_. To solve the horizontal transport and ridging equations, we need the ice velocity :math:`{\bf u}`, and to compute transport in thickness space, we must know the the ice growth diff --git a/doc/source/science_guide/sg_horiztrans.rst b/doc/source/science_guide/sg_horiztrans.rst index d66046465..10b668755 100644 --- a/doc/source/science_guide/sg_horiztrans.rst +++ b/doc/source/science_guide/sg_horiztrans.rst @@ -11,7 +11,7 @@ thickness category :math:`n`. Equation :eq:`transport-ai` describes the conservation of ice area under horizontal transport. It is obtained from Equation :eq:`transport-g` by discretizing :math:`g` and neglecting the second and third terms on the right-hand side, which are treated -separately (As described in the `Icepack Documentation `_). +separately (As described in the `Icepack Documentation `_). There are similar conservation equations for ice volume (Equation :eq:`transport-vi`), snow volume (Equation :eq:`transport-vs`), ice @@ -39,7 +39,7 @@ remapping scheme of :cite:`Dukowicz00` as modified for sea ice by - The upwind scheme uses velocity points at the East and North face (i.e. :math:`uvelE=u` at the E point and :math:`vvelN=v` at the N point) of a T gridcell. As such, the prognostic C grid velocity components (:math:`uvelE` and :math:`vvelN`) can be passed directly to the upwind transport scheme. If the upwind scheme is used with the B grid, the B grid velocities, :math:`uvelU` and :math:`vvelU` (respectively :math:`u` and :math:`v` at the U point) are interpolated to the E and N points first. (Note however that the upwind scheme does not transport all potentially available tracers.) -- The remapping scheme uses :math:`uvelU` and :math:`vvelU` if l_fixed_area is false and :math:`uvelE` and :math:`vvelN` if l_fixed_area is true. l_fixed_area is hardcoded to false by default and further described below. As such, the B grid velocities (:math:`uvelU` and :math:`vvelU`) are used directly in the remapping scheme, while the C grid velocities (:math:`uvelE` and :math:`vvelN`) are interpolated to U points first. If l_fixed_area is changed to true, then the reverse is true. The C grid velocities are used directly and the B grid velocities are interpolated. +- Remapping is naturally a B-grid transport scheme as the corner (U point) velocity components :math:`uvelU` and :math:`vvelU` are used to calculate departure points. Nevertheless, the remapping scheme can also be used with the C grid by first interpolating :math:`uvelE` and :math:`vvelN` to the U points. The remapping scheme has several desirable features: @@ -98,7 +98,7 @@ below. After the transport calculation, the sum of ice and open water areas within a grid cell may not add up to 1. The mechanical deformation parameterization in -`Icepack `_ +`Icepack `_ corrects this issue by ridging the ice and creating open water such that the ice and open water areas again add up to 1. diff --git a/doc/source/science_guide/sg_tracers.rst b/doc/source/science_guide/sg_tracers.rst index cbecb9310..5935fe67e 100644 --- a/doc/source/science_guide/sg_tracers.rst +++ b/doc/source/science_guide/sg_tracers.rst @@ -119,4 +119,4 @@ Users may add any number of additional tracers that are transported conservative provided that the dependency ``trcr_depend`` is defined appropriately. See Section :ref:`addtrcr` for guidance on adding tracers. -Please see the `Icepack documentation `_ for additional information about tracers that depend on other tracers, the floe size distribution, advanced snow physics, age of the ice, aerosols, water isotopes, brine height, and the sea ice ecosystem. +Please see the `Icepack documentation `_ for additional information about tracers that depend on other tracers, the floe size distribution, advanced snow physics, age of the ice, aerosols, water isotopes, brine height, and the sea ice ecosystem. diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 9906fba87..516f3238d 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -176,7 +176,7 @@ setup_nml "", "zero", "restart output frequency relative to year-month-day of 0000-01-01", "" "``dumpfreq_n``", "integer", "write restart frequency with ``dumpfreq``", "1" "``dump_last``", "logical", "write restart on last time step of simulation", "``.false.``" - "``hist_avg``", "logical", "write time-averaged data", "``.true.``" + "``hist_avg``", "logical", "write time-averaged data", "``.true.,.true.,.true.,.true.,.true.``" "``histfreq``", "``d``", "write history every ``histfreq_n`` days", "'1','h','d','m','y'" "", "``h``", "write history every ``histfreq_n`` hours", "" "", "``m``", "write history every ``histfreq_n`` months", "" @@ -191,6 +191,7 @@ setup_nml "``history_format``", "``default``", "read/write history files in default format", "``default``" "", "``pio_pnetcdf``", "read/write restart files with pnetcdf in pio", "" "``history_precision``", "integer", "history file precision: 4 or 8 byte", "4" + "``hist_time_axis``","character","history file time axis interval location: begin, middle, end","end" "``ice_ic``", "``default``", "equal to internal", "``default``" "", "``internal``", "initial conditions set based on ice\_data\_type,conc,dist inputs", "" "", "``none``", "no ice", "" @@ -595,15 +596,14 @@ forcing_nml "", "``constant``", "constant-based boundary layer", "" "", "``mixed``", "stability-based boundary layer for wind stress, constant-based for sensible+latent heat fluxes", "" "``atmiter_conv``", "real", "convergence criteria for ustar", "0.0" - "``atm_data_dir``", "string", "path to atmospheric forcing data directory", "" + "``atm_data_dir``", "string", "path or partial path to atmospheric forcing data directory", "" "``atm_data_format``", "``bin``", "read direct access binary atmo forcing file format", "``bin``" "", "``nc``", "read netcdf atmo forcing files", "" "``atm_data_type``", "``box2001``", "forcing data for :cite:`Hunke01` box problem", "``default``" "", "``default``", "constant values defined in the code", "" "", "``hycom``", "HYCOM atm forcing data in netcdf format", "" - "", "``JRA55_gx1``", "JRA55 forcing data for gx1 grid :cite:`Tsujino18`", "" - "", "``JRA55_gx3``", "JRA55 forcing data for gx3 grid :cite:`Tsujino18`", "" - "", "``JRA55_tx1``", "JRA55 forcing data for tx1 grid :cite:`Tsujino18`", "" + "", "``JRA55``", "JRA55 forcing data :cite:`Tsujino18`", "" + "", "``JRA55do``", "JRA55do forcing data :cite:`Tsujino18`", "" "", "``monthly``", "monthly forcing data", "" "", "``ncar``", "NCAR bulk forcing data", "" "", "``oned``", "column forcing data", "" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index d9ea07a02..9bcf205b4 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -1037,7 +1037,7 @@ used in coupled models. MPI is initialized in *init\_communicate* for both coupled and stand-alone MPI runs. The ice component communicates with a flux coupler or other climate components via external routines that handle the -variables listed in the `Icepack documentation `_. +variables listed in the `Icepack documentation `_. For stand-alone runs, routines in **ice\_forcing.F90** read and interpolate data from files, and are intended merely to provide guidance for the user to write his or @@ -1154,7 +1154,8 @@ io package. The namelist variable ``history_format`` further refines the format approach or style for some io packages. Model output data can be written as instantaneous or average data as specified -by the ``hist_avg`` namelist flag. The data is written at the period(s) given by ``histfreq`` and +by the ``hist_avg`` namelist array and is customizable by stream. The data is +written at the period(s) given by ``histfreq`` and ``histfreq_n`` relative to a reference date specified by ``histfreq_base``. The files are written to binary or netCDF files prepended by ``history_file`` in **ice_in**. These settings for history files are set in the @@ -1196,8 +1197,11 @@ with a given ``histfreq`` value, or if an element of ``histfreq_n`` is 0, then no file will be written at that frequency. The output period can be discerned from the filenames. All history streams will be either instantaneous or averaged as specified by the ``hist_avg`` namelist setting and the frequency -will be relative to a reference date specified by ``histfreq_base``. More -information about how the frequency is computed is found in :ref:`timemanager`. +will be relative to a reference date specified by ``histfreq_base``. Also, some +Earth Sytem Models require the history file time axis to be centered in the averaging +interval. The flag ``hist_time_axis`` will allow the user to chose ``begin``, ``middle``, +or ``end`` for the time stamp. More information about how the frequency is +computed is found in :ref:`timemanager`. For example, in the namelist: @@ -1206,7 +1210,7 @@ For example, in the namelist: histfreq = ’1’, ’h’, ’d’, ’m’, ’y’ histfreq_n = 1, 6, 0, 1, 1 histfreq_base = 'zero' - hist_avg = .true. + hist_avg = .true.,.true.,.true.,.true.,.true. f_hi = ’1’ f_hs = ’h’ f_Tsfc = ’d’ diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 606ae1397..e382eba17 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -8,7 +8,7 @@ Testing CICE This section documents primarily how to use the CICE scripts to carry out CICE testing. Exactly what to test is a separate question and depends on the kinds of code changes being made. Prior to merging -changes to the CICE Consortium master, changes will be reviewed and +changes to the CICE Consortium main, changes will be reviewed and developers will need to provide a summary of the tests carried out. There is a base suite of tests provided by default with CICE and this @@ -455,7 +455,7 @@ validation process. However, a full test suite should be run on the final devel version of the code. To report the test results, as is required for Pull Requests to be accepted into -the master the CICE Consortium code see :ref:`testreporting`. +the main the CICE Consortium code see :ref:`testreporting`. If using the ``--tdir`` option, that directory must not exist before the script is run. The tdir directory will be created by the script and it will be populated by all tests as well as scripts that support the @@ -578,7 +578,7 @@ Test Suite Examples the subdirectory cice.v01a. With the ``--bcmp`` option, the results will be tested against prior baselines to verify bit-for-bit, which is an important step prior to approval of many (not all, see :ref:`validation`) Pull Requests to incorporate code into - the CICE Consortium master code. You can use other regression options as well. + the CICE Consortium main branch. You can use other regression options as well. (``--bdir`` and ``--bgen``) 10) **Basic test suite, use of default string in regression testing** @@ -603,7 +603,7 @@ Test Suite Examples set mydate = `date -u "+%Y%m%d"` git clone https://github.com/myfork/cice cice.$mydate --recursive cd cice.$mydate - ./cice.setup --suite base_suite --mach conrad --env cray,gnu,intel,pgi --testid $mydate --bcmp default --bgen default --bdir /tmp/work/user/CICE_BASELINES_MASTER + ./cice.setup --suite base_suite --mach conrad --env cray,gnu,intel,pgi --testid $mydate --bcmp default --bgen default --bdir /tmp/work/user/CICE_BASELINES_MAIN When this is invoked, a new set of baselines will be generated and compared to the prior results each time without having to change the arguments. @@ -742,6 +742,8 @@ The following are brief descriptions of some of the current unit tests, in the Makefile - **optargs** is a unit test that tests passing optional arguments down a calling tree and verifying that the optional attribute is preserved correctly. + - **opticep** is a cice test that turns off the icepack optional arguments passed into icepack. This + can only be run with a subset of CICE/Icepack cases to verify the optional arguments are working correctly. - **sumchk** is a unit test that exercises the methods in ice_global_reductions.F90. This test requires that a CICE grid and decomposition be initialized, so CICE_InitMod.F90 is leveraged to initialize the model prior to running a suite of unit validation tests to verify correctness. @@ -757,7 +759,7 @@ to the official CICE Consortium Test-Results `wiki page `_. You may need write permission on the wiki. If you are interested in using the wiki, please contact the Consortium. Note that in order for code to be -accepted to the CICE master through a Pull Request it is necessary +accepted to the CICE main branch through a Pull Request it is necessary for the developer to provide proof that their code passes relevant tests. This can be accomplished by posting the full results to the wiki, or by copying the testing summary to the Pull Request comments. @@ -824,7 +826,7 @@ assess test coverage. ..Because codecov.io does not support git submodule analysis right now, a customized ..repository has to be created to test CICE with Icepack integrated directly. The repository ..https://github.com/apcraig/Test_CICE_Icepack serves as the current default test repository. -..In general, to setup the code coverage test in CICE, the current CICE master has +..In general, to setup the code coverage test in CICE, the current CICE main has ..to be copied into the Test_CICE_Icepack repository, then the full test suite ..can be run with the gnu compiler with the ``--coverage`` argument. diff --git a/doc/source/user_guide/ug_troubleshooting.rst b/doc/source/user_guide/ug_troubleshooting.rst index 315b2f869..9d8c49a72 100644 --- a/doc/source/user_guide/ug_troubleshooting.rst +++ b/doc/source/user_guide/ug_troubleshooting.rst @@ -106,7 +106,7 @@ parameterizations are used, the code should be able to execute from these files. However if different physics is used (for instance, mushy thermo instead of BL99), the code may still fail. To convert a v4.1 restart file, consult section 5.2 in the `CICE v5 documentation -`_. +`_. If restart files are taking a long time to be written serially (i.e., not using PIO), see the next section. @@ -228,7 +228,7 @@ Interpretation of albedos More information about interpretation of albedos can be found in the -`Icepack documentation `_. +`Icepack documentation `_. VP dynamics results diff --git a/icepack b/icepack index a4779cc71..4728746ea 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit a4779cc71125b40a7db3a4da8512247cbf2b0955 +Subproject commit 4728746ea2926bf10acc5de354b3eae16d418af5 From a1f066ba4f6a738ac88ae4952ea91e2e9388c412 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Mon, 21 Aug 2023 19:13:38 +0000 Subject: [PATCH 87/90] reset timer after Advance to retrieve "wait time" --- cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index e85a32a0b..1ae2f5d83 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -1186,6 +1186,8 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time: ', wtime) + call ufs_settimer(wtime) + end subroutine ModelAdvance !=============================================================================== From 897ea65b0148f41904c8eb54597057219a381621 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Wed, 23 Aug 2023 11:41:27 -0400 Subject: [PATCH 88/90] add logical control for enabling runtime info --- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 18 +++++++++++---- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 23 ++++++++++++------- 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index 2e0324e22..d0aafbb43 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -7,20 +7,24 @@ module cice_wrapper_mod use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long implicit none + + real(dbl_kind) :: wtime = 0.0 contains ! Define stub routines that do nothing - they are just here to avoid ! having cppdefs in the main program subroutine ufs_settimer(timevalue) - real(dbl_kind), intent(out) :: timevalue + real(dbl_kind), intent(inout) :: timevalue end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,elapsedsecs,string,time0) + subroutine ufs_logtimer(nunit,elapsedsecs,string,runtimelog,time0) integer, intent(in) :: nunit integer(int_kind), intent(in) :: elapsedsecs character(len=*), intent(in) :: string + logical, intent(in) :: runtimelog real(dbl_kind), intent(in) :: time0 end subroutine ufs_logtimer - subroutine ufs_file_setLogUnit(filename,nunit) + subroutine ufs_file_setLogUnit(filename,nunit,runtimelog) character(len=*), intent(in) :: filename + logical, intent(in) :: runtimelog integer, intent(out) :: nunit end subroutine ufs_file_setLogUnit subroutine ufs_logfhour(msg,hour) @@ -41,21 +45,25 @@ subroutine ufs_settimer(timevalue) timevalue = MPI_Wtime() end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,elapsedsecs,string,time0) + subroutine ufs_logtimer(nunit,elapsedsecs,string,runtimelog,time0) integer, intent(in) :: nunit integer(int_kind), intent(in) :: elapsedsecs character(len=*), intent(in) :: string + logical, intent(in) :: runtimelog real(dbl_kind), intent(in) :: time0 real(dbl_kind) :: MPI_Wtime, timevalue + if (.not. runtimelog) return if (time0 > 0.) then timevalue = MPI_Wtime()-time0 write(nunit,*)elapsedsecs,' CICE '//trim(string),timevalue end if end subroutine ufs_logtimer - subroutine ufs_file_setLogUnit(filename,nunit) + subroutine ufs_file_setLogUnit(filename,nunit,runtimelog) character(len=*), intent(in) :: filename + logical, intent(in) :: runtimelog integer, intent(out) :: nunit + if (.not. runtimelog) return open (newunit=nunit, file=trim(filename)) end subroutine ufs_file_setLogUnit diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 1ae2f5d83..4bdb7deb2 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -88,11 +88,12 @@ module ice_comp_nuopc type(ESMF_Mesh) :: ice_mesh - integer :: nthrds ! Number of threads to use in this component - integer :: nu_timer ! Simple timer log + integer :: nthrds ! Number of threads to use in this component + integer :: nu_timer = 6 ! Simple timer log, unused except by UFS integer :: dbug = 0 logical :: profile_memory = .false. logical :: mastertask + logical :: runtimelog = .false. integer :: start_ymd ! Start date (YYYYMMDD) integer :: start_tod ! start time of day (s) integer :: curr_ymd ! Current date (YYYYMMDD) @@ -308,6 +309,12 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) write(logmsg,'(i6)') dbug call ESMF_LogWrite('CICE_cap: dbug = '//trim(logmsg), ESMF_LOGMSG_INFO) + call NUOPC_CompAttributeGet(gcomp, name="RunTimeLog", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) runtimelog=(trim(cvalue)=="true") + write(logmsg,*) runtimelog + call ESMF_LogWrite('CICE_cap:RunTimeLog = '//trim(logmsg), ESMF_LOGMSG_INFO) + !---------------------------------------------------------------------------- ! generate local mpi comm !---------------------------------------------------------------------------- @@ -490,7 +497,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) ! Set the nu_diag_set flag so it's not reset later call shr_file_setLogUnit (shrlogunit) - call ufs_file_setLogUnit('./log.ice.timer', nu_timer) + call ufs_file_setLogUnit('./log.ice.timer',nu_timer,runtimelog) call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, & isPresent=isPresent, isSet=isSet, rc=rc) @@ -703,7 +710,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) end if call t_stopf ('cice_init_total') - if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeAdvertise time: ',wtime) + if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeAdvertise time: ',runtimelog,wtime) end subroutine InitializeAdvertise !=============================================================================== @@ -917,7 +924,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call flush_fileunit(nu_diag) - if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeRealize time: ',wtime) + if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeRealize time: ',runtimelog,wtime) end subroutine InitializeRealize !=============================================================================== @@ -963,7 +970,7 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- rc = ESMF_SUCCESS - if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time since last step: ',wtime) + if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time since last step: ',runtimelog,wtime) call ufs_settimer(wtime) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) @@ -1185,7 +1192,7 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time: ', wtime) + if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time: ',runtimelog,wtime) call ufs_settimer(wtime) end subroutine ModelAdvance @@ -1341,7 +1348,7 @@ subroutine ModelFinalize(gcomp, rc) end if if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if(mastertask) call ufs_logtimer(nu_timer,msec,'ModelFinalize time: ', wtime) + if(mastertask) call ufs_logtimer(nu_timer,msec,'ModelFinalize time: ',runtimelog,wtime) end subroutine ModelFinalize From 76c3f7fa802f3156f3696c41517f07f6e891d917 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Wed, 23 Aug 2023 17:46:59 +0000 Subject: [PATCH 89/90] remove zsal items from cap --- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 11 ++++------ cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 20 +++++++------------ 2 files changed, 11 insertions(+), 20 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 0ba672f3d..86f3b2848 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -232,7 +232,7 @@ subroutine init_restart() restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & restart_hbrine, read_restart_hbrine, & - restart_zsal, restart_bgc + restart_bgc use ice_restart_driver, only: restartfile use ice_restart_shared, only: runtype, restart use ice_state ! almost everything @@ -243,7 +243,7 @@ subroutine init_restart() logical(kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, tr_snow, & - skl_bgc, z_tracers, solve_zsal + skl_bgc, z_tracer integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & @@ -259,8 +259,7 @@ subroutine init_restart() if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & @@ -404,8 +403,6 @@ subroutine init_restart() if (trim(runtype) == 'continue') then if (tr_brine) & restart_hbrine = .true. - if (solve_zsal) & - restart_zsal = .true. if (skl_bgc .or. z_tracers) & restart_bgc = .true. endif @@ -415,7 +412,7 @@ subroutine init_restart() if (tr_brine .and. restart_hbrine) call read_restart_hbrine endif - if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry + if (skl_bgc .or. z_tracers) then ! biogeochemistry if (tr_fsd) then write (nu_diag,*) 'FSD implementation incomplete for use with BGC' call icepack_warnings_flush(nu_diag) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 301ada1ea..483048051 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -114,7 +114,7 @@ subroutine ice_step use ice_calendar, only: idate, myear, mmonth, mday, msec, timesecs use ice_calendar, only: calendar_sec2hms, write_history, nstreams, histfreq use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice - use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags + use ice_diagnostics_bgc, only: hbrine_diags, bgc_diags use ice_domain, only: halo_info, nblocks use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge @@ -150,7 +150,7 @@ subroutine ice_step logical (kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & - calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec + calc_Tsfc, skl_bgc, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' @@ -166,8 +166,7 @@ subroutine ice_step endif call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & - wave_spec_out=wave_spec) + z_tracers_out=z_tracers, ktherm_out=ktherm, wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & @@ -359,7 +358,6 @@ subroutine ice_step call ice_timer_start(timer_diags) ! diagnostics if (mod(istep,diagfreq) == 0) then call runtime_diags(dt) ! log file - if (solve_zsal) call zsal_diags if (skl_bgc .or. z_tracers) call bgc_diags if (tr_brine) call hbrine_diags endif @@ -381,7 +379,7 @@ subroutine ice_step if (tr_fsd) call write_restart_fsd if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero - if (solve_zsal .or. skl_bgc .or. z_tracers) & + if (skl_bgc .or. z_tracers) & call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap @@ -409,7 +407,7 @@ end subroutine ice_step subroutine coupling_prep (iblk) use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn + albicen, albsnon, albpndn, apeffn, snowfracn use ice_blocks, only: nx_block, ny_block, get_block, block use ice_domain, only: blocks_ice use ice_calendar, only: dt, nstreams @@ -424,9 +422,8 @@ subroutine coupling_prep (iblk) fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & - fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai, & - fnit, fsil, famm, fdmsp, fdms, fhum, fdust, falgalN, & - fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn + flux_bio, flux_bio_ai, fnit, fsil, famm, fdmsp, fdms, fhum, & + fdust, falgalN, fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn use ice_grid, only: tmask use ice_state, only: aicen, aice use ice_state, only: aice_init @@ -579,8 +576,6 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -626,7 +621,6 @@ subroutine coupling_prep (iblk) faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & - fzsal (:,:,iblk), fzsal_g (:,:,iblk), & flux_bio (:,:,1:nbtrcr,iblk), & Qref_iso =Qref_iso (:,:,:,iblk), & fiso_evap=fiso_evap(:,:,:,iblk), & From 84427a6d5a73b764d89d4092e9d8b56da475e7f5 Mon Sep 17 00:00:00 2001 From: "denise.worthen" Date: Wed, 23 Aug 2023 21:53:29 +0000 Subject: [PATCH 90/90] fix typo --- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 86f3b2848..270e7b371 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -243,7 +243,7 @@ subroutine init_restart() logical(kind=log_kind) :: & tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, tr_snow, & - skl_bgc, z_tracer + skl_bgc, z_tracers integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: &