forked from NOAA-PSL/stochastic_physics
-
Notifications
You must be signed in to change notification settings - Fork 1
/
fv_control_stub.F90
1300 lines (1123 loc) · 54.3 KB
/
fv_control_stub.F90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
!***********************************************************************
!* GNU Lesser General Public License
!*
!* This file is part of the FV3 dynamical core.
!*
!* The FV3 dynamical core is free software: you can redistribute it
!* and/or modify it under the terms of the
!* GNU Lesser General Public License as published by the
!* Free Software Foundation, either version 3 of the License, or
!* (at your option) any later version.
!*
!* The FV3 dynamical core is distributed in the hope that it will be
!* useful, but WITHOUT ANYWARRANTY; without even the implied warranty
!* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
!* See the GNU General Public License for more details.
!*
!* You should have received a copy of the GNU Lesser General Public
!* License along with the FV3 dynamical core.
!* If not, see <http://www.gnu.org/licenses/>.
!***********************************************************************
!>@brief The module 'FV3_control' is for initialization and termination
!! of the model, and controls namelist parameters in FV3.
!----------------
! FV control panel
!----------------
module fv_control_stub_mod
! Modules Included:
! <table>
! <tr>
! <th>Module Name</th>
! <th>Functions Included</th>
! </tr>
! <table>
! <tr>
! <td>constants_mod</td>
! <td>pi=>pi_8, kappa, radius, grav, rdgas</td>
! </tr>
! <tr>
! <td>field_manager_mod</td>
! <td>MODEL_ATMOS</td>
! </tr>
! <tr>
! <td>fms_mod</td>
! <td>write_version_number, open_namelist_file,
! check_nml_error, close_file, file_exist</td>
! </tr>
! <tr>
! <td>fv_arrays_mod</td>
! <td>fv_atmos_type, allocate_fv_atmos_type, deallocate_fv_atmos_type,
! R_GRID</td>
! </tr>
! <tr>
! <td>fv_diagnostics_mod</td>
! <td>fv_diag_init_gn</td>
! </tr>
! <tr>
! <td>fv_eta_mod</td>
! <td>set_eta</td>
! </tr>
! <tr>
! <td>fv_grid_tools_mod</td>
! <td>init_grid</td>
! </tr>
! <tr>
! <td>fv_grid_utils_mod</td>
! <td>grid_utils_init, grid_utils_end, ptop_min</td>
! </tr>
! <tr>
! <td>fv_mp_mod</td>
! <td>mp_start, mp_assign_gid, domain_decomp,ng, switch_current_Atm,
! broadcast_domains, mp_barrier, is_master, setup_master </td>
! </tr>
! <tr>
! <td>fv_io_mod</td>
! <td>fv_io_exit</td>
! </tr>
! <tr>
! <td>fv_restart_mod</td>
! <td>fv_restart_init, fv_restart_end</td>
! </tr>
! <tr>
! <td>fv_timing_mod</td>
! <td>timing_on, timing_off, timing_init, timing_prt</td>
! </tr>
! <tr>
! <td>mpp_mod</td>
! <td>mpp_send, mpp_sync, mpp_transmit, mpp_set_current_pelist, mpp_declare_pelist,
! mpp_root_pe, mpp_recv, mpp_sync_self, mpp_broadcast, read_input_nml,
! FATAL, mpp_error, mpp_pe, stdlog, mpp_npes, mpp_get_current_pelist,
! input_nml_file, get_unit, WARNING, read_ascii_file, INPUT_STR_LENGTH</td>
! </tr>
! <tr>
! <td>mpp_domains_mod</td>
! <td>mpp_get_data_domain, mpp_get_compute_domain, domain2D, mpp_define_nest_domains,
! nest_domain_type, mpp_get_global_domain, mpp_get_C2F_index, mpp_get_F2C_index,
! mpp_broadcast_domain, CENTER, CORNER, NORTH, EAST, WEST, SOUTH</td>
! </tr>
! <tr>
! <td>mpp_parameter_mod</td>
! <td>AGRID_PARAM=>AGRID</td>
! </tr>
! <tr>
! <td>test_cases_mod</td>
! <td>test_case, bubble_do, alpha, nsolitons, soliton_Umax, soliton_size</td>
! </tr>
! <tr>
! <td>tracer_manager_mod</td>
! <td>tm_get_number_tracers => get_number_tracers,tm_get_tracer_index => get_tracer_index,
! tm_get_tracer_indices => get_tracer_indices, tm_set_tracer_profile => set_tracer_profile,
! tm_get_tracer_names => get_tracer_names,tm_check_if_prognostic=> check_if_prognostic,
! tm_register_tracers => register_tracers</td>
! </tr>
! </table>
use constants_mod, only: pi=>pi_8, kappa, radius, grav, rdgas
use field_manager_mod, only: MODEL_ATMOS
use fms_mod, only: write_version_number, open_namelist_file, &
check_nml_error, close_file, file_exist
use mpp_mod, only: FATAL, mpp_error, mpp_pe, stdlog, &
mpp_npes, mpp_get_current_pelist, &
input_nml_file, get_unit, WARNING, &
read_ascii_file, INPUT_STR_LENGTH
use mpp_domains_mod, only: mpp_get_data_domain, mpp_get_compute_domain
use tracer_manager_mod, only: tm_get_number_tracers => get_number_tracers, &
tm_get_tracer_index => get_tracer_index, &
tm_get_tracer_indices => get_tracer_indices, &
tm_set_tracer_profile => set_tracer_profile, &
tm_get_tracer_names => get_tracer_names, &
tm_check_if_prognostic=> check_if_prognostic,&
tm_register_tracers => register_tracers
use fv_io_mod, only: fv_io_exit
use fv_restart_mod, only: fv_restart_init, fv_restart_end
use fv_arrays_mod, only: fv_atmos_type, allocate_fv_atmos_type, deallocate_fv_atmos_type, &
R_GRID
use fv_grid_utils_mod, only: grid_utils_init, grid_utils_end, ptop_min
use fv_eta_mod, only: set_eta
use fv_grid_tools_mod, only: init_grid
use fv_mp_mod, only: mp_start, mp_assign_gid, domain_decomp
use fv_mp_mod, only: ng, switch_current_Atm
use fv_mp_mod, only: broadcast_domains, mp_barrier, is_master, setup_master
!!! CLEANUP: should be replaced by a getter function?
use test_cases_mod, only: test_case, bubble_do, alpha, nsolitons, soliton_Umax, soliton_size
use fv_timing_mod, only: timing_on, timing_off, timing_init, timing_prt
use mpp_domains_mod, only: domain2D
use mpp_domains_mod, only: mpp_define_nest_domains, nest_domain_type, mpp_get_global_domain
use mpp_domains_mod, only: mpp_get_C2F_index, mpp_get_F2C_index, mpp_broadcast_domain
use mpp_domains_mod, only: CENTER, CORNER, NORTH, EAST, WEST, SOUTH
use mpp_mod, only: mpp_send, mpp_sync, mpp_transmit, mpp_set_current_pelist, mpp_declare_pelist, mpp_root_pe, mpp_recv, mpp_sync_self, mpp_broadcast, read_input_nml
use fv_diagnostics_mod, only: fv_diag_init_gn
#ifdef MULTI_GASES
use constants_mod, only: rvgas, cp_air
use multi_gases_mod, only: multi_gases_init, &
rilist => ri, &
cpilist => cpi
#endif
implicit none
private
public setup_pointers
!-----------------------------------------------------------------------
! Grid descriptor file setup
!-----------------------------------------------------------------------
!------------------------------------------
! Model Domain parameters
! See fv_arrays.F90 for descriptions
!------------------------------------------
!CLEANUP module pointers
character(len=80) , pointer :: grid_name
character(len=120), pointer :: grid_file
integer, pointer :: grid_type
integer , pointer :: hord_mt
integer , pointer :: kord_mt
integer , pointer :: kord_wz
integer , pointer :: hord_vt
integer , pointer :: hord_tm
integer , pointer :: hord_dp
integer , pointer :: kord_tm
integer , pointer :: hord_tr
integer , pointer :: kord_tr
real , pointer :: scale_z
real , pointer :: w_max
real , pointer :: z_min
real , pointer :: lim_fac
integer , pointer :: nord
integer , pointer :: nord_tr
real , pointer :: dddmp
real , pointer :: d2_bg
real , pointer :: d4_bg
real , pointer :: vtdm4
real , pointer :: trdm2
real , pointer :: d2_bg_k1
real , pointer :: d2_bg_k2
real , pointer :: d2_divg_max_k1
real , pointer :: d2_divg_max_k2
real , pointer :: damp_k_k1
real , pointer :: damp_k_k2
integer , pointer :: n_zs_filter
integer , pointer :: nord_zs_filter
logical , pointer :: full_zs_filter
logical , pointer :: RF_fast
logical , pointer :: consv_am
logical , pointer :: do_sat_adj
logical , pointer :: do_f3d
logical , pointer :: no_dycore
logical , pointer :: convert_ke
logical , pointer :: do_vort_damp
logical , pointer :: use_old_omega
! PG off centering:
real , pointer :: beta
integer , pointer :: n_sponge
real , pointer :: d_ext
integer , pointer :: nwat
logical , pointer :: warm_start
logical , pointer :: inline_q
real , pointer :: shift_fac
logical , pointer :: do_schmidt
real(kind=R_GRID) , pointer :: stretch_fac
real(kind=R_GRID) , pointer :: target_lat
real(kind=R_GRID) , pointer :: target_lon
logical , pointer :: reset_eta
real , pointer :: p_fac
real , pointer :: a_imp
integer , pointer :: n_split
real , pointer :: fac_n_spl
real , pointer :: fhouri
! Default
integer , pointer :: m_split
integer , pointer :: k_split
logical , pointer :: use_logp
integer , pointer :: q_split
integer , pointer :: print_freq
logical , pointer :: write_3d_diags
integer , pointer :: npx
integer , pointer :: npy
integer , pointer :: npz
integer , pointer :: npz_rst
integer , pointer :: ncnst
integer , pointer :: pnats
integer , pointer :: dnats
integer , pointer :: ntiles
integer , pointer :: nf_omega
integer , pointer :: fv_sg_adj
integer , pointer :: na_init
logical , pointer :: nudge_dz
real , pointer :: p_ref
real , pointer :: dry_mass
integer , pointer :: nt_prog
integer , pointer :: nt_phys
real , pointer :: tau_h2o
real , pointer :: delt_max
real , pointer :: d_con
real , pointer :: ke_bg
real , pointer :: consv_te
real , pointer :: tau
real , pointer :: rf_cutoff
logical , pointer :: filter_phys
logical , pointer :: dwind_2d
logical , pointer :: breed_vortex_inline
logical , pointer :: range_warn
logical , pointer :: fill
logical , pointer :: fill_dp
logical , pointer :: fill_wz
logical , pointer :: check_negative
logical , pointer :: non_ortho
logical , pointer :: adiabatic
logical , pointer :: moist_phys
logical , pointer :: do_Held_Suarez
logical , pointer :: do_reed_physics
logical , pointer :: reed_cond_only
logical , pointer :: reproduce_sum
logical , pointer :: adjust_dry_mass
logical , pointer :: fv_debug
logical , pointer :: srf_init
logical , pointer :: mountain
logical , pointer :: remap_t
logical , pointer :: z_tracer
logical , pointer :: old_divg_damp
logical , pointer :: fv_land
logical , pointer :: nudge
logical , pointer :: nudge_ic
logical , pointer :: ncep_ic
logical , pointer :: nggps_ic
logical , pointer :: ecmwf_ic
logical , pointer :: gfs_phil
logical , pointer :: agrid_vel_rst
logical , pointer :: use_new_ncep
logical , pointer :: use_ncep_phy
logical , pointer :: fv_diag_ic
logical , pointer :: external_ic
logical , pointer :: external_eta
logical , pointer :: read_increment
character(len=128) , pointer :: res_latlon_dynamics
character(len=128) , pointer :: res_latlon_tracers
logical , pointer :: hydrostatic
logical , pointer :: phys_hydrostatic
logical , pointer :: use_hydro_pressure
logical , pointer :: do_uni_zfull !miz
logical , pointer :: adj_mass_vmr ! f1p
logical , pointer :: hybrid_z
logical , pointer :: Make_NH
logical , pointer :: make_hybrid_z
logical , pointer :: nudge_qv
real, pointer :: add_noise
integer , pointer :: a2b_ord
integer , pointer :: c2l_ord
integer, pointer :: ndims
real(kind=R_GRID), pointer :: dx_const
real(kind=R_GRID), pointer :: dy_const
real(kind=R_GRID), pointer :: deglon_start, deglon_stop, & ! boundaries of latlon patch
deglat_start, deglat_stop
real(kind=R_GRID), pointer :: deglat
logical, pointer :: nested, twowaynest
logical, pointer :: regional
integer, pointer :: bc_update_interval
integer, pointer :: parent_tile, refinement, nestbctype, nestupdate, nsponge, ioffset, joffset
real, pointer :: s_weight, update_blend
integer, pointer :: layout(:), io_layout(:)
integer :: ntilesMe ! Number of tiles on this process =1 for now
#ifdef OVERLOAD_R4
real :: too_big = 1.E8
#else
real :: too_big = 1.E35
#endif
public :: fv_init
integer, public :: ngrids = 1
integer, public, allocatable :: pelist_all(:)
integer :: commID, max_refinement_of_global = 1.
integer :: gid
real :: umax = 350. !< max wave speed for grid_type>3
integer :: parent_grid_num = -1
integer :: halo_update_type = 1 !< 1 for two-interfaces non-block
!< 2 for block
!< 3 for four-interfaces non-block
! version number of this module
! Include variable "version" to be written to log file.
#include<file_version.h>
contains
!-------------------------------------------------------------------------------
!>@brief The subroutine 'fv_init' initializes FV3.
!>@details It allocates memory, sets up MPI and processor lists,
!! sets up the grid, and controls FV3 namelist parameters.
subroutine fv_init(Atm, dt_atmos, grids_on_this_pe, p_split)
type(fv_atmos_type), allocatable, intent(inout), target :: Atm(:)
real, intent(in) :: dt_atmos
logical, allocatable, intent(INOUT) :: grids_on_this_pe(:)
integer, intent(INOUT) :: p_split
integer :: i, j, k, n, p
real :: sdt
! tracers
integer :: num_family !< output of register_tracers
integer :: isc_p, iec_p, jsc_p, jec_p, isg, ieg, jsg, jeg, upoff, jind
integer :: ic, jc
gid = mpp_pe()
call init_nesting(Atm, grids_on_this_pe, p_split)
!This call is needed to set up the pointers for fv_current_grid, even for a single-grid run
call switch_current_Atm(Atm(1), .false.)
call setup_pointers(Atm(1))
! Start up MPI
!call mp_assign_gid
! Initialize timing routines
call timing_init
call timing_on('TOTAL')
! Setup the run from namelist
ntilesMe = size(Atm(:)) !Full number of Atm arrays; one less than number of grids, if multiple grids
call run_setup(Atm,dt_atmos, grids_on_this_pe, p_split) ! initializes domain_decomp
do n=1,ntilesMe
!In a single-grid run this will still be needed to correctly set the domain
call switch_current_Atm(Atm(n))
call setup_pointers(Atm(n))
target_lon = target_lon * pi/180.
target_lat = target_lat * pi/180.
if (grids_on_this_pe(n)) then
call allocate_fv_atmos_type(Atm(n), Atm(n)%bd%isd, Atm(n)%bd%ied, Atm(n)%bd%jsd, Atm(n)%bd%jed, &
Atm(n)%bd%isc, Atm(n)%bd%iec, Atm(n)%bd%jsc, Atm(n)%bd%jec, &
npx, npy, npz, ndims, ncnst, ncnst-pnats, ng, .false., grids_on_this_pe(n), ngrids)
if (grids_on_this_pe(n)) then
call switch_current_Atm(Atm(n))
call setup_pointers(Atm(n))
if ( (Atm(n)%bd%iec-Atm(n)%bd%isc+1).lt.4 .or. (Atm(n)%bd%jec-Atm(n)%bd%jsc+1).lt.4 ) then
if (is_master()) write(*,'(6I6)') Atm(n)%bd%isc, Atm(n)%bd%iec, Atm(n)%bd%jsc, Atm(n)%bd%jec, n
call mpp_error(FATAL,'Domain Decomposition: Cubed Sphere compute domain has a &
&minium requirement of 4 points in X and Y, respectively')
end if
endif
!!CLEANUP: Convenience pointers
Atm(n)%gridstruct%nested => Atm(n)%neststruct%nested
Atm(n)%gridstruct%grid_type => Atm(n)%flagstruct%grid_type
Atm(n)%flagstruct%grid_number => Atm(n)%grid_number
Atm(n)%gridstruct%regional => Atm(n)%flagstruct%regional
call init_grid(Atm(n), grid_name, grid_file, npx, npy, npz, ndims, ntiles, ng)
! Initialize the SW (2D) part of the model
!!!CLEANUP: this call could definitely use some cleaning up
call grid_utils_init(Atm(n), npx, npy, npz, non_ortho, grid_type, c2l_ord)
!!!CLEANUP: Are these correctly writing out on all pes?
if ( is_master() ) then
sdt = dt_atmos/real(n_split*k_split*abs(p_split))
write(*,*) ' '
write(*,*) 'Divergence damping Coefficients'
write(*,*) 'For small dt=', sdt
write(*,*) 'External mode del-2 (m**2/s)=', d_ext*Atm(n)%gridstruct%da_min_c/sdt
write(*,*) 'Internal mode del-2 SMAG dimensionless coeff=', dddmp
write(*,*) 'Internal mode del-2 background diff=', d2_bg*Atm(n)%gridstruct%da_min_c/sdt
if (nord==1) then
write(*,*) 'Internal mode del-4 background diff=', d4_bg
write(*,*) 'Vorticity del-4 (m**4/s)=', (vtdm4*Atm(n)%gridstruct%da_min)**2/sdt*1.E-6
endif
if (nord==2) write(*,*) 'Internal mode del-6 background diff=', d4_bg
if (nord==3) write(*,*) 'Internal mode del-8 background diff=', d4_bg
write(*,*) 'tracer del-2 diff=', trdm2
write(*,*) 'Vorticity del-4 (m**4/s)=', (vtdm4*Atm(n)%gridstruct%da_min)**2/sdt*1.E-6
write(*,*) 'beta=', beta
write(*,*) ' '
endif
Atm(n)%ts = 300.
Atm(n)%phis = too_big
! The following statements are to prevent the phatom corner regions from
! growing instability
Atm(n)%u = 0.
Atm(n)%v = 0.
Atm(n)%ua = too_big
Atm(n)%va = too_big
else !this grid is NOT defined on this pe
!Allocate dummy arrays
call allocate_fv_atmos_type(Atm(n), Atm(n)%bd%isd, Atm(n)%bd%ied, Atm(n)%bd%jsd, Atm(n)%bd%jed, &
Atm(n)%bd%isc, Atm(n)%bd%iec, Atm(n)%bd%jsc, Atm(n)%bd%jec, &
npx, npy, npz, ndims, ncnst, ncnst-pnats, ng, .true., .false., ngrids)
!Need to SEND grid_global to any child grids; this is received in setup_aligned_nest in fv_grid_tools
if (Atm(n)%neststruct%nested) then
call mpp_get_global_domain( Atm(n)%parent_grid%domain, &
isg, ieg, jsg, jeg)
!FIXME: Should replace this by generating the global grid (or at least one face thereof) on the
! nested PEs instead of sending it around.
if (gid == Atm(n)%parent_grid%pelist(1)) then
call mpp_send(Atm(n)%parent_grid%grid_global(isg-ng:ieg+1+ng,jsg-ng:jeg+1+ng,1:2,parent_tile), &
size(Atm(n)%parent_grid%grid_global(isg-ng:ieg+1+ng,jsg-ng:jeg+1+ng,1:2,parent_tile)), &
Atm(n)%pelist(1)) !send to p_ind in setup_aligned_nest
call mpp_sync_self()
endif
if (Atm(n)%neststruct%twowaynest) then
!This in reality should be very simple. With the
! restriction that only the compute domain data is
! sent from the coarse grid, we can compute
! exactly which coarse grid cells should use
! which nested-grid data. We then don't need to send around p_ind.
Atm(n)%neststruct%ind_update_h = -99999
if (Atm(n)%parent_grid%tile == Atm(n)%neststruct%parent_tile) then
isc_p = Atm(n)%parent_grid%bd%isc
iec_p = Atm(n)%parent_grid%bd%iec
jsc_p = Atm(n)%parent_grid%bd%jsc
jec_p = Atm(n)%parent_grid%bd%jec
upoff = Atm(n)%neststruct%upoff
Atm(n)%neststruct%jsu = jsc_p
Atm(n)%neststruct%jeu = jsc_p-1
do j=jsc_p,jec_p+1
if (j < joffset+upoff) then
do i=isc_p,iec_p+1
Atm(n)%neststruct%ind_update_h(i,j,2) = -9999
enddo
Atm(n)%neststruct%jsu = Atm(n)%neststruct%jsu + 1
elseif (j > joffset + (npy-1)/refinement - upoff) then
do i=isc_p,iec_p+1
Atm(n)%neststruct%ind_update_h(i,j,2) = -9999
enddo
else
jind = (j - joffset)*refinement + 1
do i=isc_p,iec_p+1
Atm(n)%neststruct%ind_update_h(i,j,2) = jind
enddo
if ( (j < joffset + (npy-1)/refinement - upoff) .and. j <= jec_p) Atm(n)%neststruct%jeu = j
endif
!write(mpp_pe()+4000,*) j, joffset, upoff, Atm(n)%neststruct%ind_update_h(isc_p,j,2)
enddo
Atm(n)%neststruct%isu = isc_p
Atm(n)%neststruct%ieu = isc_p-1
do i=isc_p,iec_p+1
if (i < ioffset+upoff) then
Atm(n)%neststruct%ind_update_h(i,:,1) = -9999
Atm(n)%neststruct%isu = Atm(n)%neststruct%isu + 1
elseif (i > ioffset + (npx-1)/refinement - upoff) then
Atm(n)%neststruct%ind_update_h(i,:,1) = -9999
else
Atm(n)%neststruct%ind_update_h(i,:,1) = (i-ioffset)*refinement + 1
if ( (i < ioffset + (npx-1)/refinement - upoff) .and. i <= iec_p) Atm(n)%neststruct%ieu = i
end if
!write(mpp_pe()+5000,*) i, ioffset, upoff, Atm(n)%neststruct%ind_update_h(i,jsc_p,1)
enddo
end if
end if
endif
endif
end do
if (ntilesMe > 1) call switch_current_Atm(Atm(1))
if (ntilesMe > 1) call setup_pointers(Atm(1))
end subroutine fv_init
!-------------------------------------------------------------------------------
!>@brief The subroutine 'run_setup' initializes the run from a namelist.
subroutine run_setup(Atm, dt_atmos, grids_on_this_pe, p_split)
type(fv_atmos_type), intent(inout), target :: Atm(:)
real, intent(in) :: dt_atmos
logical, intent(INOUT) :: grids_on_this_pe(:)
integer, intent(INOUT) :: p_split
!--- local variables ---
character(len=80) :: tracerName, errString
character(len=32) :: nested_grid_filename
integer :: ios, ierr, f_unit, unit
logical :: exists
real :: dim0 = 180. !< base dimension
real :: dt0 = 1800. !< base time step
real :: ns0 = 5. !< base nsplit for base dimension
!< For cubed sphere 5 is better
!real :: umax = 350. ! max wave speed for grid_type>3 ! Now defined above
real :: dimx, dl, dp, dxmin, dymin, d_fac
integer :: n0split
integer :: n, nn, i
integer :: pe_counter
! local version of these variables to allow PGI compiler to compile
character(len=128) :: res_latlon_dynamics = ''
character(len=128) :: res_latlon_tracers = ''
character(len=80) :: grid_name = ''
character(len=120) :: grid_file = ''
namelist /fv_grid_nml/ grid_name, grid_file
namelist /fv_core_nml/npx, npy, ntiles, npz, npz_rst, layout, io_layout, ncnst, nwat, &
use_logp, p_fac, a_imp, k_split, n_split, m_split, q_split, print_freq, write_3d_diags, do_schmidt, &
hord_mt, hord_vt, hord_tm, hord_dp, hord_tr, shift_fac, stretch_fac, target_lat, target_lon, &
kord_mt, kord_wz, kord_tm, kord_tr, fv_debug, fv_land, nudge, do_sat_adj, do_f3d, &
external_ic, read_increment, ncep_ic, nggps_ic, ecmwf_ic, use_new_ncep, use_ncep_phy, fv_diag_ic, &
external_eta, res_latlon_dynamics, res_latlon_tracers, scale_z, w_max, z_min, lim_fac, &
dddmp, d2_bg, d4_bg, vtdm4, trdm2, d_ext, delt_max, beta, non_ortho, n_sponge, &
warm_start, adjust_dry_mass, mountain, d_con, ke_bg, nord, nord_tr, convert_ke, use_old_omega, &
dry_mass, grid_type, do_Held_Suarez, do_reed_physics, reed_cond_only, &
consv_te, fill, filter_phys, fill_dp, fill_wz, consv_am, RF_fast, &
range_warn, dwind_2d, inline_q, z_tracer, reproduce_sum, adiabatic, do_vort_damp, no_dycore, &
tau, tau_h2o, rf_cutoff, nf_omega, hydrostatic, fv_sg_adj, breed_vortex_inline, &
na_init, nudge_dz, hybrid_z, Make_NH, n_zs_filter, nord_zs_filter, full_zs_filter, reset_eta, &
pnats, dnats, a2b_ord, remap_t, p_ref, d2_bg_k1, d2_bg_k2, &
c2l_ord, dx_const, dy_const, umax, deglat, &
deglon_start, deglon_stop, deglat_start, deglat_stop, &
phys_hydrostatic, use_hydro_pressure, make_hybrid_z, old_divg_damp, add_noise, &
nested, twowaynest, parent_grid_num, parent_tile, nudge_qv, &
refinement, nestbctype, nestupdate, nsponge, s_weight, &
ioffset, joffset, check_negative, nudge_ic, halo_update_type, gfs_phil, agrid_vel_rst, &
do_uni_zfull, adj_mass_vmr, fac_n_spl, fhouri, regional, bc_update_interval
namelist /test_case_nml/test_case, bubble_do, alpha, nsolitons, soliton_Umax, soliton_size
#ifdef MULTI_GASES
namelist /multi_gases_nml/ rilist,cpilist
#endif
pe_counter = mpp_root_pe()
! Make alpha = 0 the default:
alpha = 0.
bubble_do = .false.
test_case = 11 ! (USGS terrain)
#ifdef INTERNAL_FILE_NML
! Read Main namelist
read (input_nml_file,fv_grid_nml,iostat=ios)
ierr = check_nml_error(ios,'fv_grid_nml')
#else
f_unit=open_namelist_file()
rewind (f_unit)
! Read Main namelist
read (f_unit,fv_grid_nml,iostat=ios)
ierr = check_nml_error(ios,'fv_grid_nml')
call close_file(f_unit)
#endif
call write_version_number ( 'FV_CONTROL_MOD', version )
unit = stdlog()
write(unit, nml=fv_grid_nml)
do n=1,size(Atm)
call switch_current_Atm(Atm(n), .false.)
call setup_pointers(Atm(n))
Atm(n)%grid_number = n
if (grids_on_this_pe(n)) then
call fv_diag_init_gn(Atm(n))
endif
#ifdef INTERNAL_FILE_NML
! Set input_file_nml for correct parent/nest initialization
if (n > 1) then
write(nested_grid_filename,'(A4, I2.2)') 'nest', n
call read_input_nml(nested_grid_filename)
endif
! Read FVCORE namelist
read (input_nml_file,fv_core_nml,iostat=ios)
ierr = check_nml_error(ios,'fv_core_nml')
#ifdef MULTI_GASES
if( is_master() ) print *,' enter multi_gases: ncnst = ',ncnst
allocate (rilist(0:ncnst))
allocate (cpilist(0:ncnst))
rilist = 0.0
cpilist = 0.0
rilist(0) = rdgas
rilist(1) = rvgas
cpilist(0) = cp_air
cpilist(1) = 4*cp_air
! Read multi_gases namelist
read (input_nml_file,multi_gases_nml,iostat=ios)
ierr = check_nml_error(ios,'multi_gases_nml')
#endif
! Read Test_Case namelist
read (input_nml_file,test_case_nml,iostat=ios)
ierr = check_nml_error(ios,'test_case_nml')
! Reset input_file_nml to default behavior
call read_input_nml
#else
if (size(Atm) == 1) then
f_unit = open_namelist_file()
else if (n == 1) then
f_unit = open_namelist_file('input.nml')
else
write(nested_grid_filename,'(A10, I2.2, A4)') 'input_nest', n, '.nml'
f_unit = open_namelist_file(nested_grid_filename)
endif
! Read FVCORE namelist
read (f_unit,fv_core_nml,iostat=ios)
ierr = check_nml_error(ios,'fv_core_nml')
#ifdef MULTI_GASES
if( is_master() ) print *,' enter multi_gases: ncnst = ',ncnst
allocate (rilist(0:ncnst))
allocate (cpilist(0:ncnst))
rilist = 0.0
cpilist = 0.0
rilist(0) = rdgas
rilist(1) = rvgas
cpilist(0) = cp_air
cpilist(1) = 4*cp_air
! Read multi_gases namelist
rewind (f_unit)
read (f_unit,multi_gases_nml,iostat=ios)
ierr = check_nml_error(ios,'multi_gases_nml')
#endif
! Read Test_Case namelist
rewind (f_unit)
read (f_unit,test_case_nml,iostat=ios)
ierr = check_nml_error(ios,'test_case_nml')
call close_file(f_unit)
#endif
write(unit, nml=fv_core_nml)
write(unit, nml=test_case_nml)
#ifdef MULTI_GASES
write(unit, nml=multi_gases_nml)
call multi_gases_init(ncnst,nwat)
#endif
if (len_trim(grid_file) /= 0) Atm(n)%flagstruct%grid_file = grid_file
if (len_trim(grid_name) /= 0) Atm(n)%flagstruct%grid_name = grid_name
if (len_trim(res_latlon_dynamics) /= 0) Atm(n)%flagstruct%res_latlon_dynamics = res_latlon_dynamics
if (len_trim(res_latlon_tracers) /= 0) Atm(n)%flagstruct%res_latlon_tracers = res_latlon_tracers
!*** single tile for Cartesian grids
if (grid_type>3) then
ntiles=1
non_ortho = .false.
nf_omega = 0
endif
if (.not. (nested .or. regional)) Atm(n)%neststruct%npx_global = npx
! Define n_split if not in namelist
if (ntiles == 6) then
dimx = 4.0*(npx-1)
if ( hydrostatic ) then
if ( npx >= 120 ) ns0 = 6
else
if ( npx <= 45 ) then
ns0 = 6
elseif ( npx <= 90 ) then
ns0 = 7
else
ns0 = 8
endif
endif
else
dimx = max ( npx, 2*(npy-1) )
endif
if (grid_type < 4) then
n0split = nint ( ns0*abs(dt_atmos)*dimx/(dt0*dim0) + 0.49 )
elseif (grid_type == 4 .or. grid_type == 7) then
n0split = nint ( 2.*umax*dt_atmos/sqrt(dx_const**2 + dy_const**2) + 0.49 )
elseif (grid_type == 5 .or. grid_type == 6) then
if (grid_type == 6) then
deglon_start = 0.; deglon_stop = 360.
endif
dl = (deglon_stop-deglon_start)*pi/(180.*(npx-1))
dp = (deglat_stop-deglat_start)*pi/(180.*(npy-1))
dxmin=dl*radius*min(cos(deglat_start*pi/180.-ng*dp), &
cos(deglat_stop *pi/180.+ng*dp))
dymin=dp*radius
n0split = nint ( 2.*umax*dt_atmos/sqrt(dxmin**2 + dymin**2) + 0.49 )
endif
n0split = max ( 1, n0split )
if ( n_split == 0 ) then
n_split = nint( real(n0split)/real(k_split*abs(p_split)) * stretch_fac + 0.5 )
if(is_master()) write(*,*) 'For k_split (remapping)=', k_split
if(is_master()) write(*,198) 'n_split is set to ', n_split, ' for resolution-dt=',npx,npy,ntiles,dt_atmos
else
if(is_master()) write(*,199) 'Using n_split from the namelist: ', n_split
endif
if (is_master() .and. n == 1 .and. abs(p_split) > 1) then
write(*,199) 'Using p_split = ', p_split
endif
if (Atm(n)%neststruct%nested) then
do i=1,n-1
if (Atm(i)%grid_number == parent_grid_num) then
Atm(n)%parent_grid => Atm(i)
exit
end if
end do
if (.not. associated(Atm(n)%parent_grid)) then
write(errstring,'(2(A,I3))') "Could not find parent grid #", parent_grid_num, ' for grid #', n
call mpp_error(FATAL, errstring)
end if
!Note that if a gnomonic grid has a parent it is a NESTED gnomonic grid and therefore only has one tile
if ( Atm(n)%parent_grid%flagstruct%grid_type < 3 .and. &
.not. associated(Atm(n)%parent_grid%parent_grid)) then
if (parent_tile > 6 .or. parent_tile < 1) then
call mpp_error(FATAL, 'parent tile must be between 1 and 6 if the parent is a cubed-sphere grid')
end if
else
if (parent_tile /= 1) then
call mpp_error(FATAL, 'parent tile must be 1 if the parent is not a cubed-sphere grid')
end if
end if
if ( refinement < 1 ) call mpp_error(FATAL, 'grid refinement must be positive')
if (nestupdate == 1 .or. nestupdate == 2) then
if (mod(npx-1,refinement) /= 0 .or. mod(npy-1,refinement) /= 0) then
call mpp_error(WARNING, 'npx-1 or npy-1 is not evenly divisible by the refinement ratio; averaging update cannot be mass-conservative.')
end if
end if
if ( consv_te > 0.) then
call mpp_error(FATAL, 'The global energy fixer cannot be used on a nested grid. consv_te must be set to 0.')
end if
Atm(n)%neststruct%refinement_of_global = Atm(n)%neststruct%refinement * Atm(n)%parent_grid%neststruct%refinement_of_global
max_refinement_of_global = max(Atm(n)%neststruct%refinement_of_global,max_refinement_of_global)
Atm(n)%neststruct%npx_global = Atm(n)%neststruct%refinement * Atm(n)%parent_grid%neststruct%npx_global
else
Atm(n)%neststruct%ioffset = -999
Atm(n)%neststruct%joffset = -999
Atm(n)%neststruct%parent_tile = -1
Atm(n)%neststruct%refinement = -1
end if
if (Atm(n)%neststruct%nested) then
if (Atm(n)%flagstruct%grid_type >= 4 .and. Atm(n)%parent_grid%flagstruct%grid_type >= 4) then
Atm(n)%flagstruct%dx_const = Atm(n)%parent_grid%flagstruct%dx_const / real(Atm(n)%neststruct%refinement)
Atm(n)%flagstruct%dy_const = Atm(n)%parent_grid%flagstruct%dy_const / real(Atm(n)%neststruct%refinement)
end if
end if
!----------------------------------------
! Adjust divergence damping coefficients:
!----------------------------------------
! d_fac = real(n0split)/real(n_split)
! dddmp = dddmp * d_fac
! d2_bg = d2_bg * d_fac
! d4_bg = d4_bg * d_fac
! d_ext = d_ext * d_fac
! vtdm4 = vtdm4 * d_fac
if (old_divg_damp) then
if (is_master()) write(*,*) " fv_control: using original values for divergence damping "
d2_bg_k1 = 6. ! factor for d2_bg (k=1) - default(4.)
d2_bg_k2 = 4. ! factor for d2_bg (k=2) - default(2.)
d2_divg_max_k1 = 0.02 ! d2_divg max value (k=1) - default(0.05)
d2_divg_max_k2 = 0.01 ! d2_divg max value (k=2) - default(0.02)
damp_k_k1 = 0. ! damp_k value (k=1) - default(0.05)
damp_k_k2 = 0. ! damp_k value (k=2) - default(0.025)
elseif (n_sponge == 0 ) then
if ( d2_bg_k1 > 1. ) d2_bg_k1 = 0.20
if ( d2_bg_k2 > 1. ) d2_bg_k2 = 0.015
endif
! if ( beta < 1.e-5 ) beta = 0. ! beta < 0 is used for non-hydrostatic "one_grad_p"
if ( .not.hydrostatic ) then
if ( m_split==0 ) then
m_split = 1. + abs(dt_atmos)/real(k_split*n_split*abs(p_split))
if (abs(a_imp) < 0.5) then
if(is_master()) write(*,199) 'm_split is set to ', m_split
endif
endif
if(is_master()) then
write(*,*) 'Off center implicit scheme param=', a_imp
write(*,*) ' p_fac=', p_fac
endif
endif
if(is_master()) then
if (n_sponge >= 0) write(*,199) 'Using n_sponge : ', n_sponge
write(*,197) 'Using non_ortho : ', non_ortho
endif
197 format(A,l7)
198 format(A,i2.2,A,i4.4,'x',i4.4,'x',i1.1,'-',f9.3)
199 format(A,i3.3)
if (.not. (nested .or. regional)) alpha = alpha*pi
allocate(Atm(n)%neststruct%child_grids(size(Atm)))
Atm(N)%neststruct%child_grids = .false.
!Broadcast data
!Check layout
enddo
!Set pelists
do n=1,size(Atm)
if (ANY(Atm(n)%pelist == gid)) then
call mpp_set_current_pelist(Atm(n)%pelist)
call mpp_get_current_pelist(Atm(n)%pelist, commID=commID)
call mp_start(commID,halo_update_type)
endif
if (Atm(n)%neststruct%nested) then
Atm(n)%neststruct%parent_proc = ANY(Atm(n)%parent_grid%pelist == gid)
Atm(n)%neststruct%child_proc = ANY(Atm(n)%pelist == gid)
endif
enddo
do n=1,size(Atm)
call switch_current_Atm(Atm(n),.false.)
call setup_pointers(Atm(n))
!! CLEANUP: WARNING not sure what changes to domain_decomp may cause
call domain_decomp(npx,npy,ntiles,grid_type,nested,Atm(n),layout,io_layout)
enddo
!!! CLEANUP: This sets the pelist to ALL, which is also
!!! required for the define_nest_domains step in the next loop.
!!! Later the pelist must be reset to the 'local' pelist.
call broadcast_domains(Atm)
do n=1,size(Atm)
call switch_current_Atm(Atm(n))
call setup_pointers(Atm(n))
if (nested) then
if (mod(npx-1 , refinement) /= 0 .or. mod(npy-1, refinement) /= 0) &
call mpp_error(FATAL, 'npx or npy not an even refinement of its coarse grid.')
!Pelist needs to be set to ALL (which should have been done
!in broadcast_domains) to get this to work
call mpp_define_nest_domains(Atm(n)%neststruct%nest_domain, Atm(n)%domain, Atm(parent_grid_num)%domain, &
7, parent_tile, &
1, npx-1, 1, npy-1, & !Grid cells, not points
ioffset, ioffset + (npx-1)/refinement - 1, &
joffset, joffset + (npy-1)/refinement - 1, &
(/ (i,i=0,mpp_npes()-1) /), extra_halo = 0, name="nest_domain") !What pelist to use?
call mpp_define_nest_domains(Atm(n)%neststruct%nest_domain, Atm(n)%domain, Atm(parent_grid_num)%domain, &
7, parent_tile, &
1, npx-1, 1, npy-1, & !Grid cells, not points
ioffset, ioffset + (npx-1)/refinement - 1, &
joffset, joffset + (npy-1)/refinement - 1, &
(/ (i,i=0,mpp_npes()-1) /), extra_halo = 0, name="nest_domain") !What pelist to use?
! (/ (i,i=0,mpp_npes()-1) /), extra_halo = 2, name="nest_domain_for_BC") !What pelist to use?
Atm(parent_grid_num)%neststruct%child_grids(n) = .true.
if (Atm(n)%neststruct%nestbctype > 1) then
call mpp_error(FATAL, 'nestbctype > 1 not yet implemented')
!This check is due to a bug which has not yet been identified. Beware.
! if (Atm(n)%parent_grid%flagstruct%hord_tr == 7) &
! call mpp_error(FATAL, "Flux-form nested BCs (nestbctype > 1) should not use hord_tr == 7 (on parent grid), since there is no guarantee of tracer mass conservation with this option.")
!!$ if (Atm(n)%flagstruct%q_split > 0 .and. Atm(n)%parent_grid%flagstruct%q_split > 0) then
!!$ if (mod(Atm(n)%flagstruct%q_split,Atm(n)%parent_grid%flagstruct%q_split) /= 0) call mpp_error(FATAL, &
!!$ "Flux-form nested BCs (nestbctype > 1) require q_split on the nested grid to be evenly divisible by that on the coarse grid.")
!!$ endif
!!$ if (mod((Atm(n)%npx-1),Atm(n)%neststruct%refinement) /= 0 .or. mod((Atm(n)%npy-1),Atm(n)%neststruct%refinement) /= 0) call mpp_error(FATAL, &
!!$ "Flux-form nested BCs (nestbctype > 1) requires npx and npy to be one more than a multiple of the refinement ratio.")
!!$ Atm(n)%parent_grid%neststruct%do_flux_BCs = .true.
!!$ if (Atm(n)%neststruct%nestbctype == 3 .or. Atm(n)%neststruct%nestbctype == 4) Atm(n)%parent_grid%neststruct%do_2way_flux_BCs = .true.
Atm(n)%neststruct%upoff = 0
endif
end if
do nn=1,size(Atm)
if (n == 1) allocate(Atm(nn)%neststruct%nest_domain_all(size(Atm)))
Atm(nn)%neststruct%nest_domain_all(n) = Atm(n)%neststruct%nest_domain
enddo
end do
do n=1,size(Atm)
if (ANY(Atm(n)%pelist == gid)) then
call mpp_set_current_pelist(Atm(n)%pelist)
endif
enddo