FV3DYCORE  Version 2.0.0
fv_restart.F90
Go to the documentation of this file.
1 
2 !***********************************************************************
3 !* GNU Lesser General Public License
4 !*
5 !* This file is part of the FV3 dynamical core.
6 !*
7 !* The FV3 dynamical core is free software: you can redistribute it
8 !* and/or modify it under the terms of the
9 !* GNU Lesser General Public License as published by the
10 !* Free Software Foundation, either version 3 of the License, or
11 !* (at your option) any later version.
12 !*
13 !* The FV3 dynamical core is distributed in the hope that it will be
14 !* useful, but WITHOUT ANYWARRANTY; without even the implied warranty
15 !* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
16 !* See the GNU General Public License for more details.
17 !*
18 !* You should have received a copy of the GNU Lesser General Public
19 !* License along with the FV3 dynamical core.
20 !* If not, see <http://www.gnu.org/licenses/>.
21 !***********************************************************************
23 
28 
29 ! <table>
30 ! <tr>
31 ! <th>Module Name</th>
32 ! <th>Functions Included</th>
33 ! </tr>
34 ! <tr>
35 ! <td>boundary_mod</td>
36 ! <td>fill_nested_grid, nested_grid_BC, update_coarse_grid</td>
37 ! </tr>
38 ! <tr>
39 ! <td>constants_mod</td>
40 ! <td>kappa, pi=>pi_8, omega, rdgas, grav, rvgas, cp_air, radius</td>
41 ! </tr>
42 ! <tr>
43 ! <td>external_ic_mod</td>
44 ! <td>get_external_ic, get_cubed_sphere_terrain</td>
45 ! </tr>
46 ! <tr>
47 ! <td>field_manager_mod</td>
48 ! <td>MODEL_ATMOS</td>
49 ! </tr>
50 ! <tr>
51 ! <td>fms_mod</td>
52 ! <td>file_exist</td>
53 ! </tr>
54 ! <tr>
55 ! <td>fv_arrays_mod</td>
56 ! <td>fv_atmos_type, fv_nest_type, fv_grid_bounds_type, R_GRID</td>
57 ! </tr>
58 ! <tr>
59 ! <td>fv_control_mod</td>
60 ! <td>fv_init, fv_end, ngrids</td>
61 ! </tr>
62 ! <tr>
63 ! <td>fv_diagnostics_mod</td>
64 ! <td>prt_maxmin</td>
65 ! </tr>
66 ! <tr>
67 ! <td>fv_eta_mod</td>
68 ! <td>compute_dz_var, compute_dz_L32, set_hybrid_z</td>
69 ! </tr>
70 ! <tr>
71 ! <td>fv_grid_utils_mod</td>
72 ! <td>ptop_min, fill_ghost, g_sum,
73 ! make_eta_level, cubed_to_latlon, great_circle_dist</td>
74 ! </tr>
75 ! <tr>
76 ! <td>fv_io_mod</td>
77 ! <td>fv_io_init, fv_io_read_restart, fv_io_write_restart,
78 ! remap_restart, fv_io_register_restart, fv_io_register_nudge_restart,
79 ! fv_io_register_restart_BCs, fv_io_register_restart_BCs_NH, fv_io_write_BCs,
80 ! fv_io_read_BCs</td>
81 ! </tr>
82 ! <tr>
83 ! <td>fv_mp_mod</td>
84 ! <td>is_master, switch_current_Atm, mp_reduce_min, mp_reduce_max</td>
85 ! </tr>
86 ! <tr>
87 ! <td>fv_surf_map_mod</td>
88 ! <td>sgh_g, oro_g,del2_cubed_sphere, del4_cubed_sphere </td>
89 ! </tr>
90 ! <tr>
91 ! <td>fv_treat_da_inc_mod</td>
92 ! <td>read_da_inc</td>
93 ! </tr>
94 ! <tr>
95 ! <td>fv_timing_mod</td>
96 ! <td>timing_on, timing_off</td>
97 ! </tr>
98 ! <tr>
99 ! <td>fv_update_phys_mod</td>
100 ! <td>fv_update_phys</td>
101 ! </tr>
102 ! <tr>
103 ! <td>init_hydro_mod</td>
104 ! <td>p_var</td>
105 ! </tr>
106 ! <tr>
107 ! <td>mpp_mod</td>
108 ! <td>mpp_chksum, stdout, mpp_error, FATAL, NOTE, get_unit, mpp_sum,
109 ! mpp_get_current_pelist, mpp_set_current_pelist, mpp_send, mpp_recv,
110 ! mpp_sync_self, mpp_npes, mpp_pe, mpp_sync</td>
111 ! </tr>
112 ! <tr>
113 ! <td>mpp_domains_mod</td>
114 ! <td>mpp_get_compute_domain, mpp_get_data_domain, mpp_get_global_domain,
115 ! mpp_update_domains, domain2d, DGRID_NE, CENTER, CORNER, NORTH, EAST,
116 ! mpp_get_C2F_index, WEST, SOUTH, mpp_global_field</td>
117 ! </tr>
118 ! <tr>
119 ! <td>mpp_parameter_mod</td>
120 ! <td>EUPDATE, WUPDATE, SUPDATE, NUPDATE</td>
121 ! </tr>
122 ! <tr>
123 ! <td>time_manager_mod</td>
124 ! <td>time_type, get_time, set_time, operator(+), operator(-)</td>
125 ! </tr>
126 ! <tr>
127 ! <td>tracer_manager_mod</td>
128 ! <td>get_tracer_index, get_tracer_names</td>
129 ! </tr>
130 ! <tr>
131 ! <td>test_cases_mod</td>
132 ! <td>test_case, alpha, init_case, init_double_periodic, init_latlon</td>
133 ! </tr>
134 ! </table>
135 
136 
137  use constants_mod, only: kappa, pi=>pi_8, omega, rdgas, grav, rvgas, cp_air, radius
144  use fv_diagnostics_mod, only: prt_maxmin
145  use init_hydro_mod, only: p_var
146  use mpp_domains_mod, only: mpp_update_domains, domain2d, dgrid_ne
147  use mpp_mod, only: mpp_chksum, stdout, mpp_error, fatal, note
148  use mpp_mod, only: get_unit, mpp_sum, mpp_broadcast
149  use mpp_mod, only: mpp_get_current_pelist, mpp_npes, mpp_set_current_pelist
150  use test_cases_mod, only: alpha, init_case, init_double_periodic!, init_latlon
151  use fv_mp_mod, only: is_master, mp_reduce_min, mp_reduce_max, corners_ydir => ydir, fill_corners, tile_fine, global_nest_domain
152  use fv_surf_map_mod, only: sgh_g, oro_g
153  use tracer_manager_mod, only: get_tracer_names
154  use field_manager_mod, only: model_atmos
159  use tracer_manager_mod, only: get_tracer_index
160  use field_manager_mod, only: model_atmos
162  use mpp_domains_mod, only: mpp_get_compute_domain, mpp_get_data_domain, mpp_get_global_domain
163  use mpp_mod, only: mpp_send, mpp_recv, mpp_sync_self, mpp_set_current_pelist, mpp_get_current_pelist, mpp_npes, mpp_pe, mpp_sync
164  use mpp_domains_mod, only: center, corner, north, east, mpp_get_c2f_index, west, south
165  use mpp_domains_mod, only: mpp_global_field
166  use fms_mod, only: file_exist
169 #ifdef MULTI_GASES
170  use multi_gases_mod, only: virq
171 #endif
172 
173  implicit none
174  private
175 
177 
178  real(kind=R_GRID), parameter :: cnst_0p20=0.20d0
179  !--- private data type
180  logical :: module_is_initialized = .false.
181 
182 contains
183 
184 
185  subroutine fv_restart_init()
186  call fv_io_init()
187  module_is_initialized = .true.
188  end subroutine fv_restart_init
189 
195  subroutine fv_restart(fv_domain, Atm, dt_atmos, seconds, days, cold_start, grid_type, this_grid)
196  type(domain2d), intent(inout) :: fv_domain
197  type(fv_atmos_type), intent(inout) :: Atm(:)
198  real, intent(in) :: dt_atmos
199  integer, intent(out) :: seconds
200  integer, intent(out) :: days
201  logical, intent(inout) :: cold_start
202  integer, intent(in) :: grid_type, this_grid
203 
204  integer :: i, j, k, n, ntileMe, nt, iq
205  integer :: isc, iec, jsc, jec, ncnst, ntprog, ntdiag
206  integer :: isd, ied, jsd, jed, npz
207  integer isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p, isg, ieg, jsg,jeg, npx_p, npy_p
208  real, allocatable :: g_dat(:,:,:)
209 
210  integer :: unit
211  real, allocatable :: dz1(:)
212  real rgrav, f00, ztop, pertn, ph
213  logical :: hybrid
214  character(len=128):: tname, errstring, fname, tracer_name
215  character(len=120):: fname_ne, fname_sw
216  character(len=3) :: gn
217 
218  integer :: npts, sphum
219  integer, allocatable :: pelist(:), smoothed_topo(:)
220  real :: sumpertn
221  real :: zvir
222 
223  integer :: i_butterfly, j_butterfly
224  logical :: do_read_restart = .false.
225  logical :: do_read_restart_bc = .false.
226  integer, allocatable :: ideal_test_case(:), new_nest_topo(:)
227 
228  rgrav = 1. / grav
229 
230  if(.not.module_is_initialized) call mpp_error(fatal, 'You must call fv_restart_init.')
231 
232  ntileme = size(atm(:))
233  allocate(smoothed_topo(ntileme))
234  smoothed_topo(:) = 0
235  allocate(ideal_test_case(ntileme))
236  ideal_test_case(:) = 0
237  allocate(new_nest_topo(ntileme))
238  new_nest_topo(:) = 0
239 
240  do n = 1, ntileme
241 
242  isd = atm(n)%bd%isd
243  ied = atm(n)%bd%ied
244  jsd = atm(n)%bd%jsd
245  jed = atm(n)%bd%jed
246  isc = atm(n)%bd%isc
247  iec = atm(n)%bd%iec
248  jsc = atm(n)%bd%jsc
249  jec = atm(n)%bd%jec
250  ncnst = atm(n)%ncnst
251  if( is_master() ) write(*,*) 'in fv_restart ncnst=', ncnst
252  npz = atm(n)%npz
253  ntprog = size(atm(n)%q,4)
254  ntdiag = size(atm(n)%qdiag,4)
255 
256 !!$ if (is_master()) then
257 !!$ print*, 'FV_RESTART: ', n, cold_start_grids(n)
258 !!$ endif
259 
260  !1. sort out restart, external_ic, and cold-start (idealized)
261  if (atm(n)%neststruct%nested) then
262  write(fname, '(A, I2.2, A)') 'INPUT/fv_core.res.nest', atm(n)%grid_number, '.nc'
263  write(fname_ne,'(A, I2.2, A)') 'INPUT/fv_BC_ne.res.nest', atm(n)%grid_number, '.nc'
264  write(fname_sw,'(A, I2.2, A)') 'INPUT/fv_BC_sw.res.nest', atm(n)%grid_number, '.nc'
265  if (is_master()) print*, 'Searching for nested grid BC files ', trim(fname_ne), ' ', trim(fname_sw)
266  do_read_restart = file_exist(fname, atm(n)%domain)
267  do_read_restart_bc = file_exist(fname_ne, atm(n)%domain) .and. file_exist(fname_sw, atm(n)%domain)
268  if (is_master()) then
269  print*, 'FV_RESTART: ', n, do_read_restart, do_read_restart_bc
270  if (.not. do_read_restart_bc) write(*,*) 'BC files not found, re-generating nested grid boundary conditions'
271  endif
272  atm(n)%neststruct%first_step = .not. do_read_restart_bc
273  else
274  fname='INPUT/fv_core.res.nc'
275  do_read_restart = file_exist('INPUT/fv_core.res.nc') .or. file_exist('INPUT/fv_core.res.tile1.nc')
276  if (is_master()) print*, 'FV_RESTART: ', n, do_read_restart, do_read_restart_bc
277  endif
278 
279  !2. Register restarts
280  !--- call fv_io_register_restart to register restart field to be written out in fv_io_write_restart
281  if ( n==this_grid ) call fv_io_register_restart(atm(n)%domain,atm(n:n))
282  !if (Atm(n)%neststruct%nested) call fv_io_register_restart_BCs(Atm(n)) !TODO put into fv_io_register_restart
283 
284 
285  !3preN. Topography BCs for nest, including setup for blending
286 
287  if (atm(n)%neststruct%nested) then
288  if (.not. allocated(pelist)) then
289  allocate(pelist(0:mpp_npes()-1))
290  call mpp_get_current_pelist(pelist)
291  endif
292  call mpp_set_current_pelist() !global
293  call mpp_broadcast(atm(n)%flagstruct%external_ic,atm(n)%pelist(1))
294  call mpp_sync()
295  call mpp_set_current_pelist(pelist)
296  if ( ( smoothed_topo(atm(n)%parent_grid%grid_number) > 0 .or. &
297  .not. do_read_restart_bc .or. &
298  atm(n)%flagstruct%external_ic ) ) then
299  new_nest_topo(n) = 1
300  if (n==this_grid) then
301 
302  call fill_nested_grid_topo(atm(n), n==this_grid)
303  call fill_nested_grid_topo_halo(atm(n), n==this_grid) !TODO can we combine these?
304  call nested_grid_bc(atm(n)%ps, atm(n)%parent_grid%ps, global_nest_domain, &
305  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
306  atm(n)%npx, atm(n)%npy, atm(n)%bd, 1, atm(n)%npx-1, 1, atm(n)%npy-1)
307 
308  elseif (this_grid==atm(n)%parent_grid%grid_number) then !this_grid is grid n's parent
309 
310  call fill_nested_grid_topo(atm(n), n==this_grid)
311  call fill_nested_grid_topo_halo(atm(n), n==this_grid) !TODO can we combine these?
312  !call mpp_get_data_domain( Atm(n)%parent_grid%domain, isd, ied, jsd, jed)
313  call nested_grid_bc(atm(n)%parent_grid%ps, global_nest_domain, 0, 0, n-1)
314  !Atm(n)%ps, Atm(n)%parent_grid%ps, global_nest_domain, &
315  !Atm(n)%neststruct%ind_h, Atm(n)%neststruct%wt_h, 0, 0, &
316  !Atm(n)%npx, Atm(n)%npy, Atm(n)%bd, isd, ied, jsd, jed, proc_in=n==this_grid)
317 
318  endif
319 
320  endif
321  endif
322 
323  !This call still appears to be necessary to get isd, etc. correct
324  !call switch_current_Atm(Atm(n)) !TODO should NOT be necessary now that we manually set isd, etc.
325 
326  !--- call fv_io_register_restart to register restart field to be written out in fv_io_write_restart
327  !if (n==this_grid) call fv_io_register_restart(Atm(n)%domain,Atm(n:n))
328  !if (Atm(n)%neststruct%nested) call fv_io_register_restart_BCs(Atm(n)) !TODO put into fv_io_register_restart
329 
330  if (n==this_grid) then
331 
332  !3. External_ic
333  if (atm(n)%flagstruct%external_ic) then
334  if( is_master() ) write(*,*) 'Calling get_external_ic'
335  call get_external_ic(atm(n), atm(n)%domain, .not. do_read_restart, dt_atmos)
336  if( is_master() ) write(*,*) 'IC generated from the specified external source'
337 
338  !4. Restart
339  elseif (do_read_restart) then
340 
341  if ( atm(n)%flagstruct%npz_rst /= 0 .and. atm(n)%flagstruct%npz_rst /= atm(n)%npz ) then
342  !Remap vertically the prognostic variables for the chosen vertical resolution
343  if( is_master() ) then
344  write(*,*) ' '
345  write(*,*) '***** Important Note from FV core ********************'
346  write(*,*) 'Remapping dynamic IC from', atm(n)%flagstruct%npz_rst, 'levels to ', atm(n)%npz,'levels'
347  write(*,*) '***** End Note from FV core **************************'
348  write(*,*) ' '
349  endif
350  call remap_restart( atm(n)%domain, atm(n:n) )
351  if( is_master() ) write(*,*) 'Done remapping dynamical IC'
352  else
353  if( is_master() ) write(*,*) 'Warm starting, calling fv_io_restart'
354  call fv_io_read_restart(atm(n)%domain,atm(n:n))
355  !====== PJP added DA functionality ======
356  if (atm(n)%flagstruct%read_increment) then
357  ! print point in middle of domain for a sanity check
358  i = (atm(n)%bd%isc + atm(n)%bd%iec)/2
359  j = (atm(n)%bd%jsc + atm(n)%bd%jec)/2
360  k = atm(n)%npz/2
361  if( is_master() ) write(*,*) 'Calling read_da_inc',atm(n)%pt(i,j,k)
362  call read_da_inc(atm(n), atm(n)%domain, atm(n)%bd, atm(n)%npz, atm(n)%ncnst, &
363  atm(n)%u, atm(n)%v, atm(n)%q, atm(n)%delp, atm(n)%pt, atm(n)%delz, isd, jsd, ied, jed, &
364  isc, jsc, iec, jec )
365  if( is_master() ) write(*,*) 'Back from read_da_inc',atm(n)%pt(i,j,k)
366  endif
367  !====== end PJP added DA functionailty======
368  endif
369 
370  seconds = 0; days = 0 ! Restart needs to be modified to record seconds and days.
371 
372  if (atm(n)%neststruct%nested) then
373  if ( atm(n)%flagstruct%npz_rst /= 0 .and. atm(n)%flagstruct%npz_rst /= npz ) then
374  call mpp_error(fatal, "Remap-restart not implemented for nests.")
375  endif
376  if (do_read_restart_bc) call fv_io_read_bcs(atm(n))
377  call mpp_update_domains(atm(n)%u, atm(n)%v, atm(n)%domain, gridtype=dgrid_ne, complete=.true.)
378  endif
379 
380  if ( atm(n)%flagstruct%mountain ) then
381  ! !!! Additional terrain filter -- should not be called repeatedly !!!
382  if ( atm(n)%flagstruct%n_zs_filter > 0 ) then
383  if ( atm(n)%flagstruct%nord_zs_filter == 2 ) then
384  !!! TODO: move this block into its own routine or CLEAN UP these subroutine calls
385  call del2_cubed_sphere(atm(n)%npx, atm(n)%npy, atm(n)%phis, &
386  atm(n)%gridstruct%area_64, atm(n)%gridstruct%dx, atm(n)%gridstruct%dy, &
387  atm(n)%gridstruct%dxc, atm(n)%gridstruct%dyc, atm(n)%gridstruct%sin_sg, &
388  atm(n)%flagstruct%n_zs_filter, cnst_0p20*atm(n)%gridstruct%da_min, &
389  .false., oro_g, atm(n)%gridstruct%bounded_domain, atm(n)%domain, atm(n)%bd)
390  if ( is_master() ) write(*,*) 'Warning !!! del-2 terrain filter has been applied ', &
391  atm(n)%flagstruct%n_zs_filter, ' times'
392  else if( atm(n)%flagstruct%nord_zs_filter == 4 ) then
393  call del4_cubed_sphere(atm(n)%npx, atm(n)%npy, atm(n)%phis, atm(n)%gridstruct%area_64, &
394  atm(n)%gridstruct%dx, atm(n)%gridstruct%dy, &
395  atm(n)%gridstruct%dxc, atm(n)%gridstruct%dyc, atm(n)%gridstruct%sin_sg, &
396  atm(n)%flagstruct%n_zs_filter, .false., oro_g, atm(n)%gridstruct%bounded_domain, &
397  atm(n)%domain, atm(n)%bd)
398  if ( is_master() ) write(*,*) 'Warning !!! del-4 terrain filter has been applied ', &
399  atm(n)%flagstruct%n_zs_filter, ' times'
400  endif
401  endif
402  call mpp_update_domains( atm(n)%phis, atm(n)%domain, complete=.true. )
403  else
404  atm(n)%phis = 0.
405  if( is_master() ) write(*,*) 'phis set to zero'
406  endif !mountain
407 
408 
409 
410  !5. Idealized test case
411  else
412 
413  ideal_test_case(n) = 1
414 
415  if ( atm(n)%flagstruct%make_hybrid_z ) then
416  hybrid = .false.
417  else
418  hybrid = atm(n)%flagstruct%hybrid_z
419  endif
420  if (grid_type < 4) then
421  if ( .not. atm(n)%flagstruct%external_ic ) then
422  call init_case(atm(n)%u,atm(n)%v,atm(n)%w,atm(n)%pt,atm(n)%delp,atm(n)%q, &
423  atm(n)%phis, atm(n)%ps,atm(n)%pe, atm(n)%peln,atm(n)%pk,atm(n)%pkz, &
424  atm(n)%uc,atm(n)%vc, atm(n)%ua,atm(n)%va, &
425  atm(n)%ak, atm(n)%bk, atm(n)%gridstruct, atm(n)%flagstruct,&
426  atm(n)%npx, atm(n)%npy, npz, atm(n)%ng, &
427  ncnst, atm(n)%flagstruct%nwat, &
428  atm(n)%flagstruct%ndims, atm(n)%flagstruct%ntiles, &
429  atm(n)%flagstruct%dry_mass, &
430  atm(n)%flagstruct%mountain, &
431  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
432  hybrid, atm(n)%delz, atm(n)%ze0, &
433  atm(n)%flagstruct%adiabatic, atm(n)%ks, atm(n)%neststruct%npx_global, &
434  atm(n)%ptop, atm(n)%domain, atm(n)%tile_of_mosaic, atm(n)%bd)
435  endif
436  elseif (grid_type == 4) then
437  call init_double_periodic(atm(n)%u,atm(n)%v,atm(n)%w,atm(n)%pt, &
438  atm(n)%delp,atm(n)%q,atm(n)%phis, atm(n)%ps,atm(n)%pe, &
439  atm(n)%peln,atm(n)%pk,atm(n)%pkz, &
440  atm(n)%uc,atm(n)%vc, atm(n)%ua,atm(n)%va, &
441  atm(n)%ak, atm(n)%bk, &
442  atm(n)%gridstruct, atm(n)%flagstruct, &
443  atm(n)%npx, atm(n)%npy, npz, atm(n)%ng, &
444  ncnst, atm(n)%flagstruct%nwat, &
445  atm(n)%flagstruct%ndims, atm(n)%flagstruct%ntiles, &
446  atm(n)%flagstruct%dry_mass, atm(n)%flagstruct%mountain, &
447  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
448  hybrid, atm(n)%delz, atm(n)%ze0, atm(n)%ks, atm(n)%ptop, &
449  atm(n)%domain, atm(n)%tile_of_mosaic, atm(n)%bd)
450  if( is_master() ) write(*,*) 'Doubly Periodic IC generated'
451  elseif (grid_type == 5 .or. grid_type == 6) then
452  call mpp_error(fatal, "Idealized test cases for grid_type == 5,6 (global lat-lon) grid not supported")
453  endif
454 
455  !Turn this off on the nested grid if you are just interpolating topography from the coarse grid!
456  !These parameters are needed in LM3/LM4, and are communicated through restart files
457  if ( atm(n)%flagstruct%fv_land ) then
458  do j=jsc,jec
459  do i=isc,iec
460  atm(n)%sgh(i,j) = sgh_g(i,j)
461  atm(n)%oro(i,j) = oro_g(i,j)
462  enddo
463  enddo
464  endif
465 
466  endif !external_ic vs. restart vs. idealized
467 
468 
469  endif !n==this_grid
470 
471 
472  !!!! NOT NEEDED??
473  !Currently even though we do fill in the nested-grid IC from
474  ! init_case or external_ic we appear to overwrite it using
475  ! coarse-grid data
476 !!$ if (Atm(n)%neststruct%nested) then
477 !!$ if (.not. Atm(n)%flagstruct%external_ic .and. .not. Atm(n)%flagstruct%nggps_ic .and. grid_type < 4 ) then
478 !!$ call fill_nested_grid_data(Atm(n:n))
479 !!$ endif
480 !!$ end if
481 
482 ! endif !end cold_start check
483 
484  !5n. Nesting setup (part I)
485 
486  !Broadcast data for nesting
487  if (ntileme > 1) then
488  if (.not. allocated(pelist)) then
489  allocate(pelist(0:mpp_npes()-1))
490  call mpp_get_current_pelist(pelist)
491  endif
492 
493  call mpp_set_current_pelist()!global
494  !for remap BCs
495  call mpp_broadcast(atm(n)%ptop,atm(n)%pelist(1))
496  call mpp_broadcast(atm(n)%ak,atm(n)%npz+1,atm(n)%pelist(1))
497  call mpp_broadcast(atm(n)%bk,atm(n)%npz+1,atm(n)%pelist(1))
498  !smoothed_topo
499  call mpp_broadcast(smoothed_topo(n),atm(n)%pelist(1))
500 
501  call mpp_sync()
502  call mpp_set_current_pelist(pelist)
503 
504 
505  if (atm(n)%neststruct%nested) then
506  atm(n)%neststruct%do_remap_BC(ntileme) = .false.
507 
508  if (atm(n)%npz /= atm(n)%parent_grid%npz) then
509  atm(n)%neststruct%do_remap_BC(n) = .true.
510  else
511  do k=1,atm(n)%npz+1
512  if (atm(n)%ak(k) /= atm(n)%parent_grid%ak(k)) then
513  atm(n)%neststruct%do_remap_BC(n) = .true.
514  exit
515  endif
516  if (atm(n)%bk(k) /= atm(n)%parent_grid%bk(k)) then
517  atm(n)%neststruct%do_remap_BC(n) = .true.
518  exit
519  endif
520  enddo
521  endif
522 
523  atm(n)%parent_grid%neststruct%do_remap_BC(n) = atm(n)%neststruct%do_remap_BC(n)
524  if (is_master() .and. n==this_grid) then
525  if (atm(n)%neststruct%do_remap_BC(n)) then
526  print*, ' Remapping BCs ENABLED on grid', n
527  else
528  print*, ' Remapping BCs DISABLED (not necessary) on grid', n
529  endif
530  write(*,'(A, I3, A, F8.2, A)') ' Nested grid ', n, ', ptop = ', atm(n)%ak(1), ' Pa'
531  write(*,'(A, I3, A, F8.2, A)') ' Parent grid ', n, ', ptop = ', atm(n)%parent_grid%ak(1), ' Pa'
532  if (atm(n)%ak(1) < atm(n)%parent_Grid%ak(1)) then
533  print*, ' WARNING nested grid top above parent grid top. May have problems with remapping BCs.'
534  endif
535  endif
536  endif
537 
538  endif
539 
540  end do !break cycling loop to finish nesting setup
541 
542 
543  do n = ntileme,1,-1
544  if (new_nest_topo(n) > 0 ) then
545  call twoway_topo_update(atm(n), n==this_grid)
546  endif
547  end do
548 
549  !6. Data Setup
550  do n = 1, ntileme
551 
552  if (n/=this_grid) cycle
553 
554  isd = atm(n)%bd%isd
555  ied = atm(n)%bd%ied
556  jsd = atm(n)%bd%jsd
557  jed = atm(n)%bd%jed
558  isc = atm(n)%bd%isc
559  iec = atm(n)%bd%iec
560  jsc = atm(n)%bd%jsc
561  jec = atm(n)%bd%jec
562  ncnst = atm(n)%ncnst
563  if( is_master() ) write(*,*) 'in fv_restart ncnst=', ncnst
564  npz = atm(n)%npz
565  ntprog = size(atm(n)%q,4)
566  ntdiag = size(atm(n)%qdiag,4)
567 
568 
569  if ( ideal_test_case(n) == 0 ) then
570 #ifdef SW_DYNAMICS
571  atm(n)%pt(:,:,:)=1.
572 #else
573  if ( .not.atm(n)%flagstruct%hybrid_z ) then
574  if(atm(n)%ptop/=atm(n)%ak(1)) call mpp_error(fatal,'FV restart: ptop not equal Atm(n)%ak(1)')
575  else
576  atm(n)%ptop = atm(n)%ak(1); atm(n)%ks = 0
577  endif
578  call p_var(npz, isc, iec, jsc, jec, atm(n)%ptop, ptop_min, &
579  atm(n)%delp, atm(n)%delz, atm(n)%pt, atm(n)%ps, atm(n)%pe, atm(n)%peln, &
580  atm(n)%pk, atm(n)%pkz, kappa, atm(n)%q, atm(n)%ng, &
581  ncnst, atm(n)%gridstruct%area_64, atm(n)%flagstruct%dry_mass, &
582  atm(n)%flagstruct%adjust_dry_mass, atm(n)%flagstruct%mountain, &
583  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
584  atm(n)%flagstruct%nwat, atm(n)%domain, atm(1)%flagstruct%adiabatic, atm(n)%flagstruct%make_nh)
585 #endif
586  if ( grid_type < 7 .and. grid_type /= 4 ) then
587  ! Fill big values in the non-existing corner regions:
588  ! call fill_ghost(Atm(n)%phis, Atm(n)%npx, Atm(n)%npy, big_number)
589  do j=jsd,jed+1
590  do i=isd,ied+1
591  atm(n)%gridstruct%fc(i,j) = 2.*omega*( -cos(atm(n)%gridstruct%grid(i,j,1))*cos(atm(n)%gridstruct%grid(i,j,2))*sin(alpha) + &
592  sin(atm(n)%gridstruct%grid(i,j,2))*cos(alpha) )
593  enddo
594  enddo
595  do j=jsd,jed
596  do i=isd,ied
597  atm(n)%gridstruct%f0(i,j) = 2.*omega*( -cos(atm(n)%gridstruct%agrid(i,j,1))*cos(atm(n)%gridstruct%agrid(i,j,2))*sin(alpha) + &
598  sin(atm(n)%gridstruct%agrid(i,j,2))*cos(alpha) )
599  enddo
600  enddo
601  else
602  f00 = 2.*omega*sin(atm(n)%flagstruct%deglat/180.*pi)
603  do j=jsd,jed+1
604  do i=isd,ied+1
605  atm(n)%gridstruct%fc(i,j) = f00
606  enddo
607  enddo
608  do j=jsd,jed
609  do i=isd,ied
610  atm(n)%gridstruct%f0(i,j) = f00
611  enddo
612  enddo
613  endif
614  call mpp_update_domains( atm(n)%gridstruct%f0, atm(n)%domain )
615  if ( atm(n)%gridstruct%cubed_sphere .and. (.not. atm(n)%gridstruct%bounded_domain))then
616  call fill_corners(atm(n)%gridstruct%f0, atm(n)%npx, atm(n)%npy, corners_ydir)
617  endif
618  endif
619 
620 
621 !---------------------------------------------------------------------------------------------
622 ! Transform the (starting) Eulerian vertical coordinate from sigma-p to hybrid_z
623  if ( atm(n)%flagstruct%hybrid_z ) then
624  if ( atm(n)%flagstruct%make_hybrid_z ) then
625  allocate ( dz1(npz) )
626  if( npz==32 ) then
627  call compute_dz_l32(npz, ztop, dz1)
628  else
629  ztop = 45.e3
630  call compute_dz_var(npz, ztop, dz1)
631  endif
632  call set_hybrid_z(isc, iec, jsc, jec, atm(n)%ng, npz, ztop, dz1, rgrav, &
633  atm(n)%phis, atm(n)%ze0)
634  deallocate ( dz1 )
635 ! call prt_maxmin('ZE0', Atm(n)%ze0, isc, iec, jsc, jec, 0, npz, 1.E-3)
636 ! call prt_maxmin('DZ0', Atm(n)%delz, isc, iec, jsc, jec, 0, npz, 1. )
637  endif
638 ! call make_eta_level(npz, Atm(n)%pe, area, Atm(n)%ks, Atm(n)%ak, Atm(n)%bk, Atm(n)%ptop)
639  endif
640 !---------------------------------------------------------------------------------------------
641 
642  if (atm(n)%flagstruct%add_noise > 0.) then
643  write(errstring,'(A, E16.9)') "Adding thermal noise of amplitude ", atm(n)%flagstruct%add_noise
644  call mpp_error(note, errstring)
645  call random_seed
646  npts = 0
647  sumpertn = 0.
648  do k=1,npz
649  do j=jsc,jec
650  do i=isc,iec
651  call random_number(pertn)
652  atm(n)%pt(i,j,k) = atm(n)%pt(i,j,k) + pertn*atm(n)%flagstruct%add_noise
653  npts = npts + 1
654  sumpertn = sumpertn + pertn*atm(n)%flagstruct%add_noise ** 2
655  enddo
656  enddo
657  enddo
658  call mpp_update_domains(atm(n)%pt, atm(n)%domain)
659  call mpp_sum(sumpertn)
660  call mpp_sum(npts)
661  write(errstring,'(A, E16.9)') "RMS added noise: ", sqrt(sumpertn/npts)
662  call mpp_error(note, errstring)
663  endif
664 
665  if (atm(n)%flagstruct%butterfly_effect) then
666  if (n==1 .and. atm(n)%tile_of_mosaic == 1) then
667  i_butterfly = atm(n)%npx / 2
668  j_butterfly = atm(n)%npy / 2
669  if (isc <= i_butterfly .and. i_butterfly <= iec) then
670  if (jsc <= j_butterfly .and. j_butterfly <= jec) then
671 
672  write(*,'(A, I0, A, I0)') "Adding butterfly effect at (i,j) ", i_butterfly, ", ", j_butterfly
673  write(*,'(A, E24.17)') "pt (before) :", atm(n)%pt(i_butterfly,j_butterfly,atm(n)%npz)
674 
675  atm(n)%pt(i_butterfly,j_butterfly,atm(n)%npz) = nearest(atm(n)%pt(i_butterfly,j_butterfly,atm(n)%npz), -1.0)
676 
677  write(*,'(A, E24.17)') "pt (after) :", atm(n)%pt(i_butterfly,j_butterfly,atm(n)%npz)
678 
679  endif
680  endif
681  endif
682  endif
683  if (atm(n)%flagstruct%fv_sg_adj > 0 .and. atm(n)%flagstruct%sg_cutoff > 0) then
684  !Choose n_sponge from first reference level above sg_cutoff
685  do k=1,npz
686  ph = atm(n)%ak(k+1) + atm(n)%bk(k+1)*atm(n)%flagstruct%p_ref
687  if (ph > atm(n)%flagstruct%sg_cutoff) exit
688  enddo
689  atm(n)%flagstruct%n_sponge = min(k,npz)
690  write(errstring,'(A, I3, A)') ' Override n_sponge: applying 2dz filter to ', k , ' levels'
691  call mpp_error(note, errstring)
692  endif
693 
694  if (atm(n)%grid_number > 1) then
695  write(gn,'(A2, I1)') " g", atm(n)%grid_number
696  else
697  gn = ''
698  end if
699 
700  unit = stdout()
701  !!!NOTE: Checksums not yet working in stand-alone regional model!!
702  write(unit,*)
703  write(unit,*) 'fv_restart u ', trim(gn),' = ', mpp_chksum(atm(n)%u(isc:iec,jsc:jec,:))
704  write(unit,*) 'fv_restart v ', trim(gn),' = ', mpp_chksum(atm(n)%v(isc:iec,jsc:jec,:))
705  if ( .not.atm(n)%flagstruct%hydrostatic ) &
706  write(unit,*) 'fv_restart w ', trim(gn),' = ', mpp_chksum(atm(n)%w(isc:iec,jsc:jec,:))
707  write(unit,*) 'fv_restart delp', trim(gn),' = ', mpp_chksum(atm(n)%delp(isc:iec,jsc:jec,:))
708  write(unit,*) 'fv_restart phis', trim(gn),' = ', mpp_chksum(atm(n)%phis(isc:iec,jsc:jec))
709 
710 #ifdef SW_DYNAMICS
711  call prt_maxmin('H ', atm(n)%delp, isc, iec, jsc, jec, atm(n)%ng, 1, rgrav)
712 #else
713  write(unit,*) 'fv_restart pt ', trim(gn),' = ', mpp_chksum(atm(n)%pt(isc:iec,jsc:jec,:))
714  if (ntprog>0) &
715  write(unit,*) 'fv_restart q(prog) nq ', trim(gn),' =',ntprog, mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,:))
716  if (ntdiag>0) &
717  write(unit,*) 'fv_restart q(diag) nq ', trim(gn),' =',ntdiag, mpp_chksum(atm(n)%qdiag(isc:iec,jsc:jec,:,:))
718  do iq=1,min(17, ntprog) ! Check up to 17 tracers
719  call get_tracer_names(model_atmos, iq, tracer_name)
720  write(unit,*) 'fv_restart '//trim(tracer_name)//' = ', mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,iq))
721  enddo
722 
723 !---------------
724 ! Check Min/Max:
725 !---------------
726  call pmaxmn_g('ZS', atm(n)%phis, isc, iec, jsc, jec, 1, rgrav, atm(n)%gridstruct%area_64, atm(n)%domain)
727  call pmaxmn_g('PS', atm(n)%ps, isc, iec, jsc, jec, 1, 0.01, atm(n)%gridstruct%area_64, atm(n)%domain)
728  call pmaxmn_g('T ', atm(n)%pt, isc, iec, jsc, jec, npz, 1., atm(n)%gridstruct%area_64, atm(n)%domain)
729 
730 ! Check tracers:
731  do i=1, ntprog
732  call get_tracer_names ( model_atmos, i, tname )
733  call pmaxmn_g(trim(tname), atm(n)%q(isd:ied,jsd:jed,1:npz,i:i), isc, iec, jsc, jec, npz, &
734  1., atm(n)%gridstruct%area_64, atm(n)%domain)
735  enddo
736 #endif
737  call prt_maxmin('U ', atm(n)%u(isc:iec,jsc:jec,1:npz), isc, iec, jsc, jec, 0, npz, 1.)
738  call prt_maxmin('V ', atm(n)%v(isc:iec,jsc:jec,1:npz), isc, iec, jsc, jec, 0, npz, 1.)
739 
740  if ( (.not.atm(n)%flagstruct%hydrostatic) .and. atm(n)%flagstruct%make_nh ) then
741  call mpp_error(note, " Initializing w to 0")
742  atm(n)%w = 0.
743  sphum = get_tracer_index(model_atmos, 'sphum')
744  if ( .not.atm(n)%flagstruct%hybrid_z ) then
745  if (atm(n)%flagstruct%adiabatic .or. sphum < 0) then
746  zvir = 0.
747  else
748  zvir = rvgas/rdgas - 1.
749  endif
750  do k=1,npz
751  do j=jsc,jec
752  do i=isc,iec
753  atm(n)%delz(i,j,k) = (rdgas*rgrav)*atm(n)%pt(i,j,k)*(1.+zvir*atm(n)%q(i,j,k,sphum))*(atm(n)%peln(i,k,j)-atm(n)%peln(i,k+1,j))
754  enddo
755  enddo
756  enddo
757  endif
758  endif
759 
760  if ( .not.atm(n)%flagstruct%hydrostatic ) &
761  call pmaxmn_g('W ', atm(n)%w, isc, iec, jsc, jec, npz, 1., atm(n)%gridstruct%area_64, atm(n)%domain)
762 
763  if (is_master()) write(unit,*)
764 
765 !--------------------------------------------
766 ! Initialize surface winds for flux coupler:
767 !--------------------------------------------
768  if ( .not. atm(n)%flagstruct%srf_init ) then
769  call cubed_to_latlon(atm(n)%u, atm(n)%v, atm(n)%ua, atm(n)%va, &
770  atm(n)%gridstruct, &
771  atm(n)%npx, atm(n)%npy, npz, 1, &
772  atm(n)%gridstruct%grid_type, atm(n)%domain, &
773  atm(n)%gridstruct%bounded_domain, atm(n)%flagstruct%c2l_ord, atm(n)%bd)
774  do j=jsc,jec
775  do i=isc,iec
776  atm(n)%u_srf(i,j) = atm(n)%ua(i,j,npz)
777  atm(n)%v_srf(i,j) = atm(n)%va(i,j,npz)
778  enddo
779  enddo
780  atm(n)%flagstruct%srf_init = .true.
781  endif
782 
783  end do ! n_tile
784 
785  end subroutine fv_restart
786  ! </SUBROUTINE> NAME="fv_restart"
787 
788 
789  subroutine fill_nested_grid_topo_halo(Atm, proc_in)
791  type(fv_atmos_type), intent(INOUT) :: Atm
792  logical, intent(IN), OPTIONAL :: proc_in
793  integer :: isd, ied, jsd, jed
794 
795  if (.not. atm%neststruct%nested) return
796 
797  call mpp_get_data_domain( atm%parent_grid%domain, &
798  isd, ied, jsd, jed)
799 
800  !This is 2D and doesn't need remapping
801  if (is_master()) print*, ' FILLING NESTED GRID HALO WITH INTERPOLATED TERRAIN'
802  call nested_grid_bc(atm%phis, atm%parent_grid%phis, global_nest_domain, &
803  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
804  atm%npx, atm%npy, atm%bd, isd, ied, jsd, jed, proc_in=proc_in, nest_level=atm%grid_number-1)
805 
806  end subroutine fill_nested_grid_topo_halo
807 
811  subroutine fill_nested_grid_topo(Atm, proc_in)
812  type(fv_atmos_type), intent(INOUT) :: Atm
813  logical, intent(IN), OPTIONAL :: proc_in
814  real, allocatable :: g_dat(:,:,:)
815  integer :: p, sending_proc
816  integer :: isd_p, ied_p, jsd_p, jed_p
817  integer :: isg, ieg, jsg,jeg
818 
819  logical :: process
820 
821  process = .true.
822  if (present(proc_in)) then
823  process = proc_in
824  else
825  process = .true.
826  endif
827 
828 !!$ if (.not. Atm%neststruct%nested) return
829 
830  call mpp_get_global_domain( atm%parent_grid%domain, &
831  isg, ieg, jsg, jeg)
832  call mpp_get_data_domain( atm%parent_grid%domain, &
833  isd_p, ied_p, jsd_p, jed_p )
834 
835  allocate(g_dat( isg:ieg, jsg:jeg, 1) )
836  call timing_on('COMM_TOTAL')
837 
838  !!! FIXME: For whatever reason this code CRASHES if the lower-left corner
839  !!! of the nested grid lies within the first PE of a grid tile.
840 
841  if (is_master() .and. .not. atm%flagstruct%external_ic ) print*, ' FILLING NESTED GRID INTERIOR WITH INTERPOLATED TERRAIN'
842 
843  sending_proc = (atm%parent_grid%pelist(1)) + &
844  (atm%neststruct%parent_tile-tile_fine(atm%parent_grid%grid_number)+atm%parent_grid%flagstruct%ntiles-1)*atm%parent_grid%npes_per_tile
845  if (atm%neststruct%parent_tile == atm%parent_grid%global_tile) then
846  !if (Atm%neststruct%parent_proc .and. Atm%neststruct%parent_tile == Atm%parent_grid%global_tile) then
847  call mpp_global_field( &
848  atm%parent_grid%domain, &
849  atm%parent_grid%phis(isd_p:ied_p,jsd_p:jed_p), g_dat(isg:,jsg:,1), position=center)
850  if (mpp_pe() == sending_proc) then
851  do p=1,size(atm%pelist)
852  call mpp_send(g_dat,size(g_dat),atm%pelist(p))
853  enddo
854  endif
855  endif
856 
857  if (any(atm%pelist == mpp_pe())) then
858  call mpp_recv(g_dat, size(g_dat), sending_proc)
859  endif
860 
861  call timing_off('COMM_TOTAL')
862  if (process) call fill_nested_grid(atm%phis, g_dat(isg:,jsg:,1), &
863  atm%neststruct%ind_h, atm%neststruct%wt_h, &
864  0, 0, isg, ieg, jsg, jeg, atm%bd)
865 
866  call mpp_sync_self
867 
868  deallocate(g_dat)
869 
870 
871  end subroutine fill_nested_grid_topo
872 
873  !This will still probably be needed for moving nests
874  !NOTE: this has NOT been maintained and so %global_tile is now meaningless if not referring to data on the current PE
875  ! needs to be re-coded to follow method in fill_nested_grid_Topo
876  subroutine fill_nested_grid_data(Atm, proc_in)
878  type(fv_atmos_type), intent(INOUT) :: Atm(:) !Only intended to be one element; needed for cubed_sphere_terrain
879  logical, intent(IN), OPTIONAL :: proc_in
880  real, allocatable :: g_dat(:,:,:), pt_coarse(:,:,:)
881  integer :: i,j,k,nq, sphum, ncnst, istart, iend, npz
882  integer :: isc, iec, jsc, jec, isd, ied, jsd, jed
883  integer :: isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p
884  integer :: isg, ieg, jsg,jeg, npx_p, npy_p
885  integer :: isg_n, ieg_n, jsg_n, jeg_n, npx_n, npy_n
886  real zvir, gh0, p1(2), p2(2), r, r0
887 
888  integer :: p, sending_proc, gid, n
889  logical process
890 
891  call mpp_error(fatal, " FILL_NESTED_GRID_DATA not yet updated for remap BCs")
892 
893  if (present(proc_in)) then
894  process = proc_in
895  else
896  process = .true.
897  endif
898 
899  isd = atm(1)%bd%isd
900  ied = atm(1)%bd%ied
901  jsd = atm(1)%bd%jsd
902  jed = atm(1)%bd%jed
903  ncnst = atm(1)%ncnst
904  isc = atm(1)%bd%isc; iec = atm(1)%bd%iec; jsc = atm(1)%bd%jsc; jec = atm(1)%bd%jec
905  npz = atm(1)%npz
906 
907 
908  gid = mpp_pe()
909 
910  sending_proc = atm(1)%parent_grid%pelist(1) + (atm(1)%neststruct%parent_tile-1)*atm(1)%parent_grid%npes_per_tile
911 
912  call mpp_get_data_domain( atm(1)%parent_grid%domain, &
913  isd_p, ied_p, jsd_p, jed_p )
914  call mpp_get_compute_domain( atm(1)%parent_grid%domain, &
915  isc_p, iec_p, jsc_p, jec_p )
916  call mpp_get_global_domain( atm(1)%parent_grid%domain, &
917  isg, ieg, jsg, jeg, xsize=npx_p, ysize=npy_p)
918 
919  if (process) then
920 
921  call mpp_error(note, "FILLING NESTED GRID DATA")
922 
923  else
924 
925  call mpp_error(note, "SENDING TO FILL NESTED GRID DATA")
926 
927  endif
928 
929  !delp
930 
931  allocate(g_dat( isg:ieg, jsg:jeg, npz) )
932 
933  call timing_on('COMM_TOTAL')
934 
935  !Call mpp_global_field on the procs that have the required data.
936  !Then broadcast from the head PE to the receiving PEs
937  if (atm(1)%neststruct%parent_proc .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%global_tile) then
938  call mpp_global_field( &
939  atm(1)%parent_grid%domain, &
940  atm(1)%parent_grid%delp(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
941  if (gid == sending_proc) then !crazy logic but what we have for now
942  do p=1,size(atm(1)%pelist)
943  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
944  enddo
945  endif
946  endif
947  if (any(atm(1)%pelist == gid)) then
948  call mpp_recv(g_dat, size(g_dat), sending_proc)
949  endif
950 
951  call timing_off('COMM_TOTAL')
952  if (process) call fill_nested_grid(atm(1)%delp, g_dat, &
953  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
954  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
955 
956  call mpp_sync_self
957 
958  !tracers
959  do nq=1,ncnst
960 
961  call timing_on('COMM_TOTAL')
962 
963  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%global_tile) then
964  call mpp_global_field( &
965  atm(1)%parent_grid%domain, &
966  atm(1)%parent_grid%q(isd_p:ied_p,jsd_p:jed_p,:,nq), g_dat, position=center)
967  if (gid == sending_proc) then
968  do p=1,size(atm(1)%pelist)
969  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
970  enddo
971  endif
972  endif
973  if (any(atm(1)%pelist == gid)) then
974  call mpp_recv(g_dat, size(g_dat), sending_proc)
975  endif
976 
977  call timing_off('COMM_TOTAL')
978  if (process) call fill_nested_grid(atm(1)%q(isd:ied,jsd:jed,:,nq), g_dat, &
979  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
980  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
981 
982  call mpp_sync_self
983 
984  end do
985 
986  !Note that we do NOT fill in phis (surface geopotential), which should
987  !be computed exactly instead of being interpolated.
988 
989 
990 #ifndef SW_DYNAMICS
991  !pt --- actually temperature
992 
993  call timing_on('COMM_TOTAL')
994 
995  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%global_tile) then
996  call mpp_global_field( &
997  atm(1)%parent_grid%domain, &
998  atm(1)%parent_grid%pt(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
999  if (gid == sending_proc) then
1000  do p=1,size(atm(1)%pelist)
1001  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1002  enddo
1003  endif
1004  endif
1005  if (any(atm(1)%pelist == gid)) then
1006  call mpp_recv(g_dat, size(g_dat), sending_proc)
1007  endif
1008 
1009  call mpp_sync_self
1010 
1011  call timing_off('COMM_TOTAL')
1012  if (process) call fill_nested_grid(atm(1)%pt, g_dat, &
1013  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1014  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1015 
1016 
1017  if ( atm(1)%flagstruct%nwat > 0 ) then
1018  sphum = get_tracer_index(model_atmos, 'sphum')
1019  else
1020  sphum = 1
1021  endif
1022  if ( atm(1)%parent_grid%flagstruct%adiabatic .or. atm(1)%parent_grid%flagstruct%do_Held_Suarez ) then
1023  zvir = 0. ! no virtual effect
1024  else
1025  zvir = rvgas/rdgas - 1.
1026  endif
1027 
1028  call timing_on('COMM_TOTAL')
1029 
1030  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%global_tile) then
1031  call mpp_global_field( &
1032  atm(1)%parent_grid%domain, &
1033  atm(1)%parent_grid%pkz(isc_p:iec_p,jsc_p:jec_p,:), g_dat, position=center)
1034  if (gid == sending_proc) then
1035  do p=1,size(atm(1)%pelist)
1036  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1037  enddo
1038  endif
1039  endif
1040  if (any(atm(1)%pelist == gid)) then
1041  call mpp_recv(g_dat, size(g_dat), sending_proc)
1042  endif
1043 
1044  call mpp_sync_self
1045 
1046  call timing_off('COMM_TOTAL')
1047  if (process) then
1048  allocate(pt_coarse(isd:ied,jsd:jed,npz))
1049  call fill_nested_grid(pt_coarse, g_dat, &
1050  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1051  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1052 
1053  if (atm(1)%bd%is == 1) then
1054  do k=1,npz
1055  do j=atm(1)%bd%jsd,atm(1)%bd%jed
1056  do i=atm(1)%bd%isd,0
1057 #ifdef MULTI_GASES
1058  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*virq(atm(1)%q(i,j,k,:))
1059 #else
1060  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1061 #endif
1062  end do
1063  end do
1064  end do
1065  end if
1066 
1067  if (atm(1)%bd%js == 1) then
1068  if (atm(1)%bd%is == 1) then
1069  istart = atm(1)%bd%is
1070  else
1071  istart = atm(1)%bd%isd
1072  end if
1073  if (atm(1)%bd%ie == atm(1)%npx-1) then
1074  iend = atm(1)%bd%ie
1075  else
1076  iend = atm(1)%bd%ied
1077  end if
1078 
1079  do k=1,npz
1080  do j=atm(1)%bd%jsd,0
1081  do i=istart,iend
1082 #ifdef MULTI_GASES
1083  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*virq(atm(1)%q(i,j,k,:))
1084 #else
1085  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1086 #endif
1087  end do
1088  end do
1089  end do
1090  end if
1091 
1092  if (atm(1)%bd%ie == atm(1)%npx-1) then
1093  do k=1,npz
1094  do j=atm(1)%bd%jsd,atm(1)%bd%jed
1095  do i=atm(1)%npx,atm(1)%bd%ied
1096 #ifdef MULTI_GASES
1097  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*virq(atm(1)%q(i,j,k,:))
1098 #else
1099  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1100 #endif
1101  end do
1102  end do
1103  end do
1104  end if
1105 
1106  if (atm(1)%bd%je == atm(1)%npy-1) then
1107  if (atm(1)%bd%is == 1) then
1108  istart = atm(1)%bd%is
1109  else
1110  istart = atm(1)%bd%isd
1111  end if
1112  if (atm(1)%bd%ie == atm(1)%npx-1) then
1113  iend = atm(1)%bd%ie
1114  else
1115  iend = atm(1)%bd%ied
1116  end if
1117 
1118  do k=1,npz
1119  do j=atm(1)%npy,atm(1)%bd%jed
1120  do i=istart,iend
1121 #ifdef MULTI_GASES
1122  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*virq(atm(1)%q(i,j,k,:))
1123 #else
1124  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1125 #endif
1126  end do
1127  end do
1128  end do
1129  end if
1130 
1131  deallocate(pt_coarse)
1132 
1133  end if
1134 
1135  if (.not. atm(1)%flagstruct%hydrostatic) then
1136 
1137  !delz
1138  call timing_on('COMM_TOTAL')
1139 
1140  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%global_tile) then
1141  call mpp_global_field( &
1142  atm(1)%parent_grid%domain, &
1143  atm(1)%parent_grid%delz(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
1144  if (gid == sending_proc) then
1145  do p=1,size(atm(1)%pelist)
1146  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1147  enddo
1148  endif
1149  endif
1150  if (any(atm(1)%pelist == gid)) then
1151  call mpp_recv(g_dat, size(g_dat), sending_proc)
1152  endif
1153 
1154  call mpp_sync_self
1155 
1156  call timing_off('COMM_TOTAL')
1157  if (process) call fill_nested_grid(atm(1)%delz, g_dat, &
1158  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1159  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1160 
1161  !w
1162 
1163  call timing_on('COMM_TOTAL')
1164 
1165  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%global_tile) then
1166  call mpp_global_field( &
1167  atm(1)%parent_grid%domain, &
1168  atm(1)%parent_grid%w(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
1169  if (gid == sending_proc) then
1170  do p=1,size(atm(1)%pelist)
1171  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1172  enddo
1173  endif
1174  endif
1175  if (any(atm(1)%pelist == gid)) then
1176  call mpp_recv(g_dat, size(g_dat), sending_proc)
1177  endif
1178 
1179  call mpp_sync_self
1180 
1181  call timing_off('COMM_TOTAL')
1182  if (process) call fill_nested_grid(atm(1)%w, g_dat, &
1183  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1184  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1185  !
1186 
1187  end if
1188 
1189 #endif
1190  deallocate(g_dat)
1191 
1192  !u
1193 
1194  allocate(g_dat( isg:ieg, jsg:jeg+1, npz) )
1195  g_dat = 1.e25
1196 
1197  call timing_on('COMM_TOTAL')
1198 
1199  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%global_tile) then
1200  call mpp_global_field( &
1201  atm(1)%parent_grid%domain, &
1202  atm(1)%parent_grid%u(isd_p:ied_p,jsd_p:jed_p+1,:), g_dat, position=north)
1203  if (gid == sending_proc) then
1204  do p=1,size(atm(1)%pelist)
1205  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1206  enddo
1207  endif
1208  endif
1209  if (any(atm(1)%pelist == gid)) then
1210  call mpp_recv(g_dat, size(g_dat), sending_proc)
1211  endif
1212 
1213  call mpp_sync_self
1214 
1215  call timing_off('COMM_TOTAL')
1216  call mpp_sync_self
1217  if (process) call fill_nested_grid(atm(1)%u, g_dat, &
1218  atm(1)%neststruct%ind_u, atm(1)%neststruct%wt_u, &
1219  0, 1, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1220  deallocate(g_dat)
1221 
1222  !v
1223 
1224  allocate(g_dat( isg:ieg+1, jsg:jeg, npz) )
1225  g_dat = 1.e25
1226 
1227  call timing_on('COMM_TOTAL')
1228 
1229  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%global_tile) then
1230  call mpp_global_field( &
1231  atm(1)%parent_grid%domain, &
1232  atm(1)%parent_grid%v(isd_p:ied_p+1,jsd_p:jed_p,:), g_dat, position=east)
1233  if (gid == sending_proc) then
1234  do p=1,size(atm(1)%pelist)
1235  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1236  enddo
1237  endif
1238  endif
1239  if (any(atm(1)%pelist == gid)) then
1240  call mpp_recv(g_dat, size(g_dat), sending_proc)
1241  endif
1242 
1243  call mpp_sync_self
1244  call timing_off('COMM_TOTAL')
1245 
1246  if (process) call fill_nested_grid(atm(1)%v, g_dat, &
1247  atm(1)%neststruct%ind_v, atm(1)%neststruct%wt_v, &
1248  1, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1249 
1250  deallocate(g_dat)
1251 
1252  end subroutine fill_nested_grid_data
1253 
1256  subroutine twoway_topo_update(Atm, proc_in)
1257  type(fv_atmos_type), intent(INOUT) :: Atm
1258  logical, intent(IN), OPTIONAL :: proc_in
1259  real, allocatable :: g_dat(:,:,:), pt_coarse(:,:,:)
1260  integer :: i,j,k,nq, sphum, ncnst, istart, iend, npz
1261  integer :: isc, iec, jsc, jec, isd, ied, jsd, jed
1262  integer :: isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p
1263  integer :: isg, ieg, jsg,jeg, npx_p, npy_p
1264  integer :: isg_n, ieg_n, jsg_n, jeg_n, npx_n, npy_n
1265  real zvir
1266 
1267  integer :: p , sending_proc
1268  logical :: process
1269 
1270  if (present(proc_in)) then
1271  process = proc_in
1272  else
1273  process = .true.
1274  endif
1275 
1276  isd = atm%bd%isd
1277  ied = atm%bd%ied
1278  jsd = atm%bd%jsd
1279  jed = atm%bd%jed
1280  ncnst = atm%ncnst
1281  isc = atm%bd%isc; iec = atm%bd%iec; jsc = atm%bd%jsc; jec = atm%bd%jec
1282  npz = atm%npz
1283 
1284  isd_p = atm%parent_grid%bd%isd
1285  ied_p = atm%parent_grid%bd%ied
1286  jsd_p = atm%parent_grid%bd%jsd
1287  jed_p = atm%parent_grid%bd%jed
1288  isc_p = atm%parent_grid%bd%isc
1289  iec_p = atm%parent_grid%bd%iec
1290  jsc_p = atm%parent_grid%bd%jsc
1291  jec_p = atm%parent_grid%bd%jec
1292  sending_proc = atm%parent_grid%pelist(1) + (atm%neststruct%parent_tile-1)*atm%parent_grid%npes_per_tile
1293 
1294  call mpp_get_global_domain( atm%parent_grid%domain, &
1295  isg, ieg, jsg, jeg, xsize=npx_p, ysize=npy_p)
1296 
1297 
1298  !NOW: what we do is to update the nested-grid terrain to the coarse grid,
1299  !to ensure consistency between the two grids.
1300  if ( process ) call mpp_update_domains(atm%phis, atm%domain, complete=.true.)
1301  if (atm%neststruct%twowaynest) then
1302  if (any(atm%parent_grid%pelist == mpp_pe()) .or. atm%neststruct%child_proc) then
1303  call update_coarse_grid(atm%parent_grid%phis, &
1304  atm%phis, global_nest_domain, &
1305  atm%gridstruct%dx, atm%gridstruct%dy, atm%gridstruct%area, &
1306  atm%bd, isd_p, ied_p, jsd_p, jed_p, isd, ied, jsd, jed, &
1307  atm%neststruct%isu, atm%neststruct%ieu, atm%neststruct%jsu, atm%neststruct%jeu, &
1308  atm%npx, atm%npy, 0, 0, &
1309  atm%neststruct%refinement, atm%neststruct%nestupdate, 0, 0, &
1310  atm%neststruct%parent_proc, atm%neststruct%child_proc, atm%parent_grid, atm%grid_number-1)
1311  atm%parent_grid%neststruct%parent_of_twoway = .true.
1312  !NOTE: mpp_update_nest_coarse (and by extension, update_coarse_grid) does **NOT** pass data
1313  !allowing a two-way update into the halo of the coarse grid. It only passes data so that the INTERIOR
1314  ! can have the two-way update. Thus, on the nest's cold start, if this update_domains call is not done,
1315  ! the coarse grid will have the wrong topography in the halo, which will CHANGE when a restart is done!!
1316  if (atm%neststruct%parent_proc) call mpp_update_domains(atm%parent_grid%phis, atm%parent_grid%domain)
1317  end if
1318 
1319  end if
1320 
1321 
1322 #ifdef SW_DYNAMICS
1323 !!$ !ps: first level only
1324 !!$ !This is only valid for shallow-water simulations
1325 !!$ if (process) then
1326 !!$ do j=jsd,jed
1327 !!$ do i=isd,ied
1328 !!$
1329 !!$ Atm%ps(i,j) = Atm%delp(i,j,1)/grav
1330 !!$
1331 !!$ end do
1332 !!$ end do
1333 !!$ endif
1334 #else
1335  !Reset p_var after updating topography
1336  if (process) call p_var(npz, isc, iec, jsc, jec, atm%ptop, ptop_min, atm%delp, &
1337  atm%delz, atm%pt, atm%ps, &
1338  atm%pe, atm%peln, atm%pk, atm%pkz, kappa, atm%q, &
1339  atm%ng, ncnst, atm%gridstruct%area_64, atm%flagstruct%dry_mass, .false., atm%flagstruct%mountain, &
1340  atm%flagstruct%moist_phys, .true., atm%flagstruct%nwat, atm%domain, atm%flagstruct%adiabatic)
1341 #endif
1342 
1343 
1344 
1345  end subroutine twoway_topo_update
1346 
1350  subroutine fv_write_restart(Atm, timestamp)
1351  type(fv_atmos_type), intent(inout) :: Atm
1352  character(len=*), intent(in) :: timestamp
1353 
1354  call fv_io_write_restart(atm, timestamp)
1355  if (atm%neststruct%nested) then
1356  call fv_io_write_bcs(atm)
1357  endif
1358 
1359  end subroutine fv_write_restart
1360 
1364  subroutine fv_restart_end(Atm, restart_endfcst)
1365  type(fv_atmos_type), intent(inout) :: Atm
1366  logical, intent(in) :: restart_endfcst
1367 
1368  integer :: isc, iec, jsc, jec
1369  integer :: iq, ncnst, ntprog, ntdiag
1370  integer :: isd, ied, jsd, jed, npz
1371  integer :: unit
1372  integer :: file_unit
1373  integer, allocatable :: pelist(:)
1374  character(len=128):: tracer_name
1375  character(len=3):: gn
1376 
1377  call mpp_set_current_pelist(atm%pelist)
1378 
1379  isc = atm%bd%isc; iec = atm%bd%iec; jsc = atm%bd%jsc; jec = atm%bd%jec
1380 
1381  isd = atm%bd%isd
1382  ied = atm%bd%ied
1383  jsd = atm%bd%jsd
1384  jed = atm%bd%jed
1385  npz = atm%npz
1386  ncnst = atm%ncnst
1387  ntprog = size(atm%q,4)
1388  ntdiag = size(atm%qdiag,4)
1389 
1390  if (atm%grid_number > 1) then
1391  write(gn,'(A2, I1)') " g", atm%grid_number
1392  else
1393  gn = ''
1394  end if
1395 
1396  unit = stdout()
1397  write(unit,*)
1398  write(unit,*) 'fv_restart_end u ', trim(gn),' = ', mpp_chksum(atm%u(isc:iec,jsc:jec,:))
1399  write(unit,*) 'fv_restart_end v ', trim(gn),' = ', mpp_chksum(atm%v(isc:iec,jsc:jec,:))
1400  if ( .not. atm%flagstruct%hydrostatic ) &
1401  write(unit,*) 'fv_restart_end w ', trim(gn),' = ', mpp_chksum(atm%w(isc:iec,jsc:jec,:))
1402  write(unit,*) 'fv_restart_end delp', trim(gn),' = ', mpp_chksum(atm%delp(isc:iec,jsc:jec,:))
1403  write(unit,*) 'fv_restart_end phis', trim(gn),' = ', mpp_chksum(atm%phis(isc:iec,jsc:jec))
1404 #ifndef SW_DYNAMICS
1405  write(unit,*) 'fv_restart_end pt ', trim(gn),' = ', mpp_chksum(atm%pt(isc:iec,jsc:jec,:))
1406  if (ntprog>0) &
1407  write(unit,*) 'fv_restart_end q(prog) nq ', trim(gn),' =',ntprog, mpp_chksum(atm%q(isc:iec,jsc:jec,:,:))
1408  if (ntdiag>0) &
1409  write(unit,*) 'fv_restart_end q(diag) nq ', trim(gn),' =',ntdiag, mpp_chksum(atm%qdiag(isc:iec,jsc:jec,:,:))
1410  do iq=1,min(17, ntprog) ! Check up to 17 tracers
1411  call get_tracer_names(model_atmos, iq, tracer_name)
1412  write(unit,*) 'fv_restart_end '//trim(tracer_name)// trim(gn),' = ', mpp_chksum(atm%q(isc:iec,jsc:jec,:,iq))
1413  enddo
1414 
1415  !---------------
1416  ! Check Min/Max:
1417  !---------------
1418  ! call prt_maxmin('ZS', Atm%phis, isc, iec, jsc, jec, Atm%ng, 1, 1./grav)
1419  call pmaxmn_g('ZS', atm%phis, isc, iec, jsc, jec, 1, 1./grav, atm%gridstruct%area_64, atm%domain)
1420  call pmaxmn_g('PS ', atm%ps, isc, iec, jsc, jec, 1, 0.01 , atm%gridstruct%area_64, atm%domain)
1421  call prt_maxmin('PS*', atm%ps, isc, iec, jsc, jec, atm%ng, 1, 0.01)
1422  call prt_maxmin('U ', atm%u(isd:ied,jsd:jed,1:npz), isc, iec, jsc, jec, atm%ng, npz, 1.)
1423  call prt_maxmin('V ', atm%v(isd:ied,jsd:jed,1:npz), isc, iec, jsc, jec, atm%ng, npz, 1.)
1424  if ( .not. atm%flagstruct%hydrostatic ) &
1425  call prt_maxmin('W ', atm%w , isc, iec, jsc, jec, atm%ng, npz, 1.)
1426  call prt_maxmin('T ', atm%pt, isc, iec, jsc, jec, atm%ng, npz, 1.)
1427  do iq=1, ntprog
1428  call get_tracer_names ( model_atmos, iq, tracer_name )
1429  call pmaxmn_g(trim(tracer_name), atm%q(isd:ied,jsd:jed,1:npz,iq:iq), isc, iec, jsc, jec, npz, &
1430  1., atm%gridstruct%area_64, atm%domain)
1431  enddo
1432  ! Write4 energy correction term
1433 #endif
1434 
1435  if ( restart_endfcst ) then
1436  call fv_io_write_restart(atm)
1437  if (atm%neststruct%nested) call fv_io_write_bcs(atm)
1438  endif
1439  if(atm%flagstruct%write_restart_with_bcs)then
1440  call write_full_fields(atm)
1441  endif
1442 
1443  module_is_initialized = .false.
1444 
1445 #ifdef EFLUX_OUT
1446  if( is_master() ) then
1447  write(*,*) steps, 'Mean equivalent Heat flux for this integration period=',atm(1)%idiag%efx_sum/real(max(1,Atm(1)%idiag%steps)), &
1448  'Mean nesting-related flux for this integration period=',Atm(1)%idiag%efx_sum_nest/real(max(1,Atm(1)%idiag%steps)), &
1449  'Mean mountain torque=',Atm(1)%idiag%mtq_sum/real(max(1,atm(1)%idiag%steps))
1450  file_unit = get_unit()
1451  open (unit=file_unit, file='e_flux.data', form='unformatted',status='unknown', access='sequential')
1452  do n=1,steps
1453  write(file_unit) atm(1)%idiag%efx(n)
1454  write(file_unit) atm(1)%idiag%mtq(n) ! time series global mountain torque
1455  !write(file_unit) Atm(1)%idiag%efx_nest(n)
1456  enddo
1457  close(unit=file_unit)
1458  endif
1459 #endif
1460 
1461  end subroutine fv_restart_end
1462 
1463 
1464 subroutine pmaxmn_g(qname, q, is, ie, js, je, km, fac, area, domain)
1465  character(len=*), intent(in):: qname
1466  integer, intent(in):: is, ie, js, je
1467  integer, intent(in):: km
1468  real, intent(in):: q(is-3:ie+3, js-3:je+3, km)
1469  real, intent(in):: fac
1470  real(kind=R_GRID), intent(IN):: area(is-3:ie+3, js-3:je+3)
1471  type(domain2d), intent(INOUT) :: domain
1472 !
1473  real qmin, qmax, gmean
1474  integer i,j,k
1475 
1476  qmin = q(is,js,1)
1477  qmax = qmin
1478 
1479  do k=1,km
1480  do j=js,je
1481  do i=is,ie
1482  !if ( (q(i,j,k) >= 1e30) .eqv. (q(i,j,k) < 1e30) ) then !NAN checking
1483  ! print*, ' NAN found for ', qname, mpp_pe(), i,j,k
1484  !else
1485  if( q(i,j,k) < qmin) then
1486  qmin = q(i,j,k)
1487  elseif( q(i,j,k) > qmax ) then
1488  qmax = q(i,j,k)
1489  endif
1490  enddo
1491  enddo
1492  enddo
1493 
1494  call mp_reduce_min(qmin)
1495  call mp_reduce_max(qmax)
1496 
1497  gmean = g_sum(domain, q(is:ie,js:je,km), is, ie, js, je, 3, area, 1, .true.)
1498  if(is_master()) write(6,*) qname, qmax*fac, qmin*fac, gmean*fac
1499 
1500 end subroutine pmaxmn_g
1501 end module fv_restart_mod
subroutine, public fv_io_read_bcs(Atm)
Definition: fv_io.F90:1003
subroutine, public init_double_periodic(u, v, w, pt, delp, q, phis, ps, pe, peln, pk, pkz, uc, vc, ua, va, ak, bk, gridstruct, flagstruct, npx, npy, npz, ng, ncnst, nwat, ndims, nregions, dry_mass, mountain, moist_phys, hydrostatic, hybrid_z, delz, ze0, ks, ptop, domain_in, tile_in, bd)
subroutine, public init_case(u, v, w, pt, delp, q, phis, ps, pe, peln, pk, pkz, uc, vc, ua, va, ak, bk, gridstruct, flagstruct, npx, npy, npz, ng, ncnst, nwat, ndims, nregions, dry_mass, mountain, moist_phys, hydrostatic, hybrid_z, delz, ze0, adiabatic, ks, npx_global, ptop, domain_in, tile_in, bd)
Definition: test_cases.F90:565
The module &#39;fv_mp_mod&#39; is a single program multiple data (SPMD) parallel decompostion/communication m...
Definition: fv_mp_mod.F90:24
subroutine timing_off(blk_name)
The subroutine &#39;timing_off&#39; stops a timer.
Definition: fv_timing.F90:180
logical module_is_initialized
Definition: fv_restart.F90:180
The interface&#39;update_coarse_grid_mpp&#39;contains subroutines that fetch data from the nested grid and in...
Definition: boundary.F90:122
subroutine fill_nested_grid_data(Atm, proc_in)
Definition: fv_restart.F90:877
subroutine, public fv_io_init()
Initialize the fv core restart facilities.
Definition: fv_io.F90:135
real(kind=r_grid), parameter cnst_0p20
Definition: fv_restart.F90:178
The module &#39;multi_gases&#39; peforms multi constitutents computations.
Definition: multi_gases.F90:25
subroutine, public del2_cubed_sphere(npx, npy, q, area, dx, dy, dxc, dyc, sin_sg, nmax, cd, zero_ocean, oro, bounded_domain, domain, bd)
subroutine, public set_hybrid_z(is, ie, js, je, ng, km, ztop, dz, rgrav, hs, ze, dz3)
Definition: fv_eta.F90:2085
subroutine, public p_var(km, ifirst, ilast, jfirst, jlast, ptop, ptop_min, delp, delz, pt, ps, pe, peln, pk, pkz, cappa, q, ng, nq, area, dry_mass, adjust_dry_mass, mountain, moist_phys, hydrostatic, nwat, domain, adiabatic, make_nh)
the subroutine &#39;p_var&#39; computes auxiliary pressure variables for a hydrostatic state.
Definition: init_hydro.F90:86
real function, public g_sum(domain, p, ifirst, ilast, jfirst, jlast, ngc, area, mode, reproduce)
The function &#39;g_sum&#39; is the fast version of &#39;globalsum&#39;.
subroutine, public fv_io_read_restart(fv_domain, Atm)
Write the fv core restart quantities.
Definition: fv_io.F90:147
The module &#39;fv_io&#39; contains restart facilities for FV core.
Definition: fv_io.F90:30
integer, parameter, public r_grid
Definition: fv_arrays.F90:34
subroutine, public fv_io_register_restart(fv_domain, Atm)
The subroutine &#39;fv_io_register_restart&#39; registers model restart fields.
Definition: fv_io.F90:487
subroutine, public fv_io_register_restart_bcs(Atm)
The subroutine &#39;fv_io_register_restart_BCs&#39; registers restarts for nested-grid boundary conditions...
Definition: fv_io.F90:928
subroutine, public read_da_inc(Atm, fv_domain, bd, npz_in, nq, u, v, q, delp, pt, delz, is_in, js_in, ie_in, je_in, isc_in, jsc_in, iec_in, jec_in)
The subroutine &#39;read_da_inc&#39; reads the increments of the diagnostic variables from the DA-generated f...
subroutine, public make_eta_level(km, pe, area, kks, ak, bk, ptop, domain, bd)
The module &#39;fv_timing&#39; contains FV3 timers.
Definition: fv_timing.F90:24
pure real function, public virq(q)
The module &#39;boundary&#39; contains utility routines for grid nesting and boundary conditions.
Definition: boundary.F90:25
subroutine, public fv_restart_init()
Definition: fv_restart.F90:186
subroutine, public fv_write_restart(Atm, timestamp)
The subroutine &#39;fv_write_restart&#39; writes restart files to disk.
real, public alpha
Definition: test_cases.F90:182
subroutine fill_nested_grid_topo(Atm, proc_in)
The subroutine &#39;fill_nested_grid_topo&#39; fills the nested grid with topo to enable boundary smoothing...
Definition: fv_restart.F90:812
subroutine, public fv_restart(fv_domain, Atm, dt_atmos, seconds, days, cold_start, grid_type, this_grid)
The subroutine &#39;fv_restart&#39; initializes the model state, including prognaostic variables and several ...
Definition: fv_restart.F90:196
real, parameter, public ptop_min
subroutine, public compute_dz_l32(km, ztop, dz)
Definition: fv_eta.F90:1974
subroutine, public cubed_to_latlon(u, v, ua, va, gridstruct, npx, npy, km, mode, grid_type, domain, bounded_domain, c2l_ord, bd)
The module &#39;fv_arrays&#39; contains the &#39;fv_atmos_type&#39; and associated datatypes.
Definition: fv_arrays.F90:24
real function, public great_circle_dist(q1, q2, radius)
interface &#39;nested_grid_BC&#39; includes subroutines &#39;nested_grid_BC_2d&#39; and &#39;nested_grid_BC_3d&#39; that fetc...
Definition: boundary.F90:89
The module &#39;fv_eta&#39; contains routine to set up the reference (Eulerian) pressure coordinate.
Definition: fv_eta.F90:25
The module &#39;external_ic_mod&#39; contains routines that read in and remap initial conditions.
Definition: external_ic.F90:32
subroutine, public fv_io_write_bcs(Atm, timestamp)
Definition: fv_io.F90:992
subroutine, public compute_dz_var(km, ztop, dz)
Definition: fv_eta.F90:1904
The interface &#39;fill_nested_grid&#39; includes subroutines &#39;fill_nested_grid_2d&#39; and &#39;fill_nested_grid_3d&#39;...
Definition: boundary.F90:113
&#39;The module &#39;tread_da_increment&#39; contains routines for treating the increments of the prognostic vari...
real, dimension(:,:), allocatable, public sgh_g
subroutine timing_on(blk_name)
The subroutine &#39;timing_on&#39; starts a timer.
Definition: fv_timing.F90:116
@ The module &#39;fv_diagnostics&#39; contains routines to compute diagnosic fields.
subroutine, public fv_io_register_nudge_restart(Atm)
The subroutine &#39;fv_io_register_nudge_restart&#39; registers restarts for SST fields used in HiRAM...
Definition: fv_io.F90:470
The module &#39;fv_grid_utils&#39; contains routines for setting up and computing grid-related quantities...
subroutine, public write_full_fields(Atm)
real, dimension(:,:), allocatable, public oro_g
subroutine, public get_external_ic(Atm, fv_domain, cold_start, dt_atmos)
subroutine, public prt_maxmin(qname, q, is, ie, js, je, n_g, km, fac)
subroutine, public fv_io_write_restart(Atm, timestamp)
The subroutine &#39;fv_io_write_restart&#39; writes restart files.
Definition: fv_io.F90:609
subroutine fill_nested_grid_topo_halo(Atm, proc_in)
Definition: fv_restart.F90:790
subroutine, public remap_restart(fv_domain, Atm)
The subroutine &#39;remap_restart&#39; remaps the model state from remap files to a new set of Eulerian coord...
Definition: fv_io.F90:279
subroutine pmaxmn_g(qname, q, is, ie, js, je, km, fac, area, domain)
subroutine, public del4_cubed_sphere(npx, npy, q, area, dx, dy, dxc, dyc, sin_sg, nmax, zero_ocean, oro, bounded_domain, domain, bd)
subroutine twoway_topo_update(Atm, proc_in)
The subroutine &#39; twoway_topo_update&#39; actually sets up the coarse-grid TOPOGRAPHY. ...
subroutine, public fv_restart_end(Atm, restart_endfcst)
The subroutine &#39;fv_restart_end&#39; writes ending restart files, terminates I/O, and prints out diagnosti...