FV3DYCORE  Version 1.1.0
fv_restart.F90
Go to the documentation of this file.
1 
2 !***********************************************************************
3 !* GNU Lesser General Public License
4 !*
5 !* This file is part of the FV3 dynamical core.
6 !*
7 !* The FV3 dynamical core is free software: you can redistribute it
8 !* and/or modify it under the terms of the
9 !* GNU Lesser General Public License as published by the
10 !* Free Software Foundation, either version 3 of the License, or
11 !* (at your option) any later version.
12 !*
13 !* The FV3 dynamical core is distributed in the hope that it will be
14 !* useful, but WITHOUT ANYWARRANTY; without even the implied warranty
15 !* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
16 !* See the GNU General Public License for more details.
17 !*
18 !* You should have received a copy of the GNU Lesser General Public
19 !* License along with the FV3 dynamical core.
20 !* If not, see <http://www.gnu.org/licenses/>.
21 !***********************************************************************
23 
28 
29 ! <table>
30 ! <tr>
31 ! <th>Module Name</th>
32 ! <th>Functions Included</th>
33 ! </tr>
34 ! <tr>
35 ! <td>boundary_mod</td>
36 ! <td>fill_nested_grid, nested_grid_BC, update_coarse_grid</td>
37 ! </tr>
38 ! <tr>
39 ! <td>constants_mod</td>
40 ! <td>kappa, pi=>pi_8, omega, rdgas, grav, rvgas, cp_air, radius</td>
41 ! </tr>
42 ! <tr>
43 ! <td>external_ic_mod</td>
44 ! <td>get_external_ic, get_cubed_sphere_terrain</td>
45 ! </tr>
46 ! <tr>
47 ! <td>field_manager_mod</td>
48 ! <td>MODEL_ATMOS</td>
49 ! </tr>
50 ! <tr>
51 ! <td>fms_mod</td>
52 ! <td>file_exist</td>
53 ! </tr>
54 ! <tr>
55 ! <td>fv_arrays_mod</td>
56 ! <td>fv_atmos_type, fv_nest_type, fv_grid_bounds_type, R_GRID</td>
57 ! </tr>
58 ! <tr>
59 ! <td>fv_control_mod</td>
60 ! <td>fv_init, fv_end, ngrids</td>
61 ! </tr>
62 ! <tr>
63 ! <td>fv_diagnostics_mod</td>
64 ! <td>prt_maxmin</td>
65 ! </tr>
66 ! <tr>
67 ! <td>fv_eta_mod</td>
68 ! <td>compute_dz_var, compute_dz_L32, set_hybrid_z</td>
69 ! </tr>
70 ! <tr>
71 ! <td>fv_grid_utils_mod</td>
72 ! <td>ptop_min, fill_ghost, g_sum,
73 ! make_eta_level, cubed_to_latlon, great_circle_dist</td>
74 ! </tr>
75 ! <tr>
76 ! <td>fv_io_mod</td>
77 ! <td>fv_io_init, fv_io_read_restart, fv_io_write_restart,
78 ! remap_restart, fv_io_register_restart, fv_io_register_nudge_restart,
79 ! fv_io_register_restart_BCs, fv_io_register_restart_BCs_NH, fv_io_write_BCs,
80 ! fv_io_read_BCs</td>
81 ! </tr>
82 ! <tr>
83 ! <td>fv_mp_mod</td>
84 ! <td>is_master, switch_current_Atm, mp_reduce_min, mp_reduce_max</td>
85 ! </tr>
86 ! <tr>
87 ! <td>fv_surf_map_mod</td>
88 ! <td>sgh_g, oro_g,del2_cubed_sphere, del4_cubed_sphere </td>
89 ! </tr>
90 ! <tr>
91 ! <td>fv_treat_da_inc_mod</td>
92 ! <td>read_da_inc</td>
93 ! </tr>
94 ! <tr>
95 ! <td>fv_timing_mod</td>
96 ! <td>timing_on, timing_off</td>
97 ! </tr>
98 ! <tr>
99 ! <td>fv_update_phys_mod</td>
100 ! <td>fv_update_phys</td>
101 ! </tr>
102 ! <tr>
103 ! <td>init_hydro_mod</td>
104 ! <td>p_var</td>
105 ! </tr>
106 ! <tr>
107 ! <td>mpp_mod</td>
108 ! <td>mpp_chksum, stdout, mpp_error, FATAL, NOTE, get_unit, mpp_sum,
109 ! mpp_get_current_pelist, mpp_set_current_pelist, mpp_send, mpp_recv,
110 ! mpp_sync_self, mpp_npes, mpp_pe, mpp_sync</td>
111 ! </tr>
112 ! <tr>
113 ! <td>mpp_domains_mod</td>
114 ! <td>mpp_get_compute_domain, mpp_get_data_domain, mpp_get_global_domain,
115 ! mpp_update_domains, domain2d, DGRID_NE, CENTER, CORNER, NORTH, EAST,
116 ! mpp_get_C2F_index, WEST, SOUTH, mpp_global_field</td>
117 ! </tr>
118 ! <tr>
119 ! <td>mpp_parameter_mod</td>
120 ! <td>EUPDATE, WUPDATE, SUPDATE, NUPDATE</td>
121 ! </tr>
122 ! <tr>
123 ! <td>time_manager_mod</td>
124 ! <td>time_type, get_time, set_time, operator(+), operator(-)</td>
125 ! </tr>
126 ! <tr>
127 ! <td>tracer_manager_mod</td>
128 ! <td>get_tracer_index, get_tracer_names</td>
129 ! </tr>
130 ! <tr>
131 ! <td>test_cases_mod</td>
132 ! <td>test_case, alpha, init_case, init_double_periodic, init_latlon</td>
133 ! </tr>
134 ! </table>
135 
136 
137  use constants_mod, only: kappa, pi=>pi_8, omega, rdgas, grav, rvgas, cp_air, radius
144  use fv_diagnostics_mod, only: prt_maxmin
145  use init_hydro_mod, only: p_var
146  use mpp_domains_mod, only: mpp_get_compute_domain, mpp_get_data_domain, mpp_get_global_domain
147  use mpp_domains_mod, only: mpp_update_domains, domain2d, dgrid_ne
148  use mpp_domains_mod, only: center, corner, north, east, mpp_get_c2f_index, west, south
149  use mpp_domains_mod, only: mpp_global_field
150  use mpp_mod, only: mpp_chksum, stdout, mpp_error, fatal, note
151  use mpp_mod, only: get_unit, mpp_sum
152  use mpp_mod, only: mpp_get_current_pelist, mpp_set_current_pelist
153  use mpp_mod, only: mpp_send, mpp_recv, mpp_sync_self, mpp_npes, mpp_pe, mpp_sync
155  use fv_mp_mod, only: is_master, switch_current_atm, mp_reduce_min, mp_reduce_max
156  use fv_surf_map_mod, only: sgh_g, oro_g
158  use tracer_manager_mod, only: get_tracer_names
159  use tracer_manager_mod, only: get_tracer_index
160  use field_manager_mod, only: model_atmos
164  use field_manager_mod, only: model_atmos
166  use fms_mod, only: file_exist
168 #ifdef MULTI_GASES
169  use multi_gases_mod, only: virq
170 #endif
171 
172  implicit none
173  private
174 
176  public :: d2c_setup, d2a_setup
177 
178  real(kind=R_GRID), parameter :: cnst_0p20=0.20d0
179  !--- private data type
180  logical :: module_is_initialized = .false.
181 
182 contains
183 
184 
185  subroutine fv_restart_init()
186  call fv_io_init()
187  module_is_initialized = .true.
188  end subroutine fv_restart_init
189 
195  subroutine fv_restart(fv_domain, Atm, dt_atmos, seconds, days, cold_start, grid_type, grids_on_this_pe)
196  type(domain2d), intent(inout) :: fv_domain
197  type(fv_atmos_type), intent(inout) :: Atm(:)
198  real, intent(in) :: dt_atmos
199  integer, intent(out) :: seconds
200  integer, intent(out) :: days
201  logical, intent(inout) :: cold_start
202  integer, intent(in) :: grid_type
203  logical, intent(INOUT) :: grids_on_this_pe(:)
204 
205 
206  integer :: i, j, k, n, ntileMe, nt, iq
207  integer :: isc, iec, jsc, jec, npz, npz_rst, ncnst, ntprog, ntdiag
208  integer :: isd, ied, jsd, jed
209  integer isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p, isg, ieg, jsg,jeg, npx_p, npy_p
210  real, allocatable :: g_dat(:,:,:)
211 
212  integer :: unit
213  real, allocatable :: dz1(:)
214  real rgrav, f00, ztop, pertn
215  logical :: hybrid
216  logical :: cold_start_grids(size(atm))
217  character(len=128):: tname, errstring, fname, tracer_name
218  character(len=120):: fname_ne, fname_sw
219  character(len=3) :: gn
220 
221  integer :: npts
222  real :: sumpertn
223 
224  rgrav = 1. / grav
225 
226  if(.not.module_is_initialized) call mpp_error(fatal, 'You must call fv_restart_init.')
227 
228  ntileme = size(atm(:))
229 
230  cold_start_grids(:) = cold_start
231  do n = 1, ntileme
232 
233  if (is_master()) then
234  print*, 'FV_RESTART: ', n, cold_start_grids(n)
235  endif
236 
237  if (atm(n)%neststruct%nested) then
238  write(fname,'(A, I2.2, A)') 'INPUT/fv_core.res.nest', atm(n)%grid_number, '.nc'
239  write(fname_ne,'(A, I2.2, A)') 'INPUT/fv_BC_ne.res.nest', atm(n)%grid_number, '.nc'
240  write(fname_sw,'(A, I2.2, A)') 'INPUT/fv_BC_sw.res.nest', atm(n)%grid_number, '.nc'
241  if (atm(n)%flagstruct%external_ic) then
242  if (is_master()) print*, 'External IC set on grid', atm(n)%grid_number, ', re-initializing grid'
243  cold_start_grids(n) = .true.
244  atm(n)%flagstruct%warm_start = .false. !resetting warm_start flag to avoid FATAL error below
245  else
246  if (is_master()) print*, 'Searching for nested grid restart file ', trim(fname)
247  cold_start_grids(n) = .not. file_exist(fname, atm(n)%domain)
248  atm(n)%flagstruct%warm_start = file_exist(fname, atm(n)%domain)!resetting warm_start flag to avoid FATAL error below
249  endif
250  endif
251 
252  if (.not. grids_on_this_pe(n)) then
253 
254  !Even if this grid is not on this PE, if it has child grids we must send
255  !along the data that is needed.
256  !This is a VERY complicated bit of code that attempts to follow the entire decision tree
257  ! of the initialization without doing anything. This could very much be cleaned up.
258 
259  if (atm(n)%neststruct%nested) then
260  if (cold_start_grids(n)) then
261  if (atm(n)%parent_grid%flagstruct%n_zs_filter > 0) call fill_nested_grid_topo_halo(atm(n), .false.)
262  if (atm(n)%flagstruct%nggps_ic) then
263  call fill_nested_grid_topo(atm(n), .false.)
264  call fill_nested_grid_topo_halo(atm(n), .false.)
265  call nested_grid_bc(atm(n)%ps, atm(n)%parent_grid%ps, atm(n)%neststruct%nest_domain, &
266  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
267  atm(n)%npx, atm(n)%npy,atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.false.)
268  call setup_nested_boundary_halo(atm(n),.false.)
269  else
270  call fill_nested_grid_topo(atm(n), .false.)
271  call setup_nested_boundary_halo(atm(n),.false.)
272  if ( atm(n)%flagstruct%external_ic .and. grid_type < 4 ) call fill_nested_grid_data(atm(n:n), .false.)
273  endif
274  else
275  if (is_master()) print*, 'Searching for nested grid BC files ', trim(fname_ne), ' ', trim(fname_sw)
276 
277  !!!! PROBLEM: file_exist doesn't know to look for fv_BC_ne.res.nest02.nc instead of fv_BC_ne.res.nc on coarse grid
278  if (file_exist(fname_ne, atm(n)%domain) .and. file_exist(fname_sw, atm(n)%domain)) then
279  else
280  if ( is_master() ) write(*,*) 'BC files not found, re-generating nested grid boundary conditions'
281  call fill_nested_grid_topo_halo(atm(n), .false.)
282  call setup_nested_boundary_halo(atm(n), .false.)
283  atm(n)%neststruct%first_step = .true.
284  endif
285  end if
286 
287  if (.not. atm(n)%flagstruct%hydrostatic .and. atm(n)%flagstruct%make_nh .and. &
288  (.not. atm(n)%flagstruct%nggps_ic .and. .not. atm(n)%flagstruct%ecmwf_ic) ) then
289  call nested_grid_bc(atm(n)%delz, atm(n)%parent_grid%delz, atm(n)%neststruct%nest_domain, &
290  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
291  atm(n)%npx, atm(n)%npy, atm(n)%npz, atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.false.)
292  call nested_grid_bc(atm(n)%w, atm(n)%parent_grid%w, atm(n)%neststruct%nest_domain, &
293  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
294  atm(n)%npx, atm(n)%npy, atm(n)%npz, atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.false.)
295  endif
296 
297  endif
298 
299  cycle
300 
301  endif
302  !This call still appears to be necessary to get isd, etc. correct
303  call switch_current_atm(atm(n))
304 
305  !--- call fv_io_register_restart to register restart field to be written out in fv_io_write_restart
306  call fv_io_register_restart(atm(n)%domain,atm(n:n))
307  if (atm(n)%neststruct%nested) call fv_io_register_restart_bcs(atm(n))
308  if( .not.cold_start_grids(n) .and. (.not. atm(n)%flagstruct%external_ic) ) then
309 
310 
311  if ( atm(n)%flagstruct%npz_rst /= 0 .and. atm(n)%flagstruct%npz_rst /= atm(n)%npz ) then
312 ! Remap vertically the prognostic variables for the chosen vertical resolution
313  if( is_master() ) then
314  write(*,*) ' '
315  write(*,*) '***** Important Note from FV core ********************'
316  write(*,*) 'Remapping dynamic IC from', atm(n)%flagstruct%npz_rst, 'levels to ', atm(n)%npz,'levels'
317  write(*,*) '***** End Note from FV core **************************'
318  write(*,*) ' '
319  endif
320  call remap_restart( atm(n)%domain, atm(n:n) )
321  if( is_master() ) write(*,*) 'Done remapping dynamical IC'
322  else
323  if( is_master() ) write(*,*) 'Warm starting, calling fv_io_restart'
324  call fv_io_read_restart(atm(n)%domain,atm(n:n))
325 ! ====== PJP added DA functionality ======
326  if (atm(n)%flagstruct%read_increment) then
327  ! print point in middle of domain for a sanity check
328  i = (atm(n)%bd%isc + atm(n)%bd%iec)/2
329  j = (atm(n)%bd%jsc + atm(n)%bd%jec)/2
330  k = atm(n)%npz/2
331  if( is_master() ) write(*,*) 'Calling read_da_inc',atm(n)%pt(i,j,k)
332  call read_da_inc(atm(n:n), atm(n)%domain)
333  if( is_master() ) write(*,*) 'Back from read_da_inc',atm(n)%pt(i,j,k)
334  endif
335 ! ====== end PJP added DA functionailty======
336  endif
337  endif
338 
339 !---------------------------------------------------------------------------------------------
340 ! Read, interpolate (latlon to cubed), then remap vertically with terrain adjustment if needed
341 !---------------------------------------------------------------------------------------------
342  if (atm(n)%neststruct%nested) then
343  if (cold_start_grids(n)) call fill_nested_grid_topo(atm(n), .true.)
344  !if (cold_start_grids(n) .and. .not. Atm(n)%flagstruct%nggps_ic) call fill_nested_grid_topo(Atm(n), .true.)
345  if (cold_start_grids(n)) then
346  if (atm(n)%parent_grid%flagstruct%n_zs_filter > 0 .or. atm(n)%flagstruct%nggps_ic) call fill_nested_grid_topo_halo(atm(n), .true.)
347  end if
348  if (atm(n)%flagstruct%external_ic .and. atm(n)%flagstruct%nggps_ic) then
349  !Fill nested grid halo with ps
350  call nested_grid_bc(atm(n)%ps, atm(n)%parent_grid%ps, atm(n)%neststruct%nest_domain, &
351  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
352  atm(n)%npx, atm(n)%npy,atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.true.)
353  endif
354  endif
355  if ( atm(n)%flagstruct%external_ic ) then
356  if( is_master() ) write(*,*) 'Calling get_external_ic'
357  call get_external_ic(atm(n:n), atm(n)%domain, cold_start_grids(n))
358  if( is_master() ) write(*,*) 'IC generated from the specified external source'
359  endif
360 
361  seconds = 0; days = 0 ! Restart needs to be modified to record seconds and days.
362 
363 ! Notes by Jeff D.
364  ! This logic doesn't work very well.
365  ! Shouldn't have read for all tiles then loop over tiles
366 
367  isd = atm(n)%bd%isd
368  ied = atm(n)%bd%ied
369  jsd = atm(n)%bd%jsd
370  jed = atm(n)%bd%jed
371  ncnst = atm(n)%ncnst
372  if( is_master() ) write(*,*) 'in fv_restart ncnst=', ncnst
373  isc = atm(n)%bd%isc; iec = atm(n)%bd%iec; jsc = atm(n)%bd%jsc; jec = atm(n)%bd%jec
374 
375  ! Init model data
376  if(.not.cold_start_grids(n))then
377  atm(n)%neststruct%first_step = .false.
378  if (atm(n)%neststruct%nested) then
379  if ( atm(n)%flagstruct%npz_rst /= 0 .and. atm(n)%flagstruct%npz_rst /= atm(n)%npz ) then
380  call setup_nested_boundary_halo(atm(n))
381  else
382  !If BC file is found, then read them in. Otherwise we need to initialize the BCs.
383  if (is_master()) print*, 'Searching for nested grid BC files ', trim(fname_ne), ' ', trim(fname_sw)
384  if (file_exist(fname_ne, atm(n)%domain) .and. file_exist(fname_sw, atm(n)%domain)) then
385  call fv_io_read_bcs(atm(n))
386  else
387  if ( is_master() ) write(*,*) 'BC files not found, re-generating nested grid boundary conditions'
388  call fill_nested_grid_topo_halo(atm(n), .true.)
389  call setup_nested_boundary_halo(atm(n), .true.)
390  atm(n)%neststruct%first_step = .true.
391  endif
392  !Following line to make sure u and v are consistent across processor subdomains
393  call mpp_update_domains(atm(n)%u, atm(n)%v, atm(n)%domain, gridtype=dgrid_ne, complete=.true.)
394  endif
395  endif
396 
397  if ( atm(n)%flagstruct%mountain ) then
398 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
399 ! !!! Additional terrain filter -- should not be called repeatedly !!!
400 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
401  if ( atm(n)%flagstruct%n_zs_filter > 0 ) then
402  if ( atm(n)%flagstruct%nord_zs_filter == 2 ) then
403  call del2_cubed_sphere(atm(n)%npx, atm(n)%npy, atm(n)%phis, &
404  atm(n)%gridstruct%area_64, atm(n)%gridstruct%dx, atm(n)%gridstruct%dy, &
405  atm(n)%gridstruct%dxc, atm(n)%gridstruct%dyc, atm(n)%gridstruct%sin_sg, &
406  atm(n)%flagstruct%n_zs_filter, cnst_0p20*atm(n)%gridstruct%da_min, &
407  .false., oro_g, atm(n)%neststruct%nested, atm(n)%domain, atm(n)%bd, atm(n)%flagstruct%regional)
408  if ( is_master() ) write(*,*) 'Warning !!! del-2 terrain filter has been applied ', &
409  atm(n)%flagstruct%n_zs_filter, ' times'
410  else if( atm(n)%flagstruct%nord_zs_filter == 4 ) then
411  call del4_cubed_sphere(atm(n)%npx, atm(n)%npy, atm(n)%phis, atm(n)%gridstruct%area_64, &
412  atm(n)%gridstruct%dx, atm(n)%gridstruct%dy, &
413  atm(n)%gridstruct%dxc, atm(n)%gridstruct%dyc, atm(n)%gridstruct%sin_sg, &
414  atm(n)%flagstruct%n_zs_filter, .false., oro_g, atm(n)%neststruct%nested, &
415  atm(n)%domain, atm(n)%bd, atm(n)%flagstruct%regional)
416  if ( is_master() ) write(*,*) 'Warning !!! del-4 terrain filter has been applied ', &
417  atm(n)%flagstruct%n_zs_filter, ' times'
418  endif
419  endif
420 
421  call mpp_update_domains( atm(n)%phis, atm(n)%domain, complete=.true. )
422  else
423  atm(n)%phis = 0.
424  if( is_master() ) write(*,*) 'phis set to zero'
425  endif !mountain
426 
427 #ifdef SW_DYNAMICS
428  atm(n)%pt(:,:,:) = 1.
429 #else
430  if ( .not.atm(n)%flagstruct%hybrid_z ) then
431  if(atm(n)%ptop /= atm(n)%ak(1)) call mpp_error(fatal,'FV restart: ptop not equal Atm(n)%ak(1)')
432  else
433  atm(n)%ptop = atm(n)%ak(1) ; atm(n)%ks = 0
434  endif
435  call p_var(atm(n)%npz, isc, iec, jsc, jec, atm(n)%ptop, ptop_min, &
436  atm(n)%delp, atm(n)%delz, atm(n)%pt, atm(n)%ps, atm(n)%pe, atm(n)%peln, &
437  atm(n)%pk, atm(n)%pkz, kappa, atm(n)%q, atm(n)%ng, &
438  ncnst, atm(n)%gridstruct%area_64, atm(n)%flagstruct%dry_mass, &
439  atm(n)%flagstruct%adjust_dry_mass, atm(n)%flagstruct%mountain, &
440  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
441  atm(n)%flagstruct%nwat, atm(n)%domain, atm(n)%flagstruct%make_nh)
442 
443 #endif
444  if ( grid_type < 7 .and. grid_type /= 4 ) then
445 ! Fill big values in the non-existing corner regions:
446 ! call fill_ghost(Atm(n)%phis, Atm(n)%npx, Atm(n)%npy, big_number)
447  do j=jsd,jed+1
448  do i=isd,ied+1
449  atm(n)%gridstruct%fc(i,j) = 2.*omega*( -cos(atm(n)%gridstruct%grid(i,j,1))*cos(atm(n)%gridstruct%grid(i,j,2))*sin(alpha) + &
450  sin(atm(n)%gridstruct%grid(i,j,2))*cos(alpha) )
451  enddo
452  enddo
453  do j=jsd,jed
454  do i=isd,ied
455  atm(n)%gridstruct%f0(i,j) = 2.*omega*( -cos(atm(n)%gridstruct%agrid(i,j,1))*cos(atm(n)%gridstruct%agrid(i,j,2))*sin(alpha) + &
456  sin(atm(n)%gridstruct%agrid(i,j,2))*cos(alpha) )
457  enddo
458  enddo
459  else
460  f00 = 2.*omega*sin(atm(n)%flagstruct%deglat/180.*pi)
461  do j=jsd,jed+1
462  do i=isd,ied+1
463  atm(n)%gridstruct%fc(i,j) = f00
464  enddo
465  enddo
466  do j=jsd,jed
467  do i=isd,ied
468  atm(n)%gridstruct%f0(i,j) = f00
469  enddo
470  enddo
471  endif
472  else
473  if ( atm(n)%flagstruct%warm_start ) then
474  call mpp_error(fatal, 'FV restart files not found; set warm_start = .F. if cold_start is desired.')
475  endif
476 ! Cold start
477  if ( atm(n)%flagstruct%make_hybrid_z ) then
478  hybrid = .false.
479  else
480  hybrid = atm(n)%flagstruct%hybrid_z
481  endif
482  if (grid_type < 4) then
483  if ( .not. atm(n)%flagstruct%external_ic ) then
484  call init_case(atm(n)%u,atm(n)%v,atm(n)%w,atm(n)%pt,atm(n)%delp,atm(n)%q, &
485  atm(n)%phis, atm(n)%ps,atm(n)%pe, atm(n)%peln,atm(n)%pk,atm(n)%pkz, &
486  atm(n)%uc,atm(n)%vc, atm(n)%ua,atm(n)%va, &
487  atm(n)%ak, atm(n)%bk, atm(n)%gridstruct, atm(n)%flagstruct,&
488  atm(n)%npx, atm(n)%npy, atm(n)%npz, atm(n)%ng, &
489  ncnst, atm(n)%flagstruct%nwat, &
490  atm(n)%flagstruct%ndims, atm(n)%flagstruct%ntiles, &
491  atm(n)%flagstruct%dry_mass, &
492  atm(n)%flagstruct%mountain, &
493  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
494  hybrid, atm(n)%delz, atm(n)%ze0, &
495  atm(n)%flagstruct%adiabatic, atm(n)%ks, atm(n)%neststruct%npx_global, &
496  atm(n)%ptop, atm(n)%domain, atm(n)%tile, atm(n)%bd)
497  endif
498  elseif (grid_type == 4) then
499  call init_double_periodic(atm(n)%u,atm(n)%v,atm(n)%w,atm(n)%pt, &
500  atm(n)%delp,atm(n)%q,atm(n)%phis, atm(n)%ps,atm(n)%pe, &
501  atm(n)%peln,atm(n)%pk,atm(n)%pkz, &
502  atm(n)%uc,atm(n)%vc, atm(n)%ua,atm(n)%va, &
503  atm(n)%ak, atm(n)%bk, &
504  atm(n)%gridstruct, atm(n)%flagstruct, &
505  atm(n)%npx, atm(n)%npy, atm(n)%npz, atm(n)%ng, &
506  ncnst, atm(n)%flagstruct%nwat, &
507  atm(n)%flagstruct%ndims, atm(n)%flagstruct%ntiles, &
508  atm(n)%flagstruct%dry_mass, atm(n)%flagstruct%mountain, &
509  atm(n)%flagstruct%moist_phys, atm(n)%flagstruct%hydrostatic, &
510  hybrid, atm(n)%delz, atm(n)%ze0, atm(n)%ks, atm(n)%ptop, &
511  atm(n)%domain, atm(n)%tile, atm(n)%bd)
512  if( is_master() ) write(*,*) 'Doubly Periodic IC generated'
513  elseif (grid_type == 5 .or. grid_type == 6) then
514  call init_latlon(atm(n)%u,atm(n)%v,atm(n)%pt,atm(n)%delp,atm(n)%q,&
515  atm(n)%phis, atm(n)%ps,atm(n)%pe, &
516  atm(n)%peln,atm(n)%pk,atm(n)%pkz, &
517  atm(n)%uc,atm(n)%vc, atm(n)%ua,atm(n)%va, &
518  atm(n)%ak, atm(n)%bk, atm(n)%gridstruct, &
519  atm(n)%npx, atm(n)%npy, atm(n)%npz, atm(n)%ng, ncnst, &
520  atm(n)%flagstruct%ndims, atm(n)%flagstruct%ntiles, &
521  atm(n)%flagstruct%dry_mass, &
522  atm(n)%flagstruct%mountain, &
523  atm(n)%flagstruct%moist_phys, hybrid, atm(n)%delz, &
524  atm(n)%ze0, atm(n)%domain, atm(n)%tile)
525  endif
526 
527  !Turn this off on the nested grid if you are just interpolating topography from the coarse grid!
528  if ( atm(n)%flagstruct%fv_land ) then
529  do j=jsc,jec
530  do i=isc,iec
531  atm(n)%sgh(i,j) = sgh_g(i,j)
532  atm(n)%oro(i,j) = oro_g(i,j)
533  enddo
534  enddo
535  endif
536 
537 
538  !Set up nested grids
539  !Currently even though we do fill in the nested-grid IC from
540  ! init_case or external_ic we appear to overwrite it using
541  ! coarse-grid data
542  !if (Atm(n)%neststruct%nested) then
543  ! Only fill nested-grid data if external_ic is called for the cubed-sphere grid
544  if (atm(n)%neststruct%nested) then
545  call setup_nested_boundary_halo(atm(n), .true.)
546  if (atm(n)%flagstruct%external_ic .and. .not. atm(n)%flagstruct%nggps_ic .and. grid_type < 4 ) then
547  call fill_nested_grid_data(atm(n:n))
548  endif
549  end if
550 
551  endif !end cold_start check
552 
553  if ( (.not.atm(n)%flagstruct%hydrostatic) .and. atm(n)%flagstruct%make_nh .and. atm(n)%neststruct%nested) then
554  call nested_grid_bc(atm(n)%delz, atm(n)%parent_grid%delz, atm(n)%neststruct%nest_domain, &
555  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
556  atm(n)%npx, atm(n)%npy, npz, atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.true.)
557  call nested_grid_bc(atm(n)%w, atm(n)%parent_grid%w, atm(n)%neststruct%nest_domain, &
558  atm(n)%neststruct%ind_h, atm(n)%neststruct%wt_h, 0, 0, &
559  atm(n)%npx, atm(n)%npy, npz, atm(n)%bd, isg, ieg, jsg, jeg, proc_in=.true.)
560  call fv_io_register_restart_bcs_nh(atm(n)) !needed to register nested-grid BCs not registered earlier
561  endif
562 
563  end do
564 
565 
566  do n = ntileme,1,-1
567  if (atm(n)%neststruct%nested .and. atm(n)%flagstruct%external_ic .and. &
568  atm(n)%flagstruct%grid_type < 4 .and. cold_start_grids(n)) then
569  call fill_nested_grid_data_end(atm(n), grids_on_this_pe(n))
570  endif
571  end do
572 
573  do n = 1, ntileme
574  if (.not. grids_on_this_pe(n)) cycle
575 
576  isd = atm(n)%bd%isd
577  ied = atm(n)%bd%ied
578  jsd = atm(n)%bd%jsd
579  jed = atm(n)%bd%jed
580  ncnst = atm(n)%ncnst
581  ntprog = size(atm(n)%q,4)
582  ntdiag = size(atm(n)%qdiag,4)
583  isc = atm(n)%bd%isc; iec = atm(n)%bd%iec; jsc = atm(n)%bd%jsc; jec = atm(n)%bd%jec
584 
585 !---------------------------------------------------------------------------------------------
586 ! Transform the (starting) Eulerian vertical coordinate from sigma-p to hybrid_z
587  if ( atm(n)%flagstruct%hybrid_z ) then
588  if ( atm(n)%flagstruct%make_hybrid_z ) then
589  allocate ( dz1(atm(n)%npz) )
590  if( atm(n)%npz==32 ) then
591  call compute_dz_l32(atm(n)%npz, ztop, dz1)
592  else
593  ztop = 45.e3
594  call compute_dz_var(atm(n)%npz, ztop, dz1)
595  endif
596  call set_hybrid_z(isc, iec, jsc, jec, atm(n)%ng, atm(n)%npz, ztop, dz1, rgrav, &
597  atm(n)%phis, atm(n)%ze0)
598  deallocate ( dz1 )
599 ! call prt_maxmin('ZE0', Atm(n)%ze0, isc, iec, jsc, jec, 0, Atm(n)%npz, 1.E-3)
600 ! call prt_maxmin('DZ0', Atm(n)%delz, isc, iec, jsc, jec, 0, Atm(n)%npz, 1. )
601  endif
602 ! call make_eta_level(Atm(n)%npz, Atm(n)%pe, area, Atm(n)%ks, Atm(n)%ak, Atm(n)%bk, Atm(n)%ptop)
603  endif
604 !---------------------------------------------------------------------------------------------
605 
606  if (atm(n)%flagstruct%add_noise > 0.) then
607  write(errstring,'(A, E16.9)') "Adding thermal noise of amplitude ", atm(n)%flagstruct%add_noise
608  call mpp_error(note, errstring)
609  call random_seed
610  npts = 0
611  sumpertn = 0.
612  do k=1,atm(n)%npz
613  do j=jsc,jec
614  do i=isc,iec
615  call random_number(pertn)
616  atm(n)%pt(i,j,k) = atm(n)%pt(i,j,k) + pertn*atm(n)%flagstruct%add_noise
617  npts = npts + 1
618  sumpertn = sumpertn + pertn*atm(n)%flagstruct%add_noise ** 2
619  enddo
620  enddo
621  enddo
622  call mpp_update_domains(atm(n)%pt, atm(n)%domain)
623  call mpp_sum(sumpertn)
624  call mpp_sum(npts)
625  write(errstring,'(A, E16.9)') "RMS added noise: ", sqrt(sumpertn/npts)
626  call mpp_error(note, errstring)
627  endif
628 
629  if (atm(n)%grid_number > 1) then
630  write(gn,'(A2, I1)') " g", atm(n)%grid_number
631  else
632  gn = ''
633  end if
634 
635  unit = stdout()
636  write(unit,*)
637  write(unit,*) 'fv_restart u ', trim(gn),' = ', mpp_chksum(atm(n)%u(isc:iec,jsc:jec,:))
638  write(unit,*) 'fv_restart v ', trim(gn),' = ', mpp_chksum(atm(n)%v(isc:iec,jsc:jec,:))
639  if ( .not.atm(n)%flagstruct%hydrostatic ) &
640  write(unit,*) 'fv_restart w ', trim(gn),' = ', mpp_chksum(atm(n)%w(isc:iec,jsc:jec,:))
641  write(unit,*) 'fv_restart delp', trim(gn),' = ', mpp_chksum(atm(n)%delp(isc:iec,jsc:jec,:))
642  write(unit,*) 'fv_restart phis', trim(gn),' = ', mpp_chksum(atm(n)%phis(isc:iec,jsc:jec))
643 
644 #ifdef SW_DYNAMICS
645  call prt_maxmin('H ', atm(n)%delp, isc, iec, jsc, jec, atm(n)%ng, 1, rgrav)
646 #else
647  write(unit,*) 'fv_restart pt ', trim(gn),' = ', mpp_chksum(atm(n)%pt(isc:iec,jsc:jec,:))
648  if (ntprog>0) &
649  write(unit,*) 'fv_restart q(prog) nq ', trim(gn),' =',ntprog, mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,:))
650  if (ntdiag>0) &
651  write(unit,*) 'fv_restart q(diag) nq ', trim(gn),' =',ntdiag, mpp_chksum(atm(n)%qdiag(isc:iec,jsc:jec,:,:))
652  do iq=1,min(17, ntprog) ! Check up to 17 tracers
653  call get_tracer_names(model_atmos, iq, tracer_name)
654  write(unit,*) 'fv_restart '//trim(tracer_name)//' = ', mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,iq))
655  enddo
656 !---------------
657 ! Check Min/Max:
658 !---------------
659  call pmaxmn_g('ZS', atm(n)%phis, isc, iec, jsc, jec, 1, rgrav, atm(n)%gridstruct%area_64, atm(n)%domain)
660  call pmaxmn_g('PS', atm(n)%ps, isc, iec, jsc, jec, 1, 0.01, atm(n)%gridstruct%area_64, atm(n)%domain)
661  call pmaxmn_g('T ', atm(n)%pt, isc, iec, jsc, jec, atm(n)%npz, 1., atm(n)%gridstruct%area_64, atm(n)%domain)
662 
663 ! Check tracers:
664  do i=1, ntprog
665  call get_tracer_names ( model_atmos, i, tname )
666  call pmaxmn_g(trim(tname), atm(n)%q(isd:ied,jsd:jed,1:atm(n)%npz,i:i), isc, iec, jsc, jec, atm(n)%npz, &
667  1., atm(n)%gridstruct%area_64, atm(n)%domain)
668  enddo
669 #endif
670  call prt_maxmin('U ', atm(n)%u(isc:iec,jsc:jec,1:atm(n)%npz), isc, iec, jsc, jec, 0, atm(n)%npz, 1.)
671  call prt_maxmin('V ', atm(n)%v(isc:iec,jsc:jec,1:atm(n)%npz), isc, iec, jsc, jec, 0, atm(n)%npz, 1.)
672 
673  if ( (.not.atm(n)%flagstruct%hydrostatic) .and. atm(n)%flagstruct%make_nh ) then
674  call mpp_error(note, " Initializing w to 0")
675  atm(n)%w = 0.
676  if ( .not.atm(n)%flagstruct%hybrid_z ) then
677  call mpp_error(note, " Initializing delz from hydrostatic state")
678  do k=1,atm(n)%npz
679  do j=jsc,jec
680  do i=isc,iec
681  atm(n)%delz(i,j,k) = (rdgas*rgrav)*atm(n)%pt(i,j,k)*(atm(n)%peln(i,k,j)-atm(n)%peln(i,k+1,j))
682  enddo
683  enddo
684  enddo
685  endif
686  endif
687 
688  if ( .not.atm(n)%flagstruct%hydrostatic ) &
689  call pmaxmn_g('W ', atm(n)%w, isc, iec, jsc, jec, atm(n)%npz, 1., atm(n)%gridstruct%area_64, atm(n)%domain)
690 
691  if (is_master()) write(unit,*)
692 
693 !--------------------------------------------
694 ! Initialize surface winds for flux coupler:
695 !--------------------------------------------
696  if ( .not. atm(n)%flagstruct%srf_init ) then
697  call cubed_to_latlon(atm(n)%u, atm(n)%v, atm(n)%ua, atm(n)%va, &
698  atm(n)%gridstruct, &
699  atm(n)%npx, atm(n)%npy, atm(n)%npz, 1, &
700  atm(n)%gridstruct%grid_type, atm(n)%domain, &
701  atm(n)%gridstruct%nested, atm(n)%flagstruct%c2l_ord, atm(n)%bd)
702  do j=jsc,jec
703  do i=isc,iec
704  atm(n)%u_srf(i,j) = atm(n)%ua(i,j,atm(n)%npz)
705  atm(n)%v_srf(i,j) = atm(n)%va(i,j,atm(n)%npz)
706  enddo
707  enddo
708  atm(n)%flagstruct%srf_init = .true.
709  endif
710 
711  end do ! n_tile
712 
713  end subroutine fv_restart
714 
715  subroutine setup_nested_boundary_halo(Atm, proc_in)
717  !This routine is now taking the "easy way out" with regards
718  ! to pt (virtual potential temperature), q_con, and cappa;
719  ! their halo values are now set up when the BCs are set up
720  ! in fv_dynamics
721 
722  type(fv_atmos_type), intent(INOUT) :: Atm
723  logical, INTENT(IN), OPTIONAL :: proc_in
724  real, allocatable :: g_dat(:,:,:), g_dat2(:,:,:)
725  real, allocatable :: pt_coarse(:,:,:)
726  integer i,j,k,nq, sphum, ncnst, istart, iend, npz, nwat
727  integer isc, iec, jsc, jec, isd, ied, jsd, jed, is, ie, js, je
728  integer isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p, isg, ieg, jsg,jeg, npx_p, npy_p
729  real zvir
730  logical process
731  integer :: liq_wat, ice_wat, rainwat, snowwat, graupel
732  real :: qv, dp1, q_liq, q_sol, q_con, cvm, cappa, dp, pt, dz, pkz, rdg
733 
734  if (PRESENT(proc_in)) then
735  process = proc_in
736  else
737  process = .true.
738  endif
739 
740  isd = atm%bd%isd
741  ied = atm%bd%ied
742  jsd = atm%bd%jsd
743  jed = atm%bd%jed
744  ncnst = atm%ncnst
745  isc = atm%bd%isc; iec = atm%bd%iec; jsc = atm%bd%jsc; jec = atm%bd%jec
746  is = atm%bd%is ; ie = atm%bd%ie ; js = atm%bd%js ; je = atm%bd%je
747  npz = atm%npz
748  nwat = atm%flagstruct%nwat
749 
750  if (nwat >= 3) then
751  liq_wat = get_tracer_index(model_atmos, 'liq_wat')
752  ice_wat = get_tracer_index(model_atmos, 'ice_wat')
753  endif
754  if ( nwat== 5) then
755  rainwat = get_tracer_index(model_atmos, 'rainwat')
756  snowwat = get_tracer_index(model_atmos, 'snowwat')
757  elseif (nwat == 6) then
758  rainwat = get_tracer_index(model_atmos, 'rainwat')
759  snowwat = get_tracer_index(model_atmos, 'snowwat')
760  graupel = get_tracer_index(model_atmos, 'graupel')
761  endif
762 
763  call mpp_get_data_domain( atm%parent_grid%domain, &
764  isd_p, ied_p, jsd_p, jed_p )
765  call mpp_get_compute_domain( atm%parent_grid%domain, &
766  isc_p, iec_p, jsc_p, jec_p )
767  call mpp_get_global_domain( atm%parent_grid%domain, &
768  isg, ieg, jsg, jeg, xsize=npx_p, ysize=npy_p)
769 
770  call nested_grid_bc(atm%delp, atm%parent_grid%delp, atm%neststruct%nest_domain, &
771  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
772  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
773  do nq=1,ncnst
774  call nested_grid_bc(atm%q(:,:,:,nq), &
775  atm%parent_grid%q(:,:,:,nq), atm%neststruct%nest_domain, &
776  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
777  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
778  end do
779 
780  if (process) then
781  if (is_master()) print*, 'FILLING NESTED GRID HALO'
782  else
783  if (is_master()) print*, 'SENDING DATA TO FILL NESTED GRID HALO'
784  endif
785 
786 
787  !Filling phis?
788  !In idealized test cases, where the topography is EXACTLY known (ex case 13),
789  !interpolating the topography yields a much worse result. In comparison in
790  !real topography cases little difference is seen.
791 
792  !This is probably because the halo phis, which is used to compute
793  !geopotential height (gz, gh), only affects the interior by being
794  !used to compute corner gz in a2b_ord[24]. We might suppose this
795  !computation would be more accurate when using values of phis which
796  !are more consistent with those on the interior (ie the exactly-known
797  !values) than the crude values given through linear interpolation.
798 
799  !For real topography cases, or in cases in which the coarse-grid topo
800  ! is smoothed, we fill the boundary halo with the coarse-grid topo.
801 
802 #ifndef SW_DYNAMICS
803  !pt --- actually temperature
804 
805  call nested_grid_bc(atm%pt, atm%parent_grid%pt, atm%neststruct%nest_domain, &
806  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
807  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
808 
809  if (.not. atm%flagstruct%hydrostatic) then
810 
811  !w
812  call nested_grid_bc(atm%w(:,:,:), &
813  atm%parent_grid%w(:,:,:), &
814  atm%neststruct%nest_domain, atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
815  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
816 
817 
818  !delz
819  call nested_grid_bc(atm%delz(:,:,:), &
820  atm%parent_grid%delz(:,:,:), &
821  atm%neststruct%nest_domain, atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
822  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
823 
824  end if
825 
826 #endif
827 
828  if (atm%neststruct%child_proc) then
829  call nested_grid_bc(atm%u, atm%parent_grid%u(:,:,:), &
830  atm%neststruct%nest_domain, atm%neststruct%ind_u, atm%neststruct%wt_u, 0, 1, &
831  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
832  call nested_grid_bc(atm%v, atm%parent_grid%v(:,:,:), &
833  atm%neststruct%nest_domain, atm%neststruct%ind_v, atm%neststruct%wt_v, 1, 0, &
834  atm%npx, atm%npy, npz, atm%bd, isg, ieg, jsg, jeg, proc_in=process)
835  else
836  call nested_grid_bc(atm%parent_grid%u(:,:,:), &
837  atm%neststruct%nest_domain, 0, 1)
838  call nested_grid_bc(atm%parent_grid%v(:,:,:), &
839  atm%neststruct%nest_domain, 1, 0)
840  endif
841 
842 
843  if (process) then
844 !!$#ifdef SW_DYNAMICS
845 !!$ !ps: first level only
846 !!$ !This is only valid for shallow-water simulations
847 !!$ do j=jsd,jed
848 !!$ do i=isd,ied
849 !!$
850 !!$ Atm%ps(i,j) = Atm%delp(i,j,1)/grav
851 !!$
852 !!$ end do
853 !!$ end do
854 !!$#endif
855  call mpp_update_domains(atm%u, atm%v, atm%domain, gridtype=dgrid_ne)
856  call mpp_update_domains(atm%w, atm%domain, complete=.true.) ! needs an update-domain for rayleigh damping
857  endif
858 
859  call mpp_sync_self()
860 
861  end subroutine setup_nested_boundary_halo
862 
863  subroutine fill_nested_grid_topo_halo(Atm, proc_in)
865  type(fv_atmos_type), intent(INOUT) :: Atm
866  logical, intent(IN), OPTIONAL :: proc_in
867  integer :: isg, ieg, jsg, jeg
868 
869  if (.not. atm%neststruct%nested) return
870 
871  call mpp_get_global_domain( atm%parent_grid%domain, &
872  isg, ieg, jsg, jeg)
873 
874  if (is_master()) print*, ' FILLING NESTED GRID HALO WITH INTERPOLATED TERRAIN'
875  call nested_grid_bc(atm%phis, atm%parent_grid%phis, atm%neststruct%nest_domain, &
876  atm%neststruct%ind_h, atm%neststruct%wt_h, 0, 0, &
877  atm%npx, atm%npy, atm%bd, isg, ieg, jsg, jeg, proc_in=proc_in)
878 
879  end subroutine fill_nested_grid_topo_halo
880 
884  subroutine fill_nested_grid_topo(Atm, proc_in)
885  type(fv_atmos_type), intent(INOUT) :: Atm
886  logical, intent(IN), OPTIONAL :: proc_in
887  real, allocatable :: g_dat(:,:,:)
888  integer :: p, sending_proc
889  integer :: isd_p, ied_p, jsd_p, jed_p
890  integer :: isg, ieg, jsg,jeg
891 
892  logical :: process
893 
894  process = .true.
895  if (present(proc_in)) then
896  process = proc_in
897  else
898  process = .true.
899  endif
900 
901 !!$ if (.not. Atm%neststruct%nested) return
902 
903  call mpp_get_global_domain( atm%parent_grid%domain, &
904  isg, ieg, jsg, jeg)
905  call mpp_get_data_domain( atm%parent_grid%domain, &
906  isd_p, ied_p, jsd_p, jed_p )
907 
908  allocate(g_dat( isg:ieg, jsg:jeg, 1) )
909  call timing_on('COMM_TOTAL')
910 
911  !!! FIXME: For whatever reason this code CRASHES if the lower-left corner
912  !!! of the nested grid lies within the first PE of a grid tile.
913 
914  if (is_master() .and. .not. atm%flagstruct%external_ic ) print*, ' FILLING NESTED GRID INTERIOR WITH INTERPOLATED TERRAIN'
915 
916  sending_proc = atm%parent_grid%pelist(1) + (atm%neststruct%parent_tile-1)*atm%parent_grid%npes_per_tile
917  if (atm%neststruct%parent_proc .and. atm%neststruct%parent_tile == atm%parent_grid%tile) then
918  call mpp_global_field( &
919  atm%parent_grid%domain, &
920  atm%parent_grid%phis(isd_p:ied_p,jsd_p:jed_p), g_dat(isg:,jsg:,1), position=center)
921  if (mpp_pe() == sending_proc) then
922  do p=1,size(atm%pelist)
923  call mpp_send(g_dat,size(g_dat),atm%pelist(p))
924  enddo
925  endif
926  endif
927 
928  if (any(atm%pelist == mpp_pe())) then
929  call mpp_recv(g_dat, size(g_dat), sending_proc)
930  endif
931 
932  call timing_off('COMM_TOTAL')
933  if (process) call fill_nested_grid(atm%phis, g_dat(isg:,jsg:,1), &
934  atm%neststruct%ind_h, atm%neststruct%wt_h, &
935  0, 0, isg, ieg, jsg, jeg, atm%bd)
936 
937  call mpp_sync_self
938 
939  deallocate(g_dat)
940 
941 
942  end subroutine fill_nested_grid_topo
943 
944  subroutine fill_nested_grid_data(Atm, proc_in)
946  type(fv_atmos_type), intent(INOUT) :: Atm(:) !Only intended to be one element; needed for cubed_sphere_terrain
947  logical, intent(IN), OPTIONAL :: proc_in
948  real, allocatable :: g_dat(:,:,:), pt_coarse(:,:,:)
949  integer :: i,j,k,nq, sphum, ncnst, istart, iend, npz
950  integer :: isc, iec, jsc, jec, isd, ied, jsd, jed
951  integer :: isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p
952  integer :: isg, ieg, jsg,jeg, npx_p, npy_p
953  integer :: isg_n, ieg_n, jsg_n, jeg_n, npx_n, npy_n
954  real zvir, gh0, p1(2), p2(2), r, r0
955 
956  integer :: p, sending_proc, gid, n
957  logical process
958 
959  if (present(proc_in)) then
960  process = proc_in
961  else
962  process = .true.
963  endif
964 
965  isd = atm(1)%bd%isd
966  ied = atm(1)%bd%ied
967  jsd = atm(1)%bd%jsd
968  jed = atm(1)%bd%jed
969  ncnst = atm(1)%ncnst
970  isc = atm(1)%bd%isc; iec = atm(1)%bd%iec; jsc = atm(1)%bd%jsc; jec = atm(1)%bd%jec
971  npz = atm(1)%npz
972 
973 
974  gid = mpp_pe()
975 
976  sending_proc = atm(1)%parent_grid%pelist(1) + (atm(1)%neststruct%parent_tile-1)*atm(1)%parent_grid%npes_per_tile
977 
978  call mpp_get_data_domain( atm(1)%parent_grid%domain, &
979  isd_p, ied_p, jsd_p, jed_p )
980  call mpp_get_compute_domain( atm(1)%parent_grid%domain, &
981  isc_p, iec_p, jsc_p, jec_p )
982  call mpp_get_global_domain( atm(1)%parent_grid%domain, &
983  isg, ieg, jsg, jeg, xsize=npx_p, ysize=npy_p)
984 
985  if (process) then
986 
987  call mpp_error(note, "FILLING NESTED GRID DATA")
988 
989  else
990 
991  call mpp_error(note, "SENDING TO FILL NESTED GRID DATA")
992 
993  endif
994 
995  !delp
996 
997  allocate(g_dat( isg:ieg, jsg:jeg, npz) )
998 
999  call timing_on('COMM_TOTAL')
1000 
1001  !Call mpp_global_field on the procs that have the required data.
1002  !Then broadcast from the head PE to the receiving PEs
1003  if (atm(1)%neststruct%parent_proc .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1004  call mpp_global_field( &
1005  atm(1)%parent_grid%domain, &
1006  atm(1)%parent_grid%delp(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
1007  if (gid == sending_proc) then !crazy logic but what we have for now
1008  do p=1,size(atm(1)%pelist)
1009  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1010  enddo
1011  endif
1012  endif
1013  if (any(atm(1)%pelist == gid)) then
1014  call mpp_recv(g_dat, size(g_dat), sending_proc)
1015  endif
1016 
1017  call timing_off('COMM_TOTAL')
1018  if (process) call fill_nested_grid(atm(1)%delp, g_dat, &
1019  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1020  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1021 
1022  call mpp_sync_self
1023 
1024  !tracers
1025  do nq=1,ncnst
1026 
1027  call timing_on('COMM_TOTAL')
1028 
1029  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1030  call mpp_global_field( &
1031  atm(1)%parent_grid%domain, &
1032  atm(1)%parent_grid%q(isd_p:ied_p,jsd_p:jed_p,:,nq), g_dat, position=center)
1033  if (gid == sending_proc) then
1034  do p=1,size(atm(1)%pelist)
1035  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1036  enddo
1037  endif
1038  endif
1039  if (any(atm(1)%pelist == gid)) then
1040  call mpp_recv(g_dat, size(g_dat), sending_proc)
1041  endif
1042 
1043  call timing_off('COMM_TOTAL')
1044  if (process) call fill_nested_grid(atm(1)%q(isd:ied,jsd:jed,:,nq), g_dat, &
1045  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1046  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1047 
1048  call mpp_sync_self
1049 
1050  end do
1051 
1052  !Note that we do NOT fill in phis (surface geopotential), which should
1053  !be computed exactly instead of being interpolated.
1054 
1055 
1056 #ifndef SW_DYNAMICS
1057  !pt --- actually temperature
1058 
1059  call timing_on('COMM_TOTAL')
1060 
1061  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1062  call mpp_global_field( &
1063  atm(1)%parent_grid%domain, &
1064  atm(1)%parent_grid%pt(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
1065  if (gid == sending_proc) then
1066  do p=1,size(atm(1)%pelist)
1067  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1068  enddo
1069  endif
1070  endif
1071  if (any(atm(1)%pelist == gid)) then
1072  call mpp_recv(g_dat, size(g_dat), sending_proc)
1073  endif
1074 
1075  call mpp_sync_self
1076 
1077  call timing_off('COMM_TOTAL')
1078  if (process) call fill_nested_grid(atm(1)%pt, g_dat, &
1079  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1080  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1081 
1082 
1083  if ( atm(1)%flagstruct%nwat > 0 ) then
1084  sphum = get_tracer_index(model_atmos, 'sphum')
1085  else
1086  sphum = 1
1087  endif
1088  if ( atm(1)%parent_grid%flagstruct%adiabatic .or. atm(1)%parent_grid%flagstruct%do_Held_Suarez ) then
1089  zvir = 0. ! no virtual effect
1090  else
1091  zvir = rvgas/rdgas - 1.
1092  endif
1093 
1094  call timing_on('COMM_TOTAL')
1095 
1096  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1097  call mpp_global_field( &
1098  atm(1)%parent_grid%domain, &
1099  atm(1)%parent_grid%pkz(isc_p:iec_p,jsc_p:jec_p,:), g_dat, position=center)
1100  if (gid == sending_proc) then
1101  do p=1,size(atm(1)%pelist)
1102  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1103  enddo
1104  endif
1105  endif
1106  if (any(atm(1)%pelist == gid)) then
1107  call mpp_recv(g_dat, size(g_dat), sending_proc)
1108  endif
1109 
1110  call mpp_sync_self
1111 
1112  call timing_off('COMM_TOTAL')
1113  if (process) then
1114  allocate(pt_coarse(isd:ied,jsd:jed,npz))
1115  call fill_nested_grid(pt_coarse, g_dat, &
1116  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1117  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1118 
1119  if (atm(1)%bd%is == 1) then
1120  do k=1,npz
1121  do j=atm(1)%bd%jsd,atm(1)%bd%jed
1122  do i=atm(1)%bd%isd,0
1123 #ifdef MULTI_GASES
1124  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*virq(atm(1)%q(i,j,k,:))
1125 #else
1126  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1127 #endif
1128  end do
1129  end do
1130  end do
1131  end if
1132 
1133  if (atm(1)%bd%js == 1) then
1134  if (atm(1)%bd%is == 1) then
1135  istart = atm(1)%bd%is
1136  else
1137  istart = atm(1)%bd%isd
1138  end if
1139  if (atm(1)%bd%ie == atm(1)%npx-1) then
1140  iend = atm(1)%bd%ie
1141  else
1142  iend = atm(1)%bd%ied
1143  end if
1144 
1145  do k=1,npz
1146  do j=atm(1)%bd%jsd,0
1147  do i=istart,iend
1148 #ifdef MULTI_GASES
1149  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*virq(atm(1)%q(i,j,k,:))
1150 #else
1151  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1152 #endif
1153  end do
1154  end do
1155  end do
1156  end if
1157 
1158  if (atm(1)%bd%ie == atm(1)%npx-1) then
1159  do k=1,npz
1160  do j=atm(1)%bd%jsd,atm(1)%bd%jed
1161  do i=atm(1)%npx,atm(1)%bd%ied
1162 #ifdef MULTI_GASES
1163  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*virq(atm(1)%q(i,j,k,:))
1164 #else
1165  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1166 #endif
1167  end do
1168  end do
1169  end do
1170  end if
1171 
1172  if (atm(1)%bd%je == atm(1)%npy-1) then
1173  if (atm(1)%bd%is == 1) then
1174  istart = atm(1)%bd%is
1175  else
1176  istart = atm(1)%bd%isd
1177  end if
1178  if (atm(1)%bd%ie == atm(1)%npx-1) then
1179  iend = atm(1)%bd%ie
1180  else
1181  iend = atm(1)%bd%ied
1182  end if
1183 
1184  do k=1,npz
1185  do j=atm(1)%npy,atm(1)%bd%jed
1186  do i=istart,iend
1187 #ifdef MULTI_GASES
1188  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*virq(atm(1)%q(i,j,k,:))
1189 #else
1190  atm(1)%pt(i,j,k) = cp_air*atm(1)%pt(i,j,k)/pt_coarse(i,j,k)*(1.+zvir*atm(1)%q(i,j,k,sphum))
1191 #endif
1192  end do
1193  end do
1194  end do
1195  end if
1196 
1197  deallocate(pt_coarse)
1198 
1199  end if
1200 
1201  if (.not. atm(1)%flagstruct%hydrostatic) then
1202 
1203  !delz
1204  call timing_on('COMM_TOTAL')
1205 
1206  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1207  call mpp_global_field( &
1208  atm(1)%parent_grid%domain, &
1209  atm(1)%parent_grid%delz(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
1210  if (gid == sending_proc) then
1211  do p=1,size(atm(1)%pelist)
1212  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1213  enddo
1214  endif
1215  endif
1216  if (any(atm(1)%pelist == gid)) then
1217  call mpp_recv(g_dat, size(g_dat), sending_proc)
1218  endif
1219 
1220  call mpp_sync_self
1221 
1222  call timing_off('COMM_TOTAL')
1223  if (process) call fill_nested_grid(atm(1)%delz, g_dat, &
1224  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1225  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1226 
1227  !w
1228 
1229  call timing_on('COMM_TOTAL')
1230 
1231  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1232  call mpp_global_field( &
1233  atm(1)%parent_grid%domain, &
1234  atm(1)%parent_grid%w(isd_p:ied_p,jsd_p:jed_p,:), g_dat, position=center)
1235  if (gid == sending_proc) then
1236  do p=1,size(atm(1)%pelist)
1237  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1238  enddo
1239  endif
1240  endif
1241  if (any(atm(1)%pelist == gid)) then
1242  call mpp_recv(g_dat, size(g_dat), sending_proc)
1243  endif
1244 
1245  call mpp_sync_self
1246 
1247  call timing_off('COMM_TOTAL')
1248  if (process) call fill_nested_grid(atm(1)%w, g_dat, &
1249  atm(1)%neststruct%ind_h, atm(1)%neststruct%wt_h, &
1250  0, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1251  !
1252 
1253  end if
1254 
1255 #endif
1256  deallocate(g_dat)
1257 
1258  !u
1259 
1260  allocate(g_dat( isg:ieg, jsg:jeg+1, npz) )
1261  g_dat = 1.e25
1262 
1263  call timing_on('COMM_TOTAL')
1264 
1265  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1266  call mpp_global_field( &
1267  atm(1)%parent_grid%domain, &
1268  atm(1)%parent_grid%u(isd_p:ied_p,jsd_p:jed_p+1,:), g_dat, position=north)
1269  if (gid == sending_proc) then
1270  do p=1,size(atm(1)%pelist)
1271  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1272  enddo
1273  endif
1274  endif
1275  if (any(atm(1)%pelist == gid)) then
1276  call mpp_recv(g_dat, size(g_dat), sending_proc)
1277  endif
1278 
1279  call mpp_sync_self
1280 
1281  call timing_off('COMM_TOTAL')
1282  call mpp_sync_self
1283  if (process) call fill_nested_grid(atm(1)%u, g_dat, &
1284  atm(1)%neststruct%ind_u, atm(1)%neststruct%wt_u, &
1285  0, 1, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1286  deallocate(g_dat)
1287 
1288  !v
1289 
1290  allocate(g_dat( isg:ieg+1, jsg:jeg, npz) )
1291  g_dat = 1.e25
1292 
1293  call timing_on('COMM_TOTAL')
1294 
1295  if (any(atm(1)%parent_grid%pelist == gid) .and. atm(1)%neststruct%parent_tile == atm(1)%parent_grid%tile) then
1296  call mpp_global_field( &
1297  atm(1)%parent_grid%domain, &
1298  atm(1)%parent_grid%v(isd_p:ied_p+1,jsd_p:jed_p,:), g_dat, position=east)
1299  if (gid == sending_proc) then
1300  do p=1,size(atm(1)%pelist)
1301  call mpp_send(g_dat,size(g_dat),atm(1)%pelist(p))
1302  enddo
1303  endif
1304  endif
1305  if (any(atm(1)%pelist == gid)) then
1306  call mpp_recv(g_dat, size(g_dat), sending_proc)
1307  endif
1308 
1309  call mpp_sync_self
1310  call timing_off('COMM_TOTAL')
1311 
1312  if (process) call fill_nested_grid(atm(1)%v, g_dat, &
1313  atm(1)%neststruct%ind_v, atm(1)%neststruct%wt_v, &
1314  1, 0, isg, ieg, jsg, jeg, npz, atm(1)%bd)
1315 
1316  deallocate(g_dat)
1317 
1318  end subroutine fill_nested_grid_data
1319 
1322  subroutine fill_nested_grid_data_end(Atm, proc_in)
1323  type(fv_atmos_type), intent(INOUT) :: Atm
1324  logical, intent(IN), OPTIONAL :: proc_in
1325  real, allocatable :: g_dat(:,:,:), pt_coarse(:,:,:)
1326  integer :: i,j,k,nq, sphum, ncnst, istart, iend, npz
1327  integer :: isc, iec, jsc, jec, isd, ied, jsd, jed
1328  integer :: isd_p, ied_p, jsd_p, jed_p, isc_p, iec_p, jsc_p, jec_p
1329  integer :: isg, ieg, jsg,jeg, npx_p, npy_p
1330  integer :: isg_n, ieg_n, jsg_n, jeg_n, npx_n, npy_n
1331  real zvir
1332 
1333  integer :: p , sending_proc
1334  logical :: process
1335 
1336  if (present(proc_in)) then
1337  process = proc_in
1338  else
1339  process = .true.
1340  endif
1341 
1342  isd = atm%bd%isd
1343  ied = atm%bd%ied
1344  jsd = atm%bd%jsd
1345  jed = atm%bd%jed
1346  ncnst = atm%ncnst
1347  isc = atm%bd%isc; iec = atm%bd%iec; jsc = atm%bd%jsc; jec = atm%bd%jec
1348  npz = atm%npz
1349 
1350  isd_p = atm%parent_grid%bd%isd
1351  ied_p = atm%parent_grid%bd%ied
1352  jsd_p = atm%parent_grid%bd%jsd
1353  jed_p = atm%parent_grid%bd%jed
1354  isc_p = atm%parent_grid%bd%isc
1355  iec_p = atm%parent_grid%bd%iec
1356  jsc_p = atm%parent_grid%bd%jsc
1357  jec_p = atm%parent_grid%bd%jec
1358  sending_proc = atm%parent_grid%pelist(1) + (atm%neststruct%parent_tile-1)*atm%parent_grid%npes_per_tile
1359 
1360  call mpp_get_global_domain( atm%parent_grid%domain, &
1361  isg, ieg, jsg, jeg, xsize=npx_p, ysize=npy_p)
1362 
1363 
1364  !NOW: what we do is to update the nested-grid terrain to the coarse grid,
1365  !to ensure consistency between the two grids.
1366  if ( process ) call mpp_update_domains(atm%phis, atm%domain, complete=.true.)
1367  if (atm%neststruct%twowaynest) then
1368  if (any(atm%parent_grid%pelist == mpp_pe()) .or. atm%neststruct%child_proc) then
1369  call update_coarse_grid(atm%parent_grid%phis, &
1370  atm%phis, atm%neststruct%nest_domain, &
1371  atm%neststruct%ind_update_h(isd_p:ied_p+1,jsd_p:jed_p+1,:), &
1372  atm%gridstruct%dx, atm%gridstruct%dy, atm%gridstruct%area, &
1373  isd_p, ied_p, jsd_p, jed_p, isd, ied, jsd, jed, &
1374  atm%neststruct%isu, atm%neststruct%ieu, atm%neststruct%jsu, atm%neststruct%jeu, &
1375  atm%npx, atm%npy, 0, 0, &
1376  atm%neststruct%refinement, atm%neststruct%nestupdate, 0, 0, &
1377  atm%neststruct%parent_proc, atm%neststruct%child_proc, atm%parent_grid)
1378  atm%parent_grid%neststruct%parent_of_twoway = .true.
1379  !NOTE: mpp_update_nest_coarse (and by extension, update_coarse_grid) does **NOT** pass data
1380  !allowing a two-way update into the halo of the coarse grid. It only passes data so that the INTERIOR
1381  ! can have the two-way update. Thus, on the nest's cold start, if this update_domains call is not done,
1382  ! the coarse grid will have the wrong topography in the halo, which will CHANGE when a restart is done!!
1383  if (atm%neststruct%parent_proc) call mpp_update_domains(atm%parent_grid%phis, atm%parent_grid%domain)
1384  end if
1385 
1386  end if
1387 
1388 
1389 
1390 
1391 #ifdef SW_DYNAMICS
1392 !!$ !ps: first level only
1393 !!$ !This is only valid for shallow-water simulations
1394 !!$ if (process) then
1395 !!$ do j=jsd,jed
1396 !!$ do i=isd,ied
1397 !!$
1398 !!$ Atm%ps(i,j) = Atm%delp(i,j,1)/grav
1399 !!$
1400 !!$ end do
1401 !!$ end do
1402 !!$ endif
1403 #else
1404  !Sets up flow to be initially hydrostatic (shouldn't be the case for all ICs?)
1405  if (process) call p_var(npz, isc, iec, jsc, jec, atm%ptop, ptop_min, atm%delp, &
1406  atm%delz, atm%pt, atm%ps, &
1407  atm%pe, atm%peln, atm%pk, atm%pkz, kappa, atm%q, &
1408  atm%ng, ncnst, atm%gridstruct%area_64, atm%flagstruct%dry_mass, .false., atm%flagstruct%mountain, &
1409  atm%flagstruct%moist_phys, .true., atm%flagstruct%nwat, atm%domain)
1410 #endif
1411 
1412 
1413 
1414  end subroutine fill_nested_grid_data_end
1415 
1419  subroutine fv_write_restart(Atm, grids_on_this_pe, timestamp)
1420  type(fv_atmos_type), intent(inout) :: Atm(:)
1421  character(len=*), intent(in) :: timestamp
1422  logical, intent(IN) :: grids_on_this_pe(:)
1423  integer n
1424 
1425  call fv_io_write_restart(atm, grids_on_this_pe, timestamp)
1426  do n=1,size(atm)
1427  if (atm(n)%neststruct%nested .and. grids_on_this_pe(n)) then
1428  call fv_io_write_bcs(atm(n))
1429  endif
1430  enddo
1431 
1432  end subroutine fv_write_restart
1433 
1437  subroutine fv_restart_end(Atm, grids_on_this_pe)
1438  type(fv_atmos_type), intent(inout) :: Atm(:)
1439  logical, intent(INOUT) :: grids_on_this_pe(:)
1440 
1441  integer :: isc, iec, jsc, jec
1442  integer :: iq, n, ntileMe, ncnst, ntprog, ntdiag
1443  integer :: isd, ied, jsd, jed, npz
1444  integer :: unit
1445  integer :: file_unit
1446  integer, allocatable :: pelist(:)
1447  character(len=128):: tracer_name
1448  character(len=3):: gn
1449 
1450 
1451  ntileme = size(atm(:))
1452 
1453  do n = 1, ntileme
1454 
1455  if (.not. grids_on_this_pe(n)) then
1456  cycle
1457  endif
1458 
1459  call mpp_set_current_pelist(atm(n)%pelist)
1460 
1461  isc = atm(n)%bd%isc; iec = atm(n)%bd%iec; jsc = atm(n)%bd%jsc; jec = atm(n)%bd%jec
1462 
1463  isd = atm(n)%bd%isd
1464  ied = atm(n)%bd%ied
1465  jsd = atm(n)%bd%jsd
1466  jed = atm(n)%bd%jed
1467  npz = atm(n)%npz
1468  ncnst = atm(n)%ncnst
1469  ntprog = size(atm(n)%q,4)
1470  ntdiag = size(atm(n)%qdiag,4)
1471 
1472  if (atm(n)%grid_number > 1) then
1473  write(gn,'(A2, I1)') " g", atm(n)%grid_number
1474  else
1475  gn = ''
1476  end if
1477 
1478  unit = stdout()
1479  write(unit,*)
1480  write(unit,*) 'fv_restart_end u ', trim(gn),' = ', mpp_chksum(atm(n)%u(isc:iec,jsc:jec,:))
1481  write(unit,*) 'fv_restart_end v ', trim(gn),' = ', mpp_chksum(atm(n)%v(isc:iec,jsc:jec,:))
1482  if ( .not. atm(n)%flagstruct%hydrostatic ) &
1483  write(unit,*) 'fv_restart_end w ', trim(gn),' = ', mpp_chksum(atm(n)%w(isc:iec,jsc:jec,:))
1484  write(unit,*) 'fv_restart_end delp', trim(gn),' = ', mpp_chksum(atm(n)%delp(isc:iec,jsc:jec,:))
1485  write(unit,*) 'fv_restart_end phis', trim(gn),' = ', mpp_chksum(atm(n)%phis(isc:iec,jsc:jec))
1486 #ifndef SW_DYNAMICS
1487  write(unit,*) 'fv_restart_end pt ', trim(gn),' = ', mpp_chksum(atm(n)%pt(isc:iec,jsc:jec,:))
1488  if (ntprog>0) &
1489  write(unit,*) 'fv_restart_end q(prog) nq ', trim(gn),' =',ntprog, mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,:))
1490  if (ntdiag>0) &
1491  write(unit,*) 'fv_restart_end q(diag) nq ', trim(gn),' =',ntdiag, mpp_chksum(atm(n)%qdiag(isc:iec,jsc:jec,:,:))
1492  do iq=1,min(17, ntprog) ! Check up to 17 tracers
1493  call get_tracer_names(model_atmos, iq, tracer_name)
1494  write(unit,*) 'fv_restart_end '//trim(tracer_name)// trim(gn),' = ', mpp_chksum(atm(n)%q(isc:iec,jsc:jec,:,iq))
1495  enddo
1496 
1497 !---------------
1498 ! Check Min/Max:
1499 !---------------
1500 ! call prt_maxmin('ZS', Atm(n)%phis, isc, iec, jsc, jec, Atm(n)%ng, 1, 1./grav)
1501  call pmaxmn_g('ZS', atm(n)%phis, isc, iec, jsc, jec, 1, 1./grav, atm(n)%gridstruct%area_64, atm(n)%domain)
1502  call pmaxmn_g('PS ', atm(n)%ps, isc, iec, jsc, jec, 1, 0.01 , atm(n)%gridstruct%area_64, atm(n)%domain)
1503  call prt_maxmin('PS*', atm(n)%ps, isc, iec, jsc, jec, atm(n)%ng, 1, 0.01)
1504  call prt_maxmin('U ', atm(n)%u(isd:ied,jsd:jed,1:npz), isc, iec, jsc, jec, atm(n)%ng, npz, 1.)
1505  call prt_maxmin('V ', atm(n)%v(isd:ied,jsd:jed,1:npz), isc, iec, jsc, jec, atm(n)%ng, npz, 1.)
1506  if ( .not. atm(n)%flagstruct%hydrostatic ) &
1507  call prt_maxmin('W ', atm(n)%w , isc, iec, jsc, jec, atm(n)%ng, npz, 1.)
1508  call prt_maxmin('T ', atm(n)%pt, isc, iec, jsc, jec, atm(n)%ng, npz, 1.)
1509  do iq=1, ntprog
1510  call get_tracer_names ( model_atmos, iq, tracer_name )
1511  call pmaxmn_g(trim(tracer_name), atm(n)%q(isd:ied,jsd:jed,1:npz,iq:iq), isc, iec, jsc, jec, npz, &
1512  1., atm(n)%gridstruct%area_64, atm(n)%domain)
1513  enddo
1514 ! Write4 energy correction term
1515 #endif
1516 
1517  enddo
1518 
1519  call fv_io_write_restart(atm, grids_on_this_pe)
1520  do n=1,ntileme
1521  if (atm(n)%neststruct%nested .and. grids_on_this_pe(n)) call fv_io_write_bcs(atm(n))
1522  end do
1523 
1524  module_is_initialized = .false.
1525 
1526 #ifdef EFLUX_OUT
1527  if( is_master() ) then
1528  write(*,*) steps, 'Mean equivalent Heat flux for this integration period=',atm(1)%idiag%efx_sum/real(max(1,Atm(1)%idiag%steps)), &
1529  'Mean nesting-related flux for this integration period=',Atm(1)%idiag%efx_sum_nest/real(max(1,Atm(1)%idiag%steps)), &
1530  'Mean mountain torque=',Atm(1)%idiag%mtq_sum/real(max(1,atm(1)%idiag%steps))
1531  file_unit = get_unit()
1532  open (unit=file_unit, file='e_flux.data', form='unformatted',status='unknown', access='sequential')
1533  do n=1,steps
1534  write(file_unit) atm(1)%idiag%efx(n)
1535  write(file_unit) atm(1)%idiag%mtq(n) ! time series global mountain torque
1536  !write(file_unit) Atm(1)%idiag%efx_nest(n)
1537  enddo
1538  close(unit=file_unit)
1539  endif
1540 #endif
1541 
1542  end subroutine fv_restart_end
1543 
1544  subroutine d2c_setup(u, v, &
1545  ua, va, &
1546  uc, vc, dord4, &
1547  isd,ied,jsd,jed, is,ie,js,je, npx,npy, &
1548  grid_type, nested, &
1549  se_corner, sw_corner, ne_corner, nw_corner, &
1550  rsin_u,rsin_v,cosa_s,rsin2,regional )
1552  logical, intent(in):: dord4
1553  real, intent(in) :: u(isd:ied,jsd:jed+1)
1554  real, intent(in) :: v(isd:ied+1,jsd:jed)
1555  real, intent(out), dimension(isd:ied ,jsd:jed ):: ua
1556  real, intent(out), dimension(isd:ied ,jsd:jed ):: va
1557  real, intent(out), dimension(isd:ied+1,jsd:jed ):: uc
1558  real, intent(out), dimension(isd:ied ,jsd:jed+1):: vc
1559  integer, intent(in) :: isd,ied,jsd,jed, is,ie,js,je, npx,npy,grid_type
1560  logical, intent(in) :: nested, se_corner, sw_corner, ne_corner, nw_corner, regional
1561  real, intent(in) :: rsin_u(isd:ied+1,jsd:jed)
1562  real, intent(in) :: rsin_v(isd:ied,jsd:jed+1)
1563  real, intent(in) :: cosa_s(isd:ied,jsd:jed)
1564  real, intent(in) :: rsin2(isd:ied,jsd:jed)
1565 
1566 ! Local
1567  real, dimension(isd:ied,jsd:jed):: utmp, vtmp
1568  real, parameter:: t11=27./28., t12=-13./28., t13=3./7., t14=6./7., t15=3./28.
1569  real, parameter:: a1 = 0.5625
1570  real, parameter:: a2 = -0.0625
1571  real, parameter:: c1 = -2./14.
1572  real, parameter:: c2 = 11./14.
1573  real, parameter:: c3 = 5./14.
1574  integer npt, i, j, ifirst, ilast, id
1575 
1576  if ( dord4) then
1577  id = 1
1578  else
1579  id = 0
1580  endif
1581 
1582 
1583  if (grid_type < 3 .and. .not. (nested .or. regional)) then
1584  npt = 4
1585  else
1586  npt = -2
1587  endif
1588 
1589  if ( nested) then
1590 
1591  do j=jsd+1,jed-1
1592  do i=isd,ied
1593  utmp(i,j) = a2*(u(i,j-1)+u(i,j+2)) + a1*(u(i,j)+u(i,j+1))
1594  enddo
1595  enddo
1596  do i=isd,ied
1597  j = jsd
1598  utmp(i,j) = 0.5*(u(i,j)+u(i,j+1))
1599  j = jed
1600  utmp(i,j) = 0.5*(u(i,j)+u(i,j+1))
1601  end do
1602 
1603  do j=jsd,jed
1604  do i=isd+1,ied-1
1605  vtmp(i,j) = a2*(v(i-1,j)+v(i+2,j)) + a1*(v(i,j)+v(i+1,j))
1606  enddo
1607  i = isd
1608  vtmp(i,j) = 0.5*(v(i,j)+v(i+1,j))
1609  i = ied
1610  vtmp(i,j) = 0.5*(v(i,j)+v(i+1,j))
1611  enddo
1612 
1613  do j=jsd,jed
1614  do i=isd,ied
1615  ua(i,j) = (utmp(i,j)-vtmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1616  va(i,j) = (vtmp(i,j)-utmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1617  enddo
1618  enddo
1619 
1620  else
1621 
1622  !----------
1623  ! Interior:
1624  !----------
1625  utmp = 0.
1626  vtmp = 0.
1627 
1628 
1629  do j=max(npt,js-1),min(npy-npt,je+1)
1630  do i=max(npt,isd),min(npx-npt,ied)
1631  utmp(i,j) = a2*(u(i,j-1)+u(i,j+2)) + a1*(u(i,j)+u(i,j+1))
1632  enddo
1633  enddo
1634  do j=max(npt,jsd),min(npy-npt,jed)
1635  do i=max(npt,is-1),min(npx-npt,ie+1)
1636  vtmp(i,j) = a2*(v(i-1,j)+v(i+2,j)) + a1*(v(i,j)+v(i+1,j))
1637  enddo
1638  enddo
1639 
1640  !----------
1641  ! edges:
1642  !----------
1643  if (grid_type < 3) then
1644 
1645  if ( js==1 .or. jsd<npt) then
1646  do j=jsd,npt-1
1647  do i=isd,ied
1648  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1649  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1650  enddo
1651  enddo
1652  endif
1653 
1654  if ( (je+1)==npy .or. jed>=(npy-npt)) then
1655  do j=npy-npt+1,jed
1656  do i=isd,ied
1657  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1658  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1659  enddo
1660  enddo
1661  endif
1662 
1663  if ( is==1 .or. isd<npt ) then
1664  do j=max(npt,jsd),min(npy-npt,jed)
1665  do i=isd,npt-1
1666  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1667  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1668  enddo
1669  enddo
1670  endif
1671 
1672  if ( (ie+1)==npx .or. ied>=(npx-npt)) then
1673  do j=max(npt,jsd),min(npy-npt,jed)
1674  do i=npx-npt+1,ied
1675  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1676  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1677  enddo
1678  enddo
1679  endif
1680 
1681  endif
1682  do j=js-1-id,je+1+id
1683  do i=is-1-id,ie+1+id
1684  ua(i,j) = (utmp(i,j)-vtmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1685  va(i,j) = (vtmp(i,j)-utmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1686  enddo
1687  enddo
1688 
1689  end if
1690 
1691 ! A -> C
1692 !--------------
1693 ! Fix the edges
1694 !--------------
1695 ! Xdir:
1696  if( sw_corner ) then
1697  do i=-2,0
1698  utmp(i,0) = -vtmp(0,1-i)
1699  enddo
1700  endif
1701  if( se_corner ) then
1702  do i=0,2
1703  utmp(npx+i,0) = vtmp(npx,i+1)
1704  enddo
1705  endif
1706  if( ne_corner ) then
1707  do i=0,2
1708  utmp(npx+i,npy) = -vtmp(npx,je-i)
1709  enddo
1710  endif
1711  if( nw_corner ) then
1712  do i=-2,0
1713  utmp(i,npy) = vtmp(0,je+i)
1714  enddo
1715  endif
1716 
1717  if (grid_type < 3 .and. .not. (nested .or. regional)) then
1718  ifirst = max(3, is-1)
1719  ilast = min(npx-2,ie+2)
1720  else
1721  ifirst = is-1
1722  ilast = ie+2
1723  endif
1724 !---------------------------------------------
1725 ! 4th order interpolation for interior points:
1726 !---------------------------------------------
1727  do j=js-1,je+1
1728  do i=ifirst,ilast
1729  uc(i,j) = a1*(utmp(i-1,j)+utmp(i,j))+a2*(utmp(i-2,j)+utmp(i+1,j))
1730  enddo
1731  enddo
1732 
1733  if (grid_type < 3) then
1734 ! Xdir:
1735  if( is==1 .and. .not. (nested .or. regional) ) then
1736  do j=js-1,je+1
1737  uc(0,j) = c1*utmp(-2,j) + c2*utmp(-1,j) + c3*utmp(0,j)
1738  uc(1,j) = ( t14*(utmp( 0,j)+utmp(1,j)) &
1739  + t12*(utmp(-1,j)+utmp(2,j)) &
1740  + t15*(utmp(-2,j)+utmp(3,j)) )*rsin_u(1,j)
1741  uc(2,j) = c1*utmp(3,j) + c2*utmp(2,j) + c3*utmp(1,j)
1742  enddo
1743  endif
1744 
1745  if( (ie+1)==npx .and. .not. (nested .or. regional) ) then
1746  do j=js-1,je+1
1747  uc(npx-1,j) = c1*utmp(npx-3,j)+c2*utmp(npx-2,j)+c3*utmp(npx-1,j)
1748  uc(npx,j) = (t14*(utmp(npx-1,j)+utmp(npx,j))+ &
1749  t12*(utmp(npx-2,j)+utmp(npx+1,j)) &
1750  + t15*(utmp(npx-3,j)+utmp(npx+2,j)))*rsin_u(npx,j)
1751  uc(npx+1,j) = c3*utmp(npx,j)+c2*utmp(npx+1,j)+c1*utmp(npx+2,j)
1752  enddo
1753  endif
1754 
1755  endif
1756 
1757 !------
1758 ! Ydir:
1759 !------
1760  if( sw_corner ) then
1761  do j=-2,0
1762  vtmp(0,j) = -utmp(1-j,0)
1763  enddo
1764  endif
1765  if( nw_corner ) then
1766  do j=0,2
1767  vtmp(0,npy+j) = utmp(j+1,npy)
1768  enddo
1769  endif
1770  if( se_corner ) then
1771  do j=-2,0
1772  vtmp(npx,j) = utmp(ie+j,0)
1773  enddo
1774  endif
1775  if( ne_corner ) then
1776  do j=0,2
1777  vtmp(npx,npy+j) = -utmp(ie-j,npy)
1778  enddo
1779  endif
1780 
1781  if (grid_type < 3) then
1782 
1783  do j=js-1,je+2
1784  if ( j==1 .and. .not. (nested .or. regional)) then
1785  do i=is-1,ie+1
1786  vc(i,1) = (t14*(vtmp(i, 0)+vtmp(i,1)) &
1787  + t12*(vtmp(i,-1)+vtmp(i,2)) &
1788  + t15*(vtmp(i,-2)+vtmp(i,3)))*rsin_v(i,1)
1789  enddo
1790  elseif ( (j==0 .or. j==(npy-1)) .and. .not. (nested .or. regional)) then
1791  do i=is-1,ie+1
1792  vc(i,j) = c1*vtmp(i,j-2) + c2*vtmp(i,j-1) + c3*vtmp(i,j)
1793  enddo
1794  elseif ( (j==2 .or. j==(npy+1)) .and. .not. (nested .or. regional)) then
1795  do i=is-1,ie+1
1796  vc(i,j) = c1*vtmp(i,j+1) + c2*vtmp(i,j) + c3*vtmp(i,j-1)
1797  enddo
1798  elseif ( j==npy .and. .not. (nested .or. regional)) then
1799  do i=is-1,ie+1
1800  vc(i,npy) = (t14*(vtmp(i,npy-1)+vtmp(i,npy)) &
1801  + t12*(vtmp(i,npy-2)+vtmp(i,npy+1)) &
1802  + t15*(vtmp(i,npy-3)+vtmp(i,npy+2)))*rsin_v(i,npy)
1803  enddo
1804  else
1805 ! 4th order interpolation for interior points:
1806  do i=is-1,ie+1
1807  vc(i,j) = a2*(vtmp(i,j-2)+vtmp(i,j+1))+a1*(vtmp(i,j-1)+vtmp(i,j))
1808  enddo
1809  endif
1810  enddo
1811  else
1812 ! 4th order interpolation:
1813  do j=js-1,je+2
1814  do i=is-1,ie+1
1815  vc(i,j) = a2*(vtmp(i,j-2)+vtmp(i,j+1))+a1*(vtmp(i,j-1)+vtmp(i,j))
1816  enddo
1817  enddo
1818  endif
1819 
1820  end subroutine d2c_setup
1821 
1822  subroutine d2a_setup(u, v, ua, va, dord4, &
1823  isd,ied,jsd,jed, is,ie,js,je, npx,npy, &
1824  grid_type, nested, &
1825  cosa_s,rsin2,regional )
1827  logical, intent(in):: dord4
1828  real, intent(in) :: u(isd:ied,jsd:jed+1)
1829  real, intent(in) :: v(isd:ied+1,jsd:jed)
1830  real, intent(out), dimension(isd:ied ,jsd:jed ):: ua
1831  real, intent(out), dimension(isd:ied ,jsd:jed ):: va
1832  integer, intent(in) :: isd,ied,jsd,jed, is,ie,js,je, npx,npy,grid_type
1833  real, intent(in) :: cosa_s(isd:ied,jsd:jed)
1834  real, intent(in) :: rsin2(isd:ied,jsd:jed)
1835  logical, intent(in) :: nested, regional
1836 
1837 ! Local
1838  real, dimension(isd:ied,jsd:jed):: utmp, vtmp
1839  real, parameter:: t11=27./28., t12=-13./28., t13=3./7., t14=6./7., t15=3./28.
1840  real, parameter:: a1 = 0.5625
1841  real, parameter:: a2 = -0.0625
1842  real, parameter:: c1 = -2./14.
1843  real, parameter:: c2 = 11./14.
1844  real, parameter:: c3 = 5./14.
1845  integer npt, i, j, ifirst, ilast, id
1846 
1847  if ( dord4) then
1848  id = 1
1849  else
1850  id = 0
1851  endif
1852 
1853 
1854  if (grid_type < 3 .and. .not. (nested .or. regional)) then
1855  npt = 4
1856  else
1857  npt = -2
1858  endif
1859 
1860  if ( nested) then
1861 
1862  do j=jsd+1,jed-1
1863  do i=isd,ied
1864  utmp(i,j) = a2*(u(i,j-1)+u(i,j+2)) + a1*(u(i,j)+u(i,j+1))
1865  enddo
1866  enddo
1867  do i=isd,ied
1868  j = jsd
1869  utmp(i,j) = 0.5*(u(i,j)+u(i,j+1))
1870  j = jed
1871  utmp(i,j) = 0.5*(u(i,j)+u(i,j+1))
1872  end do
1873 
1874  do j=jsd,jed
1875  do i=isd+1,ied-1
1876  vtmp(i,j) = a2*(v(i-1,j)+v(i+2,j)) + a1*(v(i,j)+v(i+1,j))
1877  enddo
1878  i = isd
1879  vtmp(i,j) = 0.5*(v(i,j)+v(i+1,j))
1880  i = ied
1881  vtmp(i,j) = 0.5*(v(i,j)+v(i+1,j))
1882  enddo
1883 
1884  else
1885 
1886  !----------
1887  ! Interior:
1888  !----------
1889 
1890  do j=max(npt,js-1),min(npy-npt,je+1)
1891  do i=max(npt,isd),min(npx-npt,ied)
1892  utmp(i,j) = a2*(u(i,j-1)+u(i,j+2)) + a1*(u(i,j)+u(i,j+1))
1893  enddo
1894  enddo
1895  do j=max(npt,jsd),min(npy-npt,jed)
1896  do i=max(npt,is-1),min(npx-npt,ie+1)
1897  vtmp(i,j) = a2*(v(i-1,j)+v(i+2,j)) + a1*(v(i,j)+v(i+1,j))
1898  enddo
1899  enddo
1900 
1901  !----------
1902  ! edges:
1903  !----------
1904  if (grid_type < 3) then
1905 
1906  if ( js==1 .or. jsd<npt) then
1907  do j=jsd,npt-1
1908  do i=isd,ied
1909  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1910  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1911  enddo
1912  enddo
1913  endif
1914 
1915  if ( (je+1)==npy .or. jed>=(npy-npt)) then
1916  do j=npy-npt+1,jed
1917  do i=isd,ied
1918  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1919  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1920  enddo
1921  enddo
1922  endif
1923 
1924  if ( is==1 .or. isd<npt ) then
1925  do j=max(npt,jsd),min(npy-npt,jed)
1926  do i=isd,npt-1
1927  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1928  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1929  enddo
1930  enddo
1931  endif
1932 
1933  if ( (ie+1)==npx .or. ied>=(npx-npt)) then
1934  do j=max(npt,jsd),min(npy-npt,jed)
1935  do i=npx-npt+1,ied
1936  utmp(i,j) = 0.5*(u(i,j) + u(i,j+1))
1937  vtmp(i,j) = 0.5*(v(i,j) + v(i+1,j))
1938  enddo
1939  enddo
1940  endif
1941 
1942  endif
1943 
1944  end if
1945 
1946 
1947 
1948  do j=js-1-id,je+1+id
1949  do i=is-1-id,ie+1+id
1950  ua(i,j) = (utmp(i,j)-vtmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1951  va(i,j) = (vtmp(i,j)-utmp(i,j)*cosa_s(i,j)) * rsin2(i,j)
1952  enddo
1953  enddo
1954 
1955 end subroutine d2a_setup
1956 
1958 subroutine pmaxmn_g(qname, q, is, ie, js, je, km, fac, area, domain)
1959  character(len=*), intent(in):: qname
1960  integer, intent(in):: is, ie, js, je
1961  integer, intent(in):: km
1962  real, intent(in):: q(is-3:ie+3, js-3:je+3, km)
1963  real, intent(in):: fac
1964  real(kind=R_GRID), intent(IN):: area(is-3:ie+3, js-3:je+3)
1965  type(domain2d), intent(INOUT) :: domain
1966 !
1967  real qmin, qmax, gmean
1968  integer i,j,k
1969 
1970  qmin = q(is,js,1)
1971  qmax = qmin
1972 
1973  do k=1,km
1974  do j=js,je
1975  do i=is,ie
1976  if( q(i,j,k) < qmin ) then
1977  qmin = q(i,j,k)
1978  elseif( q(i,j,k) > qmax ) then
1979  qmax = q(i,j,k)
1980  endif
1981  enddo
1982  enddo
1983  enddo
1984 
1985  call mp_reduce_min(qmin)
1986  call mp_reduce_max(qmax)
1987 
1988  gmean = g_sum(domain, q(is:ie,js:je,km), is, ie, js, je, 3, area, 1, .true.)
1989  if(is_master()) write(6,*) qname, qmax*fac, qmin*fac, gmean*fac
1990 
1991 end subroutine pmaxmn_g
1992 end module fv_restart_mod
subroutine, public fv_io_read_bcs(Atm)
The subroutine &#39;fv_io_read_BCs&#39; reads BCs from a restart file.
Definition: fv_io.F90:1010
subroutine, public fv_restart(fv_domain, Atm, dt_atmos, seconds, days, cold_start, grid_type, grids_on_this_pe)
The subroutine &#39;fv_restart&#39; initializes the model state, including prognaostic variables and several ...
Definition: fv_restart.F90:196
subroutine, public del2_cubed_sphere(npx, npy, q, area, dx, dy, dxc, dyc, sin_sg, nmax, cd, zero_ocean, oro, nested, domain, bd, regional)
subroutine, public init_double_periodic(u, v, w, pt, delp, q, phis, ps, pe, peln, pk, pkz, uc, vc, ua, va, ak, bk, gridstruct, flagstruct, npx, npy, npz, ng, ncnst, nwat, ndims, nregions, dry_mass, mountain, moist_phys, hydrostatic, hybrid_z, delz, ze0, ks, ptop, domain_in, tile_in, bd)
subroutine, public init_case(u, v, w, pt, delp, q, phis, ps, pe, peln, pk, pkz, uc, vc, ua, va, ak, bk, gridstruct, flagstruct, npx, npy, npz, ng, ncnst, nwat, ndims, nregions, dry_mass, mountain, moist_phys, hydrostatic, hybrid_z, delz, ze0, adiabatic, ks, npx_global, ptop, domain_in, tile_in, bd)
Definition: test_cases.F90:541
The module &#39;fv_mp_mod&#39; is a single program multiple data (SPMD) parallel decompostion/communication m...
Definition: fv_mp_mod.F90:24
subroutine timing_off(blk_name)
The subroutine &#39;timing_off&#39; stops a timer.
Definition: fv_timing.F90:180
subroutine, public fv_write_restart(Atm, grids_on_this_pe, timestamp)
The subroutine &#39;fv_write_restart&#39; writes restart files to disk.
subroutine, public fv_restart_end(Atm, grids_on_this_pe)
The subroutine &#39;fv_restart_end&#39; writes ending restart files, terminates I/O, and prints out diagnosti...
logical module_is_initialized
Definition: fv_restart.F90:180
The interface&#39;update_coarse_grid_mpp&#39;contains subroutines that fetch data from the nested grid and in...
Definition: boundary.F90:108
subroutine fill_nested_grid_data(Atm, proc_in)
Definition: fv_restart.F90:945
subroutine, public fv_io_write_restart(Atm, grids_on_this_pe, timestamp)
The subroutine &#39;fv_io_write_restart&#39; writes restart files.
Definition: fv_io.F90:582
subroutine, public fv_io_init()
Initialize the fv core restart facilities.
Definition: fv_io.F90:132
real(kind=r_grid), parameter cnst_0p20
Definition: fv_restart.F90:178
The module &#39;multi_gases&#39; peforms multi constitutents computations.
Definition: multi_gases.F90:25
subroutine, public set_hybrid_z(is, ie, js, je, ng, km, ztop, dz, rgrav, hs, ze, dz3)
Definition: fv_eta.F90:2840
integer, public test_case
Definition: test_cases.F90:173
subroutine, public del4_cubed_sphere(npx, npy, q, area, dx, dy, dxc, dyc, sin_sg, nmax, zero_ocean, oro, nested, domain, bd, regional)
real function, public g_sum(domain, p, ifirst, ilast, jfirst, jlast, ngc, area, mode, reproduce)
The function &#39;g_sum&#39; is the fast version of &#39;globalsum&#39;.
subroutine, public fv_io_read_restart(fv_domain, Atm)
Write the fv core restart quantities.
Definition: fv_io.F90:144
The module &#39;fv_io&#39; contains restart facilities for FV core.
Definition: fv_io.F90:30
integer, parameter, public r_grid
Definition: fv_arrays.F90:35
subroutine, public fv_io_register_restart(fv_domain, Atm)
The subroutine &#39;fv_io_register_restart&#39; registers model restart fields.
Definition: fv_io.F90:464
subroutine, public fv_io_register_restart_bcs(Atm)
The subroutine &#39;fv_io_register_restart_BCs&#39; registers restarts for nested-grid boundary conditions...
Definition: fv_io.F90:910
subroutine, public make_eta_level(km, pe, area, kks, ak, bk, ptop, domain, bd)
The module &#39;fv_timing&#39; contains FV3 timers.
Definition: fv_timing.F90:24
pure real function, public virq(q)
The module &#39;boundary&#39; contains utility routines for grid nesting and boundary conditions.
Definition: boundary.F90:25
subroutine, public fv_restart_init()
Definition: fv_restart.F90:186
real, public alpha
Definition: test_cases.F90:175
subroutine fill_nested_grid_topo(Atm, proc_in)
The subroutine &#39;fill_nested_grid_topo&#39; fills the nested grid with topo to enable boundary smoothing...
Definition: fv_restart.F90:885
real, parameter, public ptop_min
subroutine, public compute_dz_l32(km, ztop, dz)
Definition: fv_eta.F90:2729
subroutine, public d2a_setup(u, v, ua, va, dord4, isd, ied, jsd, jed, is, ie, js, je, npx, npy, grid_type, nested, cosa_s, rsin2, regional)
subroutine, public setup_nested_boundary_halo(Atm, proc_in)
Definition: fv_restart.F90:716
The module &#39;fv_arrays&#39; contains the &#39;fv_atmos_type&#39; and associated datatypes.
Definition: fv_arrays.F90:24
real function, public great_circle_dist(q1, q2, radius)
interface &#39;nested_grid_BC&#39; includes subroutines &#39;nested_grid_BC_2d&#39; and &#39;nested_grid_BC_3d&#39; that fetc...
Definition: boundary.F90:88
The module &#39;fv_eta&#39; contains routine to set up the reference (Eulerian) pressure coordinate.
Definition: fv_eta.F90:25
The module &#39;external_ic_mod&#39; contains routines that read in and remap initial conditions.
Definition: external_ic.F90:32
subroutine, public fv_io_write_bcs(Atm, timestamp)
The subroutine &#39;fv_io_write_BCs&#39; writes BCs to a restart file.
Definition: fv_io.F90:999
subroutine, public compute_dz_var(km, ztop, dz)
Definition: fv_eta.F90:2659
The interface &#39;fill_nested_grid&#39; includes subroutines &#39;fill_nested_grid_2d&#39; and &#39;fill_nested_grid_3d&#39;...
Definition: boundary.F90:99
&#39;The module &#39;tread_da_increment&#39; contains routines for treating the increments of the prognostic vari...
real, dimension(:,:), allocatable, public sgh_g
subroutine timing_on(blk_name)
The subroutine &#39;timing_on&#39; starts a timer.
Definition: fv_timing.F90:116
subroutine fill_nested_grid_data_end(Atm, proc_in)
The subroutine &#39; fill_nested_grid_data_end&#39; actually sets up the coarse-grid TOPOGRAPHY.
@ The module &#39;fv_diagnostics&#39; contains routines to compute diagnosic fields.
subroutine, public fv_io_register_nudge_restart(Atm)
The subroutine &#39;fv_io_register_nudge_restart&#39; registers restarts for SST fields used in HiRAM...
Definition: fv_io.F90:448
The module &#39;fv_grid_utils&#39; contains routines for setting up and computing grid-related quantities...
real, dimension(:,:), allocatable, public oro_g
subroutine, public prt_maxmin(qname, q, is, ie, js, je, n_g, km, fac)
subroutine, public get_cubed_sphere_terrain(Atm, fv_domain)
subroutine, public init_latlon(u, v, pt, delp, q, phis, ps, pe, peln, pk, pkz, uc, vc, ua, va, ak, bk, gridstruct, npx, npy, npz, ng, ncnst, ndims, nregions, dry_mass, mountain, moist_phys, hybrid_z, delz, ze0, domain_in, tile_in)
subroutine, public read_da_inc(Atm, fv_domain)
The subroutine &#39;read_da_inc&#39; reads the increments of the diagnostic variables from the DA-generated f...
subroutine, public get_external_ic(Atm, fv_domain, cold_start)
subroutine, public cubed_to_latlon(u, v, ua, va, gridstruct, npx, npy, km, mode, grid_type, domain, nested, c2l_ord, bd)
subroutine fill_nested_grid_topo_halo(Atm, proc_in)
Definition: fv_restart.F90:864
subroutine, public remap_restart(fv_domain, Atm)
The subroutine &#39;remap_restart&#39; remaps the model state from remap files to a new set of Eulerian coord...
Definition: fv_io.F90:276
subroutine pmaxmn_g(qname, q, is, ie, js, je, km, fac, area, domain)
The subroutine &#39;pmaxn_g&#39; writes domain max, min, and averages quantities.
subroutine, public d2c_setup(u, v, ua, va, uc, vc, dord4, isd, ied, jsd, jed, is, ie, js, je, npx, npy, grid_type, nested, se_corner, sw_corner, ne_corner, nw_corner, rsin_u, rsin_v, cosa_s, rsin2, regional)
subroutine, public p_var(km, ifirst, ilast, jfirst, jlast, ptop, ptop_min, delp, delz, pt, ps, pe, peln, pk, pkz, cappa, q, ng, nq, area, dry_mass, adjust_dry_mass, mountain, moist_phys, hydrostatic, nwat, domain, make_nh)
the subroutine &#39;p_var&#39; computes auxiliary pressure variables for a hydrostatic state.
Definition: init_hydro.F90:87
subroutine, public fv_io_register_restart_bcs_nh(Atm)
Definition: fv_io.F90:975