FV3DYCORE  Version 2.0.0
atmosphere.F90
Go to the documentation of this file.
1 
2 !***********************************************************************
3 !* GNU Lesser General Public License
4 !*
5 !* This file is part of the FV3 dynamical core.
6 !*
7 !* The FV3 dynamical core is free software: you can redistribute it
8 !* and/or modify it under the terms of the
9 !* GNU Lesser General Public License as published by the
10 !* Free Software Foundation, either version 3 of the License, or
11 !* (at your option) any later version.
12 !*
13 !* The FV3 dynamical core is distributed in the hope that it will be
14 !* useful, but WITHOUT ANYWARRANTY; without even the implied warranty
15 !* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
16 !* See the GNU General Public License for more details.
17 !*
18 !* You should have received a copy of the GNU Lesser General Public
19 !* License along with the FV3 dynamical core.
20 !* If not, see <http://www.gnu.org/licenses/>.
21 !***********************************************************************
22 
25 
27 
28 ! <table>
29 ! <tr>
30 ! <th>Module Name</th>
31 ! <th>Functions Included</th>
32 ! </tr>
33 ! <tr>
34 ! <td>block_control_mod</td>
35 ! <td>block_control_type</td>
36 ! </tr>
37 ! <tr>
38 ! <td>constants_mod</td>
39 ! <td>cp_air, rdgas, grav, rvgas, kappa, pstd_mks</td>
40 ! </tr>
41 ! <tr>
42 ! <td>field_manager_mod</td>
43 ! <td>MODEL_ATMOS</td>
44 ! </tr>
45 ! <tr>
46 ! <td>fms_mod</td>
47 ! <td>file_exist, open_namelist_file,close_file, error_mesg, FATAL,
48 ! check_nml_error, stdlog,write_version_number,set_domain,
49 ! mpp_clock_id, mpp_clock_begin, mpp_clock_end, CLOCK_SUBCOMPONENT,
50 ! clock_flag_default, nullify_domain</td>
51 ! </tr>
52 ! <tr>
53 ! <td>fv_arrays_mod</td>
54 ! <td>fv_atmos_type, R_GRID</td>
55 ! </tr>
56 ! <tr>
57 ! <td>fv_control_mod</td>
58 ! <td>fv_init, fv_end, ngrids</td>
59 ! </tr>
60 ! <tr>
61 ! <td>fv_diagnostics_mod</td>
62 ! <td>fv_diag_init, fv_diag, fv_time, prt_maxmin, prt_height</td>
63 ! </tr>
64 ! <tr>
65 ! <td>fv_dynamics_mod</td>
66 ! <td>fv_dynamics</td>
67 ! </tr>
68 ! <tr>
69 ! <td>fv_eta_mod</td>
70 ! <td>get_eta_level</td>
71 ! </tr>
72 ! <tr>
73 ! <td>fv_fill_mod</td>
74 ! <td>fill_gfs</td>
75 ! </tr>
76 ! <tr>
77 ! <td>fv_mp_mod</td>
78 ! <td>switch_current_Atm</td>
79 ! </tr>
80 ! <tr>
81 ! <td>fv_nesting_mod</td>
82 ! <td>twoway_nesting</td>
83 ! </tr>
84 ! <tr>
85 ! <td>fv_nggps_diags_mod</td>
86 ! <td>fv_nggps_diag_init, fv_nggps_diag</td>
87 ! </tr>
88 ! <tr>
89 ! <td>fv_nwp_nudge_mod</td>
90 ! <td>fv_nwp_nudge_init, fv_nwp_nudge_end, do_adiabatic_init</td>
91 ! </tr>
92 ! <tr>
93 ! <td>fv_restart_mod</td>
94 ! <td>fv_restart, fv_write_restart</td>
95 ! </tr>
96 ! <tr>
97 ! <td>fv_sg_mod</td>
98 ! <td>fv_subgrid_z</td>
99 ! </tr>
100 ! <tr>
101 ! <td>fv_timing_mod</td>
102 ! <td>timing_on, timing_off</td>
103 ! </tr>
104 ! <tr>
105 ! <td>fv_update_phys_mod</td>
106 ! <td>fv_update_phys</td>
107 ! </tr>
108 ! <tr>
109 ! <td>IPD_typedefs_mod</td>
110 ! <td>IPD_data_type, kind_phys => IPD_kind_phys</td>
111 ! </tr>
112 ! <tr>
113 ! <td>mpp_mod</td>
114 ! <td>mpp_error, stdout, FATAL, NOTE, input_nml_file, mpp_root_pe,
115 ! mpp_npes, mpp_pe, mpp_chksum,mpp_get_current_pelist,
116 ! mpp_set_current_pelist</td>
117 ! </tr>
118 ! <tr>
119 ! <td>mpp_domains_mod</td>
120 ! <td>mpp_get_data_domain, mpp_get_compute_domain, domain2d, mpp_update_domains</td>
121 ! </tr>
122 ! <tr>
123 ! <td>mpp_parameter_mod</td>
124 ! <td>EUPDATE, WUPDATE, SUPDATE, NUPDATE</td>
125 ! </tr>
126 ! <tr>
127 ! <td>time_manager_mod</td>
128 ! <td>time_type, get_time, set_time, operator(+), operator(-)</td>
129 ! </tr>
130 ! <tr>
131 ! <td>tracer_manager_mod</td>
132 ! <td>get_tracer_index, get_number_tracers, NO_TRACER</td>
133 ! </tr>
134 ! <tr>
135 ! <td>xgrid_mod</td>
136 ! <td>grid_box_type</td>
137 ! </tr>
138 ! </table>
139 
140 #include <fms_platform.h>
141 
142 !-----------------
143 ! FMS modules:
144 !-----------------
145 use block_control_mod, only: block_control_type
146 use constants_mod, only: cp_air, rdgas, grav, rvgas, kappa, pstd_mks
147 use time_manager_mod, only: time_type, get_time, set_time, operator(+), &
148  operator(-), operator(/), time_type_to_real
149 use fms_mod, only: file_exist, open_namelist_file, &
150  close_file, error_mesg, fatal, &
151  check_nml_error, stdlog, &
152  write_version_number, &
153  set_domain, &
154  read_data, &
155  mpp_clock_id, mpp_clock_begin, &
156  mpp_clock_end, clock_subcomponent, &
157  clock_flag_default, nullify_domain
158 use mpp_mod, only: mpp_error, stdout, fatal, warning, note, &
159  input_nml_file, mpp_root_pe, &
160  mpp_npes, mpp_pe, mpp_chksum, &
161  mpp_get_current_pelist, &
162  mpp_set_current_pelist, mpp_sync
163 use mpp_parameter_mod, only: eupdate, wupdate, supdate, nupdate
164 use mpp_domains_mod, only: domain2d, mpp_update_domains
165 use xgrid_mod, only: grid_box_type
166 use field_manager_mod, only: model_atmos
167 use tracer_manager_mod, only: get_tracer_index, get_number_tracers, &
168  no_tracer, get_tracer_names
170 use ipd_typedefs, only: ipd_data_type, kind_phys => ipd_kind_phys
172 
173 !-----------------
174 ! FV core modules:
175 !-----------------
178 use fv_eta_mod, only: get_eta_level
179 use fv_fill_mod, only: fill_gfs
180 use fv_dynamics_mod, only: fv_dynamics
186 use fv_mp_mod, only: is_master
187 use fv_sg_mod, only: fv_subgrid_z
191 #ifdef MULTI_GASES
192 use multi_gases_mod, only: virq, virq_max, num_gas, ri, cpi
193 #endif
196 
197 use mpp_domains_mod, only: mpp_get_data_domain, mpp_get_compute_domain
198 use fv_grid_utils_mod, only: g_sum
199 
200 implicit none
201 private
202 
203 !--- driver routines
206 
207 !--- utility routines
213 ! experimental APIs not ready for use
214 ! atmosphere_tracer_postinit, &
215  atmosphere_diss_est, & ! dissipation estimate for SKEB
220 
221 !--- physics/radiation data exchange routines
223 
224 !-----------------------------------------------------------------------
225 ! version number of this module
226 ! Include variable "version" to be written to log file.
227 #include<file_version.h>
228 character(len=20) :: mod_name = 'fvGFS/atmosphere_mod'
229 
230 !---- private data ----
231  type(time_type) :: time_step_atmos
232  public atm, mygrid
233 
234  !These are convenience variables for local use only, and are set to values in Atm%
235  real :: dt_atmos
236  real :: zvir
237  integer :: npx, npy, npz, ncnst, pnats
238  integer :: isc, iec, jsc, jec
239  integer :: isd, ied, jsd, jed
240  integer :: nq ! number of transported tracers
241  integer :: sec, seconds, days
243  logical :: cold_start = .false. ! used in initial condition
244 
245  integer, dimension(:), allocatable :: id_tracerdt_dyn
246  integer :: sphum, liq_wat, rainwat, ice_wat, snowwat, graupel ! condensate species tracer indices
247 #ifdef CCPP
248  integer :: cld_amt
249 #endif
250 
251  integer :: mygrid = 1
252  integer :: p_split = 1
253  integer, allocatable :: pelist(:)
254  logical, allocatable :: grids_on_this_pe(:)
255  type(fv_atmos_type), allocatable, target :: atm(:)
256 
257  integer :: id_udt_dyn, id_vdt_dyn
258 
259  real, parameter:: w0_big = 60. ! to prevent negative w-tracer diffusion
260 
261 !---dynamics tendencies for use in fv_subgrid_z and during fv_update_phys
262  real, allocatable, dimension(:,:,:) :: u_dt, v_dt, t_dt
263  real, allocatable :: pref(:,:), dum1d(:)
264 
265  logical :: first_diag = .true.
266 
267 contains
268 
269 
273  subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area)
274 #ifdef CCPP
275  use ccpp_static_api, only: ccpp_physics_init
276  use ccpp_data, only: ccpp_suite, &
277  cdata => cdata_tile, &
278  ccpp_interstitial
279 #ifdef OPENMP
280  use omp_lib
281 #endif
282 #endif
283  type(time_type), intent(in) :: time_init, time, time_step
284  type(grid_box_type), intent(inout) :: Grid_box
285  real(kind=kind_phys), pointer, dimension(:,:), intent(inout) :: area
286 !--- local variables ---
287  integer :: i, n
288 ! integer :: itrac
289  logical :: do_atmos_nudge
290  character(len=32) :: tracer_name, tracer_units
291  real :: ps1, ps2
292 #ifdef CCPP
293  integer :: nthreads
294  integer :: ierr
295 #endif
296 
297  integer :: nlunit = 9999
298  character (len = 64) :: fn_nml = 'input.nml'
299 
300  !For regional
301  a_step = 0
302  current_time_in_seconds = time_type_to_real( time - time_init )
303  if (mpp_pe() == 0) write(*,"('atmosphere_init: current_time_seconds = ',f9.1)")current_time_in_seconds
304 
305  call timing_on('ATMOS_INIT')
306  allocate(pelist(mpp_npes()))
307  call mpp_get_current_pelist(pelist)
308 
309  zvir = rvgas/rdgas - 1.
310 
311 !---- compute physics/atmos time step in seconds ----
312 
313  time_step_atmos = time_step
314  call get_time (time_step_atmos, sec)
315  dt_atmos = real(sec)
316 
317 !----- initialize FV dynamical core -----
318  !NOTE do we still need the second file_exist call?
319  cold_start = (.not.file_exist('INPUT/fv_core.res.nc') .and. .not.file_exist('INPUT/fv_core.res.tile1.nc'))
320 
321  call fv_control_init( atm, dt_atmos, mygrid, grids_on_this_pe, p_split ) ! allocates Atm components; sets mygrid
322 
323  atm(mygrid)%Time_init = time_init
324 
325  if(atm(mygrid)%flagstruct%warm_start) then
327  endif
328 
329 !----- write version and namelist to log file -----
330  call write_version_number ( 'fvGFS/ATMOSPHERE_MOD', version )
331 
332 !-----------------------------------
333 
334  npx = atm(mygrid)%npx
335  npy = atm(mygrid)%npy
336  npz = atm(mygrid)%npz
337  ncnst = atm(mygrid)%ncnst
338  pnats = atm(mygrid)%flagstruct%pnats
339 
340  isc = atm(mygrid)%bd%isc
341  iec = atm(mygrid)%bd%iec
342  jsc = atm(mygrid)%bd%jsc
343  jec = atm(mygrid)%bd%jec
344 
345  isd = isc - atm(mygrid)%bd%ng
346  ied = iec + atm(mygrid)%bd%ng
347  jsd = jsc - atm(mygrid)%bd%ng
348  jed = jec + atm(mygrid)%bd%ng
349 
350  nq = ncnst-pnats
351  sphum = get_tracer_index(model_atmos, 'sphum' )
352  liq_wat = get_tracer_index(model_atmos, 'liq_wat' )
353  ice_wat = get_tracer_index(model_atmos, 'ice_wat' )
354  rainwat = get_tracer_index(model_atmos, 'rainwat' )
355  snowwat = get_tracer_index(model_atmos, 'snowwat' )
356  graupel = get_tracer_index(model_atmos, 'graupel' )
357 #ifdef CCPP
358  cld_amt = get_tracer_index(model_atmos, 'cld_amt')
359 #endif
360 
361  if (max(sphum,liq_wat,ice_wat,rainwat,snowwat,graupel) > atm(mygrid)%flagstruct%nwat) then
362  call mpp_error (fatal,' atmosphere_init: condensate species are not first in the list of &
363  &tracers defined in the field_table')
364  endif
365 
366  ! Allocate grid variables to be used to calculate gradient in 2nd order flux exchange
367  ! This data is only needed for the COARSEST grid.
368  !call switch_current_Atm(Atm(mygrid))
369  call set_domain(atm(mygrid)%domain)
370 
371  allocate(grid_box%dx ( isc:iec , jsc:jec+1))
372  allocate(grid_box%dy ( isc:iec+1, jsc:jec ))
373  allocate(grid_box%area ( isc:iec , jsc:jec ))
374  allocate(grid_box%edge_w( jsc:jec+1))
375  allocate(grid_box%edge_e( jsc:jec+1))
376  allocate(grid_box%edge_s( isc:iec+1 ))
377  allocate(grid_box%edge_n( isc:iec+1 ))
378  allocate(grid_box%en1 (3, isc:iec , jsc:jec+1))
379  allocate(grid_box%en2 (3, isc:iec+1, jsc:jec ))
380  allocate(grid_box%vlon (3, isc:iec , jsc:jec ))
381  allocate(grid_box%vlat (3, isc:iec , jsc:jec ))
382  grid_box%dx ( isc:iec , jsc:jec+1) = atm(mygrid)%gridstruct%dx ( isc:iec, jsc:jec+1)
383  grid_box%dy ( isc:iec+1, jsc:jec ) = atm(mygrid)%gridstruct%dy ( isc:iec+1, jsc:jec )
384  grid_box%area ( isc:iec , jsc:jec ) = atm(mygrid)%gridstruct%area ( isc:iec , jsc:jec )
385  grid_box%edge_w( jsc:jec+1) = atm(mygrid)%gridstruct%edge_w( jsc:jec+1)
386  grid_box%edge_e( jsc:jec+1) = atm(mygrid)%gridstruct%edge_e( jsc:jec+1)
387  grid_box%edge_s( isc:iec+1 ) = atm(mygrid)%gridstruct%edge_s( isc:iec+1)
388  grid_box%edge_n( isc:iec+1 ) = atm(mygrid)%gridstruct%edge_n( isc:iec+1)
389  grid_box%en1 (:, isc:iec , jsc:jec+1) = atm(mygrid)%gridstruct%en1 (:, isc:iec , jsc:jec+1)
390  grid_box%en2 (:, isc:iec+1, jsc:jec ) = atm(mygrid)%gridstruct%en2 (:, isc:iec+1, jsc:jec )
391  do i = 1,3
392  grid_box%vlon (i, isc:iec , jsc:jec ) = atm(mygrid)%gridstruct%vlon (isc:iec , jsc:jec, i )
393  grid_box%vlat (i, isc:iec , jsc:jec ) = atm(mygrid)%gridstruct%vlat (isc:iec , jsc:jec, i )
394  enddo
395  allocate (area(isc:iec , jsc:jec ))
396  area(isc:iec,jsc:jec) = atm(mygrid)%gridstruct%area_64(isc:iec,jsc:jec)
397 
398 !----- allocate and zero out the dynamics (and accumulated) tendencies
399  allocate( u_dt(isd:ied,jsd:jed,npz), &
400  v_dt(isd:ied,jsd:jed,npz), &
401  t_dt(isc:iec,jsc:jec,npz) )
402 !--- allocate pref
403  allocate(pref(npz+1,2), dum1d(npz+1))
404 
405  call fv_restart(atm(mygrid)%domain, atm, dt_atmos, seconds, days, cold_start, atm(mygrid)%gridstruct%grid_type, mygrid)
406 
407  fv_time = time
408 
409 !----- initialize atmos_axes and fv_dynamics diagnostics
410  !I've had trouble getting this to work with multiple grids at a time; worth revisiting?
411  call fv_diag_init(atm(mygrid:mygrid), atm(mygrid)%atmos_axes, time, npx, npy, npz, atm(mygrid)%flagstruct%p_ref)
412 
413 !---------- reference profile -----------
414  ps1 = 101325.
415  ps2 = 81060.
416  pref(npz+1,1) = ps1
417  pref(npz+1,2) = ps2
418  call get_eta_level ( npz, ps1, pref(1,1), dum1d, atm(mygrid)%ak, atm(mygrid)%bk )
419  call get_eta_level ( npz, ps2, pref(1,2), dum1d, atm(mygrid)%ak, atm(mygrid)%bk )
420 
421 ! --- initialize clocks for dynamics, physics_down and physics_up
422  id_dynam = mpp_clock_id('FV dy-core', flags = clock_flag_default, grain=clock_subcomponent )
423  id_subgridz = mpp_clock_id('FV subgrid_z',flags = clock_flag_default, grain=clock_subcomponent )
424  id_fv_diag = mpp_clock_id('FV Diag', flags = clock_flag_default, grain=clock_subcomponent )
425 
426  call timing_off('ATMOS_INIT')
427 
428 #ifdef CCPP
429  ! Do CCPP fast physics initialization before call to adiabatic_init (since this calls fv_dynamics)
430 
431  ! For fast physics running over the entire domain, block
432  ! and thread number are not used; set to safe values
433  cdata%blk_no = 1
434  cdata%thrd_no = 1
435 
436  ! Create shared data type for fast and slow physics, one for each thread
437 #ifdef OPENMP
438  nthreads = omp_get_max_threads()
439 #else
440  nthreads = 1
441 #endif
442  ! Create interstitial data type for fast physics; for multi-gases physics,
443  ! pass q(:,:,:,1:num_gas) as qvi, otherwise pass q(:,:,:,1:1) as 4D array
444  call ccpp_interstitial%create(atm(mygrid)%bd%is, atm(mygrid)%bd%ie, atm(mygrid)%bd%isd, atm(mygrid)%bd%ied, &
445  atm(mygrid)%bd%js, atm(mygrid)%bd%je, atm(mygrid)%bd%jsd, atm(mygrid)%bd%jed, &
446  atm(mygrid)%npz, atm(mygrid)%ng, &
447  dt_atmos, p_split, atm(mygrid)%flagstruct%k_split, &
448  zvir, atm(mygrid)%flagstruct%p_ref, atm(mygrid)%ak, atm(mygrid)%bk, &
449  liq_wat>0, ice_wat>0, rainwat>0, snowwat>0, graupel>0, &
450  cld_amt>0, kappa, atm(mygrid)%flagstruct%hydrostatic, &
451  atm(mygrid)%flagstruct%do_sat_adj, &
452  atm(mygrid)%delp, atm(mygrid)%delz, atm(mygrid)%gridstruct%area_64, &
453  atm(mygrid)%peln, atm(mygrid)%phis, atm(mygrid)%pkz, atm(mygrid)%pt, &
454 #ifdef MULTI_GASES
455  atm(mygrid)%q(:,:,:,1:max(1,num_gas)), &
456 #else
457  atm(mygrid)%q(:,:,:,1:1), &
458 #endif
459  atm(mygrid)%q(:,:,:,sphum), atm(mygrid)%q(:,:,:,liq_wat), &
460  atm(mygrid)%q(:,:,:,ice_wat), atm(mygrid)%q(:,:,:,rainwat), &
461  atm(mygrid)%q(:,:,:,snowwat), atm(mygrid)%q(:,:,:,graupel), &
462  atm(mygrid)%q(:,:,:,cld_amt), atm(mygrid)%q_con, nthreads, &
463  atm(mygrid)%flagstruct%nwat, &
464 #ifdef MULTI_GASES
465  ngas=num_gas, rilist=ri, cpilist=cpi, &
466 #endif
467  mpirank=mpp_pe(), mpiroot=mpp_root_pe())
468 
469  if (atm(mygrid)%flagstruct%do_sat_adj) then
470  ! Initialize fast physics
471  call ccpp_physics_init(cdata, suite_name=trim(ccpp_suite), group_name="fast_physics", ierr=ierr)
472  if (ierr/=0) then
473  cdata%errmsg = ' atmosphere_dynamics: error in ccpp_physics_init for group fast_physics: ' // trim(cdata%errmsg)
474  call mpp_error (fatal, cdata%errmsg)
475  end if
476  end if
477 #endif
478 
479 ! --- initiate the start for a restarted regional forecast
480  if ( atm(mygrid)%gridstruct%regional .and. atm(mygrid)%flagstruct%warm_start ) then
481 
483  isc, iec, jsc, jec, &
484  isd, ied, jsd, jed )
485  endif
486 
487 
488  if ( atm(mygrid)%flagstruct%nudge ) then
489  call fv_nwp_nudge_init( time, atm(mygrid)%atmos_axes, npz, zvir, atm(mygrid)%ak, atm(mygrid)%bk, atm(mygrid)%ts, &
490  atm(mygrid)%phis, atm(mygrid)%gridstruct, atm(mygrid)%ks, atm(mygrid)%npx, atm(mygrid)%neststruct, atm(mygrid)%bd)
491  call mpp_error(note, 'NWP nudging is active')
492  endif
494 
495 
496  if ( atm(mygrid)%flagstruct%na_init>0 ) then
497  call nullify_domain ( )
498  if ( .not. atm(mygrid)%flagstruct%hydrostatic ) then
499  call prt_maxmin('Before adi: W', atm(mygrid)%w, isc, iec, jsc, jec, atm(mygrid)%ng, npz, 1.)
500  endif
501  call adiabatic_init(zvir,atm(mygrid)%flagstruct%nudge_dz, time)
502  if ( .not. atm(mygrid)%flagstruct%hydrostatic ) then
503  call prt_maxmin('After adi: W', atm(mygrid)%w, isc, iec, jsc, jec, atm(mygrid)%ng, npz, 1.)
504 ! Not nested?
505  call prt_height('na_ini Z500', isc,iec, jsc,jec, 3, npz, 500.e2, atm(mygrid)%phis, atm(mygrid)%delz, &
506  atm(mygrid)%peln, atm(mygrid)%gridstruct%area_64(isc:iec,jsc:jec), atm(mygrid)%gridstruct%agrid_64(isc:iec,jsc:jec,2))
507  endif
508  else
509  call mpp_error(note,'No adiabatic initialization correction in use')
510  endif
511 
512 #ifdef DEBUG
513  call nullify_domain()
514  call fv_diag(atm(mygrid:mygrid), zvir, time, -1)
515 #endif
516 
517  call set_domain(atm(mygrid)%domain)
518 
519  end subroutine atmosphere_init
520 
521 
524  subroutine p_adi(km, ng, ifirst, ilast, jfirst, jlast, ptop, &
525  delp, pt, ps, pe, peln, pk, pkz, hydrostatic)
526 ! Given (ptop, delp) computes (ps, pk, pe, peln, pkz)
527 ! Input:
528  integer, intent(in):: km, ng
529  integer, intent(in):: ifirst, ilast
530  integer, intent(in):: jfirst, jlast
531  logical, intent(in):: hydrostatic
532  real, intent(in):: ptop
533  real, intent(in):: pt(ifirst-ng:ilast+ng,jfirst-ng:jlast+ng, km)
534  real, intent(in):: delp(ifirst-ng:ilast+ng,jfirst-ng:jlast+ng, km)
535 ! Output:
536  real, intent(out) :: ps(ifirst-ng:ilast+ng, jfirst-ng:jlast+ng)
537  real, intent(out) :: pk(ifirst:ilast, jfirst:jlast, km+1)
538  real, intent(out) :: pe(ifirst-1:ilast+1,km+1,jfirst-1:jlast+1)
539  real, intent(out) :: peln(ifirst:ilast, km+1, jfirst:jlast)
540  real, intent(out) :: pkz(ifirst:ilast, jfirst:jlast, km)
541 ! Local
542  real pek
543  integer i, j, k
544 
545  pek = ptop ** kappa
546 !$OMP parallel do default (none) &
547 !$OMP shared (ifirst,ilast,jfirst,jlast,km,ptop,pek,pe,pk, &
548 !$OMP ps,delp,peln,hydrostatic,pkz) &
549 !$OMP private (j, i, k)
550  do j=jfirst,jlast
551  do i=ifirst,ilast
552  pe(i,1,j) = ptop
553  pk(i,j,1) = pek
554  enddo
555 
556  do k=2,km+1
557  do i=ifirst,ilast
558  pe(i,k,j) = pe(i,k-1,j) + delp(i,j,k-1)
559  peln(i,k,j) = log(pe(i,k,j))
560  pk(i,j,k) = exp( kappa*peln(i,k,j) )
561  enddo
562  enddo
563 
564  do i=ifirst,ilast
565  ps(i,j) = pe(i,km+1,j)
566  enddo
567 
568  if ( hydrostatic ) then
569  do k=1,km
570  do i=ifirst,ilast
571  pkz(i,j,k) = (pk(i,j,k+1)-pk(i,j,k))/(kappa*(peln(i,k+1,j)-peln(i,k,j)))
572  enddo
573  enddo
574  endif
575  enddo
576 
577  end subroutine p_adi
578 
579 
582  subroutine atmosphere_dynamics ( Time )
583  type(time_type),intent(in) :: Time
584  integer :: n, psc, atmos_time_step
585  integer :: k, w_diff, nt_dyn, n_split_loc, seconds, days
586 
587  type(time_type) :: atmos_time
588 
589 !---- Call FV dynamics -----
590 
591  call mpp_clock_begin (id_dynam)
592 
593  n = mygrid
594 
595  call get_time (time, seconds, days)
596 ! if (seconds < 10800 .and. days == 0) then
597 ! n_split_loc = (Atm(n)%flagstruct%n_split * 3) / 2
598  if (seconds < nint(3600*atm(n)%flagstruct%fhouri) .and. atm(n)%flagstruct%fac_n_spl > 1.0) then
599  n_split_loc = nint(atm(n)%flagstruct%n_split * atm(n)%flagstruct%fac_n_spl)
600  else
601  n_split_loc = atm(n)%flagstruct%n_split
602  endif
603 
604 ! write(0,*)' before calling init n_split_loc=',n_split_loc,' seconds=',seconds,' days=',days,&
605 ! ' n_split=',Atm(mytile)%flagstruct%n_split,' mytile=',mytile
606 
607  a_step = a_step + 1
608 !
609 !*** If this is a regional run then read in the next boundary data when it is time.
610 !
611  if(atm(n)%flagstruct%regional)then
612 
613  call read_new_bc_data(atm(n), time, time_step_atmos, p_split, &
614  isd, ied, jsd, jed )
615  endif
616 
617  do psc=1,abs(p_split)
618  p_step = psc
619  call timing_on('fv_dynamics')
620 !uc/vc only need be same on coarse grid? However BCs do need to be the same
621  call fv_dynamics(npx, npy, npz, nq, atm(n)%ng, dt_atmos/real(abs(p_split)),&
622  Atm(n)%flagstruct%consv_te, Atm(n)%flagstruct%fill, &
623  Atm(n)%flagstruct%reproduce_sum, kappa, cp_air, zvir, &
624  Atm(n)%ptop, Atm(n)%ks, nq, &
625  n_split_loc, Atm(n)%flagstruct%q_split, &
626 ! Atm(n)%flagstruct%n_split, Atm(n)%flagstruct%q_split, &
627  atm(n)%u, atm(n)%v, atm(n)%w, atm(n)%delz, &
628  atm(n)%flagstruct%hydrostatic, &
629  atm(n)%pt , atm(n)%delp, atm(n)%q, atm(n)%ps, &
630  atm(n)%pe, atm(n)%pk, atm(n)%peln, &
631  atm(n)%pkz, atm(n)%phis, atm(n)%q_con, &
632  atm(n)%omga, atm(n)%ua, atm(n)%va, atm(n)%uc, &
633  atm(n)%vc, atm(n)%ak, atm(n)%bk, atm(n)%mfx, &
634  atm(n)%mfy , atm(n)%cx, atm(n)%cy, atm(n)%ze0, &
635  atm(n)%flagstruct%hybrid_z, &
636  atm(n)%gridstruct, atm(n)%flagstruct, &
637  atm(n)%neststruct, atm(n)%idiag, atm(n)%bd, &
638  atm(n)%parent_grid, atm(n)%domain,atm(n)%diss_est)
639 
640  call timing_off('fv_dynamics')
641 
642  if (ngrids > 1 .and. (psc < p_split .or. p_split < 0)) then
643  call mpp_sync()
644  call timing_on('TWOWAY_UPDATE')
646  call timing_off('TWOWAY_UPDATE')
647  endif
648 
649  end do !p_split
650  call mpp_clock_end (id_dynam)
651 
652 !-----------------------------------------------------
653 !--- COMPUTE SUBGRID Z
654 !-----------------------------------------------------
655 !--- zero out tendencies
656  call mpp_clock_begin (id_subgridz)
657  u_dt(:,:,:) = 0.
658  v_dt(:,:,:) = 0.
659  t_dt(:,:,:) = 0.
660 
661  w_diff = get_tracer_index(model_atmos, 'w_diff' )
662  if ( atm(n)%flagstruct%fv_sg_adj > 0 ) then
663  nt_dyn = nq
664  if ( w_diff /= no_tracer ) then
665  nt_dyn = nq - 1
666  endif
667  call fv_subgrid_z(isd, ied, jsd, jed, isc, iec, jsc, jec, atm(n)%npz, &
668  nt_dyn, dt_atmos, atm(n)%flagstruct%fv_sg_adj, &
669  atm(n)%flagstruct%nwat, atm(n)%delp, atm(n)%pe, &
670  atm(n)%peln, atm(n)%pkz, atm(n)%pt, atm(n)%q, &
671  atm(n)%ua, atm(n)%va, atm(n)%flagstruct%hydrostatic,&
672  atm(n)%w, atm(n)%delz, u_dt, v_dt, t_dt, &
673  atm(n)%flagstruct%n_sponge)
674  endif
675 
676 #ifdef USE_Q_DT
677  if ( .not. atm(n)%flagstruct%hydrostatic .and. w_diff /= no_tracer ) then
678 !$OMP parallel do default (none) &
679 !$OMP shared (isc, iec, jsc, jec, w_diff, n, Atm, q_dt) &
680 !$OMP private (k)
681  do k=1, atm(n)%npz
682  atm(n)%q(isc:iec,jsc:jec,k,w_diff) = atm(n)%w(isc:iec,jsc:jec,k) + w0_big
683  q_dt(:,:,k,w_diff) = 0.
684  enddo
685  endif
686 #endif
687 
688  call mpp_clock_end (id_subgridz)
689 
690  end subroutine atmosphere_dynamics
691 
692 
695  subroutine atmosphere_end (Time, Grid_box, restart_endfcst)
696 #ifdef CCPP
697  use ccpp_static_api, only: ccpp_physics_finalize
698  use ccpp_data, only: ccpp_suite
699  use ccpp_data, only: cdata => cdata_tile
700 #endif
701  type(time_type), intent(in) :: time
702  type(grid_box_type), intent(inout) :: Grid_box
703  logical, intent(in) :: restart_endfcst
704 
705 #ifdef CCPP
706  integer :: ierr
707  if (atm(mygrid)%flagstruct%do_sat_adj) then
708  ! Finalize fast physics
709  call ccpp_physics_finalize(cdata, suite_name=trim(ccpp_suite), group_name="fast_physics", ierr=ierr)
710  if (ierr/=0) then
711  cdata%errmsg = ' atmosphere_dynamics: error in ccpp_physics_finalize for group fast_physics: ' // trim(cdata%errmsg)
712  call mpp_error (fatal, cdata%errmsg)
713  end if
714  end if
715 #endif
716 
717  ! initialize domains for writing global physics data
718  call set_domain ( atm(mygrid)%domain )
719 
720  if ( atm(mygrid)%flagstruct%nudge ) call fv_nwp_nudge_end
721  call nullify_domain ( )
722  if (first_diag) then
723  call timing_on('FV_DIAG')
724  call fv_diag(atm(mygrid:mygrid), zvir, fv_time, atm(mygrid)%flagstruct%print_freq)
725  call fv_nggps_diag_init(atm(mygrid:mygrid), atm(mygrid)%atmos_axes, fv_time)
727  first_diag = .false.
728  call timing_off('FV_DIAG')
729  endif
730 
731  call fv_end(atm, mygrid, restart_endfcst)
732  deallocate (atm)
733 
734  deallocate( u_dt, v_dt, t_dt, pref, dum1d )
735 
736  end subroutine atmosphere_end
737 
738 
743  subroutine atmosphere_restart(timestamp)
744  character(len=*), intent(in) :: timestamp
745 
746  call fv_write_restart(atm(mygrid), timestamp)
747 
748  end subroutine atmosphere_restart
749 
750 
754  subroutine atmosphere_resolution (i_size, j_size, global)
755  integer, intent(out) :: i_size, j_size
756  logical, intent(in), optional :: global
757  logical :: local
758 
759  local = .true.
760  if( PRESENT(global) ) local = .NOT.global
761 
762  if( local ) then
763  i_size = iec - isc + 1
764  j_size = jec - jsc + 1
765  else
766  i_size = npx - 1
767  j_size = npy - 1
768  end if
769 
770  end subroutine atmosphere_resolution
771 
772 
774  subroutine atmosphere_pref (p_ref)
775  real, dimension(:,:), intent(inout) :: p_ref
776 
777  p_ref = pref
778 
779  end subroutine atmosphere_pref
780 
781 
782  subroutine atmosphere_control_data (i1, i2, j1, j2, kt, p_hydro, hydro, tile_num)
783  integer, intent(out) :: i1, i2, j1, j2, kt
784  logical, intent(out), optional :: p_hydro, hydro
785  integer, intent(out), optional :: tile_num
786  i1 = atm(mygrid)%bd%isc
787  i2 = atm(mygrid)%bd%iec
788  j1 = atm(mygrid)%bd%jsc
789  j2 = atm(mygrid)%bd%jec
790  kt = atm(mygrid)%npz
791 
792  if (present(tile_num)) tile_num = atm(mygrid)%tile_of_mosaic
793  if (present(p_hydro)) p_hydro = atm(mygrid)%flagstruct%phys_hydrostatic
794  if (present( hydro)) hydro = atm(mygrid)%flagstruct%hydrostatic
795 
796  end subroutine atmosphere_control_data
797 
798 
801  subroutine atmosphere_grid_ctr (lon, lat)
802  real(kind=kind_phys), intent(out) :: lon(:,:), lat(:,:)
803 ! Local data:
804  integer i,j
805 
806  do j=jsc,jec
807  do i=isc,iec
808  lon(i-isc+1,j-jsc+1) = atm(mygrid)%gridstruct%agrid_64(i,j,1)
809  lat(i-isc+1,j-jsc+1) = atm(mygrid)%gridstruct%agrid_64(i,j,2)
810  enddo
811  end do
812 
813  end subroutine atmosphere_grid_ctr
814 
815 
818  subroutine atmosphere_grid_bdry (blon, blat, global)
819  real, intent(out) :: blon(:,:), blat(:,:)
820  logical, intent(in), optional :: global
821 ! Local data:
822  integer i,j
823 
824  if( PRESENT(global) ) then
825  if (global) call mpp_error(fatal, '==> global grid is no longer available &
826  & in the Cubed Sphere')
827  endif
828 
829  do j=jsc,jec+1
830  do i=isc,iec+1
831  blon(i-isc+1,j-jsc+1) = atm(mygrid)%gridstruct%grid(i,j,1)
832  blat(i-isc+1,j-jsc+1) = atm(mygrid)%gridstruct%grid(i,j,2)
833  enddo
834  end do
835 
836  end subroutine atmosphere_grid_bdry
837 
838 
839  subroutine set_atmosphere_pelist ()
840  call mpp_set_current_pelist(atm(mygrid)%pelist, no_sync=.true.)
841  end subroutine set_atmosphere_pelist
842 
843 
848  subroutine atmosphere_domain ( fv_domain, layout, regional, nested, pelist )
849  type(domain2d), intent(out) :: fv_domain
850  integer, intent(out) :: layout(2)
851  logical, intent(out) :: regional
852  logical, intent(out) :: nested
853  integer, pointer, intent(out) :: pelist(:)
854 ! returns the domain2d variable associated with the coupling grid
855 ! note: coupling is done using the mass/temperature grid with no halos
856 
857  fv_domain = atm(mygrid)%domain_for_coupler
858  layout(1:2) = atm(mygrid)%layout(1:2)
859  regional = atm(mygrid)%flagstruct%regional
860  nested = ngrids > 1
861  call set_atmosphere_pelist()
862  pelist => atm(mygrid)%pelist
863 
864  end subroutine atmosphere_domain
865 
866 
869  subroutine atmosphere_diag_axes ( axes )
870  integer, intent(out) :: axes (:)
871 
872 !----- returns the axis indices for the atmospheric (mass) grid -----
873  if ( size(axes(:)) < 0 .or. size(axes(:)) > 4 ) call error_mesg ( &
874  'get_atmosphere_axes in atmosphere_mod', &
875  'size of argument is incorrect', fatal )
876 
877  axes(1:size(axes(:))) = atm(mygrid)%atmos_axes (1:size(axes(:)))
878 
879  end subroutine atmosphere_diag_axes
880 
881 
886  subroutine atmosphere_etalvls (ak, bk, flip)
887  real(kind=kind_phys), pointer, dimension(:), intent(inout) :: ak, bk
888  logical, intent(in) :: flip
889 
890  allocate(ak(npz+1))
891  allocate(bk(npz+1))
892 
893  if (flip) then
894  ak(1:npz+1) = atm(mygrid)%ak(npz+1:1:-1)
895  bk(1:npz+1) = atm(mygrid)%bk(npz+1:1:-1)
896  else
897  ak(1:npz+1) = atm(mygrid)%ak(1:npz+1)
898  bk(1:npz+1) = atm(mygrid)%bk(1:npz+1)
899  endif
900  end subroutine atmosphere_etalvls
901 
902 
908  subroutine atmosphere_hgt (hgt, position, relative, flip)
909  real(kind=kind_phys), pointer, dimension(:,:,:), intent(inout) :: hgt
910  character(len=5), intent(in) :: position
911  logical, intent(in) :: relative
912  logical, intent(in) :: flip
913  !--- local variables ---
914  integer:: lev, k, j, i
915  real(kind=kind_phys), allocatable, dimension(:,:,:) :: z, dz
916 
917  if ((position .ne. "layer") .and. (position .ne. "level")) then
918  call mpp_error (fatal, 'atmosphere_hgt:: incorrect position specification')
919  endif
920 
921  allocate(z(iec-isc+1,jec-jsc+1,npz+1))
922  allocate(dz(iec-isc+1,jec-jsc+1,npz))
923  z = 0
924  dz = 0
925 
926  if (atm(mygrid)%flagstruct%hydrostatic) then
927  !--- generate dz using hydrostatic assumption
928  do j = jsc, jec
929  do i = isc, iec
930  dz(i-isc+1,j-jsc+1,1:npz) = (rdgas/grav)*atm(mygrid)%pt(i,j,1:npz) &
931  * (atm(mygrid)%peln(i,1:npz,j) - atm(mygrid)%peln(i,2:npz+1,j))
932  enddo
933  enddo
934  else
935  !--- use non-hydrostatic delz directly
936  do j = jsc, jec
937  do i = isc, iec
938  dz(i-isc+1,j-jsc+1,1:npz) = atm(mygrid)%delz(i,j,1:npz)
939  enddo
940  enddo
941  endif
942 
943  !--- calculate geometric heights at the interfaces (levels)
944  !--- if needed, flip the indexing during this step
945  if (flip) then
946  if (.not. relative) then
947  z(1:iec-isc+1,1:jec-jsc+1,1) = atm(mygrid)%phis(isc:iec,jsc:jec)/grav
948  endif
949  do k = 2,npz+1
950  z(:,:,k) = z(:,:,k-1) - dz(:,:,npz+2-k)
951  enddo
952  else
953  if (.not. relative) then
954  z(1:iec-isc+1,1:jec-jsc+1,npz+1) = atm(mygrid)%phis(isc:iec,jsc:jec)/grav
955  endif
956  do k = npz,1,-1
957  z(:,:,k) = z(:,:,k+1) - dz(:,:,k)
958  enddo
959  endif
960 
961  !--- allocate and set either the level or layer height for return
962  if (position == "level") then
963  allocate (hgt(iec-isc+1,jec-jsc+1,npz+1))
964  hgt = z
965  elseif (position == "layer") then
966  allocate (hgt(iec-isc+1,jec-jsc+1,npz))
967  hgt(:,:,1:npz) = 0.5d0 * (z(:,:,1:npz) + z(:,:,2:npz+1))
968  endif
969 
970  deallocate (z)
971  deallocate (dz)
972 
973  end subroutine atmosphere_hgt
974 
975 
981  subroutine atmosphere_scalar_field_halo (data, halo, isize, jsize, ksize, data_p)
982  !--------------------------------------------------------------------
983  ! data - output array to return the field with halo (i,j,k)
984  ! optionally input for field already in (i,j,k) form
985  ! sized to include the halo of the field (+ 2*halo)
986  ! halo - size of the halo (must be less than 3)
987  ! ied - horizontal resolution in i-dir with haloes
988  ! jed - horizontal resolution in j-dir with haloes
989  ! ksize - vertical resolution
990  ! data_p - optional input field in packed format (ix,k)
991  !--------------------------------------------------------------------
992  !--- interface variables ---
993  real(kind=kind_phys), dimension(1:isize,1:jsize,ksize), intent(inout) :: data
996  integer, intent(in) :: halo
997  integer, intent(in) :: isize
998  integer, intent(in) :: jsize
999  integer, intent(in) :: ksize
1000  real(kind=kind_phys), dimension(:,:), optional, intent(in) :: data_p
1001  !--- local variables ---
1002  integer :: i, j, k
1003  integer :: ic, jc
1004  character(len=44) :: modname = 'atmosphere_mod::atmosphere_scalar_field_halo'
1005  integer :: mpp_flags
1006 
1007  !--- perform error checking
1008  if (halo .gt. 3) call mpp_error(fatal, modname//.gt.' - halo3 requires extending the MPP domain to support')
1009  ic = isize - 2 * halo
1010  jc = jsize - 2 * halo
1011 
1012  !--- if packed data is present, unpack it into the two-dimensional data array
1013  if (present(data_p)) then
1014  if (ic*jc .ne. size(data_p,1)) call mpp_error(fatal, modname//' - incorrect sizes for incoming &
1015  &variables data and data_p')
1016  data = 0.
1017 !$OMP parallel do default (none) &
1018 !$OMP shared (data, data_p, halo, ic, jc, ksize) &
1019 !$OMP private (i, j, k)
1020  do k = 1, ksize
1021  do j = 1, jc
1022  do i = 1, ic
1023  data(i+halo, j+halo, k) = data_p(i + (j-1)*ic, k)
1024  enddo
1025  enddo
1026  enddo
1027  endif
1028 
1029  mpp_flags = eupdate + wupdate + supdate + nupdate
1030  if (halo == 1) then
1031  call mpp_update_domains(data, atm(mygrid)%domain_for_coupler, flags=mpp_flags, complete=.true.)
1032  elseif (halo == 3) then
1033  call mpp_update_domains(data, atm(mygrid)%domain, flags=mpp_flags, complete=.true.)
1034  else
1035  call mpp_error(fatal, modname//' - unsupported halo size')
1036  endif
1037 
1038  !--- fill the halo points when at a corner of the cubed-sphere tile
1039  !--- interior domain corners are handled correctly
1040  if ( (isc==1) .or. (jsc==1) .or. (iec==npx-1) .or. (jec==npy-1) ) then
1041  do k = 1, ksize
1042  do j=1,halo
1043  do i=1,halo
1044  if ((isc== 1) .and. (jsc== 1)) data(halo+1-j ,halo+1-i ,k) = data(halo+i ,halo+1-j ,k) !SW Corner
1045  if ((isc== 1) .and. (jec==npy-1)) data(halo+1-j ,halo+jc+i,k) = data(halo+i ,halo+jc+j,k) !NW Corner
1046  if ((iec==npx-1) .and. (jsc== 1)) data(halo+ic+j,halo+1-i ,k) = data(halo+ic-i+1,halo+1-j ,k) !SE Corner
1047  if ((iec==npx-1) .and. (jec==npy-1)) data(halo+ic+j,halo+jc+i,k) = data(halo+ic-i+1,halo+jc+j,k) !NE Corner
1048  enddo
1049  enddo
1050  enddo
1051  endif
1052 
1053  return
1054  end subroutine atmosphere_scalar_field_halo
1055 
1056 
1057  subroutine atmosphere_diss_est (npass)
1059  !--- interface variables ---
1060  integer, intent(in) :: npass
1061  !--- local variables
1062  integer:: k
1063 
1064  !horizontally smooth dissiapation estimate for SKEB
1065  ! 3 passes before taking absolute value
1066  do k = 1,min(3,npass)
1067  call del2_cubed(atm(mygrid)%diss_est, 0.25*atm(mygrid)%gridstruct%da_min, atm(mygrid)%gridstruct, &
1068  atm(mygrid)%domain, npx, npy, npz, 3, atm(mygrid)%bd)
1069  enddo
1070 
1071  atm(mygrid)%diss_est=abs(atm(mygrid)%diss_est)
1072 
1073  do k = 4,npass
1074  call del2_cubed(atm(mygrid)%diss_est, 0.25*atm(mygrid)%gridstruct%da_min, atm(mygrid)%gridstruct, &
1075  atm(mygrid)%domain, npx, npy, npz, 3, atm(mygrid)%bd)
1076  enddo
1077  ! provide back sqrt of dissipation estimate
1078  atm(mygrid)%diss_est=sqrt(abs(atm(mygrid)%diss_est))
1079 
1080  end subroutine atmosphere_diss_est
1081 
1082 
1087  subroutine atmosphere_nggps_diag (Time, init, ltavg,avg_max_length)
1088  type(time_type), intent(in) :: Time
1089  logical, optional, intent(in) :: init, ltavg
1090  real, optional, intent(in) :: avg_max_length
1091  if (PRESENT(init)) then
1092  if (init) then
1093  call fv_nggps_diag_init(atm(mygrid:mygrid), atm(mygrid)%atmos_axes, time)
1094  return
1095  else
1096  call mpp_error(fatal, 'atmosphere_nggps_diag - calling with init present, but set to .false.')
1097  endif
1098  endif
1099  if (PRESENT(ltavg)) then
1100  if (ltavg) then
1101  call fv_nggps_tavg(atm(mygrid:mygrid), time_step_atmos,avg_max_length,zvir)
1102  return
1103  endif
1104  else
1105  call fv_nggps_diag(atm(mygrid:mygrid), zvir, time)
1106  endif
1107  end subroutine atmosphere_nggps_diag
1108 
1109 
1110 !--- Need to know the formulation of the mixing ratio being imported into FV3
1111 !--- in order to adjust it in a consistent manner for advection
1112 !rab subroutine atmosphere_tracer_postinit (IPD_Data, Atm_block)
1113 !rab !--- interface variables ---
1114 !rab type(IPD_data_type), intent(in) :: IPD_Data(:)
1115 !rab type(block_control_type), intent(in) :: Atm_block
1116 !rab !--- local variables ---
1117 !rab integer :: i, j, ix, k, k1, n, nwat, nb, blen
1118 !rab real(kind=kind_phys) :: qwat
1119 !rab
1120 !rab if( nq<3 ) call mpp_error(FATAL, 'GFS phys must have 3 interactive tracers')
1121 !rab
1122 !rab n = mygrid
1123 !rab nwat = Atm(n)%flagstruct%nwat
1124 !rab
1125 !rab!$OMP parallel do default (none) &
1126 !rab!$OMP shared (Atm_block, Atm, IPD_Data, npz, nq, ncnst, n, nwat) &
1127 !rab!$OMP private (nb, k, k1, ix, i, j, qwat)
1128 !rab do nb = 1,Atm_block%nblks
1129 !rab do k = 1, npz
1130 !rab k1 = npz+1-k !reverse the k direction
1131 !rab do ix = 1, Atm_block%blksz(nb)
1132 !rab i = Atm_block%index(nb)%ii(ix)
1133 !rab j = Atm_block%index(nb)%jj(ix)
1134 !rab qwat = sum(Atm(n)%q(i,j,k1,1:nwat))
1135 !rab Atm(n)%q(i,j,k1,1:nq) = Atm(n)%q(i,j,k1,1:nq) + IPD_Data(nb)%Stateout%gq0(ix,k,1:nq) * (1.0 - qwat)
1136 !rab if (nq .gt. ncnst) then
1137 !rab Atm(n)%qdiag(i,j,k1,nq+1:ncnst) = Atm(n)%qdiag(i,j,k1,nq+1:ncnst) + IPD_Data(nb)%Stateout%gq0(ix,k,nq+1:ncnst)
1138 !rab endif
1139 !rab enddo
1140 !rab enddo
1141 !rab enddo
1142 !rab
1143 !rab call mpp_update_domains (Atm(n)%q, Atm(n)%domain, complete=.true.)
1144 !rab
1145 !rab return
1146 !rab end subroutine atmosphere_tracer_postinit
1147 
1148 
1149  subroutine get_bottom_mass ( t_bot, tr_bot, p_bot, z_bot, p_surf, slp )
1150 !--------------------------------------------------------------
1151 ! returns temp, sphum, pres, height at the lowest model level
1152 ! and surface pressure
1153 !--------------------------------------------------------------
1154  !--- interface variables ---
1155  real, intent(out), dimension(isc:iec,jsc:jec):: t_bot, p_bot, z_bot, p_surf
1156  real, intent(out), optional, dimension(isc:iec,jsc:jec):: slp
1157  real, intent(out), dimension(isc:iec,jsc:jec,nq):: tr_bot
1158  !--- local variables ---
1159  integer :: i, j, m, k, kr
1160  real :: rrg, sigtop, sigbot
1161  real, dimension(isc:iec,jsc:jec) :: tref
1162  real, parameter :: tlaps = 6.5e-3
1163 
1164  rrg = rdgas / grav
1165 
1166  do j=jsc,jec
1167  do i=isc,iec
1168  p_surf(i,j) = atm(mygrid)%ps(i,j)
1169  t_bot(i,j) = atm(mygrid)%pt(i,j,npz)
1170  p_bot(i,j) = atm(mygrid)%delp(i,j,npz)/(atm(mygrid)%peln(i,npz+1,j)-atm(mygrid)%peln(i,npz,j))
1171  z_bot(i,j) = rrg*t_bot(i,j)*(1. - atm(mygrid)%pe(i,npz,j)/p_bot(i,j))
1172 #ifdef MULTI_GASES
1173  z_bot(i,j) = z_bot(i,j)*virq(atm(mygrid)%q(i,j,npz,:))
1174 #else
1175  z_bot(i,j) = z_bot(i,j)*(1.+zvir*atm(mygrid)%q(i,j,npz,1))
1176 #endif
1177  enddo
1178  enddo
1179 
1180  if ( present(slp) ) then
1181  ! determine 0.8 sigma reference level
1182  sigtop = atm(mygrid)%ak(1)/pstd_mks+atm(mygrid)%bk(1)
1183  do k = 1, npz
1184  sigbot = atm(mygrid)%ak(k+1)/pstd_mks+atm(mygrid)%bk(k+1)
1185  if (sigbot+sigtop > 1.6) then
1186  kr = k
1187  exit
1188  endif
1189  sigtop = sigbot
1190  enddo
1191  do j=jsc,jec
1192  do i=isc,iec
1193  ! sea level pressure
1194  tref(i,j) = atm(mygrid)%pt(i,j,kr) * (atm(mygrid)%delp(i,j,kr)/ &
1195  ((atm(mygrid)%peln(i,kr+1,j)-atm(mygrid)%peln(i,kr,j))*atm(mygrid)%ps(i,j)))**(-rrg*tlaps)
1196  slp(i,j) = atm(mygrid)%ps(i,j)*(1.+tlaps*atm(mygrid)%phis(i,j)/(tref(i,j)*grav))**(1./(rrg*tlaps))
1197  enddo
1198  enddo
1199  endif
1200 
1201 ! Copy tracers
1202  do m=1,nq
1203  do j=jsc,jec
1204  do i=isc,iec
1205  tr_bot(i,j,m) = atm(mygrid)%q(i,j,npz,m)
1206  enddo
1207  enddo
1208  enddo
1209 
1210  end subroutine get_bottom_mass
1211 
1212 
1213  subroutine get_bottom_wind ( u_bot, v_bot )
1214 !-----------------------------------------------------------
1215 ! returns u and v on the mass grid at the lowest model level
1216 !-----------------------------------------------------------
1217  !--- interface variables ---
1218  real, intent(out), dimension(isc:iec,jsc:jec):: u_bot, v_bot
1219  !--- local variables ---
1220  integer i, j
1221 
1222  do j=jsc,jec
1223  do i=isc,iec
1224  u_bot(i,j) = atm(mygrid)%u_srf(i,j)
1225  v_bot(i,j) = atm(mygrid)%v_srf(i,j)
1226  enddo
1227  enddo
1228 
1229  end subroutine get_bottom_wind
1230 
1231 
1235  subroutine atmosphere_get_bottom_layer (Atm_block, DYCORE_Data)
1236 !--------------------------------------------------------------
1237 ! Returns in blocked format:
1238 ! temp, pres, winds, height, and tracers at the bottom layer
1239 ! surface pressure
1240 ! sea-level pressure
1241 !--------------------------------------------------------------
1242  !--- interface variables ---
1243  type(block_control_type), intent(in) :: Atm_block
1244  type(dycore_data_type), intent(inout) :: DYCORE_Data(:)
1245  !--- local variables ---
1246  integer :: ix, i, j, nt, k, nb
1247  real :: rrg, sigtop, sigbot, tref
1248  !--- local parameters ---
1249  real, parameter :: tlaps = 6.5e-3
1250  !--- local saved variables ---
1251  integer, save :: kr
1252  logical, save :: first_time = .true.
1253 
1254  rrg = rdgas / grav
1255 
1256  if (first_time) then
1257  ! determine 0.8 sigma reference level
1258  sigtop = atm(mygrid)%ak(1)/pstd_mks+atm(mygrid)%bk(1)
1259  do k = 1, npz
1260  sigbot = atm(mygrid)%ak(k+1)/pstd_mks+atm(mygrid)%bk(k+1)
1261  if (sigbot+sigtop > 1.6) then
1262  kr = k
1263  exit
1264  endif
1265  sigtop = sigbot
1266  enddo
1267  !--- allocate the DYCORE_Data container
1268  do nb = 1,atm_block%nblks
1269  call dycore_data(nb)%Coupling%create (atm_block%blksz(nb), nq)
1270  enddo
1271  first_time = .false.
1272  endif
1273 
1274 !$OMP parallel do default (none) &
1275 !$OMP shared (Atm_block, DYCORE_Data, Atm, mygrid, npz, kr, rrg, zvir, nq) &
1276 !$OMP private (nb, ix, i, j, tref, nt)
1277  do nb = 1,atm_block%nblks
1278  do ix = 1,atm_block%blksz(nb)
1279  i = atm_block%index(nb)%ii(ix)
1280  j = atm_block%index(nb)%jj(ix)
1281  !--- surface pressure
1282  dycore_data(nb)%Coupling%p_srf(ix) = atm(mygrid)%ps(i,j)
1283  !--- bottom layer temperature, pressure, & winds
1284  dycore_data(nb)%Coupling%t_bot(ix) = atm(mygrid)%pt(i,j,npz)
1285  dycore_data(nb)%Coupling%p_bot(ix) = atm(mygrid)%delp(i,j,npz)/(atm(mygrid)%peln(i,npz+1,j)-atm(mygrid)%peln(i,npz,j))
1286  dycore_data(nb)%Coupling%u_bot(ix) = atm(mygrid)%u_srf(i,j)
1287  dycore_data(nb)%Coupling%v_bot(ix) = atm(mygrid)%v_srf(i,j)
1288  !--- bottom layer height based on hydrostatic assumptions
1289  dycore_data(nb)%Coupling%z_bot(ix) = rrg*dycore_data(nb)%Coupling%t_bot(ix) * &
1290  (1. - atm(mygrid)%pe(i,npz,j)/dycore_data(nb)%Coupling%p_bot(ix))
1291 #ifdef MULTI_GASES
1292  dycore_data(nb)%Coupling%z_bot(ix) = dycore_data(nb)%Coupling%z_bot(ix)*virq(atm(mygrid)%q(i,j,npz,:))
1293 #else
1294  dycore_data(nb)%Coupling%z_bot(ix) = dycore_data(nb)%Coupling%z_bot(ix)*(1.+zvir*atm(mygrid)%q(i,j,npz,1))
1295 #endif
1296  !--- sea level pressure
1297  tref = atm(mygrid)%pt(i,j,kr) * (atm(mygrid)%delp(i,j,kr)/ &
1298  ((atm(mygrid)%peln(i,kr+1,j)-atm(mygrid)%peln(i,kr,j))*atm(mygrid)%ps(i,j)))**(-rrg*tlaps)
1299  dycore_data(nb)%Coupling%slp(ix) = atm(mygrid)%ps(i,j)*(1.+tlaps*atm(mygrid)%phis(i,j)/(tref*grav))**(1./(rrg*tlaps))
1300  enddo
1301 
1302  !--- bottom layer tracers
1303  do nt = 1,nq
1304  do ix = 1,atm_block%blksz(nb)
1305  i = atm_block%index(nb)%ii(ix)
1306  j = atm_block%index(nb)%jj(ix)
1307  dycore_data(nb)%Coupling%tr_bot(ix,nt) = atm(mygrid)%q(i,j,npz,nt)
1308  enddo
1309  enddo
1310  enddo
1311 
1312  end subroutine atmosphere_get_bottom_layer
1313 
1314 
1315  subroutine get_stock_pe(index, value)
1316  integer, intent(in) :: index
1317  real, intent(out) :: value
1318 
1319 #ifdef USE_STOCK
1320  include 'stock.inc'
1321 #endif
1322 
1323  real wm(isc:iec,jsc:jec)
1324  integer i,j,k
1325  real, pointer :: area(:,:)
1326 
1327  area => atm(mygrid)%gridstruct%area
1328 
1329  select case (index)
1330 
1331 #ifdef USE_STOCK
1332  case (istock_water)
1333 #else
1334  case (1)
1335 #endif
1336 
1337 !----------------------
1338 ! Perform vertical sum:
1339 !----------------------
1340  wm = 0.
1341  do j=jsc,jec
1342  do k=1,npz
1343  do i=isc,iec
1344 ! Warning: the following works only with AM2 physics: water vapor; cloud water, cloud ice.
1345  wm(i,j) = wm(i,j) + atm(mygrid)%delp(i,j,k) * ( atm(mygrid)%q(i,j,k,1) + &
1346  atm(mygrid)%q(i,j,k,2) + &
1347  atm(mygrid)%q(i,j,k,3) )
1348  enddo
1349  enddo
1350  enddo
1351 
1352 !----------------------
1353 ! Horizontal sum:
1354 !----------------------
1355  value = 0.
1356  do j=jsc,jec
1357  do i=isc,iec
1358  value = value + wm(i,j)*area(i,j)
1359  enddo
1360  enddo
1361  value = value/grav
1362 
1363  case default
1364  value = 0.0
1365  end select
1366 
1367  end subroutine get_stock_pe
1368 
1369 
1372  subroutine atmosphere_state_update (Time, IPD_Data, IAU_Data, Atm_block, flip_vc)
1373  type(time_type), intent(in) :: Time
1374  type(ipd_data_type), intent(in) :: IPD_Data(:)
1375  type(iau_external_data_type), intent(in) :: IAU_Data
1376  type(block_control_type), intent(in) :: Atm_block
1377  logical, intent(in) :: flip_vc
1378  !--- local variables ---
1379  type(time_type) :: Time_prev, Time_next
1380  integer :: i, j, ix, k, k1, n, w_diff, nt_dyn, iq
1381  integer :: nb, blen, nwat, dnats, nq_adv
1382  real(kind=kind_phys):: rcp, q0, qwat(nq), qt, rdt
1383  real psum, qsum, psumb, qsumb, betad
1384  character(len=32) :: tracer_name
1385  time_prev = time
1386  time_next = time + time_step_atmos
1387  rdt = 1.d0 / dt_atmos
1388 
1389  n = mygrid
1390  nwat = atm(n)%flagstruct%nwat
1391  dnats = atm(mygrid)%flagstruct%dnats
1392  nq_adv = nq - dnats
1393 
1394  if( nq<3 ) call mpp_error(fatal, 'GFS phys must have 3 interactive tracers')
1395 
1396  if (iau_data%in_interval) then
1397  if (iau_data%drymassfixer) then
1398  ! global mean total pressure and water before IAU
1399  psumb = g_sum(atm(n)%domain,sum(atm(n)%delp(isc:iec,jsc:jec,1:npz),dim=3),&
1400  isc,iec,jsc,jec,atm(n)%ng,atm(n)%gridstruct%area_64,1,reproduce=.true.)
1401  qsumb = g_sum(atm(n)%domain,&
1402  sum(atm(n)%delp(isc:iec,jsc:jec,1:npz)*sum(atm(n)%q(isc:iec,jsc:jec,1:npz,1:nwat),4),dim=3),&
1403  isc,iec,jsc,jec,atm(n)%ng,atm(n)%gridstruct%area_64,1,reproduce=.true.)
1404  if (is_master()) then
1405  print *,'dry ps before IAU/physics',psumb+atm(n)%ptop-qsumb
1406  endif
1407  endif
1408 
1409 ! IAU increments are in units of 1/sec
1410 
1411 ! add analysis increment to u,v,t tendencies
1412 ! directly update delp with analysis increment
1413  do k = 1, npz
1414  do j = jsc,jec
1415  do i = isc,iec
1416  u_dt(i,j,k) = u_dt(i,j,k) + iau_data%ua_inc(i,j,k)
1417  v_dt(i,j,k) = v_dt(i,j,k) + iau_data%va_inc(i,j,k)
1418  t_dt(i,j,k) = t_dt(i,j,k) + iau_data%temp_inc(i,j,k)
1419  atm(n)%delp(i,j,k) = atm(n)%delp(i,j,k) + iau_data%delp_inc(i,j,k)*dt_atmos
1420  enddo
1421  enddo
1422  enddo
1423  if (.not. atm(mygrid)%flagstruct%hydrostatic) then
1424  do k = 1, npz
1425  do j = jsc,jec
1426  do i = isc,iec
1427  atm(n)%delz(i,j,k) = atm(n)%delz(i,j,k) + iau_data%delz_inc(i,j,k)*dt_atmos
1428  enddo
1429  enddo
1430  enddo
1431  endif
1432 ! add analysis increment to tracers to output from physics
1433  do nb = 1,atm_block%nblks
1434  !if (nb.EQ.1) print*,'in block_update',IAU_Data%in_interval,IAU_Data%temp_inc(isc,jsc,30)
1435  blen = atm_block%blksz(nb)
1436  do k = 1, npz
1437  if(flip_vc) then
1438  k1 = npz+1-k !reverse the k direction
1439  else
1440  k1 = k
1441  endif
1442  do ix = 1, blen
1443  i = atm_block%index(nb)%ii(ix)
1444  j = atm_block%index(nb)%jj(ix)
1445  ipd_data(nb)%Stateout%gq0(ix,k,:) = ipd_data(nb)%Stateout%gq0(ix,k,:) + iau_data%tracer_inc(i,j,k1,:)*dt_atmos
1446  enddo
1447  enddo
1448  enddo
1449  endif
1450 
1451  call set_domain ( atm(mygrid)%domain )
1452 
1453  call timing_on('GFS_TENDENCIES')
1454  call atmos_phys_qdt_diag(atm(n)%q, atm(n)%phys_diag, nt_dyn, dt_atmos, .true.)
1455 !--- put u/v tendencies into haloed arrays u_dt and v_dt
1456 !$OMP parallel do default (none) &
1457 !$OMP shared (rdt, n, nq, dnats, npz, ncnst, nwat, mygrid, u_dt, v_dt, t_dt,&
1458 !$OMP Atm, IPD_Data, Atm_block, sphum, liq_wat, rainwat, ice_wat, &
1459 #ifdef MULTI_GASES
1460 !$OMP num_gas, &
1461 #endif
1462 !$OMP snowwat, graupel, nq_adv, flip_vc) &
1463 !$OMP private (nb, blen, i, j, k, k1, ix, q0, qwat, qt, tracer_name)
1464  do nb = 1,atm_block%nblks
1465 
1466 !SJL: perform vertical filling to fix the negative humidity if the SAS convection scheme is used
1467 ! This call may be commented out if RAS or other positivity-preserving CPS is used.
1468  blen = atm_block%blksz(nb)
1469  if (atm(n)%flagstruct%fill_gfs) call fill_gfs(blen, npz, ipd_data(nb)%Statein%prsi, ipd_data(nb)%Stateout%gq0, 1.e-9_kind_phys)
1470 
1471 !LMH 28sep18: If the name of a tracer ends in 'nopbl' then do NOT update it;
1472  !override this by setting Stateout%gq0(:,:,iq) to the input value
1473  do iq = 1, nq
1474  call get_tracer_names (model_atmos, iq, tracer_name)
1475  if (index(tracer_name, 'nopbl') > 0) then
1476  ipd_data(nb)%Stateout%gq0(:,:,iq) = ipd_data(nb)%Statein%qgrs(:,:,iq)
1477  endif
1478  enddo
1479 
1480 
1481  do k = 1, npz
1482  if(flip_vc) then
1483  k1 = npz+1-k !reverse the k direction
1484  else
1485  k1 = k
1486  endif
1487  do ix = 1, blen
1488  i = atm_block%index(nb)%ii(ix)
1489  j = atm_block%index(nb)%jj(ix)
1490  u_dt(i,j,k1) = u_dt(i,j,k1) + (ipd_data(nb)%Stateout%gu0(ix,k) - ipd_data(nb)%Statein%ugrs(ix,k)) * rdt
1491  v_dt(i,j,k1) = v_dt(i,j,k1) + (ipd_data(nb)%Stateout%gv0(ix,k) - ipd_data(nb)%Statein%vgrs(ix,k)) * rdt
1492 ! t_dt(i,j,k1) = (IPD_Data(nb)%Stateout%gt0(ix,k) - IPD_Data(nb)%Statein%tgrs(ix,k)) * rdt
1493  t_dt(i,j,k1) = t_dt(i,j,k1) + (ipd_data(nb)%Stateout%gt0(ix,k) - ipd_data(nb)%Statein%tgrs(ix,k)) * rdt
1494 ! SJL notes:
1495 ! ---- DO not touch the code below; dry mass conservation may change due to 64bit <-> 32bit conversion
1496 ! GFS total air mass = dry_mass + water_vapor (condensate excluded)
1497 ! GFS mixing ratios = tracer_mass / (dry_mass + vapor_mass)
1498 ! FV3 total air mass = dry_mass + [water_vapor + condensate ]
1499 ! FV3 mixing ratios = tracer_mass / (dry_mass+vapor_mass+cond_mass)
1500  if(flip_vc) then
1501  q0 = ipd_data(nb)%Statein%prsi(ix,k) - ipd_data(nb)%Statein%prsi(ix,k+1)
1502  else
1503  q0 = ipd_data(nb)%Statein%prsi(ix,k+1) - ipd_data(nb)%Statein%prsi(ix,k)
1504  endif
1505  qwat(1:nq_adv) = q0*ipd_data(nb)%Stateout%gq0(ix,k,1:nq_adv)
1506 ! **********************************************************************************************************
1507 ! Dry mass: the following way of updating delp is key to mass conservation with hybrid 32-64 bit computation
1508 ! **********************************************************************************************************
1509 ! The following example is for 2 water species.
1510 ! q0 = Atm(n)%delp(i,j,k1)*(1.-(Atm(n)%q(i,j,k1,1)+Atm(n)%q(i,j,k1,2))) + q1 + q2
1511 #ifdef MULTI_GASES
1512  q0 = atm(n)%delp(i,j,k1)*(1.-sum(atm(n)%q(i,j,k1,1:max(nwat,num_gas)))) + sum(qwat(1:max(nwat,num_gas)))
1513 #else
1514  qt = sum(qwat(1:nwat))
1515  q0 = atm(n)%delp(i,j,k1)*(1.-sum(atm(n)%q(i,j,k1,1:nwat))) + qt
1516 #endif
1517  atm(n)%delp(i,j,k1) = q0
1518  atm(n)%q(i,j,k1,1:nq_adv) = qwat(1:nq_adv) / q0
1519 ! if (dnats .gt. 0) Atm(n)%q(i,j,k1,nq_adv+1:nq) = IPD_Data(nb)%Stateout%gq0(ix,k,nq_adv+1:nq)
1520  enddo
1521  enddo
1522 
1523  !--- diagnostic tracers are assumed to be updated in-place
1524  !--- SHOULD THESE DIAGNOSTIC TRACERS BE MASS ADJUSTED???
1525  !--- See Note in statein...
1526  do iq = nq+1, ncnst
1527  do k = 1, npz
1528  if(flip_vc) then
1529  k1 = npz+1-k !reverse the k direction
1530  else
1531  k1 = k
1532  endif
1533  do ix = 1, blen
1534  i = atm_block%index(nb)%ii(ix)
1535  j = atm_block%index(nb)%jj(ix)
1536  atm(mygrid)%qdiag(i,j,k1,iq) = ipd_data(nb)%Stateout%gq0(ix,k,iq)
1537  enddo
1538  enddo
1539  enddo
1540 
1541  enddo ! nb-loop
1542 
1543 ! dry mass fixer in IAU interval following
1544 ! https://onlinelibrary.wiley.com/doi/full/10.1111/j.1600-0870.2007.00299.x
1545  if (iau_data%in_interval .and. iau_data%drymassfixer) then
1546  ! global mean total pressure
1547  psum = g_sum(atm(n)%domain,sum(atm(n)%delp(isc:iec,jsc:jec,1:npz),dim=3),&
1548  isc,iec,jsc,jec,atm(n)%ng,atm(n)%gridstruct%area_64,1,reproduce=.true.)
1549  ! global mean total water (before adjustment)
1550  qsum = g_sum(atm(n)%domain,&
1551  sum(atm(n)%delp(isc:iec,jsc:jec,1:npz)*sum(atm(n)%q(isc:iec,jsc:jec,1:npz,1:nwat),4),dim=3),&
1552  isc,iec,jsc,jec,atm(n)%ng,atm(n)%gridstruct%area_64,1,reproduce=.true.)
1553  betad = (psum - (psumb - qsumb))/qsum
1554  if (is_master()) then
1555  print *,'dry ps after IAU/physics',psum+atm(n)%ptop-qsum
1556  endif
1557  atm(n)%q(:,:,:,1:nwat) = betad*atm(n)%q(:,:,:,1:nwat)
1558  !qsum = g_sum(Atm(n)%domain,&
1559  ! sum(Atm(n)%delp(isc:iec,jsc:jec,1:npz)*sum(Atm(n)%q(isc:iec,jsc:jec,1:npz,1:nwat),4),dim=3),&
1560  ! isc,iec,jsc,jec,Atm(n)%ng,Atm(n)%gridstruct%area_64,1)
1561  !if (is_master()) then
1562  ! print *,'dry ps after iau_drymassfixer',psum+Atm(n)%ptop-qsum
1563  !endif
1564  endif
1565 
1566  call timing_off('GFS_TENDENCIES')
1567 
1568  w_diff = get_tracer_index(model_atmos, 'w_diff' )
1569  nt_dyn = ncnst-pnats !nothing more than nq
1570  if ( w_diff /= no_tracer ) then
1571  nt_dyn = nt_dyn - 1
1572  endif
1573 
1574 !--- adjust w and heat tendency for non-hydrostatic case
1575 #ifdef USE_Q_DT
1576  if ( .not.atm(n)%flagstruct%hydrostatic .and. w_diff /= no_tracer ) then
1577  rcp = 1. / cp_air
1578 !$OMP parallel do default (none) &
1579 !$OMP shared (jsc, jec, isc, iec, n, w_diff, Atm, q_dt, t_dt, rcp, dt_atmos) &
1580 !$OMP private (i, j, k)
1581  do k=1, atm(n)%npz
1582  do j=jsc, jec
1583  do i=isc, iec
1584  atm(n)%q(i,j,k,w_diff) = q_dt(i,j,k,w_diff) ! w tendency due to phys
1585 ! Heating due to loss of KE (vertical diffusion of w)
1586  t_dt(i,j,k) = t_dt(i,j,k) - q_dt(i,j,k,w_diff)*rcp*&
1587  (atm(n)%w(i,j,k)+0.5*dt_atmos*q_dt(i,j,k,w_diff))
1588  atm(n)%w(i,j,k) = atm(n)%w(i,j,k) + dt_atmos*atm(n)%q(i,j,k,w_diff)
1589  enddo
1590  enddo
1591  enddo
1592  endif
1593 #endif
1594 
1595  call mpp_clock_begin (id_dynam)
1596  call timing_on('FV_UPDATE_PHYS')
1597  call fv_update_phys( dt_atmos, isc, iec, jsc, jec, isd, ied, jsd, jed, atm(n)%ng, nt_dyn, &
1598  atm(n)%u, atm(n)%v, atm(n)%w, atm(n)%delp, atm(n)%pt, &
1599  atm(n)%q, atm(n)%qdiag, &
1600  atm(n)%ua, atm(n)%va, atm(n)%ps, atm(n)%pe, atm(n)%peln, &
1601  atm(n)%pk, atm(n)%pkz, atm(n)%ak, atm(n)%bk, atm(n)%phis, &
1602  atm(n)%u_srf, atm(n)%v_srf, atm(n)%ts, atm(n)%delz, &
1603  atm(n)%flagstruct%hydrostatic, u_dt, v_dt, t_dt, &
1604  .true., time_next, atm(n)%flagstruct%nudge, atm(n)%gridstruct, &
1605  atm(n)%gridstruct%agrid(:,:,1), atm(n)%gridstruct%agrid(:,:,2), &
1606  atm(n)%npx, atm(n)%npy, atm(n)%npz, atm(n)%flagstruct, &
1607  atm(n)%neststruct, atm(n)%bd, atm(n)%domain, atm(n)%ptop, atm(n)%phys_diag)
1608  call timing_off('FV_UPDATE_PHYS')
1609  call mpp_clock_end (id_dynam)
1610 
1611 !--- nesting update after updating atmospheric variables with
1612 !--- physics tendencies
1613  if (ngrids > 1 .and. p_split > 0) then
1614  call mpp_sync()
1615  call timing_on('TWOWAY_UPDATE')
1617  call timing_off('TWOWAY_UPDATE')
1618  endif
1619  call nullify_domain()
1620 
1621  !---- diagnostics for FV dynamics -----
1622  if (atm(mygrid)%flagstruct%print_freq /= -99) then
1623  call mpp_clock_begin(id_fv_diag)
1624 
1625  fv_time = time_next
1626  call get_time (fv_time, seconds, days)
1627 
1628  call nullify_domain()
1629  call timing_on('FV_DIAG')
1630  call fv_diag(atm(mygrid:mygrid), zvir, fv_time, atm(mygrid)%flagstruct%print_freq)
1631  first_diag = .false.
1632  call timing_off('FV_DIAG')
1633 
1634  call mpp_clock_end(id_fv_diag)
1635  endif
1636 
1637  end subroutine atmosphere_state_update
1638 
1639 
1643  subroutine adiabatic_init(zvir,nudge_dz,time)
1644  type(time_type),intent(in) :: time
1645  real, allocatable, dimension(:,:,:):: u0, v0, t0, dz0, dp0
1646  real, intent(in) :: zvir
1647  logical, intent(inout):: nudge_dz
1648 ! real, parameter:: wt = 1. ! was 2.
1649  real, parameter:: wt = 2.
1650 !***********
1651 ! Haloe Data
1652 !***********
1653  real, parameter:: q1_h2o = 2.2e-6
1654  real, parameter:: q7_h2o = 3.8e-6
1655  real, parameter:: q100_h2o = 3.8e-6
1656  real, parameter:: q1000_h2o = 3.1e-6
1657  real, parameter:: q2000_h2o = 2.8e-6
1658  real, parameter:: q3000_h2o = 3.0e-6
1659  real:: xt, p00, q00
1660  integer:: isc, iec, jsc, jec, npz
1661  integer:: m, n, i,j,k, ngc, n_split_loc, days
1662 
1663  character(len=80) :: errstr
1664 
1665  xt = 1./(1.+wt)
1666 
1667  write(errstr,'(A, I4, A)') 'Performing adiabatic init', atm(mygrid)%flagstruct%na_init, ' times'
1668  call mpp_error(note, errstr)
1669  sphum = get_tracer_index(model_atmos, 'sphum' )
1670 
1671  npz = atm(mygrid)%npz
1672 
1673  isc = atm(mygrid)%bd%isc
1674  iec = atm(mygrid)%bd%iec
1675  jsc = atm(mygrid)%bd%jsc
1676  jec = atm(mygrid)%bd%jec
1677 
1678  ngc = atm(mygrid)%ng
1679  isd = isc - ngc
1680  ied = iec + ngc
1681  jsd = jsc - ngc
1682  jed = jec + ngc
1683 
1684  call timing_on('adiabatic_init')
1685  do_adiabatic_init = .true.
1686 
1687  allocate ( u0(isc:iec, jsc:jec+1, npz) )
1688  allocate ( v0(isc:iec+1,jsc:jec, npz) )
1689  allocate (dp0(isc:iec,jsc:jec, npz) )
1690 
1691  if ( atm(mygrid)%flagstruct%hydrostatic ) nudge_dz = .false.
1692 
1693  if ( nudge_dz ) then
1694  allocate (dz0(isc:iec,jsc:jec, npz) )
1695  else
1696  allocate ( t0(isc:iec,jsc:jec, npz) )
1697  endif
1698 
1699 !$omp parallel do default (none) &
1700 !$omp shared (nudge_dz, npz, jsc, jec, isc, iec, n, sphum, u0, v0, t0, dz0, dp0, Atm, zvir, mygrid) &
1701 !$omp private (k, j, i)
1702  do k=1,npz
1703  do j=jsc,jec+1
1704  do i=isc,iec
1705  u0(i,j,k) = atm(mygrid)%u(i,j,k)
1706  enddo
1707  enddo
1708  do j=jsc,jec
1709  do i=isc,iec+1
1710  v0(i,j,k) = atm(mygrid)%v(i,j,k)
1711  enddo
1712  enddo
1713  if ( nudge_dz ) then
1714  do j=jsc,jec
1715  do i=isc,iec
1716  dp0(i,j,k) = atm(mygrid)%delp(i,j,k)
1717  dz0(i,j,k) = atm(mygrid)%delz(i,j,k)
1718  enddo
1719  enddo
1720  else
1721  do j=jsc,jec
1722  do i=isc,iec
1723 #ifdef MULTI_GASES
1724  t0(i,j,k) = atm(mygrid)%pt(i,j,k)*virq(atm(mygrid)%q(i,j,k,:)) ! virt T
1725 #else
1726  t0(i,j,k) = atm(mygrid)%pt(i,j,k)*(1.+zvir*atm(mygrid)%q(i,j,k,sphum)) ! virt T
1727 #endif
1728  dp0(i,j,k) = atm(mygrid)%delp(i,j,k)
1729  enddo
1730  enddo
1731  endif
1732  enddo
1733 
1734  call get_time (time, seconds, days)
1735  if (seconds < nint(3600*atm(mygrid)%flagstruct%fhouri) .and. atm(mygrid)%flagstruct%fac_n_spl > 1.0) then
1736  n_split_loc = nint(atm(mygrid)%flagstruct%n_split * atm(mygrid)%flagstruct%fac_n_spl)
1737  else
1738  n_split_loc = atm(mygrid)%flagstruct%n_split
1739  endif
1740 
1741 ! write(0,*)' before calling init n_split_loc=',n_split_loc,' seconds=',seconds,' days=',days,&
1742 ! ' n_split=',Atm(mygrid)%flagstruct%n_split,' mygrid=',mygrid
1743 
1744  do m=1,atm(mygrid)%flagstruct%na_init
1745 ! Forward call
1746  call fv_dynamics(atm(mygrid)%npx, atm(mygrid)%npy, npz, nq, atm(mygrid)%ng, dt_atmos, 0., &
1747  atm(mygrid)%flagstruct%fill, atm(mygrid)%flagstruct%reproduce_sum, kappa, cp_air, zvir, &
1748 ! Atm(mygrid)%ptop, Atm(mygrid)%ks, nq, Atm(mygrid)%flagstruct%n_split, &
1749  atm(mygrid)%ptop, atm(mygrid)%ks, nq, n_split_loc, &
1750  atm(mygrid)%flagstruct%q_split, atm(mygrid)%u, atm(mygrid)%v, atm(mygrid)%w, &
1751  atm(mygrid)%delz, atm(mygrid)%flagstruct%hydrostatic, &
1752  atm(mygrid)%pt, atm(mygrid)%delp, atm(mygrid)%q, atm(mygrid)%ps, &
1753  atm(mygrid)%pe, atm(mygrid)%pk, atm(mygrid)%peln, atm(mygrid)%pkz, atm(mygrid)%phis, &
1754  atm(mygrid)%q_con, atm(mygrid)%omga, atm(mygrid)%ua, atm(mygrid)%va, atm(mygrid)%uc, atm(mygrid)%vc, &
1755  atm(mygrid)%ak, atm(mygrid)%bk, atm(mygrid)%mfx, atm(mygrid)%mfy, &
1756  atm(mygrid)%cx, atm(mygrid)%cy, atm(mygrid)%ze0, atm(mygrid)%flagstruct%hybrid_z, &
1757  atm(mygrid)%gridstruct, atm(mygrid)%flagstruct, &
1758  atm(mygrid)%neststruct, atm(mygrid)%idiag, atm(mygrid)%bd, atm(mygrid)%parent_grid, &
1759  atm(mygrid)%domain,atm(mygrid)%diss_est)
1760 ! Backward
1761  call fv_dynamics(atm(mygrid)%npx, atm(mygrid)%npy, npz, nq, atm(mygrid)%ng, -dt_atmos, 0., &
1762  atm(mygrid)%flagstruct%fill, atm(mygrid)%flagstruct%reproduce_sum, kappa, cp_air, zvir, &
1763 ! Atm(mygrid)%ptop, Atm(mygrid)%ks, nq, Atm(mygrid)%flagstruct%n_split, &
1764  atm(mygrid)%ptop, atm(mygrid)%ks, nq, n_split_loc, &
1765  atm(mygrid)%flagstruct%q_split, atm(mygrid)%u, atm(mygrid)%v, atm(mygrid)%w, &
1766  atm(mygrid)%delz, atm(mygrid)%flagstruct%hydrostatic, &
1767  atm(mygrid)%pt, atm(mygrid)%delp, atm(mygrid)%q, atm(mygrid)%ps, &
1768  atm(mygrid)%pe, atm(mygrid)%pk, atm(mygrid)%peln, atm(mygrid)%pkz, atm(mygrid)%phis, &
1769  atm(mygrid)%q_con, atm(mygrid)%omga, atm(mygrid)%ua, atm(mygrid)%va, atm(mygrid)%uc, atm(mygrid)%vc, &
1770  atm(mygrid)%ak, atm(mygrid)%bk, atm(mygrid)%mfx, atm(mygrid)%mfy, &
1771  atm(mygrid)%cx, atm(mygrid)%cy, atm(mygrid)%ze0, atm(mygrid)%flagstruct%hybrid_z, &
1772  atm(mygrid)%gridstruct, atm(mygrid)%flagstruct, &
1773  atm(mygrid)%neststruct, atm(mygrid)%idiag, atm(mygrid)%bd, atm(mygrid)%parent_grid, &
1774  atm(mygrid)%domain,atm(mygrid)%diss_est)
1775 !Nudging back to IC
1776 !$omp parallel do default (none) &
1777 !$omp shared (pref, npz, jsc, jec, isc, iec, n, sphum, Atm, u0, v0, t0, dp0, xt, zvir, mygrid, nudge_dz, dz0) &
1778 !$omp private (i, j, k, p00, q00)
1779  do k=1,npz
1780  do j=jsc,jec+1
1781  do i=isc,iec
1782  atm(mygrid)%u(i,j,k) = xt*(atm(mygrid)%u(i,j,k) + wt*u0(i,j,k))
1783  enddo
1784  enddo
1785  do j=jsc,jec
1786  do i=isc,iec+1
1787  atm(mygrid)%v(i,j,k) = xt*(atm(mygrid)%v(i,j,k) + wt*v0(i,j,k))
1788  enddo
1789  enddo
1790  if( atm(mygrid)%flagstruct%nudge_qv ) then
1791 ! SJL note: Nudging water vaport towards HALOE climatology:
1792 ! In case of better IC (IFS) this step may not be necessary
1793  p00 = atm(mygrid)%pe(isc,k,jsc)
1794  if ( p00 < 30.e2 ) then
1795  if ( p00 < 1. ) then
1796  q00 = q1_h2o
1797  elseif ( p00 <= 7. .and. p00 >= 1. ) then
1798  q00 = q1_h2o + (q7_h2o-q1_h2o)*log(pref(k,1)/1.)/log(7.)
1799  elseif ( p00 < 100. .and. p00 >= 7. ) then
1800  q00 = q7_h2o + (q100_h2o-q7_h2o)*log(pref(k,1)/7.)/log(100./7.)
1801  elseif ( p00 < 1000. .and. p00 >= 100. ) then
1802  q00 = q100_h2o + (q1000_h2o-q100_h2o)*log(pref(k,1)/1.e2)/log(10.)
1803  elseif ( p00 < 2000. .and. p00 >= 1000. ) then
1804  q00 = q1000_h2o + (q2000_h2o-q1000_h2o)*log(pref(k,1)/1.e3)/log(2.)
1805  else
1806  q00 = q2000_h2o + (q3000_h2o-q2000_h2o)*log(pref(k,1)/2.e3)/log(1.5)
1807  endif
1808  do j=jsc,jec
1809  do i=isc,iec
1810  atm(mygrid)%q(i,j,k,sphum) = xt*(atm(mygrid)%q(i,j,k,sphum) + wt*q00)
1811  enddo
1812  enddo
1813  endif
1814  endif
1815  if ( nudge_dz ) then
1816  do j=jsc,jec
1817  do i=isc,iec
1818  atm(mygrid)%delp(i,j,k) = xt*(atm(mygrid)%delp(i,j,k) + wt*dp0(i,j,k))
1819  atm(mygrid)%delz(i,j,k) = xt*(atm(mygrid)%delz(i,j,k) + wt*dz0(i,j,k))
1820  enddo
1821  enddo
1822  else
1823  do j=jsc,jec
1824  do i=isc,iec
1825 #ifdef MULTI_GASES
1826  atm(mygrid)%pt(i,j,k) = xt*(atm(mygrid)%pt(i,j,k) + wt*t0(i,j,k)/virq(atm(mygrid)%q(i,j,k,:)))
1827 #else
1828  atm(mygrid)%pt(i,j,k) = xt*(atm(mygrid)%pt(i,j,k) + wt*t0(i,j,k)/(1.+zvir*atm(mygrid)%q(i,j,k,sphum)))
1829 #endif
1830  atm(mygrid)%delp(i,j,k) = xt*(atm(mygrid)%delp(i,j,k) + wt*dp0(i,j,k))
1831  enddo
1832  enddo
1833  endif
1834 
1835  enddo
1836 
1837 ! Backward
1838  call fv_dynamics(atm(mygrid)%npx, atm(mygrid)%npy, npz, nq, atm(mygrid)%ng, -dt_atmos, 0., &
1839  atm(mygrid)%flagstruct%fill, atm(mygrid)%flagstruct%reproduce_sum, kappa, cp_air, zvir, &
1840  atm(mygrid)%ptop, atm(mygrid)%ks, nq, atm(mygrid)%flagstruct%n_split, &
1841  atm(mygrid)%flagstruct%q_split, atm(mygrid)%u, atm(mygrid)%v, atm(mygrid)%w, &
1842  atm(mygrid)%delz, atm(mygrid)%flagstruct%hydrostatic, &
1843  atm(mygrid)%pt, atm(mygrid)%delp, atm(mygrid)%q, atm(mygrid)%ps, &
1844  atm(mygrid)%pe, atm(mygrid)%pk, atm(mygrid)%peln, atm(mygrid)%pkz, atm(mygrid)%phis, &
1845  atm(mygrid)%q_con, atm(mygrid)%omga, atm(mygrid)%ua, atm(mygrid)%va, atm(mygrid)%uc, atm(mygrid)%vc, &
1846  atm(mygrid)%ak, atm(mygrid)%bk, atm(mygrid)%mfx, atm(mygrid)%mfy, &
1847  atm(mygrid)%cx, atm(mygrid)%cy, atm(mygrid)%ze0, atm(mygrid)%flagstruct%hybrid_z, &
1848  atm(mygrid)%gridstruct, atm(mygrid)%flagstruct, &
1849  atm(mygrid)%neststruct, atm(mygrid)%idiag, atm(mygrid)%bd, atm(mygrid)%parent_grid, &
1850  atm(mygrid)%domain,atm(mygrid)%diss_est)
1851 ! Forward call
1852  call fv_dynamics(atm(mygrid)%npx, atm(mygrid)%npy, npz, nq, atm(mygrid)%ng, dt_atmos, 0., &
1853  atm(mygrid)%flagstruct%fill, atm(mygrid)%flagstruct%reproduce_sum, kappa, cp_air, zvir, &
1854  atm(mygrid)%ptop, atm(mygrid)%ks, nq, atm(mygrid)%flagstruct%n_split, &
1855  atm(mygrid)%flagstruct%q_split, atm(mygrid)%u, atm(mygrid)%v, atm(mygrid)%w, &
1856  atm(mygrid)%delz, atm(mygrid)%flagstruct%hydrostatic, &
1857  atm(mygrid)%pt, atm(mygrid)%delp, atm(mygrid)%q, atm(mygrid)%ps, &
1858  atm(mygrid)%pe, atm(mygrid)%pk, atm(mygrid)%peln, atm(mygrid)%pkz, atm(mygrid)%phis, &
1859  atm(mygrid)%q_con, atm(mygrid)%omga, atm(mygrid)%ua, atm(mygrid)%va, atm(mygrid)%uc, atm(mygrid)%vc, &
1860  atm(mygrid)%ak, atm(mygrid)%bk, atm(mygrid)%mfx, atm(mygrid)%mfy, &
1861  atm(mygrid)%cx, atm(mygrid)%cy, atm(mygrid)%ze0, atm(mygrid)%flagstruct%hybrid_z, &
1862  atm(mygrid)%gridstruct, atm(mygrid)%flagstruct, &
1863  atm(mygrid)%neststruct, atm(mygrid)%idiag, atm(mygrid)%bd, atm(mygrid)%parent_grid, &
1864  atm(mygrid)%domain,atm(mygrid)%diss_est)
1865 ! Nudging back to IC
1866 !$omp parallel do default (none) &
1867 !$omp shared (nudge_dz,npz, jsc, jec, isc, iec, n, sphum, Atm, u0, v0, t0, dz0, dp0, xt, zvir, mygrid) &
1868 !$omp private (i, j, k)
1869  do k=1,npz
1870  do j=jsc,jec+1
1871  do i=isc,iec
1872  atm(mygrid)%u(i,j,k) = xt*(atm(mygrid)%u(i,j,k) + wt*u0(i,j,k))
1873  enddo
1874  enddo
1875  do j=jsc,jec
1876  do i=isc,iec+1
1877  atm(mygrid)%v(i,j,k) = xt*(atm(mygrid)%v(i,j,k) + wt*v0(i,j,k))
1878  enddo
1879  enddo
1880  if ( nudge_dz ) then
1881  do j=jsc,jec
1882  do i=isc,iec
1883  atm(mygrid)%delp(i,j,k) = xt*(atm(mygrid)%delp(i,j,k) + wt*dp0(i,j,k))
1884  atm(mygrid)%delz(i,j,k) = xt*(atm(mygrid)%delz(i,j,k) + wt*dz0(i,j,k))
1885  enddo
1886  enddo
1887  else
1888  do j=jsc,jec
1889  do i=isc,iec
1890 #ifdef MULTI_GASES
1891  atm(mygrid)%pt(i,j,k) = xt*(atm(mygrid)%pt(i,j,k) + wt*t0(i,j,k)/virq(atm(mygrid)%q(i,j,k,:)))
1892 #else
1893  atm(mygrid)%pt(i,j,k) = xt*(atm(mygrid)%pt(i,j,k) + wt*t0(i,j,k)/(1.+zvir*atm(mygrid)%q(i,j,k,sphum)))
1894 #endif
1895  atm(mygrid)%delp(i,j,k) = xt*(atm(mygrid)%delp(i,j,k) + wt*dp0(i,j,k))
1896  enddo
1897  enddo
1898  endif
1899  enddo
1900 
1901  enddo
1902 
1903  deallocate ( u0 )
1904  deallocate ( v0 )
1905  deallocate (dp0 )
1906  if ( allocated(t0) ) deallocate ( t0 )
1907  if ( allocated(dz0) ) deallocate ( dz0 )
1908 
1909  do_adiabatic_init = .false.
1910  call timing_off('adiabatic_init')
1911 
1912  end subroutine adiabatic_init
1913 
1914 
1915 
1922 #if defined(OVERLOAD_R4)
1923 #define _DBL_(X) DBLE(X)
1924 #define _RL_(X) REAL(X,KIND=4)
1925 #else
1926 #define _DBL_(X) X
1927 #define _RL_(X) X
1928 #endif
1929  subroutine atmos_phys_driver_statein (IPD_Data, Atm_block,flip_vc)
1930  type(ipd_data_type), intent(inout) :: ipd_data(:)
1931  type(block_control_type), intent(in) :: atm_block
1932  logical, intent(in) :: flip_vc
1933 !--------------------------------------
1934 ! Local GFS-phys consistent parameters:
1935 !--------------------------------------
1936  real(kind=kind_phys), parameter :: p00 = 1.e5
1937  real(kind=kind_phys), parameter :: qmin = 1.0e-10
1938  real(kind=kind_phys) :: pk0inv, ptop, pktop
1939  real(kind=kind_phys) :: rTv, dm, qgrs_rad
1940  integer :: nb, blen, npz, i, j, k, ix, k1, kz, dnats, nq_adv
1941 #ifdef MULTI_GASES
1942  real :: q_grs(nq), q_min
1943 #endif
1944 
1945 
1946 !!! NOTES: lmh 6nov15
1947 !!! - "Layer" means "layer mean", ie. the average value in a layer
1948 !!! - "Level" means "level interface", ie the point values at the top or bottom of a layer
1949 
1950  ptop = _dbl_(_rl_(atm(mygrid)%ak(1)))
1951  pktop = (ptop/p00)**kappa
1952  pk0inv = (1.0_kind_phys/p00)**kappa
1953 
1954  npz = atm_block%npz
1955  dnats = atm(mygrid)%flagstruct%dnats
1956  nq_adv = nq - dnats
1957 
1958 !---------------------------------------------------------------------
1959 ! use most up to date atmospheric properties when running serially
1960 !---------------------------------------------------------------------
1961 !$OMP parallel do default (none) &
1962 !$OMP shared (Atm_block, Atm, IPD_Data, npz, nq, ncnst, sphum, liq_wat, &
1963 !$OMP ice_wat, rainwat, snowwat, graupel, pk0inv, ptop, &
1964 !$OMP pktop, zvir, mygrid, dnats, nq_adv, flip_vc) &
1965 #ifdef MULTI_GASES
1966 
1967 !$OMP private (dm, nb, blen, i, j, ix, k1, kz, rTv, qgrs_rad, q_min, q_grs)
1968 
1969 #else
1970 !$OMP private (dm, nb, blen, i, j, ix, k1, kz, rTv, qgrs_rad)
1971 #endif
1972 
1973  do nb = 1,atm_block%nblks
1974 ! gas_phase_mass <-- prsl
1975 ! log(pe) <-- prsik
1976 
1977  blen = atm_block%blksz(nb)
1978 
1979  !-- level interface geopotential height (relative to the surface)
1980  if(flip_vc) then
1981  ipd_data(nb)%Statein%phii(:,1) = 0.0_kind_phys
1982  else
1983  ipd_data(nb)%Statein%phii(:,npz+1) = 0.0_kind_phys
1984  endif
1985  ipd_data(nb)%Statein%prsik(:,:) = 1.e25_kind_phys
1986 
1987  do k = 1, npz
1988  !Indices for FV's vertical coordinate, for which 1 = top
1989  !here, k is the index for GFS's vertical coordinate, for which 1 =
1990  !bottom
1991  kz = npz+1-k
1992  if(flip_vc) then
1993  k1 = kz ! flipping the index
1994  else
1995  k1 = k
1996  endif
1997  do ix = 1, blen
1998  i = atm_block%index(nb)%ii(ix)
1999  j = atm_block%index(nb)%jj(ix)
2000 
2001  ipd_data(nb)%Statein%tgrs(ix,k) = _dbl_(_rl_(atm(mygrid)%pt(i,j,k1)))
2002  ipd_data(nb)%Statein%ugrs(ix,k) = _dbl_(_rl_(atm(mygrid)%ua(i,j,k1)))
2003  ipd_data(nb)%Statein%vgrs(ix,k) = _dbl_(_rl_(atm(mygrid)%va(i,j,k1)))
2004  ipd_data(nb)%Statein%vvl(ix,k) = _dbl_(_rl_(atm(mygrid)%omga(i,j,k1)))
2005  ipd_data(nb)%Statein%prsl(ix,k) = _dbl_(_rl_(atm(mygrid)%delp(i,j,k1))) ! Total mass
2006  if (atm(mygrid)%flagstruct%do_skeb)ipd_data(nb)%Statein%diss_est(ix,k) = _dbl_(_rl_(atm(mygrid)%diss_est(i,j,k1)))
2007 
2008  if(flip_vc) then
2009  if (.not.atm(mygrid)%flagstruct%hydrostatic .and. (.not.atm(mygrid)%flagstruct%use_hydro_pressure)) &
2010  ipd_data(nb)%Statein%phii(ix,k+1) = ipd_data(nb)%Statein%phii(ix,k) - _dbl_(_rl_(atm(mygrid)%delz(i,j,k1)*grav))
2011  else
2012  if (.not.atm(mygrid)%flagstruct%hydrostatic .and. (.not.atm(mygrid)%flagstruct%use_hydro_pressure)) &
2013  ipd_data(nb)%Statein%phii(ix,kz) = ipd_data(nb)%Statein%phii(ix,kz+1) - _dbl_(_rl_(atm(mygrid)%delz(i,j,kz)*grav))
2014  endif
2015 
2016 ! Convert to tracer mass:
2017  ipd_data(nb)%Statein%qgrs(ix,k,1:nq_adv) = _dbl_(_rl_(atm(mygrid)%q(i,j,k1,1:nq_adv))) &
2018  * ipd_data(nb)%Statein%prsl(ix,k)
2019  if (dnats > 0) &
2020  ipd_data(nb)%Statein%qgrs(ix,k,nq_adv+1:nq) = _dbl_(_rl_(atm(mygrid)%q(i,j,k1,nq_adv+1:nq)))
2021  !--- SHOULD THESE BE CONVERTED TO MASS SINCE THE DYCORE DOES NOT TOUCH THEM IN ANY WAY???
2022  !--- See Note in state update...
2023  if ( ncnst > nq) &
2024  ipd_data(nb)%Statein%qgrs(ix,k,nq+1:ncnst) = _dbl_(_rl_(atm(mygrid)%qdiag(i,j,k1,nq+1:ncnst)))
2025 ! Remove the contribution of condensates to delp (mass):
2026  if ( atm(mygrid)%flagstruct%nwat == 6 ) then
2027  ipd_data(nb)%Statein%prsl(ix,k) = ipd_data(nb)%Statein%prsl(ix,k) &
2028  - ipd_data(nb)%Statein%qgrs(ix,k,liq_wat) &
2029  - ipd_data(nb)%Statein%qgrs(ix,k,ice_wat) &
2030  - ipd_data(nb)%Statein%qgrs(ix,k,rainwat) &
2031  - ipd_data(nb)%Statein%qgrs(ix,k,snowwat) &
2032  - ipd_data(nb)%Statein%qgrs(ix,k,graupel)
2033  else !variable condensate numbers
2034  ipd_data(nb)%Statein%prsl(ix,k) = ipd_data(nb)%Statein%prsl(ix,k) &
2035  - sum(ipd_data(nb)%Statein%qgrs(ix,k,2:atm(mygrid)%flagstruct%nwat))
2036  endif
2037  enddo
2038  enddo
2039 
2040 ! Re-compute pressure (dry_mass + water_vapor) derived fields:
2041  if(flip_vc) then
2042  do i=1,blen
2043  ipd_data(nb)%Statein%prsi(i,npz+1) = ptop
2044  enddo
2045  do k=npz,1,-1
2046  do i=1,blen
2047  ipd_data(nb)%Statein%prsi(i,k) = ipd_data(nb)%Statein%prsi(i,k+1) + ipd_data(nb)%Statein%prsl(i,k)
2048  ipd_data(nb)%Statein%prsik(i,k) = log( ipd_data(nb)%Statein%prsi(i,k) )
2049 ! Redefine mixing ratios for GFS == tracer_mass / (dry_air_mass + water_vapor_mass)
2050  ipd_data(nb)%Statein%qgrs(i,k,1:nq_adv) = ipd_data(nb)%Statein%qgrs(i,k,1:nq_adv) &
2051  / ipd_data(nb)%Statein%prsl(i,k)
2052  enddo
2053  enddo
2054  else
2055  do i=1,blen
2056  ipd_data(nb)%Statein%prsi(i, 1) = ptop
2057  enddo
2058  do k=1,npz
2059  do i=1,blen
2060  ipd_data(nb)%Statein%prsi(i,k+1) = ipd_data(nb)%Statein%prsi(i,k) + ipd_data(nb)%Statein%prsl(i,k)
2061  ipd_data(nb)%Statein%prsik(i,k) = log( ipd_data(nb)%Statein%prsi(i,k) )
2062 ! Redefine mixing ratios for GFS == tracer_mass / (dry_air_mass + water_vapor_mass)
2063  ipd_data(nb)%Statein%qgrs(i,k,1:nq_adv) = ipd_data(nb)%Statein%qgrs(i,k,1:nq_adv) &
2064  / ipd_data(nb)%Statein%prsl(i,k)
2065  enddo
2066  enddo
2067  endif
2068 
2069  do i=1,blen
2070  ipd_data(nb)%Statein%pgr(i) = ipd_data(nb)%Statein%prsi(i,1) ! surface pressure for GFS
2071  ipd_data(nb)%Statein%prsik(i,npz+1) = log(ptop)
2072  enddo
2073 
2074  do k=1,npz
2075  do i=1,blen
2076 ! Geo-potential at interfaces:
2077 #ifdef MULTI_GASES
2078  q_grs(1:nq_adv) = ipd_data(nb)%Statein%qgrs(i,k,1:nq_adv)
2079  q_min = qmin
2080  rtv = rdgas*ipd_data(nb)%Statein%tgrs(i,k)*virq_max(q_grs(:),q_min)
2081 #else
2082  qgrs_rad = max(qmin,ipd_data(nb)%Statein%qgrs(i,k,sphum))
2083  rtv = rdgas*ipd_data(nb)%Statein%tgrs(i,k)*(1.+zvir*qgrs_rad)
2084 #endif
2085  if ( atm(mygrid)%flagstruct%hydrostatic .or. atm(mygrid)%flagstruct%use_hydro_pressure ) &
2086  ipd_data(nb)%Statein%phii(i,k+1) = ipd_data(nb)%Statein%phii(i,k) &
2087  + rtv*(ipd_data(nb)%Statein%prsik(i,k) &
2088  - ipd_data(nb)%Statein%prsik(i,k+1))
2089 ! Layer mean pressure by perfect gas law:
2090  dm = ipd_data(nb)%Statein%prsl(i,k)
2091  ipd_data(nb)%Statein%prsl(i,k) = dm*rtv/(ipd_data(nb)%Statein%phii(i,k+1) &
2092  - ipd_data(nb)%Statein%phii(i,k))
2093 
2094 !!! Ensure subgrid MONOTONICITY of Pressure: SJL 09/11/2016
2095  if ( .not.atm(mygrid)%flagstruct%hydrostatic ) then
2096 ! If violated, replaces it with hydrostatic pressure
2097  ipd_data(nb)%Statein%prsl(i,k) = min(ipd_data(nb)%Statein%prsl(i,k), &
2098  ipd_data(nb)%Statein%prsi(i,k) - 0.01*dm)
2099  ipd_data(nb)%Statein%prsl(i,k) = max(ipd_data(nb)%Statein%prsl(i,k), &
2100  ipd_data(nb)%Statein%prsi(i,k+1) + 0.01*dm)
2101  endif
2102  enddo
2103  enddo
2104 
2105  do k = 1,npz
2106  do i=1,blen
2107 ! Exner function layer center: large sensitivity to non-hydro runs with moist kappa
2108  ipd_data(nb)%Statein%prslk(i,k) = exp( kappa*log(ipd_data(nb)%Statein%prsl(i,k)/p00) )
2109 !-- layer center geopotential; geometric midpoint
2110  ipd_data(nb)%Statein%phil(i,k) = 0.5_kind_phys*(ipd_data(nb)%Statein%phii(i,k) &
2111  + ipd_data(nb)%Statein%phii(i,k+1))
2112  enddo
2113  enddo
2114 
2115 ! Compute Exner function at layer "interfaces"
2116  do i=1,blen
2117 ! Top & Bottom edges computed hydrostaticlly
2118  ipd_data(nb)%Statein%prsik(i, 1) = exp( kappa*ipd_data(nb)%Statein%prsik(i,1) )*pk0inv ! bottom
2119  ipd_data(nb)%Statein%prsik(i,npz+1) = pktop ! TOA
2120  enddo
2121 
2122  if ( atm(mygrid)%flagstruct%hydrostatic .or. atm(mygrid)%flagstruct%use_hydro_pressure ) then
2123  do k=2,npz
2124  do i=1,blen
2125  ipd_data(nb)%Statein%prsik(i,k) = exp( kappa*ipd_data(nb)%Statein%prsik(i,k) )*pk0inv
2126  enddo
2127  enddo
2128  endif
2129  enddo
2130 
2131  end subroutine atmos_phys_driver_statein
2132 
2133  subroutine atmos_phys_qdt_diag(q, phys_diag, nq, dt, begin)
2135  integer, intent(IN) :: nq
2136  real, intent(IN) :: dt
2137  logical, intent(IN) :: begin
2138  real, intent(IN) :: q(isd:ied,jsd:jed,npz,nq)
2139  type(phys_diag_type), intent(INOUT) :: phys_diag
2140 
2141  integer sphum, liq_wat, ice_wat ! GFDL AM physics
2142  integer rainwat, snowwat, graupel ! GFDL Cloud Microphysics
2143 
2144  sphum = get_tracer_index(model_atmos, 'sphum')
2145  liq_wat = get_tracer_index(model_atmos, 'liq_wat')
2146  ice_wat = get_tracer_index(model_atmos, 'ice_wat')
2147  rainwat = get_tracer_index(model_atmos, 'rainwat')
2148  snowwat = get_tracer_index(model_atmos, 'snowwat')
2149  graupel = get_tracer_index(model_atmos, 'graupel')
2150 
2151  if (begin) then
2152  if (allocated(phys_diag%phys_qv_dt)) phys_diag%phys_qv_dt = q(isc:iec,jsc:jec,:,sphum)
2153  if (allocated(phys_diag%phys_ql_dt)) then
2154  if (liq_wat < 0) call mpp_error(fatal, " phys_ql_dt needs at least one liquid water tracer defined")
2155  phys_diag%phys_ql_dt = q(isc:iec,jsc:jec,:,liq_wat)
2156  endif
2157  if (allocated(phys_diag%phys_qi_dt)) then
2158  if (ice_wat < 0) then
2159  call mpp_error(warning, " phys_qi_dt needs at least one ice water tracer defined")
2160  phys_diag%phys_qi_dt = 0.
2161  endif
2162  phys_diag%phys_qi_dt = q(isc:iec,jsc:jec,:,ice_wat)
2163  endif
2164  else
2165  if (allocated(phys_diag%phys_qv_dt)) phys_diag%phys_qv_dt = q(isc:iec,jsc:jec,:,sphum) - phys_diag%phys_qv_dt
2166  if (allocated(phys_diag%phys_ql_dt)) then
2167  phys_diag%phys_ql_dt = q(isc:iec,jsc:jec,:,liq_wat) - phys_diag%phys_ql_dt
2168  endif
2169  if (allocated(phys_diag%phys_qi_dt)) then
2170  phys_diag%phys_qi_dt = q(isc:iec,jsc:jec,:,ice_wat) - phys_diag%phys_qv_dt
2171  endif
2172  endif
2173 
2174  if (allocated(phys_diag%phys_ql_dt)) then
2175  if (rainwat > 0) phys_diag%phys_ql_dt = q(isc:iec,jsc:jec,:,rainwat) + phys_diag%phys_ql_dt
2176  endif
2177  if (allocated(phys_diag%phys_qi_dt)) then
2178  if (snowwat > 0) phys_diag%phys_qi_dt = q(isc:iec,jsc:jec,:,snowwat) + phys_diag%phys_qi_dt
2179  if (graupel > 0) phys_diag%phys_qi_dt = q(isc:iec,jsc:jec,:,graupel) + phys_diag%phys_qi_dt
2180  endif
2181 
2182  if (.not. begin) then
2183  if (allocated(phys_diag%phys_qv_dt)) phys_diag%phys_qv_dt = phys_diag%phys_qv_dt / dt
2184  if (allocated(phys_diag%phys_ql_dt)) phys_diag%phys_ql_dt = phys_diag%phys_ql_dt / dt
2185  if (allocated(phys_diag%phys_qi_dt)) phys_diag%phys_qi_dt = phys_diag%phys_qi_dt / dt
2186  endif
2187 
2188 
2189  end subroutine atmos_phys_qdt_diag
2190 
2191 end module atmosphere_mod
integer, public mygrid
Definition: atmosphere.F90:251
subroutine, public prt_height(qname, is, ie, js, je, ng, km, press, phis, delz, peln, area, lat)
logical first_diag
Definition: atmosphere.F90:265
subroutine, public atmosphere_dynamics(Time)
The subroutine &#39;atmosphere_dynamics&#39; is an API for the main driver of the FV3 dynamical core responsi...
Definition: atmosphere.F90:583
subroutine, public atmosphere_hgt(hgt, position, relative, flip)
The subroutine &#39;atmosphere_hgt&#39; is an API to return the height coordinate. By default, the vertical dimension assumes the standard FV3 convention of TOA (k=1) to Surface (k=npz). There are options to choose location [level (interface) or layer] and absolute vs. relative height (zero-based).
Definition: atmosphere.F90:909
logical, public do_adiabatic_init
Definition: fv_nudge.F90:138
subroutine, public twoway_nesting(Atm, ngrids, grids_on_this_pe, zvir, Time, this_grid)
The subroutine&#39;twoway_nesting&#39; performs a two-way update of nested-grid data onto the parent grid...
integer id_udt_dyn
Definition: atmosphere.F90:257
real, public current_time_in_seconds
The module &#39;fv_mp_mod&#39; is a single program multiple data (SPMD) parallel decompostion/communication m...
Definition: fv_mp_mod.F90:24
subroutine timing_off(blk_name)
The subroutine &#39;timing_off&#39; stops a timer.
Definition: fv_timing.F90:180
integer, public p_step
subroutine, public atmosphere_get_bottom_layer(Atm_block, DYCORE_Data)
The subroutine &#39;atmosphere_get_bottom_layer&#39; is an API to provide the bottom layer quantities needed ...
integer id_vdt_dyn
Definition: atmosphere.F90:257
subroutine atmos_phys_qdt_diag(q, phys_diag, nq, dt, begin)
integer, public num_gas
Definition: multi_gases.F90:46
subroutine, public fv_nggps_tavg(Atm, Time_step_atmos, avg_max_length, zvir)
The module &#39;multi_gases&#39; peforms multi constitutents computations.
Definition: multi_gases.F90:25
subroutine, public atmosphere_end(Time, Grid_box, restart_endfcst)
The subroutine &#39;atmosphere_end&#39; is an API for the termination of the FV3 dynamical core responsible f...
Definition: atmosphere.F90:696
pure real function, public virq_max(q, qmin)
subroutine, public atmosphere_diss_est(npass)
subroutine adiabatic_init(zvir, nudge_dz, time)
The subroutine &#39;adiabatic_init&#39; is an optional step during initialization to pre-condition a solution...
subroutine, public get_eta_level(npz, p_s, pf, ph, ak, bk, pscale)
The subroutine &#39;get_eta_level&#39; returns the interface and layer-mean pressures for reference...
Definition: fv_eta.F90:1833
subroutine, public atmosphere_control_data(i1, i2, j1, j2, kt, p_hydro, hydro, tile_num)
Definition: atmosphere.F90:783
subroutine, public atmos_phys_driver_statein(IPD_Data, Atm_block, flip_vc)
The subroutine &#39;atmos_phys_driver_statein&#39; is an API to populate the IPD_DataStatein container with t...
integer, public ngrids
Definition: fv_control.F90:173
real function, public g_sum(domain, p, ifirst, ilast, jfirst, jlast, ngc, area, mode, reproduce)
The function &#39;g_sum&#39; is the fast version of &#39;globalsum&#39;.
subroutine, public get_bottom_mass(t_bot, tr_bot, p_bot, z_bot, p_surf, slp)
logical, dimension(:), allocatable grids_on_this_pe
Definition: atmosphere.F90:254
subroutine, public start_regional_restart(Atm, dt_atmos, isc, iec, jsc, jec, isd, ied, jsd, jed)
The module &#39;fv_io&#39; contains restart facilities for FV core.
Definition: fv_io.F90:30
The module &#39;fv_update_phys&#39; applies physics tendencies consistent with the FV3 discretization and def...
integer, parameter, public r_grid
Definition: fv_arrays.F90:34
The module &#39;fv_dynamics&#39; is the top-level routine for the dynamical core.
Definition: fv_dynamics.F90:26
real, dimension(:), allocatable ri
Definition: multi_gases.F90:50
The module &#39;fv_sg&#39; performs FV sub-grid mixing.
Definition: fv_sg.F90:54
The module fv_nwp_nudge contains routines for nudging to input analyses. note This module is currentl...
Definition: fv_nudge.F90:32
integer, dimension(:), allocatable id_tracerdt_dyn
Definition: atmosphere.F90:245
type(time_type) time_step_atmos
Definition: atmosphere.F90:231
real, dimension(:,:,:), allocatable u_dt
Definition: atmosphere.F90:262
The module &#39;fv_timing&#39; contains FV3 timers.
Definition: fv_timing.F90:24
pure real function, public virq(q)
real, dimension(:), allocatable dum1d
Definition: atmosphere.F90:263
subroutine, public fv_write_restart(Atm, timestamp)
The subroutine &#39;fv_write_restart&#39; writes restart files to disk.
integer id_subgridz
Definition: atmosphere.F90:242
subroutine, public fv_diag_init(Atm, axes, Time, npx, npy, npz, p_ref)
subroutine, public fv_dynamics(npx, npy, npz, nq_tot, ng, bdt, consv_te, fill, reproduce_sum, kappa, cp_air, zvir, ptop, ks, ncnst, n_split, q_split, u, v, w, delz, hydrostatic, pt, delp, q, ps, pe, pk, peln, pkz, phis, q_con, omga, ua, va, uc, vc, ak, bk, mfx, mfy, cx, cy, ze0, hybrid_z, gridstruct, flagstruct, neststruct, idiag, bd, parent_grid, domain, diss_est, time_total)
The module &#39;fv_nggps_diags&#39; computes output diagnostics entirely on 3D pressure levels.
subroutine, public fv_restart(fv_domain, Atm, dt_atmos, seconds, days, cold_start, grid_type, this_grid)
The subroutine &#39;fv_restart&#39; initializes the model state, including prognaostic variables and several ...
Definition: fv_restart.F90:196
subroutine, public get_stock_pe(index, value)
incremental analysis update module
Definition: fv_iau_mod.F90:38
subroutine, public fv_nggps_diag(Atm, zvir, Time)
subroutine, public fv_nggps_diag_init(Atm, axes, Time)
subroutine, public atmosphere_diag_axes(axes)
The subroutine &#39;atmosphere_diag_axes&#39; is an API to return the axis indices for the atmospheric (mass)...
Definition: atmosphere.F90:870
subroutine, public fv_nwp_nudge_end
The subroutine &#39;fv_nwp_nudge_end&#39; terminates the nudging module.
Definition: fv_nudge.F90:2084
The module &#39;fv_arrays&#39; contains the &#39;fv_atmos_type&#39; and associated datatypes.
Definition: fv_arrays.F90:24
logical cold_start
Definition: atmosphere.F90:243
subroutine, public set_atmosphere_pelist()
Definition: atmosphere.F90:840
real, dimension(:,:,:), allocatable v_dt
Definition: atmosphere.F90:262
The module &#39;fv_eta&#39; contains routine to set up the reference (Eulerian) pressure coordinate.
Definition: fv_eta.F90:25
subroutine, public fv_diag(Atm, zvir, Time, print_freq)
The module &#39;atmosphere&#39; provides the interface for the Cubed-Sphere FV dynamical core.
Definition: atmosphere.F90:26
real, parameter w0_big
Definition: atmosphere.F90:259
subroutine, public atmosphere_resolution(i_size, j_size, global)
The subroutine &#39;atmospehre_resolution&#39; is an API to return the local extents of the current MPI-rank ...
Definition: atmosphere.F90:755
subroutine, public fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split)
Definition: fv_control.F90:189
subroutine, public atmosphere_etalvls(ak, bk, flip)
The subroutine &#39;atmosphere_etalvls&#39; is an API to return the ak/bk pairs used to compute the eta or pr...
Definition: atmosphere.F90:887
subroutine, public atmosphere_init(Time_init, Time, Time_step, Grid_box, area)
The subroutine &#39;atmosphere_init&#39; is an API to initialize the FV3 dynamical core, including the grid s...
Definition: atmosphere.F90:274
subroutine, public del2_cubed(q, cd, gridstruct, domain, npx, npy, km, nmax, bd)
The subroutine &#39;del2-cubed&#39; filters the omega field for the physics.
Definition: dyn_core.F90:2461
type(fv_atmos_type), dimension(:), allocatable, target, public atm
Definition: atmosphere.F90:255
subroutine, public fill_gfs(im, km, pe2, q, q_min)
The subroutine &#39;fill_gfs&#39; is for mass-conservative filling of nonphysical negative values in the trac...
Definition: fv_fill.F90:163
integer, dimension(:), allocatable pelist
Definition: atmosphere.F90:253
subroutine p_adi(km, ng, ifirst, ilast, jfirst, jlast, ptop, delp, pt, ps, pe, peln, pk, pkz, hydrostatic)
The subroutine &#39;p_adi&#39; computes (ps, pk, pe, peln, pkz) given (ptop, delp).
Definition: atmosphere.F90:526
type(time_type), public fv_time
real, dimension(:), allocatable cpi
Definition: multi_gases.F90:51
subroutine, public atmosphere_pref(p_ref)
The subroutine &#39;atmosphere_pref&#39; is an API to return the reference pressure.
Definition: atmosphere.F90:775
subroutine timing_on(blk_name)
The subroutine &#39;timing_on&#39; starts a timer.
Definition: fv_timing.F90:116
subroutine, public fv_end(Atm, this_grid, restart_endfcst)
The subroutine &#39;fv_end&#39; terminates FV3, deallocates memory, saves restart files, and stops I/O...
@ The module &#39;fv_diagnostics&#39; contains routines to compute diagnosic fields.
subroutine, public fv_io_register_nudge_restart(Atm)
The subroutine &#39;fv_io_register_nudge_restart&#39; registers restarts for SST fields used in HiRAM...
Definition: fv_io.F90:470
subroutine, public atmosphere_restart(timestamp)
The subroutine &#39;atmosphere_restart&#39; is an API to save restart information at a given timestamp...
Definition: atmosphere.F90:744
The module &#39;fv_grid_utils&#39; contains routines for setting up and computing grid-related quantities...
The module &#39;dyn_core&#39; peforms the Lagrangian acoustic dynamics described by .
Definition: dyn_core.F90:28
subroutine, public prt_maxmin(qname, q, is, ie, js, je, n_g, km, fac)
character(len=20) mod_name
Definition: atmosphere.F90:228
subroutine, public fv_update_phys(dt, is, ie, js, je, isd, ied, jsd, jed, ng, nq, u, v, w, delp, pt, q, qdiag, ua, va, ps, pe, peln, pk, pkz, ak, bk, phis, u_srf, v_srf, ts, delz, hydrostatic, u_dt, v_dt, t_dt, moist_phys, Time, nudge, gridstruct, lona, lata, npx, npy, npz, flagstruct, neststruct, bd, domain, ptop, phys_diag, q_dt)
integer, public a_step
subroutine, public fv_subgrid_z(isd, ied, jsd, jed, is, ie, js, je, km, nq, dt, tau, nwat, delp, pe, peln, pkz, ta, qa, ua, va, hydrostatic, w, delz, u_dt, v_dt, t_dt, q_dt, k_bot)
Definition: fv_sg.F90:718
real, dimension(:,:), allocatable pref
Definition: atmosphere.F90:263
subroutine, public atmosphere_scalar_field_halo(data, halo, isize, jsize, ksize, data_p)
The subroutine &#39;atmosphere_scalar_field_halo&#39; is an API to return halo information of the current MPI...
Definition: atmosphere.F90:982
subroutine, public fv_nwp_nudge_init(time, axes, npz, zvir, ak, bk, ts, phis, gridstruct, ks, npx, neststruct, bd)
The subroutine &#39;fv_nwp_nudge_init&#39; initializes the nudging module.
Definition: fv_nudge.F90:1256
subroutine, public atmosphere_nggps_diag(Time, init, ltavg, avg_max_length)
The subroutine &#39;atmosphere_nggps_diag&#39; is an API to trigger output of diagnostics in NCEP/EMC format...
integer id_dynam
Definition: atmosphere.F90:242
subroutine, public atmosphere_state_update(Time, IPD_Data, IAU_Data, Atm_block, flip_vc)
The subroutine &#39;atmosphere_state_update&#39; is an API to apply tendencies and compute a consistent progn...
The module &#39;FV3_control&#39; is for initialization and termination of the model, and controls namelist pa...
Definition: fv_control.F90:29
subroutine, public atmosphere_grid_bdry(blon, blat, global)
The subroutine &#39;atmosphere_grid_bdry&#39; is an API to returns the longitude and latitude finite volume e...
Definition: atmosphere.F90:819
subroutine, public atmosphere_grid_ctr(lon, lat)
The subroutine &#39;atmosphere_grid_ctr&#39; is an API that returns the longitude and latitude cell centers o...
Definition: atmosphere.F90:802
subroutine, public read_new_bc_data(Atm, Time, Time_step_atmos, p_split, isd, ied, jsd, jed)
subroutine, public get_bottom_wind(u_bot, v_bot)
integer id_fv_diag
Definition: atmosphere.F90:242
subroutine, public atmosphere_domain(fv_domain, layout, regional, nested, pelist)
The subroutine &#39;atmosphere_domain&#39; is an API to return the "domain2d" variable associated with the co...
Definition: atmosphere.F90:849
The module &#39;fv_nesting&#39; is a collection of routines pertaining to grid nesting .
Definition: fv_nesting.F90:27
real, dimension(:,:,:), allocatable t_dt
Definition: atmosphere.F90:262