UPP  11.0.0
 All Data Structures Files Functions Variables Pages
WRFPOST.f
Go to the documentation of this file.
1 
36 ! and 2D diag. output (d2d_chem) for GEFS-Aerosols and CCPP-Chem model.
38  PROGRAM wrfpost
39 
40 !
41 !
42 !============================================================================================================
43 !
44 ! This is an MPI code. All array indexing is with respect to the global indices. Loop indices
45 ! look as follows for N MPI tasks.
46 !
47 !
48 !
49 ! Original New
50 ! Index Index
51 !
52 ! JM ----------------------------------------------- JEND
53 ! JM-1 - - JEND_M
54 ! JM-2 - MPI TASK N-1 - JEND_M2
55 ! - -
56 ! - -
57 ! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
58 ! ----------------------------------------------- JEND, JEND_M, JEND_M2
59 ! - -
60 ! - MPI TASK N-2 -
61 ! - -
62 ! - -
63 ! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
64 !
65 ! .
66 ! .
67 ! .
68 !
69 ! ----------------------------------------------- JEND, JEND_M, JEND_M2
70 ! - -
71 ! - MPI TASK 1 -
72 ! - -
73 ! - -
74 ! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
75 ! ----------------------------------------------- JEND, JEND_M, JEND_M2
76 ! - -
77 ! - MPI TASK 0 -
78 ! 3 - - JSTA_M2
79 ! 2 - - JSTA_M
80 ! 1 ----------------------------------------------- JSTA
81 !
82 ! 1 IM
83 !
84 !
85 ! Jim Tuccillo
86 ! Jan 2000
87 !
88 ! README - Jim Tuccillo Feb 2001
89 !
90 ! Many common blocks have been replaced by modules to support Fortran
91 ! "allocate" commands. Many of the 3-D arrays are now allocated to be the
92 ! exact size required based on the number of MPI tasks. The dimensioning will be
93 ! x ( im,jsta_2l:jend_2u,lm)
94 ! Most 2-D arrays continue to be dimensioned (im,jm). This is fine but please be aware
95 ! that the EXCH routine for arrays dimensioned (im,jm) is different than arrays dimensioned
96 ! (im,jsta_2l:jend_2u). Also, be careful about passing any arrays dimensioned
97 ! (im,jst_2l:jend_2u,lm). See examples in the code as to the correct calling sequence and
98 ! EXCH routine to use.
99 !
100 !
101 ! ASYNCHRONOUS I/O HAS BEEN ADDED. THE LAST MPI TASK DOES THE I/O. IF THERE IS
102 ! ONLY ONE MPI TASK THN TASK ) DOES THE I/O.
103 ! THE CODE HAS GOTTEN A LITTLE KLUDGY. BASICLY, IM, IMX and IMOUT MUST BE EQUAL
104 ! AND REPRESENT THE VALUE USED IN THE MODEL. THE SAME HOLDS FOR JM, JMX and JMOUT.
105 !
106 ! Jim Tuccillo June 2001
107 !
108 !
109 !===========================================================================================
110 !
111  use netcdf
112  use nemsio_module, only: nemsio_getheadvar, nemsio_gfile, nemsio_init, nemsio_open, &
113  nemsio_getfilehead,nemsio_close
114  use ctlblk_mod, only: filenameaer, me, num_procs, num_servers, mpi_comm_comp, datestr, &
115  mpi_comm_inter, filename, ioform, grib, idat, filenameflux, filenamed3d, gdsdegr, &
116  spldef, modelname, ihrst, lsmdef,vtimeunits, tprec, pthresh, datahandle, im, jm, lm, &
117  lp1, lm1, im_jm, isf_surface_physics, nsoil, spl, lsmp1, global, imp_physics, &
118  ista, iend, ista_m, iend_m, ista_2l, iend_2u, &
119  jsta, jend, jsta_m, jend_m, jsta_2l, jend_2u, novegtype, icount_calmict, npset, datapd,&
120  lsm, fld_info, etafld2_tim, eta2p_tim, mdl2sigma_tim, cldrad_tim, miscln_tim, &
121  mdl2agl_tim, mdl2std_tim, mdl2thandpv_tim, calrad_wcloud_tim,nasa_on,gccpp_on, &
122  fixed_tim, time_output, imin, surfce2_tim, komax, ivegsrc, d3d_on, gocart_on,rdaod, &
123  readxml_tim, spval, fullmodelname, submodelname, hyb_sigp, filenameflat, aqf_on,numx, &
124  run_ifi_tim, slrutah_on, d2d_chem
125  use grib2_module, only: gribit2,num_pset,nrecout,first_grbtbl,grib_info_finalize
126  use upp_ifi_mod, only: write_ifi_debug_files
127 !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128  implicit none
129 !
130  type(nemsio_gfile) :: nfile,ffile,rfile
131  include "mpif.h"
132 !
133 ! DECLARE VARIABLES.
134 !
135 ! SET HEADER WRITER FLAGS TO TRUE.
136 !
137 !temporary vars
138 !
139  real(kind=8) :: time_initpost=0.,initpost_tim=0.,btim,bbtim
140  real rinc(5), untcnvt
141  integer :: status=0,iostatusd3d=0,iostatusflux=0
142  integer i,j,iii,l,k,ierr,nrec,ist,lusig,idrt,ncid3d,ncid2d,varid
143  integer :: prntsec,iim,jjm,llm,ioutcount,itmp,iret,iunit, &
144  iunitd3d,iyear,imn,iday,lcntrl,ieof
145  integer :: iostatusaer
146  logical :: popascal
147 !
148  integer :: kpo,kth,kpv
149  real,dimension(komax) :: po,th,pv
150  namelist/nampgb/kpo,po,kth,th,kpv,pv,filenameaer,d3d_on,gocart_on,gccpp_on, nasa_on,popascal &
151  ,hyb_sigp,rdaod,d2d_chem, aqf_on,slrutah_on, vtimeunits,numx,write_ifi_debug_files
152  integer :: itag_ierr
153  namelist/model_inputs/filename,ioform,grib,datestr,modelname,submodelname &
154  ,filenameflux,filenameflat
155 
156  character startdate*19,sysdepinfo*80,iowrfname*3,post_fname*255
157  character cgar*1,cdum*4,line*10
158 !
159 !------------------------------------------------------------------------------
160 ! START HERE
161 !
162  call start()
163 !
164 ! INITIALIZE MPI
165 
166  CALL setup_servers(me, &
167  & num_procs, &
168  & num_servers, &
169  & mpi_comm_comp, &
170  & mpi_comm_inter)
171 !
172 ! ME IS THE RANK
173 ! NUM_PROCS IS THE NUMBER OF TASKS DOING POSTING
174 ! NUM_SERVERS IS ONE IF THERE ARE MORE THAN ONE TOTAL MPI TASKS, OTHERWISE ZERO
175 ! MPI_COMM_COMP IS THE INTRACOMMUNICATOR
176 ! MPI_COMM_INTER IS THE INTERCOMMUNICATOR FOR COMMUNCATION BETWEEN TASK 0 OF THE
177 ! TASKS DOING THE POSTING AND THE I/O SERVER
178 !
179 !
180 ! IF WE HAVE MORE THAN 1 MPI TASK THEN WE WILL FIRE UP THE IO SERVER
181 ! THE LAST TASK ( IN THE CONTEXT OF MPI_COMM_WORLD ) IS THE I/O SERVER
182 !
183  print*,'ME,NUM_PROCS,NUM_SERVERS=',me,num_procs,num_servers
184 
185  if (me == 0) CALL w3tagb('nems ',0000,0000,0000,'np23 ')
186 
187  if ( me >= num_procs ) then
188 !
189  call server
190 !
191  else
192  spval = 9.9e10
193 !
194 !**************************************************************************
195 !KaYee: Read itag in Fortran Namelist format
196 !Set default
197  submodelname='NONE'
198  numx=1
199 !open namelist
200  open(5,file='itag')
201  read(5,nml=model_inputs,iostat=itag_ierr,err=888)
202  !print*,'itag_ierr=',itag_ierr
203 888 if (itag_ierr /= 0) then
204  print*,'Incorrect namelist variable(s) found in the itag file,stopping!'
205  stop
206  endif
207 
208  if (me==0) print*,'fileName= ',filename
209  if (me==0) print*,'IOFORM= ',ioform
210  !if (me==0) print*,'OUTFORM= ',grib
211  if (me==0) print*,'OUTFORM= ',grib
212  if (me==0) print*,'DateStr= ',datestr
213  if (me==0) print*,'MODELNAME= ',modelname
214  if (me==0) print*,'SUBMODELNAME= ',submodelname
215  if (me==0) print*,'numx= ',numx
216 ! if(MODELNAME == 'NMM')then
217 ! read(5,1114) VTIMEUNITS
218 ! 1114 format(a4)
219 ! if (me==0) print*,'VALID TIME UNITS = ', VTIMEUNITS
220 ! endif
221 !
222  303 format('MODELNAME="',a,'" SUBMODELNAME="',a,'"')
223 
224  write(*,*)'MODELNAME: ', modelname, submodelname
225 
226  if (me==0) print 303,modelname,submodelname
227 ! assume for now that the first date in the stdin file is the start date
228  read(datestr,300) iyear,imn,iday,ihrst,imin
229  if (me==0) write(*,*) 'in WRFPOST iyear,imn,iday,ihrst,imin', &
230  iyear,imn,iday,ihrst,imin
231  300 format(i4,1x,i2,1x,i2,1x,i2,1x,i2)
232 
233  idat(1) = imn
234  idat(2) = iday
235  idat(3) = iyear
236  idat(4) = ihrst
237  idat(5) = imin
238 
239  111 format(a256)
240  112 format(a19)
241  113 format(a20)
242  114 format(a8)
243  120 format(a5)
244  121 format(a4)
245 
246 !KaYee: Read in GFS/FV3 runs in Fortran Namelist Format.
247  if (me==0) print*,'MODELNAME= ',modelname,'grib=',grib
248  if(modelname == 'GFS' .OR. modelname == 'FV3R') then
249  if (me == 0) print*,'first two file names in GFS or FV3= ' &
250  ,trim(filename),trim(filenameflux)
251  end if
252 
253  if(grib=='grib2') then
254  gdsdegr = 1.d6
255  endif
256  if (me==0) print *,'gdsdegr=',gdsdegr
257 !
258 ! set default for kpo, kth, th, kpv, pv
259  kpo = 0
260  po = 0
261  kth = 6
262  th = (/310.,320.,350.,450.,550.,650.,(0.,k=kth+1,komax)/) ! isentropic level to output
263  kpv = 8
264  pv = (/0.5,-0.5,1.0,-1.0,1.5,-1.5,2.0,-2.0,(0.,k=kpv+1,komax)/)
265 
266  hyb_sigp = .true.
267  d3d_on = .false.
268  gocart_on = .false.
269  gccpp_on = .false.
270  nasa_on = .false.
271  aqf_on = .false.
272  slrutah_on = .false.
273  popascal = .false.
274  filenameaer = ''
275  rdaod = .false.
276  d2d_chem = .false.
277 
278 !set control file name
279  filenameflat='postxconfig-NT.txt'
280  read(5,nampgb,iostat=iret,end=119)
281  119 continue
282  if (me==0) print*,'in itag, write_ifi_debug_files=', write_ifi_debug_files
283  if (me==0) print*,'in itag, mod(num_procs,numx)=', mod(num_procs,numx)
284  if(mod(num_procs,numx)/=0) then
285  if (me==0) then
286  print*,'total proces, num_procs=', num_procs
287  print*,'number of subdomain in x direction, numx=', numx
288  print*,'remainder of num_procs/numx = ', mod(num_procs,numx)
289  print*,'Warning!!! the remainder of num_procs/numx is not 0, reset numx=1 &
290  & in this run or you adjust numx in the itag file to restart'
291  endif
292 ! stop 9999
293  numx=1
294  if(me == 0) print*,'Warning!!! Reset numx as 1, numx=',numx
295  endif
296  if(numx>num_procs/2) then
297  if (me==0) then
298  print*,'total proces, num_procs=', num_procs
299  print*,'number of subdomain in x direction, numx=', numx
300  print*,'Warning!!! numx cannot exceed num_procs/2, reset numx=1 in this run'
301  print*,'or you adjust numx in the itag file to restart'
302  endif
303  numx=1
304  if(me == 0) print*,'Warning!!! Reset numx as 1, numx=',numx
305  endif
306  if(me == 0) then
307  print*,'komax,iret for nampgb= ',komax,iret
308  print*,'komax,kpo,kth,th,kpv,pv,fileNameAER,nasa_on,popascal= ',komax,kpo &
309  & ,kth,th(1:kth),kpv,pv(1:kpv),trim(filenameaer),nasa_on,popascal
310  print*,'NUM_PROCS=',num_procs
311  print*,'numx= ',numx
312  endif
313 
314  IF(trim(ioform) /= 'netcdfpara' .AND. trim(ioform) /= 'netcdf' ) THEN
315  numx=1
316  if(me == 0) print*,'2D decomposition only supports netcdfpara IO.'
317  if(me == 0) print*,'Reset numx= ',numx
318  ENDIF
319 
320  IF(modelname /= 'FV3R' .AND. modelname /= 'GFS') THEN
321  numx=1
322  if(me == 0) print*,'2D decomposition only supports GFS and FV3R.'
323  if(me == 0) print*,'Reset numx= ',numx
324  ENDIF
325 
326 ! set up pressure level from POSTGPVARS or DEFAULT
327  if(kpo == 0) then
328 ! use default pressure levels
329  if(me == 0) then
330  print*,'using default pressure levels,spldef=',(spldef(l),l=1,lsmdef)
331  endif
332  lsm = lsmdef
333  do l=1,lsm
334  spl(l) = spldef(l)
335  end do
336  else
337 ! use POSTGPVARS
338  if(me == 0) then
339  print*,'using pressure levels from POSTGPVARS'
340  endif
341  lsm = kpo
342  if( .not. popascal ) then
343  untcnvt = 100.
344  else
345  untcnvt = 1.
346  endif
347  if(po(lsm) < po(1))then ! post logic assumes asscending
348  do l=1,lsm
349  spl(l) = po(lsm-l+1)*untcnvt
350  end do
351  else
352  do l=1,lsm
353  spl(l) = po(l)*untcnvt
354  end do
355  end if
356  end if
357  lsmp1 = lsm+1
358  if (me==0) print*,'LSM, SPL = ',lsm,spl(1:lsm)
359 
360  116 continue
361 
362 ! set PTHRESH for different models
363  if(modelname == 'NMM')then
364  pthresh = 0.000004
365  else
366  pthresh = 0.000001
367  end if
368 !Chuang: add dynamical allocation
369  if(trim(ioform) == 'netcdf' .OR. trim(ioform) == 'netcdfpara') THEN
370  IF(modelname == 'NCAR' .OR. modelname == 'RAPR' .OR. modelname == 'NMM') THEN
371  call ext_ncd_ioinit(sysdepinfo,status)
372  print*,'called ioinit', status
373  call ext_ncd_open_for_read( trim(filename), 0, 0, " ", &
374  datahandle, status)
375  print*,'called open for read', status
376  if ( status /= 0 ) then
377  print*,'error opening ',filename, ' Status = ', status ; stop
378  endif
379  call ext_ncd_get_dom_ti_integer(datahandle &
380  ,'WEST-EAST_GRID_DIMENSION',iim,1,ioutcount, status )
381  im = iim-1
382  call ext_ncd_get_dom_ti_integer(datahandle &
383  ,'SOUTH-NORTH_GRID_DIMENSION',jjm,1,ioutcount, status )
384  jm = jjm-1
385  call ext_ncd_get_dom_ti_integer(datahandle &
386  ,'BOTTOM-TOP_GRID_DIMENSION',llm,1,ioutcount, status )
387  lm = llm-1
388  lp1 = lm+1
389  lm1 = lm-1
390  im_jm = im*jm
391 
392  print*,'im jm lm from wrfout= ',im,jm, lm
393 
394 ! Read and set global value for surface physics scheme
395  call ext_ncd_get_dom_ti_integer(datahandle &
396  ,'SF_SURFACE_PHYSICS',itmp,1,ioutcount, status )
397  isf_surface_physics = itmp
398  print*,'SF_SURFACE_PHYSICS= ',isf_surface_physics
399 ! set NSOIL to 4 as default for NOAH but change if using other
400 ! SFC scheme
401  nsoil = 4
402  IF(itmp == 1) then !thermal diffusion scheme
403  nsoil = 5
404  ELSE IF(itmp == 3) then ! RUC LSM
405  nsoil = 9
406  ELSE IF(itmp == 7) then ! Pleim Xu
407  nsoil = 2
408  END IF
409  print*,'NSOIL from wrfout= ',nsoil
410 
411  call ext_ncd_ioclose( datahandle, status )
412  ELSE
413 ! use parallel netcdf lib directly to read FV3 output in netCDF
414  spval = 9.99e20
415  status = nf90_open(trim(filename),ior(nf90_nowrite,nf90_mpiio), &
416  ncid3d,comm=mpi_comm_world,info=mpi_info_null)
417  if ( status /= 0 ) then
418  print*,'error opening ',filename, ' Status = ', status
419  stop
420  endif
421  status = nf90_open(trim(filenameflux),ior(nf90_nowrite,nf90_mpiio), &
422  ncid2d,comm=mpi_comm_world,info=mpi_info_null)
423  if ( status /= 0 ) then
424  print*,'error opening ',filenameflux, ' Status = ', status
425  stop
426  endif
427 ! read in LSM index and nsoil here
428  status=nf90_get_att(ncid2d,nf90_global,'landsfcmdl', isf_surface_physics)
429  if(status/=0)then
430  print*,'landsfcmdl not found; assigning to 2'
431  isf_surface_physics=2 !set LSM physics to 2 for NOAH
432  endif
433  if(isf_surface_physics<2)then
434  isf_surface_physics=2 !set LSM physics to 2 for NOAH
435  endif
436  status=nf90_get_att(ncid2d,nf90_global,'nsoil', nsoil)
437  if(status/=0)then
438  print*,'nsoil not found; assigning to 4'
439  nsoil=4 !set nsoil to 4 for NOAH
440  endif
441  if(me==0)print*,'SF_SURFACE_PHYSICS= ',isf_surface_physics
442  if(me==0)print*,'NSOIL= ',nsoil
443 ! read imp_physics
444  status=nf90_get_att(ncid2d,nf90_global,'imp_physics',imp_physics)
445  if(status/=0)then
446  print*,'imp_physics not found; assigning to GFDL 11'
447  imp_physics=11
448  endif
449  if (me == 0) print*,'MP_PHYSICS= ',imp_physics
450 ! get dimesions
451  status = nf90_inq_dimid(ncid3d,'grid_xt',varid)
452  if ( status /= 0 ) then
453  print*,status,varid
454  stop 1
455  end if
456  status = nf90_inquire_dimension(ncid3d,varid,len=im)
457  if ( status /= 0 ) then
458  print*,status
459  stop 1
460  end if
461  status = nf90_inq_dimid(ncid3d,'grid_yt',varid)
462  if ( status /= 0 ) then
463  print*,status,varid
464  stop 1
465  end if
466  status = nf90_inquire_dimension(ncid3d,varid,len=jm)
467  if ( status /= 0 ) then
468  print*,status
469  stop 1
470  end if
471  status = nf90_inq_dimid(ncid3d,'pfull',varid)
472  if ( status /= 0 ) then
473  print*,status,varid
474  stop 1
475  end if
476  status = nf90_inquire_dimension(ncid3d,varid,len=lm)
477  if ( status /= 0 ) then
478  print*,status
479  stop 1
480  end if
481  lp1 = lm+1
482  lm1 = lm-1
483  im_jm = im*jm
484 ! set NSOIL to 4 as default for NOAH but change if using other
485 ! SFC scheme
486 ! NSOIL = 4
487 
488  print*,'im jm lm nsoil from fv3 output = ',im,jm,lm,nsoil
489  END IF
490 
491  ELSE IF(trim(ioform) == 'binary' .OR. &
492  trim(ioform) == 'binarympiio' ) THEN
493  print*,'WRF Binary format is no longer supported'
494  stop 9996
495 ! NEMSIO format
496  ELSE IF(trim(ioform) == 'binarynemsio' .or. &
497  trim(ioform) == 'binarynemsiompiio' )THEN
498 
499  spval = 9.99e20
500  IF(me == 0)THEN
501  call nemsio_init(iret=status)
502  print *,'nemsio_init, iret=',status
503  call nemsio_open(nfile,trim(filename),'read',iret=status)
504  if ( status /= 0 ) then
505  print*,'error opening ',filename, ' Status = ', status ; stop
506  endif
507 !---
508  call nemsio_getfilehead(nfile,iret=status,nrec=nrec &
509  ,dimx=im,dimy=jm,dimz=lm,nsoil=nsoil)
510  if ( status /= 0 ) then
511  print*,'error finding model dimensions '; stop
512  endif
513  call nemsio_getheadvar(nfile,'global',global,iret)
514  if (iret /= 0)then
515  print*,"global not found in file-Assigned false"
516  global = .false.
517  end if
518  IF(modelname == 'GFS') global = .true.
519 ! global NMMB has i=1 overlap with i=im so post will cut off i=im
520  if(global .and. modelname == 'NMM') im = im-1
521 
522  END IF
523 
524  CALL mpi_bcast(im, 1,mpi_integer,0, mpi_comm_comp,status)
525  call mpi_bcast(jm, 1,mpi_integer,0, mpi_comm_comp,status)
526  call mpi_bcast(lm, 1,mpi_integer,0, mpi_comm_comp,status)
527  call mpi_bcast(nsoil,1,mpi_integer,0, mpi_comm_comp,status)
528 
529  if (me == 0) print*,'im jm lm nsoil from NEMS= ',im,jm, lm ,nsoil
530  call mpi_bcast(global,1,mpi_logical,0,mpi_comm_comp,status)
531  if (me == 0) print*,'Is this a global run ',global
532  lp1 = lm+1
533  lm1 = lm-1
534  im_jm = im*jm
535 
536 ! opening GFS flux file
537  IF(modelname == 'GFS') THEN
538 ! iunit=33
539  call nemsio_open(ffile,trim(filenameflux),'read',iret=iostatusflux)
540  if ( iostatusflux /= 0 ) then
541  print*,'error opening ',filenameflux, ' Status = ', iostatusflux
542  endif
543  iostatusd3d = -1
544  iunitd3d = -1
545 !
546 ! opening GFS aer file
547  call nemsio_open(rfile,trim(filenameaer),'read',iret=iostatusaer)
548  if ( iostatusaer /= 0 .and. me == 0) then
549  print*,'error opening AER ',filenameaer, ' Status = ', iostatusaer
550  endif
551 !
552 ! print*,'iostatusD3D in WRFPOST= ',iostatusD3D
553 
554  END IF
555 
556  ELSE
557  print*,'UNKNOWN MODEL OUTPUT FORMAT, STOPPING'
558  stop 9999
559  END IF
560 
561 
562  CALL mpi_first()
563  print*,'jsta,jend,jsta_m,jend_m,jsta_2l,jend_2u,spval=',jsta, &
564  jend,jsta_m,jend_m, jsta_2l,jend_2u,spval
565  CALL allocate_all()
566 
567 !
568 ! INITIALIZE POST COMMON BLOCKS
569 !
570  lcntrl = 14
571  rewind(lcntrl)
572 
573 ! EXP. initialize netcdf here instead
574  bbtim = mpi_wtime()
575  btim = mpi_wtime()
576 ! set default novegtype
577  if(modelname == 'GFS')THEN
578  novegtype = 13
579  ivegsrc = 2
580  else if(modelname=='NMM' .and. trim(ioform)=='binarynemsio')then
581  novegtype = 20
582  ivegsrc = 1
583  else if(modelname=='RAPR')then
584  novegtype = 20
585  ivegsrc = 1
586  else ! USGS
587  novegtype = 24
588  ivegsrc = 0
589  end if
590 
591 ! Reading model output for different models and IO format
592 
593  IF(trim(ioform) == 'netcdf' .OR. trim(ioform) == 'netcdfpara') THEN
594  IF(modelname == 'NCAR' .OR. modelname == 'RAPR') THEN
595  print*,'CALLING INITPOST TO PROCESS NCAR NETCDF OUTPUT'
596  CALL initpost
597  ELSE IF (modelname == 'FV3R' .OR. modelname == 'GFS') THEN
598 ! use parallel netcdf library to read output directly
599  print*,'CALLING INITPOST_NETCDF'
600  CALL initpost_netcdf(ncid2d,ncid3d)
601  ELSE
602  print*,'POST does not have netcdf option for model,',modelname,' STOPPING,'
603  stop 9998
604  END IF
605  ELSE IF(trim(ioform) == 'binarympiio') THEN
606  IF(modelname == 'NCAR' .OR. modelname == 'RAPR' .OR. modelname == 'NMM') THEN
607  print*,'WRF BINARY IO FORMAT IS NO LONGER SUPPORTED, STOPPING'
608  stop 9996
609  ELSE IF(modelname == 'RSM') THEN
610  print*,'MPI BINARY IO IS NOT YET INSTALLED FOR RSM, STOPPING'
611  stop 9997
612  ELSE
613  print*,'POST does not have mpiio option for this model, STOPPING'
614  stop 9998
615  END IF
616  ELSE IF(trim(ioform) == 'binarynemsio') THEN
617  IF(modelname == 'NMM') THEN
618  CALL initpost_nems(nrec,nfile)
619  ELSE
620  print*,'POST does not have nemsio option for model,',modelname,' STOPPING,'
621  stop 9998
622 
623  END IF
624 
625  ELSE IF(trim(ioform) == 'binarynemsiompiio')THEN
626  IF(modelname == 'GFS') THEN
627 ! close nemsio file for serial read
628  call nemsio_close(nfile,iret=status)
629  call nemsio_close(ffile,iret=status)
630  call nemsio_close(rfile,iret=status)
631  CALL initpost_gfs_nems_mpiio(iostatusaer)
632  ELSE
633  print*,'POST does not have nemsio mpi option for model,',modelname, &
634  'STOPPING,'
635  stop 9999
636 
637  END IF
638 
639  ELSE
640  print*,'UNKNOWN MODEL OUTPUT FORMAT, STOPPING'
641  stop 9999
642  END IF
643  initpost_tim = initpost_tim +(mpi_wtime() - btim)
644  IF(me == 0)THEN
645  WRITE(6,*)'WRFPOST: INITIALIZED POST COMMON BLOCKS'
646  ENDIF
647 !
648 ! IF GRIB2 read out post aviable fields xml file and post control file
649 !
650  if(grib == "grib2") then
651  btim=mpi_wtime()
652  call read_xml()
653  readxml_tim = readxml_tim + (mpi_wtime() - btim)
654  endif
655 !
656 ! LOOP OVER THE OUTPUT GRID(S). FIELD(S) AND OUTPUT GRID(S) ARE SPECIFIED
657 ! IN THE CONTROL FILE. WE PROCESS ONE GRID AND ITS FIELDS AT A TIME.
658 ! THAT'S WHAT THIS LOOP DOES.
659 !
660  icount_calmict = 0
661  first_grbtbl = .true.
662  npset = 0
663 !10 CONTINUE
664 !
665 ! READ CONTROL FILE DIRECTING WHICH FIELDS ON WHICH
666 ! LEVELS AND TO WHICH GRID TO INTERPOLATE DATA TO.
667 ! VARIABLE IEOF/=0 WHEN THERE ARE NO MORE GRIDS TO PROCESS.
668 !
669 ! -------- grib1 processing ---------------
670 ! ------------------
671 ! if (grib == "grib1") then !DO NOT REVERT TO GRIB1. GRIB1 NOT SUPPORTED ANYMORE
672 ! IEOF = 0
673 ! do while (ieof == 0)
674 ! CALL READCNTRL(kth,IEOF)
675 ! IF(ME == 0)THEN
676 ! WRITE(6,*)'POST: RETURN FROM READCNTRL. ', 'IEOF=',IEOF
677 ! ENDIF
678 !
679 ! PROCESS SELECTED FIELDS. FOR EACH SELECTED FIELD/LEVEL
680 ! WE GO THROUGH THE FOLLOWING STEPS:
681 ! (1) COMPUTE FIELD IF NEED BE
682 ! (2) WRITE FIELD TO OUTPUT FILE IN GRIB.
683 !
684 ! if (ieof == 0) then
685 ! CALL PROCESS(kth,kpv,th(1:kth),pv(1:kpv),iostatusD3D)
686 ! IF(ME == 0)THEN
687 ! WRITE(6,*)' '
688 ! WRITE(6,*)'WRFPOST: PREPARE TO PROCESS NEXT GRID'
689 ! ENDIF
690 ! endif
691 !
692 ! PROCESS NEXT GRID.
693 !
694 ! enddo
695 ! -------- grib2 processing ---------------
696 ! ------------------
697 ! elseif (grib == "grib2") then
698  if (me==0) write(*,*) ' in WRFPOST OUTFORM= ',grib
699  if (me==0) write(*,*) ' GRIB1 IS NOT SUPPORTED ANYMORE'
700  if (grib == "grib2") then
701  do while (npset < num_pset)
702  npset = npset+1
703  if (me==0) write(*,*)' in WRFPOST npset=',npset,' num_pset=',num_pset
704  CALL set_outflds(kth,th,kpv,pv)
705  if (me==0) write(*,*)' in WRFPOST size datapd',size(datapd)
706  if(allocated(datapd)) deallocate(datapd)
707 !Jesse x-decomposition
708 ! allocate(datapd(im,1:jend-jsta+1,nrecout+100))
709  allocate(datapd(1:iend-ista+1,1:jend-jsta+1,nrecout+100))
710 !$omp parallel do private(i,j,k)
711  do k=1,nrecout+100
712  do j=1,jend+1-jsta
713 !Jesse x-decomposition
714 ! do i=1,im
715  do i =1,iend+1-ista
716  datapd(i,j,k) = 0.
717  enddo
718  enddo
719  enddo
720  call get_postfilename(post_fname)
721  if (me==0) write(*,*)'post_fname=',trim(post_fname)
722  if (me==0) write(*,*)'get_postfilename,post_fname=',trim(post_fname), &
723  'npset=',npset, 'num_pset=',num_pset, &
724  'iSF_SURFACE_PHYSICS=',isf_surface_physics
725 !
726 ! PROCESS SELECTED FIELDS. FOR EACH SELECTED FIELD/LEVEL
727 ! WE GO THROUGH THE FOLLOWING STEPS:
728 ! (1) COMPUTE FIELD IF NEED BE
729 ! (2) WRITE FIELD TO OUTPUT FILE IN GRIB.
730 !
731  CALL process(kth,kpv,th(1:kth),pv(1:kpv),iostatusd3d)
732  IF(me == 0) WRITE(6,*)'WRFPOST: PREPARE TO PROCESS NEXT GRID'
733 !
734 ! write(*,*)'enter gribit2 before mpi_barrier'
735  call mpi_barrier(mpi_comm_comp,ierr)
736 
737 ! if(me==0)call w3tage('bf grb2 ')
738 ! write(*,*)'enter gribit2 after mpi barrier'
739  call gribit2(post_fname)
740  deallocate(datapd)
741  deallocate(fld_info)
742 !
743 ! PROCESS NEXT GRID.
744 !
745  enddo
746 
747  endif
748 !
749 !-------
750  call grib_info_finalize()
751 !
752  IF(me == 0) THEN
753  WRITE(6,*)' '
754  WRITE(6,*)'ALL GRIDS PROCESSED.'
755  WRITE(6,*)' '
756  ENDIF
757 !
758  call de_allocate
759 
760 ! GO TO 98
761  1000 CONTINUE
762 !exp call ext_ncd_ioclose ( DataHandle, Status )
763 !
764  IF(me == 0) THEN
765  print*, 'INITPOST_tim = ', initpost_tim
766  print*, 'MDLFLD_tim = ', etafld2_tim
767  print*, 'MDL2P_tim = ',eta2p_tim
768  print*, 'MDL2SIGMA_tim = ',mdl2sigma_tim
769  print*, 'MDL2AGL_tim = ',mdl2agl_tim
770  print*, 'SURFCE_tim = ',surfce2_tim
771  print*, 'CLDRAD_tim = ',cldrad_tim
772  print*, 'MISCLN_tim = ',miscln_tim
773  print*, 'MDL2STD_tim = ',mdl2std_tim
774  print*, 'FIXED_tim = ',fixed_tim
775  print*, 'MDL2THANDPV_tim = ',mdl2thandpv_tim
776  print*, 'CALRAD_WCLOUD_tim = ',calrad_wcloud_tim
777  print*, 'RUN_IFI_tim = ',run_ifi_tim
778  print*, 'Total time = ',(mpi_wtime() - bbtim)
779  print*, 'Time for OUTPUT = ',time_output
780  print*, 'Time for READxml = ',readxml_tim
781  endif
782 !
783 ! END OF PROGRAM.
784 !
785 !
786 ! MPI_LAST WILL SHUTDOWN THE IO SERVER, IF IT EXISTS
787 !
788  CALL mpi_last
789 !
790 !
791  end if
792 !
793 !
794 !
795 ! call summary()
796  if (me == 0) CALL w3tage('UNIFIED_POST')
797  CALL mpi_finalize(ierr)
798 
799 
800  stop 0
801 
802  END
803 
program wrfpost
Definition: WRFPOST.f:38
Definition: IFI.F:5
subroutine allocate_all()
SET UP MESSGAE PASSING INFO.
Definition: ALLOCATE_ALL.f:36
subroutine initpost
This routine initializes constants and variables at the start of an ETA model or post processor run...
Definition: INITPOST.F:25
subroutine de_allocate
Definition: DEALLOCATE.f:17
subroutine setup_servers(MYPE, NPES, INUMQ, MPI_COMM_COMP, MPI_COMM_INTER)
This subroutine is to setup I/O servers.
Definition: SETUP_SERVERS.f:20
subroutine set_outflds(kth, th, kpv, pv)
This routine reads the control file in xml format specifying field(s) to post, and save all the field...
Definition: SET_OUTFLDS.f:23
subroutine read_xml()
SUBPROGRAM: READCNTRLgrb2_xml READS POST xml CONTROL FILE PRGRMMR: J.
Definition: READ_xml.f:39
subroutine mpi_first()
SUBPROGRAM: MPI_FIRST SET UP MESSGAE PASSING INFO PRGRMMR: TUCCILLO ORG: IBM.
Definition: MPI_FIRST.f:40
subroutine mpi_last
SUBPROGRAM: MPI_LAST SHUTS DOWN THE IO SERVER PRGRMMR: TUCCILLO ORG: IBM.
Definition: MPI_LAST.f:31
subroutine initpost_gfs_nems_mpiio(iostatusAER)
This routine initializes constants and variables at the start of GFS model or post processor run...
subroutine server
SUBPROGRAM: SERVER PERFORMS IO TO DISK PRGRMMR: TUCCILLO ORG: IBM.
Definition: SERVER.f:36
subroutine initpost_netcdf(ncid2d, ncid3d)
This routine initializes constants and variables at the start of GFS model or post processor run...
subroutine initpost_nems(NREC, nfile)
This routine initializes constants and variables at the start of an NEMS model or post processor run...
Definition: INITPOST_NEMS.f:20