UPP (upp-srw-2.2.0)
Loading...
Searching...
No Matches
WRFPOST.f
Go to the documentation of this file.
1
36! and 2D diag. output (d2d_chem) for GEFS-Aerosols and CCPP-Chem model.
39 PROGRAM wrfpost
40
41!
42!
43!============================================================================================================
44!
45! This is an MPI code. All array indexing is with respect to the global indices. Loop indices
46! look as follows for N MPI tasks.
47!
48!
49!
50! Original New
51! Index Index
52!
53! JM ----------------------------------------------- JEND
54! JM-1 - - JEND_M
55! JM-2 - MPI TASK N-1 - JEND_M2
56! - -
57! - -
58! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
59! ----------------------------------------------- JEND, JEND_M, JEND_M2
60! - -
61! - MPI TASK N-2 -
62! - -
63! - -
64! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
65!
66! .
67! .
68! .
69!
70! ----------------------------------------------- JEND, JEND_M, JEND_M2
71! - -
72! - MPI TASK 1 -
73! - -
74! - -
75! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
76! ----------------------------------------------- JEND, JEND_M, JEND_M2
77! - -
78! - MPI TASK 0 -
79! 3 - - JSTA_M2
80! 2 - - JSTA_M
81! 1 ----------------------------------------------- JSTA
82!
83! 1 IM
84!
85!
86! Jim Tuccillo
87! Jan 2000
88!
89! README - Jim Tuccillo Feb 2001
90!
91! Many common blocks have been replaced by modules to support Fortran
92! "allocate" commands. Many of the 3-D arrays are now allocated to be the
93! exact size required based on the number of MPI tasks. The dimensioning will be
94! x ( im,jsta_2l:jend_2u,lm)
95! Most 2-D arrays continue to be dimensioned (im,jm). This is fine but please be aware
96! that the EXCH routine for arrays dimensioned (im,jm) is different than arrays dimensioned
97! (im,jsta_2l:jend_2u). Also, be careful about passing any arrays dimensioned
98! (im,jst_2l:jend_2u,lm). See examples in the code as to the correct calling sequence and
99! EXCH routine to use.
100!
101!
102! ASYNCHRONOUS I/O HAS BEEN ADDED. THE LAST MPI TASK DOES THE I/O. IF THERE IS
103! ONLY ONE MPI TASK THN TASK ) DOES THE I/O.
104! THE CODE HAS GOTTEN A LITTLE KLUDGY. BASICLY, IM, IMX and IMOUT MUST BE EQUAL
105! AND REPRESENT THE VALUE USED IN THE MODEL. THE SAME HOLDS FOR JM, JMX and JMOUT.
106!
107! Jim Tuccillo June 2001
108!
109!
110!===========================================================================================
111!
112 use netcdf
113 use nemsio_module, only: nemsio_getheadvar, nemsio_gfile, nemsio_init, nemsio_open, &
114 nemsio_getfilehead,nemsio_close
115 use ctlblk_mod, only: filenameaer, me, num_procs, num_servers, mpi_comm_comp, datestr, &
116 mpi_comm_inter, filename, ioform, grib, idat, filenameflux, filenamed3d, gdsdegr, &
117 spldef, modelname, ihrst, lsmdef,vtimeunits, tprec, pthresh, datahandle, im, jm, lm, &
118 lp1, lm1, im_jm, isf_surface_physics, nsoil, spl, lsmp1, global, imp_physics, &
119 ista, iend, ista_m, iend_m, ista_2l, iend_2u, &
120 jsta, jend, jsta_m, jend_m, jsta_2l, jend_2u, novegtype, icount_calmict, npset, datapd,&
121 lsm, fld_info, etafld2_tim, eta2p_tim, mdl2sigma_tim, cldrad_tim, miscln_tim, &
122 mdl2agl_tim, mdl2std_tim, mdl2thandpv_tim, calrad_wcloud_tim,nasa_on,gccpp_on, &
123 fixed_tim, time_output, imin, surfce2_tim, komax, ivegsrc, d3d_on, gocart_on,rdaod, &
124 readxml_tim, spval, fullmodelname, submodelname, hyb_sigp, filenameflat, aqf_on,numx, &
125 run_ifi_tim, slrutah_on, d2d_chem
126 use grib2_module, only: gribit2,num_pset,nrecout,first_grbtbl,grib_info_finalize
127 use upp_ifi_mod, only: write_ifi_debug_files
128!- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
129 implicit none
130!
131 type(nemsio_gfile) :: nfile,ffile,rfile
132 include "mpif.h"
133!
134! DECLARE VARIABLES.
135!
136! SET HEADER WRITER FLAGS TO TRUE.
137!
138!temporary vars
139!
140 real(kind=8) :: time_initpost=0.,initpost_tim=0.,btim,bbtim
141 real rinc(5), untcnvt
142 integer :: status=0,iostatusd3d=0,iostatusflux=0
143 integer i,j,iii,l,k,ierr,nrec,ist,lusig,idrt,ncid3d,ncid2d,varid
144 integer :: prntsec,iim,jjm,llm,ioutcount,itmp,iret,iunit, &
145 iunitd3d,iyear,imn,iday,lcntrl,ieof
146 integer :: iostatusaer
147 logical :: popascal
148!
149 integer :: kpo,kth,kpv
150 real,dimension(komax) :: po,th,pv
151 namelist/nampgb/kpo,po,kth,th,kpv,pv,filenameaer,d3d_on,gocart_on,gccpp_on, nasa_on,popascal &
152 ,hyb_sigp,rdaod,d2d_chem, aqf_on,slrutah_on, vtimeunits,numx,write_ifi_debug_files
153 integer :: itag_ierr
154 namelist/model_inputs/filename,ioform,grib,datestr,modelname,submodelname &
155 ,filenameflux,filenameflat
156
157 character startdate*19,sysdepinfo*80,iowrfname*3,post_fname*255
158 character cgar*1,cdum*4,line*10
159!
160!------------------------------------------------------------------------------
161! START HERE
162!
163 call start()
164!
165! INITIALIZE MPI
166
167 CALL setup_servers(me, &
168 & num_procs, &
169 & num_servers, &
170 & mpi_comm_comp, &
171 & mpi_comm_inter)
172!
173! ME IS THE RANK
174! NUM_PROCS IS THE NUMBER OF TASKS DOING POSTING
175! NUM_SERVERS IS ONE IF THERE ARE MORE THAN ONE TOTAL MPI TASKS, OTHERWISE ZERO
176! MPI_COMM_COMP IS THE INTRACOMMUNICATOR
177! MPI_COMM_INTER IS THE INTERCOMMUNICATOR FOR COMMUNCATION BETWEEN TASK 0 OF THE
178! TASKS DOING THE POSTING AND THE I/O SERVER
179!
180!
181! IF WE HAVE MORE THAN 1 MPI TASK THEN WE WILL FIRE UP THE IO SERVER
182! THE LAST TASK ( IN THE CONTEXT OF MPI_COMM_WORLD ) IS THE I/O SERVER
183!
184 if (me == 0) CALL w3tagb('nems ',0000,0000,0000,'np23 ')
185
186 if ( me >= num_procs ) then
187!
188 call server
189!
190 else
191 spval = 9.9e10
192!
193!**************************************************************************
194!KaYee: Read itag in Fortran Namelist format
195!Set default
196 submodelname='NONE'
197!Set control file name
198 filenameflat='postxconfig-NT.txt'
199 numx=1
200!open namelist
201 open(5,file='itag')
202 read(5,nml=model_inputs,iostat=itag_ierr,err=888)
203888 if (itag_ierr /= 0) then
204 print*,'Incorrect namelist variable(s) found in the itag file,stopping!'
205 stop
206 endif
207 if (me == 0) write(6, model_inputs)
208
209! if(MODELNAME == 'NMM')then
210! read(5,1114) VTIMEUNITS
211! 1114 format(a4)
212! if (me==0) print*,'VALID TIME UNITS = ', VTIMEUNITS
213! endif
214!
215 303 format('MODELNAME="',a,'" SUBMODELNAME="',a,'"')
216
217 if(me==0) write(*,*)'MODELNAME: ', modelname, submodelname
218
219! assume for now that the first date in the stdin file is the start date
220 read(datestr,300) iyear,imn,iday,ihrst,imin
221 if (me==0) write(*,*) 'in WRFPOST iyear,imn,iday,ihrst,imin', &
222 iyear,imn,iday,ihrst,imin
223 300 format(i4,1x,i2,1x,i2,1x,i2,1x,i2)
224
225 idat(1) = imn
226 idat(2) = iday
227 idat(3) = iyear
228 idat(4) = ihrst
229 idat(5) = imin
230
231 111 format(a256)
232 112 format(a19)
233 113 format(a20)
234 114 format(a8)
235 120 format(a5)
236 121 format(a4)
237
238!KaYee: Read in GFS/FV3 runs in Fortran Namelist Format.
239 if(grib=='grib2') then
240 gdsdegr = 1.d6
241 endif
242!
243! set default for kpo, kth, th, kpv, pv
244 kpo = 0
245 po = 0
246 kth = 6
247 th = (/310.,320.,350.,450.,550.,650.,(0.,k=kth+1,komax)/) ! isentropic level to output
248 kpv = 8
249 pv = (/0.5,-0.5,1.0,-1.0,1.5,-1.5,2.0,-2.0,(0.,k=kpv+1,komax)/)
250
251 hyb_sigp = .true.
252 d3d_on = .false.
253 gocart_on = .false.
254 gccpp_on = .false.
255 nasa_on = .false.
256 aqf_on = .false.
257 slrutah_on = .false.
258 popascal = .false.
259 filenameaer = ''
260 rdaod = .false.
261 d2d_chem = .false.
262 vtimeunits = ''
263
264 read(5,nampgb,iostat=iret,end=119)
265 119 continue
266 if (me == 0) write(6, nampgb)
267 if(mod(num_procs,numx)/=0) then
268 if (me==0) then
269 print*,'total proces, num_procs=', num_procs
270 print*,'number of subdomain in x direction, numx=', numx
271 print*,'remainder of num_procs/numx = ', mod(num_procs,numx)
272 print*,'Warning!!! the remainder of num_procs/numx is not 0, reset numx=1 &
273 & in this run or you adjust numx in the itag file to restart'
274 endif
275! stop 9999
276 numx=1
277 if(me == 0) print*,'Warning!!! Reset numx as 1, numx=',numx
278 endif
279 if(numx>num_procs/2) then
280 if (me==0) then
281 print*,'total proces, num_procs=', num_procs
282 print*,'number of subdomain in x direction, numx=', numx
283 print*,'Warning!!! numx cannot exceed num_procs/2, reset numx=1 in this run'
284 print*,'or you adjust numx in the itag file to restart'
285 endif
286 numx=1
287 if(me == 0) print*,'Warning!!! Reset numx as 1, numx=',numx
288 endif
289 if(me == 0) then
290 print*,'komax,iret for nampgb= ',komax,iret
291 print*,'komax,kpo,kth,th,kpv,pv,fileNameAER,nasa_on,popascal= ',komax,kpo &
292 & ,kth,th(1:kth),kpv,pv(1:kpv),trim(filenameaer),nasa_on,popascal
293 print*,'NUM_PROCS=',num_procs
294 print*,'numx= ',numx
295 endif
296
297 IF(trim(ioform) /= 'netcdfpara' .AND. trim(ioform) /= 'netcdf' ) THEN
298 numx=1
299 if(me == 0) print*,'2D decomposition only supports netcdfpara IO.'
300 if(me == 0) print*,'Reset numx= ',numx
301 ENDIF
302
303 IF(modelname /= 'FV3R' .AND. modelname /= 'GFS') THEN
304 numx=1
305 if(me == 0) print*,'2D decomposition only supports GFS and FV3R.'
306 if(me == 0) print*,'Reset numx= ',numx
307 ENDIF
308
309! set up pressure level from POSTGPVARS or DEFAULT
310 if(kpo == 0) then
311! use default pressure levels
312 if(me == 0) then
313 print*,'using default pressure levels,spldef=',(spldef(l),l=1,lsmdef)
314 endif
315 lsm = lsmdef
316 do l=1,lsm
317 spl(l) = spldef(l)
318 end do
319 else
320! use POSTGPVARS
321 if(me == 0) then
322 print*,'using pressure levels from POSTGPVARS'
323 endif
324 lsm = kpo
325 if( .not. popascal ) then
326 untcnvt = 100.
327 else
328 untcnvt = 1.
329 endif
330 if(po(lsm) < po(1))then ! post logic assumes asscending
331 do l=1,lsm
332 spl(l) = po(lsm-l+1)*untcnvt
333 end do
334 else
335 do l=1,lsm
336 spl(l) = po(l)*untcnvt
337 end do
338 end if
339 end if
340 lsmp1 = lsm+1
341
342 116 continue
343
344! set PTHRESH for different models
345 if(modelname == 'NMM')then
346 pthresh = 0.000004
347 else
348 pthresh = 0.000001
349 end if
350!Chuang: add dynamical allocation
351 if(trim(ioform) == 'netcdf' .OR. trim(ioform) == 'netcdfpara') THEN
352 IF(modelname == 'NCAR' .OR. modelname == 'RAPR' .OR. modelname == 'NMM') THEN
353 call ext_ncd_ioinit(sysdepinfo,status)
354 call ext_ncd_open_for_read( trim(filename), 0, 0, " ", &
355 datahandle, status)
356 if ( status /= 0 ) then
357 print*,'error opening ',filename, ' Status = ', status ; stop
358 endif
359 call ext_ncd_get_dom_ti_integer(datahandle &
360 ,'WEST-EAST_GRID_DIMENSION',iim,1,ioutcount, status )
361 im = iim-1
362 call ext_ncd_get_dom_ti_integer(datahandle &
363 ,'SOUTH-NORTH_GRID_DIMENSION',jjm,1,ioutcount, status )
364 jm = jjm-1
365 call ext_ncd_get_dom_ti_integer(datahandle &
366 ,'BOTTOM-TOP_GRID_DIMENSION',llm,1,ioutcount, status )
367 lm = llm-1
368 lp1 = lm+1
369 lm1 = lm-1
370 im_jm = im*jm
371
372! Read and set global value for surface physics scheme
373 call ext_ncd_get_dom_ti_integer(datahandle &
374 ,'SF_SURFACE_PHYSICS',itmp,1,ioutcount, status )
375 isf_surface_physics = itmp
376! set NSOIL to 4 as default for NOAH but change if using other
377! SFC scheme
378 nsoil = 4
379 IF(itmp == 1) then !thermal diffusion scheme
380 nsoil = 5
381 ELSE IF(itmp == 3) then ! RUC LSM
382 nsoil = 9
383 ELSE IF(itmp == 7) then ! Pleim Xu
384 nsoil = 2
385 END IF
386
387 call ext_ncd_ioclose ( datahandle, status )
388 ELSE
389! use parallel netcdf lib directly to read FV3 output in netCDF
390 spval = 9.99e20
391 status = nf90_open(trim(filename),ior(nf90_nowrite,nf90_mpiio), &
392 ncid3d,comm=mpi_comm_world,info=mpi_info_null)
393 if ( status /= 0 ) then
394 print*,'error opening ',filename, ' Status = ', status
395 stop
396 endif
397 status = nf90_open(trim(filenameflux),ior(nf90_nowrite,nf90_mpiio), &
398 ncid2d,comm=mpi_comm_world,info=mpi_info_null)
399 if ( status /= 0 ) then
400 print*,'error opening ',filenameflux, ' Status = ', status
401 stop
402 endif
403! read in LSM index and nsoil here
404 status=nf90_get_att(ncid2d,nf90_global,'landsfcmdl', isf_surface_physics)
405 if(status/=0)then
406 print*,'landsfcmdl not found; assigning to 2'
407 isf_surface_physics=2 !set LSM physics to 2 for NOAH
408 endif
409 if(isf_surface_physics<2)then
410 isf_surface_physics=2 !set LSM physics to 2 for NOAH
411 endif
412 status=nf90_get_att(ncid2d,nf90_global,'nsoil', nsoil)
413 if(status/=0)then
414 print*,'nsoil not found; assigning to 4'
415 nsoil=4 !set nsoil to 4 for NOAH
416 endif
417! read imp_physics
418 status=nf90_get_att(ncid2d,nf90_global,'imp_physics',imp_physics)
419 if(status/=0)then
420 print*,'imp_physics not found; assigning to GFDL 11'
421 imp_physics=11
422 endif
423! get dimesions
424 status = nf90_inq_dimid(ncid3d,'grid_xt',varid)
425 if ( status /= 0 ) then
426 print*,status,varid
427 stop 1
428 end if
429 status = nf90_inquire_dimension(ncid3d,varid,len=im)
430 if ( status /= 0 ) then
431 print*,status
432 stop 1
433 end if
434 status = nf90_inq_dimid(ncid3d,'grid_yt',varid)
435 if ( status /= 0 ) then
436 print*,status,varid
437 stop 1
438 end if
439 status = nf90_inquire_dimension(ncid3d,varid,len=jm)
440 if ( status /= 0 ) then
441 print*,status
442 stop 1
443 end if
444 status = nf90_inq_dimid(ncid3d,'pfull',varid)
445 if ( status /= 0 ) then
446 print*,status,varid
447 stop 1
448 end if
449 status = nf90_inquire_dimension(ncid3d,varid,len=lm)
450 if ( status /= 0 ) then
451 print*,status
452 stop 1
453 end if
454 lp1 = lm+1
455 lm1 = lm-1
456 im_jm = im*jm
457! set NSOIL to 4 as default for NOAH but change if using other
458! SFC scheme
459! NSOIL = 4
460 END IF
461
462 ELSE IF(trim(ioform) == 'binary' .OR. &
463 trim(ioform) == 'binarympiio' ) THEN
464 print*,'WRF Binary format is no longer supported'
465 stop 9996
466! NEMSIO format
467 ELSE IF(trim(ioform) == 'binarynemsio' .or. &
468 trim(ioform) == 'binarynemsiompiio' )THEN
469
470 spval = 9.99e20
471 IF(me == 0)THEN
472 call nemsio_init(iret=status)
473 call nemsio_open(nfile,trim(filename),'read',iret=status)
474 if ( status /= 0 ) then
475 print*,'error opening ',filename, ' Status = ', status ; stop
476 endif
477!---
478 call nemsio_getfilehead(nfile,iret=status,nrec=nrec &
479 ,dimx=im,dimy=jm,dimz=lm,nsoil=nsoil)
480 if ( status /= 0 ) then
481 print*,'error finding model dimensions '; stop
482 endif
483 call nemsio_getheadvar(nfile,'global',global,iret)
484 if (iret /= 0)then
485 print*,"global not found in file-Assigned false"
486 global = .false.
487 end if
488 IF(modelname == 'GFS') global = .true.
489! global NMMB has i=1 overlap with i=im so post will cut off i=im
490 if(global .and. modelname == 'NMM') im = im-1
491
492 END IF
493
494 CALL mpi_bcast(im, 1,mpi_integer,0, mpi_comm_comp,status)
495 call mpi_bcast(jm, 1,mpi_integer,0, mpi_comm_comp,status)
496 call mpi_bcast(lm, 1,mpi_integer,0, mpi_comm_comp,status)
497 call mpi_bcast(nsoil,1,mpi_integer,0, mpi_comm_comp,status)
498
499 call mpi_bcast(global,1,mpi_logical,0,mpi_comm_comp,status)
500 lp1 = lm+1
501 lm1 = lm-1
502 im_jm = im*jm
503
504! opening GFS flux file
505 IF(modelname == 'GFS') THEN
506! iunit=33
507 call nemsio_open(ffile,trim(filenameflux),'read',iret=iostatusflux)
508 if ( iostatusflux /= 0 ) then
509 print*,'error opening ',filenameflux, ' Status = ', iostatusflux
510 endif
511 iostatusd3d = -1
512 iunitd3d = -1
513!
514! opening GFS aer file
515 call nemsio_open(rfile,trim(filenameaer),'read',iret=iostatusaer)
516 if ( iostatusaer /= 0 .and. me == 0) then
517 print*,'error opening AER ',filenameaer, ' Status = ', iostatusaer
518 endif
519!
520! print*,'iostatusD3D in WRFPOST= ',iostatusD3D
521
522 END IF
523
524 ELSE
525 print*,'UNKNOWN MODEL OUTPUT FORMAT, STOPPING'
526 stop 9999
527 END IF
528
529
530 CALL mpi_first()
531 CALL allocate_all()
532
533!
534! INITIALIZE POST COMMON BLOCKS
535!
536 lcntrl = 14
537 rewind(lcntrl)
538
539! EXP. initialize netcdf here instead
540 bbtim = mpi_wtime()
541 btim = mpi_wtime()
542! set default novegtype
543 if(modelname == 'GFS')THEN
544 novegtype = 13
545 ivegsrc = 2
546 else if(modelname=='NMM' .and. trim(ioform)=='binarynemsio')then
547 novegtype = 20
548 ivegsrc = 1
549 else if(modelname=='RAPR')then
550 novegtype = 20
551 ivegsrc = 1
552 else ! USGS
553 novegtype = 24
554 ivegsrc = 0
555 end if
556
557! Reading model output for different models and IO format
558
559 IF(trim(ioform) == 'netcdf' .OR. trim(ioform) == 'netcdfpara') THEN
560 IF(modelname == 'NCAR' .OR. modelname == 'RAPR') THEN
561 CALL initpost
562 ELSE IF (modelname == 'FV3R' .OR. modelname == 'GFS') THEN
563! use parallel netcdf library to read output directly
564 CALL initpost_netcdf(ncid2d,ncid3d)
565 ELSE
566 print*,'POST does not have netcdf option for model,',modelname,' STOPPING,'
567 stop 9998
568 END IF
569 ELSE IF(trim(ioform) == 'binarympiio') THEN
570 IF(modelname == 'NCAR' .OR. modelname == 'RAPR' .OR. modelname == 'NMM') THEN
571 print*,'WRF BINARY IO FORMAT IS NO LONGER SUPPORTED, STOPPING'
572 stop 9996
573 ELSE IF(modelname == 'RSM') THEN
574 print*,'MPI BINARY IO IS NOT YET INSTALLED FOR RSM, STOPPING'
575 stop 9997
576 ELSE
577 print*,'POST does not have mpiio option for this model, STOPPING'
578 stop 9998
579 END IF
580 ELSE IF(trim(ioform) == 'binarynemsio') THEN
581 IF(modelname == 'NMM') THEN
582 CALL initpost_nems(nrec,nfile)
583 ELSE
584 print*,'POST does not have nemsio option for model,',modelname,' STOPPING,'
585 stop 9998
586
587 END IF
588
589 ELSE IF(trim(ioform) == 'binarynemsiompiio')THEN
590 IF(modelname == 'GFS') THEN
591! close nemsio file for serial read
592 call nemsio_close(nfile,iret=status)
593 call nemsio_close(ffile,iret=status)
594 call nemsio_close(rfile,iret=status)
595 CALL initpost_gfs_nems_mpiio(iostatusaer)
596 ELSE
597 print*,'POST does not have nemsio mpi option for model,',modelname, &
598 'STOPPING,'
599 stop 9999
600
601 END IF
602
603 ELSE
604 print*,'UNKNOWN MODEL OUTPUT FORMAT, STOPPING'
605 stop 9999
606 END IF
607 initpost_tim = initpost_tim +(mpi_wtime() - btim)
608 IF(me == 0)THEN
609 WRITE(6,*)'WRFPOST: INITIALIZED POST COMMON BLOCKS'
610 ENDIF
611!
612! IF GRIB2 read out post aviable fields xml file and post control file
613!
614 if(grib == "grib2") then
615 btim=mpi_wtime()
616 call read_xml()
617 readxml_tim = readxml_tim + (mpi_wtime() - btim)
618 endif
619!
620! LOOP OVER THE OUTPUT GRID(S). FIELD(S) AND OUTPUT GRID(S) ARE SPECIFIED
621! IN THE CONTROL FILE. WE PROCESS ONE GRID AND ITS FIELDS AT A TIME.
622! THAT'S WHAT THIS LOOP DOES.
623!
624 icount_calmict = 0
625 first_grbtbl = .true.
626 npset = 0
627!10 CONTINUE
628!
629! READ CONTROL FILE DIRECTING WHICH FIELDS ON WHICH
630! LEVELS AND TO WHICH GRID TO INTERPOLATE DATA TO.
631! VARIABLE IEOF/=0 WHEN THERE ARE NO MORE GRIDS TO PROCESS.
632!
633! -------- grib1 processing ---------------
634! ------------------
635! if (grib == "grib1") then !DO NOT REVERT TO GRIB1. GRIB1 NOT SUPPORTED ANYMORE
636! IEOF = 0
637! do while (ieof == 0)
638! CALL READCNTRL(kth,IEOF)
639! IF(ME == 0)THEN
640! WRITE(6,*)'POST: RETURN FROM READCNTRL. ', 'IEOF=',IEOF
641! ENDIF
642!
643! PROCESS SELECTED FIELDS. FOR EACH SELECTED FIELD/LEVEL
644! WE GO THROUGH THE FOLLOWING STEPS:
645! (1) COMPUTE FIELD IF NEED BE
646! (2) WRITE FIELD TO OUTPUT FILE IN GRIB.
647!
648! if (ieof == 0) then
649! CALL PROCESS(kth,kpv,th(1:kth),pv(1:kpv),iostatusD3D)
650! IF(ME == 0)THEN
651! WRITE(6,*)' '
652! WRITE(6,*)'WRFPOST: PREPARE TO PROCESS NEXT GRID'
653! ENDIF
654! endif
655!
656! PROCESS NEXT GRID.
657!
658! enddo
659! -------- grib2 processing ---------------
660! ------------------
661! elseif (grib == "grib2") then
662 if (me==0) write(*,*) ' in WRFPOST OUTFORM= ',grib
663 if (me==0) write(*,*) ' GRIB1 IS NOT SUPPORTED ANYMORE'
664 if (grib == "grib2") then
665 do while (npset < num_pset)
666 npset = npset+1
667 if (me==0) write(*,*)' in WRFPOST npset=',npset,' num_pset=',num_pset
668 CALL set_outflds(kth,th,kpv,pv)
669 if (me==0) write(*,*)' in WRFPOST size datapd',size(datapd)
670 if(allocated(datapd)) deallocate(datapd)
671!Jesse x-decomposition
672! allocate(datapd(im,1:jend-jsta+1,nrecout+100))
673 allocate(datapd(1:iend-ista+1,1:jend-jsta+1,nrecout+100))
674!$omp parallel do private(i,j,k)
675 do k=1,nrecout+100
676 do j=1,jend+1-jsta
677!Jesse x-decomposition
678! do i=1,im
679 do i =1,iend+1-ista
680 datapd(i,j,k) = 0.
681 enddo
682 enddo
683 enddo
684 call get_postfilename(post_fname)
685 if (me==0) write(*,*)'post_fname=',trim(post_fname)
686 if (me==0) write(*,*)'get_postfilename,post_fname=',trim(post_fname), &
687 'npset=',npset, 'num_pset=',num_pset, &
688 'iSF_SURFACE_PHYSICS=',isf_surface_physics
689!
690! PROCESS SELECTED FIELDS. FOR EACH SELECTED FIELD/LEVEL
691! WE GO THROUGH THE FOLLOWING STEPS:
692! (1) COMPUTE FIELD IF NEED BE
693! (2) WRITE FIELD TO OUTPUT FILE IN GRIB.
694!
695 CALL process(kth,kpv,th(1:kth),pv(1:kpv),iostatusd3d)
696 IF(me == 0) WRITE(6,*)'WRFPOST: PREPARE TO PROCESS NEXT GRID'
697!
698! write(*,*)'enter gribit2 before mpi_barrier'
699 call mpi_barrier(mpi_comm_comp,ierr)
700
701! if(me==0)call w3tage('bf grb2 ')
702! write(*,*)'enter gribit2 after mpi barrier'
703 call gribit2(post_fname)
704 deallocate(datapd)
705 deallocate(fld_info)
706!
707! PROCESS NEXT GRID.
708!
709 enddo
710
711 endif
712!
713!-------
714 call grib_info_finalize()
715!
716 IF(me == 0) THEN
717 WRITE(6,*)' '
718 WRITE(6,*)'ALL GRIDS PROCESSED.'
719 WRITE(6,*)' '
720 ENDIF
721!
722 call de_allocate
723
724! GO TO 98
725 1000 CONTINUE
726!exp call ext_ncd_ioclose ( DataHandle, Status )
727!
728 IF(me == 0) THEN
729 print*, 'INITPOST_tim = ', initpost_tim
730 print*, 'MDLFLD_tim = ', etafld2_tim
731 print*, 'MDL2P_tim = ',eta2p_tim
732 print*, 'MDL2SIGMA_tim = ',mdl2sigma_tim
733 print*, 'MDL2AGL_tim = ',mdl2agl_tim
734 print*, 'SURFCE_tim = ',surfce2_tim
735 print*, 'CLDRAD_tim = ',cldrad_tim
736 print*, 'MISCLN_tim = ',miscln_tim
737 print*, 'MDL2STD_tim = ',mdl2std_tim
738 print*, 'FIXED_tim = ',fixed_tim
739 print*, 'MDL2THANDPV_tim = ',mdl2thandpv_tim
740 print*, 'CALRAD_WCLOUD_tim = ',calrad_wcloud_tim
741 print*, 'RUN_IFI_tim = ',run_ifi_tim
742 print*, 'Total time = ',(mpi_wtime() - bbtim)
743 print*, 'Time for OUTPUT = ',time_output
744 print*, 'Time for READxml = ',readxml_tim
745 endif
746!
747! END OF PROGRAM.
748!
749!
750! MPI_LAST WILL SHUTDOWN THE IO SERVER, IF IT EXISTS
751!
752 CALL mpi_last
753!
754!
755 end if
756!
757!
758!
759! call summary()
760 if (me == 0) CALL w3tage('UNIFIED_POST')
761 CALL mpi_finalize(ierr)
762
763
764 stop 0
765
766 END
767
subroutine allocate_all()
SET UP MESSGAE PASSING INFO.
subroutine de_allocate
Definition DEALLOCATE.f:18
subroutine initpost
This routine initializes constants and variables at the start of an ETA model or post processor run.
Definition INITPOST.F:26
subroutine initpost_gfs_nems_mpiio(iostatusaer)
This routine initializes constants and variables at the start of GFS model or post processor run.
subroutine initpost_nems(nrec, nfile)
This routine initializes constants and variables at the start of an NEMS model or post processor run.
subroutine initpost_netcdf(ncid2d, ncid3d)
This routine initializes constants and variables at the start of GFS model or post processor run.
subroutine mpi_first()
SUBPROGRAM: MPI_FIRST SET UP MESSGAE PASSING INFO PRGRMMR: TUCCILLO ORG: IBM.
Definition MPI_FIRST.f:41
subroutine mpi_last
SUBPROGRAM: MPI_LAST SHUTS DOWN THE IO SERVER PRGRMMR: TUCCILLO ORG: IBM.
Definition MPI_LAST.f:32
subroutine read_xml()
SUBPROGRAM: READCNTRLgrb2_xml READS POST xml CONTROL FILE PRGRMMR: J.
Definition READ_xml.f:40
subroutine server
SUBPROGRAM: SERVER PERFORMS IO TO DISK PRGRMMR: TUCCILLO ORG: IBM.
Definition SERVER.f:37
subroutine setup_servers(mype, npes, inumq, mpi_comm_comp, mpi_comm_inter)
This subroutine is to setup I/O servers.
subroutine set_outflds(kth, th, kpv, pv)
This routine reads the control file in xml format specifying field(s) to post, and save all the field...
Definition SET_OUTFLDS.f:24
program wrfpost
2023-05-20 | Rahul Mahajan | Bug fix for fileNameFlat as namelist configurable
Definition WRFPOST.f:39