UPP 11.0.0
Loading...
Searching...
No Matches
WRFPOST.F
Go to the documentation of this file.
1
41!---------------------------------------------------------------------
43!---------------------------------------------------------------------
44 PROGRAM wrfpost
45
46!
47!
48!============================================================================================================
49!
50! This is an MPI code. All array indexing is with respect to the global indices. Loop indices
51! look as follows for N MPI tasks.
52!
53!
54!
55! Original New
56! Index Index
57!
58! JM ----------------------------------------------- JEND
59! JM-1 - - JEND_M
60! JM-2 - MPI TASK N-1 - JEND_M2
61! - -
62! - -
63! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
64! ----------------------------------------------- JEND, JEND_M, JEND_M2
65! - -
66! - MPI TASK N-2 -
67! - -
68! - -
69! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
70!
71! .
72! .
73! .
74!
75! ----------------------------------------------- JEND, JEND_M, JEND_M2
76! - -
77! - MPI TASK 1 -
78! - -
79! - -
80! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
81! ----------------------------------------------- JEND, JEND_M, JEND_M2
82! - -
83! - MPI TASK 0 -
84! 3 - - JSTA_M2
85! 2 - - JSTA_M
86! 1 ----------------------------------------------- JSTA
87!
88! 1 IM
89!
90!
91! Jim Tuccillo
92! Jan 2000
93!
94! README - Jim Tuccillo Feb 2001
95!
96! Many common blocks have been replaced by modules to support Fortran
97! "allocate" commands. Many of the 3-D arrays are now allocated to be the
98! exact size required based on the number of MPI tasks. The dimensioning will be
99! x ( im,jsta_2l:jend_2u,lm)
100! Most 2-D arrays continue to be dimensioned (im,jm). This is fine but please be aware
101! that the EXCH routine for arrays dimensioned (im,jm) is different than arrays dimensioned
102! (im,jsta_2l:jend_2u). Also, be careful about passing any arrays dimensioned
103! (im,jst_2l:jend_2u,lm). See examples in the code as to the correct calling sequence and
104! EXCH routine to use.
105!
106!
107! ASYNCHRONOUS I/O HAS BEEN ADDED. THE LAST MPI TASK DOES THE I/O. IF THERE IS
108! ONLY ONE MPI TASK THN TASK ) DOES THE I/O.
109! THE CODE HAS GOTTEN A LITTLE KLUDGY. BASICLY, IM, IMX and IMOUT MUST BE EQUAL
110! AND REPRESENT THE VALUE USED IN THE MODEL. THE SAME HOLDS FOR JM, JMX and JMOUT.
111!
112! Jim Tuccillo June 2001
113!
114!
115!===========================================================================================
116!
117 use netcdf
118#if defined(BUILD_WITH_NEMSIO)
119 use nemsio_module, only: nemsio_getheadvar, nemsio_gfile, nemsio_init, nemsio_open, &
120 nemsio_getfilehead,nemsio_close
121#endif
122 use ctlblk_mod, only: filenameaer, me, num_procs, num_servers, mpi_comm_comp, datestr, &
123 mpi_comm_inter, filename, ioform, grib, idat, filenameflux, filenamed3d, gdsdegr, &
124 spldef, modelname, ihrst, lsmdef,vtimeunits, tprec, pthresh, datahandle, im, jm, lm, &
125 lp1, lm1, im_jm, isf_surface_physics, nsoil, spl, lsmp1, global, imp_physics, &
126 ista, iend, ista_m, iend_m, ista_2l, iend_2u, &
127 jsta, jend, jsta_m, jend_m, jsta_2l, jend_2u, novegtype, icount_calmict, npset, datapd,&
130 fixed_tim, time_output, imin, surfce2_tim, komax, ivegsrc, d3d_on, gocart_on,rdaod, &
131 readxml_tim, spval, fullmodelname, submodelname, hyb_sigp, filenameflat, aqf_on,numx, &
132 run_ifi_tim, slrutah_on, d2d_chem, gtg_on, method_blsn
133 use grib2_module, only: gribit2,num_pset,nrecout,first_grbtbl,grib_info_finalize
134 use upp_ifi_mod, only: write_ifi_debug_files
135!- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136 implicit none
137!
138#if defined(BUILD_WITH_NEMSIO)
139 type(nemsio_gfile) :: nfile,ffile,rfile
140#endif
141 include "mpif.h"
142!
143! DECLARE VARIABLES.
144!
145! SET HEADER WRITER FLAGS TO TRUE.
146!
147!temporary vars
148!
149 real(kind=8) :: time_initpost=0.,initpost_tim=0.,btim,bbtim
150 real rinc(5), untcnvt
151 integer :: status=0,iostatusd3d=0,iostatusflux=0
152 integer i,j,iii,l,k,ierr,nrec,ist,lusig,idrt,ncid3d,ncid2d,varid
153 integer :: prntsec,iim,jjm,llm,ioutcount,itmp,iret,iunit, &
154 iunitd3d,iyear,imn,iday,lcntrl,ieof
155 integer :: iostatusaer
156 logical :: popascal
157!
158 integer :: kpo,kth,kpv
159 real,dimension(komax) :: po,th,pv
160 namelist/nampgb/kpo,po,kth,th,kpv,pv,filenameaer,d3d_on,gocart_on,gccpp_on, nasa_on,gtg_on,method_blsn,popascal &
161 ,hyb_sigp,rdaod,d2d_chem, aqf_on,slrutah_on, vtimeunits,numx,write_ifi_debug_files
162 integer :: itag_ierr
163 namelist/model_inputs/filename,ioform,grib,datestr,modelname,submodelname &
164 ,filenameflux,filenameflat
165
166 character startdate*19,sysdepinfo*80,iowrfname*3,post_fname*255
167 character cgar*1,cdum*4,line*10
168!
169!------------------------------------------------------------------------------
170! START HERE
171!
172 call start()
173!
174! INITIALIZE MPI
175
176 CALL setup_servers(me, &
177 & num_procs, &
178 & num_servers, &
179 & mpi_comm_comp, &
180 & mpi_comm_inter)
181!
182! ME IS THE RANK
183! NUM_PROCS IS THE NUMBER OF TASKS DOING POSTING
184! NUM_SERVERS IS ONE IF THERE ARE MORE THAN ONE TOTAL MPI TASKS, OTHERWISE ZERO
185! MPI_COMM_COMP IS THE INTRACOMMUNICATOR
186! MPI_COMM_INTER IS THE INTERCOMMUNICATOR FOR COMMUNCATION BETWEEN TASK 0 OF THE
187! TASKS DOING THE POSTING AND THE I/O SERVER
188!
189!
190! IF WE HAVE MORE THAN 1 MPI TASK THEN WE WILL FIRE UP THE IO SERVER
191! THE LAST TASK ( IN THE CONTEXT OF MPI_COMM_WORLD ) IS THE I/O SERVER
192!
193 if (me == 0) CALL w3tagb('nems ',0000,0000,0000,'np23 ')
194
195 if ( me >= num_procs ) then
196!
197 call server
198!
199 else
200 spval = 9.9e10
201!
202!**************************************************************************
203!KaYee: Read itag in Fortran Namelist format
204!Set default
205 submodelname='NONE'
206!Set control file name
207 filenameflat='postxconfig-NT.txt'
208 numx=1
209!open namelist
210 open(5,file='itag')
211 read(5,nml=model_inputs,iostat=itag_ierr,err=888)
212888 if (itag_ierr /= 0) then
213 print*,'Incorrect namelist variable(s) found in the itag file,stopping.'
214 stop
215 endif
216 if (me == 0) write(6, model_inputs)
217
218! if(MODELNAME == 'NMM')then
219! read(5,1114) VTIMEUNITS
220! 1114 format(a4)
221! if (me==0) print*,'VALID TIME UNITS = ', VTIMEUNITS
222! endif
223!
224 303 format('MODELNAME="',a,'" SUBMODELNAME="',a,'"')
225
226 if(me==0) write(*,*)'MODELNAME: ', modelname, submodelname
227
228! assume for now that the first date in the stdin file is the start date
229 read(datestr,300) iyear,imn,iday,ihrst,imin
230 if (me==0) write(*,*) 'in WRFPOST iyear,imn,iday,ihrst,imin', &
231 iyear,imn,iday,ihrst,imin
232 300 format(i4,1x,i2,1x,i2,1x,i2,1x,i2)
233
234 idat(1) = imn
235 idat(2) = iday
236 idat(3) = iyear
237 idat(4) = ihrst
238 idat(5) = imin
239
240 111 format(a256)
241 112 format(a19)
242 113 format(a20)
243 114 format(a8)
244 120 format(a5)
245 121 format(a4)
246
247!KaYee: Read in GFS/FV3 runs in Fortran Namelist Format.
248 if(grib=='grib2') then
249 gdsdegr = 1.d6
250 endif
251!
252! set default for kpo, kth, th, kpv, pv
253 kpo = 0
254 po = 0
255 kth = 6
256 th = (/310.,320.,350.,450.,550.,650.,(0.,k=kth+1,komax)/) ! isentropic level to output
257 kpv = 8
258 pv = (/0.5,-0.5,1.0,-1.0,1.5,-1.5,2.0,-2.0,(0.,k=kpv+1,komax)/)
259
260 hyb_sigp = .true.
261 d3d_on = .false.
262 gocart_on = .false.
263 gccpp_on = .false.
264 nasa_on = .false.
265 aqf_on = .false.
266 slrutah_on = .false.
267 gtg_on = .false.
268 method_blsn = .true.
269 popascal = .false.
270 filenameaer = ''
271 rdaod = .false.
272 d2d_chem = .false.
273 vtimeunits = ''
274
275 read(5,nampgb,iostat=iret,end=119)
276 119 continue
277 if (me == 0) write(6, nampgb)
278 if(mod(num_procs,numx)/=0) then
279 if (me==0) then
280 print*,'total proces, num_procs=', num_procs
281 print*,'number of subdomain in x direction, numx=', numx
282 print*,'remainder of num_procs/numx = ', mod(num_procs,numx)
283 print*,'Warning!!! the remainder of num_procs/numx is not 0, reset numx=1 &
284 & in this run or you adjust numx in the itag file to restart'
285 endif
286! stop 9999
287 numx=1
288 if(me == 0) print*,'Warning!!! Reset numx as 1, numx=',numx
289 endif
290 if(numx>num_procs/2) then
291 if (me==0) then
292 print*,'total proces, num_procs=', num_procs
293 print*,'number of subdomain in x direction, numx=', numx
294 print*,'Warning!!! numx cannot exceed num_procs/2, reset numx=1 in this run'
295 print*,'or you adjust numx in the itag file to restart'
296 endif
297 numx=1
298 if(me == 0) print*,'Warning!!! Reset numx as 1, numx=',numx
299 endif
300 if(me == 0) then
301 print*,'komax,iret for nampgb= ',komax,iret
302 print*,'komax,kpo,kth,th,kpv,pv,fileNameAER,nasa_on,popascal= ',komax,kpo &
303 & ,kth,th(1:kth),kpv,pv(1:kpv),trim(filenameaer),nasa_on,popascal
304 print*,'NUM_PROCS=',num_procs
305 print*,'numx= ',numx
306 endif
307
308 IF(trim(ioform) /= 'netcdfpara' .AND. trim(ioform) /= 'netcdf' ) THEN
309 numx=1
310 if(me == 0) print*,'2D decomposition only supports netcdfpara IO.'
311 if(me == 0) print*,'Reset numx= ',numx
312 ENDIF
313
314 IF(modelname /= 'FV3R' .AND. modelname /= 'GFS') THEN
315 numx=1
316 if(me == 0) print*,'2D decomposition only supports GFS and FV3R.'
317 if(me == 0) print*,'Reset numx= ',numx
318 ENDIF
319
320! set up pressure level from POSTGPVARS or DEFAULT
321 if(kpo == 0) then
322! use default pressure levels
323 if(me == 0) then
324 print*,'using default pressure levels,spldef=',(spldef(l),l=1,lsmdef)
325 endif
326 lsm = lsmdef
327 do l=1,lsm
328 spl(l) = spldef(l)
329 end do
330 else
331! use POSTGPVARS
332 if(me == 0) then
333 print*,'using pressure levels from POSTGPVARS'
334 endif
335 lsm = kpo
336 if( .not. popascal ) then
337 untcnvt = 100.
338 else
339 untcnvt = 1.
340 endif
341 if(po(lsm) < po(1))then ! post logic assumes asscending
342 do l=1,lsm
343 spl(l) = po(lsm-l+1)*untcnvt
344 end do
345 else
346 do l=1,lsm
347 spl(l) = po(l)*untcnvt
348 end do
349 end if
350 end if
351 lsmp1 = lsm+1
352
353 116 continue
354
355! set PTHRESH for different models
356 if(modelname == 'NMM')then
357 pthresh = 0.000004
358 else
359 pthresh = 0.000001
360 end if
361!Chuang: add dynamical allocation
362 if(trim(ioform) == 'netcdf' .OR. trim(ioform) == 'netcdfpara') THEN
363 IF(modelname == 'NCAR' .OR. modelname == 'RAPR' .OR. modelname == 'NMM') THEN
364 call ext_ncd_ioinit(sysdepinfo,status)
365 call ext_ncd_open_for_read( trim(filename), 0, 0, " ", &
366 datahandle, status)
367 if ( status /= 0 ) then
368 print*,'error opening ',filename, ' Status = ', status ; stop
369 endif
370 call ext_ncd_get_dom_ti_integer(datahandle &
371 ,'WEST-EAST_GRID_DIMENSION',iim,1,ioutcount, status )
372 im = iim-1
373 call ext_ncd_get_dom_ti_integer(datahandle &
374 ,'SOUTH-NORTH_GRID_DIMENSION',jjm,1,ioutcount, status )
375 jm = jjm-1
376 call ext_ncd_get_dom_ti_integer(datahandle &
377 ,'BOTTOM-TOP_GRID_DIMENSION',llm,1,ioutcount, status )
378 lm = llm-1
379 lp1 = lm+1
380 lm1 = lm-1
381 im_jm = im*jm
382
383! Read and set global value for surface physics scheme
384 call ext_ncd_get_dom_ti_integer(datahandle &
385 ,'SF_SURFACE_PHYSICS',itmp,1,ioutcount, status )
386 isf_surface_physics = itmp
387! set NSOIL to 4 as default for NOAH but change if using other
388! SFC scheme
389 nsoil = 4
390 IF(itmp == 1) then !thermal diffusion scheme
391 nsoil = 5
392 ELSE IF(itmp == 3) then ! RUC LSM
393 nsoil = 9
394 ELSE IF(itmp == 7) then ! Pleim Xu
395 nsoil = 2
396 END IF
397
398 call ext_ncd_ioclose ( datahandle, status )
399 ELSE
400! use parallel netcdf lib directly to read FV3 output in netCDF
401 spval = 9.99e20
402 status = nf90_open(trim(filename),ior(nf90_nowrite,nf90_mpiio), &
403 ncid3d,comm=mpi_comm_world,info=mpi_info_null)
404 if ( status /= 0 ) then
405 print*,'error opening ',filename, ' Status = ', status
406 stop
407 endif
408 status = nf90_open(trim(filenameflux),ior(nf90_nowrite,nf90_mpiio), &
409 ncid2d,comm=mpi_comm_world,info=mpi_info_null)
410 if ( status /= 0 ) then
411 print*,'error opening ',filenameflux, ' Status = ', status
412 stop
413 endif
414! read in LSM index and nsoil here
415 status=nf90_get_att(ncid2d,nf90_global,'landsfcmdl', isf_surface_physics)
416 if(status/=0)then
417 print*,'landsfcmdl not found; assigning to 2'
418 isf_surface_physics=2 !set LSM physics to 2 for NOAH
419 endif
420 if(isf_surface_physics<2)then
421 isf_surface_physics=2 !set LSM physics to 2 for NOAH
422 endif
423 status=nf90_get_att(ncid2d,nf90_global,'nsoil', nsoil)
424 if(status/=0)then
425 print*,'nsoil not found; assigning to 4'
426 nsoil=4 !set nsoil to 4 for NOAH
427 endif
428! read imp_physics
429 status=nf90_get_att(ncid2d,nf90_global,'imp_physics',imp_physics)
430 if(status/=0)then
431 print*,'imp_physics not found; assigning to GFDL 11'
432 imp_physics=11
433 endif
434! get dimesions
435 status = nf90_inq_dimid(ncid3d,'grid_xt',varid)
436 if ( status /= 0 ) then
437 print*,status,varid
438 stop 1
439 end if
440 status = nf90_inquire_dimension(ncid3d,varid,len=im)
441 if ( status /= 0 ) then
442 print*,status
443 stop 1
444 end if
445 status = nf90_inq_dimid(ncid3d,'grid_yt',varid)
446 if ( status /= 0 ) then
447 print*,status,varid
448 stop 1
449 end if
450 status = nf90_inquire_dimension(ncid3d,varid,len=jm)
451 if ( status /= 0 ) then
452 print*,status
453 stop 1
454 end if
455 status = nf90_inq_dimid(ncid3d,'pfull',varid)
456 if ( status /= 0 ) then
457 print*,status,varid
458 stop 1
459 end if
460 status = nf90_inquire_dimension(ncid3d,varid,len=lm)
461 if ( status /= 0 ) then
462 print*,status
463 stop 1
464 end if
465 lp1 = lm+1
466 lm1 = lm-1
467 im_jm = im*jm
468! set NSOIL to 4 as default for NOAH but change if using other
469! SFC scheme
470! NSOIL = 4
471 END IF
472
473 ELSE IF(trim(ioform) == 'binary' .OR. &
474 trim(ioform) == 'binarympiio' ) THEN
475 print*,'WRF Binary format is no longer supported'
476 stop 9996
477! NEMSIO format
478#if defined(BUILD_WITH_NEMSIO)
479 ELSE IF(trim(ioform) == 'binarynemsio' .or. &
480 trim(ioform) == 'binarynemsiompiio' )THEN
481
482 spval = 9.99e20
483 IF(me == 0)THEN
484 call nemsio_init(iret=status)
485 call nemsio_open(nfile,trim(filename),'read',iret=status)
486 if ( status /= 0 ) then
487 print*,'error opening ',filename, ' Status = ', status ; stop
488 endif
489!---
490 call nemsio_getfilehead(nfile,iret=status,nrec=nrec &
491 ,dimx=im,dimy=jm,dimz=lm,nsoil=nsoil)
492 if ( status /= 0 ) then
493 print*,'error finding model dimensions '; stop
494 endif
495 call nemsio_getheadvar(nfile,'global',global,iret)
496 if (iret /= 0)then
497 print*,"global not found in file-Assigned false"
498 global = .false.
499 end if
500 IF(modelname == 'GFS') global = .true.
501! global NMMB has i=1 overlap with i=im so post will cut off i=im
502 if(global .and. modelname == 'NMM') im = im-1
503
504 END IF
505
506 CALL mpi_bcast(im, 1,mpi_integer,0, mpi_comm_comp,status)
507 call mpi_bcast(jm, 1,mpi_integer,0, mpi_comm_comp,status)
508 call mpi_bcast(lm, 1,mpi_integer,0, mpi_comm_comp,status)
509 call mpi_bcast(nsoil,1,mpi_integer,0, mpi_comm_comp,status)
510
511 call mpi_bcast(global,1,mpi_logical,0,mpi_comm_comp,status)
512 lp1 = lm+1
513 lm1 = lm-1
514 im_jm = im*jm
515
516! opening GFS flux file
517 IF(modelname == 'GFS') THEN
518! iunit=33
519 call nemsio_open(ffile,trim(filenameflux),'read',iret=iostatusflux)
520 if ( iostatusflux /= 0 ) then
521 print*,'error opening ',filenameflux, ' Status = ', iostatusflux
522 endif
523 iostatusd3d = -1
524 iunitd3d = -1
525!
526! opening GFS aer file
527 call nemsio_open(rfile,trim(filenameaer),'read',iret=iostatusaer)
528 if ( iostatusaer /= 0 .and. me == 0) then
529 print*,'error opening AER ',filenameaer, ' Status = ', iostatusaer
530 endif
531!
532! print*,'iostatusD3D in WRFPOST= ',iostatusD3D
533
534 END IF
535#endif
536 ELSE
537 print*,'UNKNOWN MODEL OUTPUT FORMAT, STOPPING'
538 stop 9999
539 END IF
540
541
542 CALL mpi_first()
543 CALL allocate_all()
544
545!
546! INITIALIZE POST COMMON BLOCKS
547!
548 lcntrl = 14
549 rewind(lcntrl)
550
551! EXP. initialize netcdf here instead
552 bbtim = mpi_wtime()
553 btim = mpi_wtime()
554! set default novegtype
555 if(modelname == 'GFS')THEN
556 novegtype = 13
557 ivegsrc = 2
558 else if(modelname=='NMM' .and. trim(ioform)=='binarynemsio')then
559 novegtype = 20
560 ivegsrc = 1
561 else if(modelname=='RAPR')then
562 novegtype = 20
563 ivegsrc = 1
564 else ! USGS
565 novegtype = 24
566 ivegsrc = 0
567 end if
568
569! Reading model output for different models and IO format
570
571 IF(trim(ioform) == 'netcdf' .OR. trim(ioform) == 'netcdfpara') THEN
572 IF(modelname == 'NCAR' .OR. modelname == 'RAPR') THEN
573 CALL initpost
574 ELSE IF (modelname == 'FV3R' .OR. modelname == 'GFS') THEN
575! use parallel netcdf library to read output directly
576 CALL initpost_netcdf(ncid2d,ncid3d)
577 ELSE
578 print*,'POST does not have netcdf option for model,',modelname,' STOPPING,'
579 stop 9998
580 END IF
581 ELSE IF(trim(ioform) == 'binarympiio') THEN
582 IF(modelname == 'NCAR' .OR. modelname == 'RAPR' .OR. modelname == 'NMM') THEN
583 print*,'WRF BINARY IO FORMAT IS NO LONGER SUPPORTED, STOPPING'
584 stop 9996
585 ELSE IF(modelname == 'RSM') THEN
586 print*,'MPI BINARY IO IS NOT YET INSTALLED FOR RSM, STOPPING'
587 stop 9997
588 ELSE
589 print*,'POST does not have mpiio option for this model, STOPPING'
590 stop 9998
591 END IF
592#if defined(BUILD_WITH_NEMSIO)
593 ELSE IF(trim(ioform) == 'binarynemsio') THEN
594 IF(modelname == 'NMM') THEN
595 CALL initpost_nems(nrec,nfile)
596 ELSE
597 print*,'POST does not have nemsio option for model,',modelname,' STOPPING,'
598 stop 9998
599
600 END IF
601
602 ELSE IF(trim(ioform) == 'binarynemsiompiio')THEN
603 IF(modelname == 'GFS') THEN
604! close nemsio file for serial read
605 call nemsio_close(nfile,iret=status)
606 call nemsio_close(ffile,iret=status)
607 call nemsio_close(rfile,iret=status)
608 CALL initpost_gfs_nems_mpiio(iostatusaer)
609 ELSE
610 print*,'POST does not have nemsio mpi option for model,',modelname, &
611 'STOPPING,'
612 stop 9999
613
614 END IF
615#endif
616 ELSE
617 print*,'UNKNOWN MODEL OUTPUT FORMAT, STOPPING'
618 stop 9999
619 END IF
620 initpost_tim = initpost_tim +(mpi_wtime() - btim)
621 IF(me == 0)THEN
622 WRITE(6,*)'WRFPOST: INITIALIZED POST COMMON BLOCKS'
623 ENDIF
624!
625! IF GRIB2 read out post aviable fields xml file and post control file
626!
627 if(grib == "grib2") then
628 btim=mpi_wtime()
629 call read_xml()
630 readxml_tim = readxml_tim + (mpi_wtime() - btim)
631 endif
632!
633! LOOP OVER THE OUTPUT GRID(S). FIELD(S) AND OUTPUT GRID(S) ARE SPECIFIED
634! IN THE CONTROL FILE. WE PROCESS ONE GRID AND ITS FIELDS AT A TIME.
635! THAT'S WHAT THIS LOOP DOES.
636!
637 icount_calmict = 0
638 first_grbtbl = .true.
639 npset = 0
640!10 CONTINUE
641!
642! READ CONTROL FILE DIRECTING WHICH FIELDS ON WHICH
643! LEVELS AND TO WHICH GRID TO INTERPOLATE DATA TO.
644! VARIABLE IEOF/=0 WHEN THERE ARE NO MORE GRIDS TO PROCESS.
645!
646! -------- grib1 processing ---------------
647! ------------------
648! if (grib == "grib1") then !DO NOT REVERT TO GRIB1. GRIB1 NOT SUPPORTED ANYMORE
649! IEOF = 0
650! do while (ieof == 0)
651! CALL READCNTRL(kth,IEOF)
652! IF(ME == 0)THEN
653! WRITE(6,*)'POST: RETURN FROM READCNTRL. ', 'IEOF=',IEOF
654! ENDIF
655!
656! PROCESS SELECTED FIELDS. FOR EACH SELECTED FIELD/LEVEL
657! WE GO THROUGH THE FOLLOWING STEPS:
658! (1) COMPUTE FIELD IF NEED BE
659! (2) WRITE FIELD TO OUTPUT FILE IN GRIB.
660!
661! if (ieof == 0) then
662! CALL PROCESS(kth,kpv,th(1:kth),pv(1:kpv),iostatusD3D)
663! IF(ME == 0)THEN
664! WRITE(6,*)' '
665! WRITE(6,*)'WRFPOST: PREPARE TO PROCESS NEXT GRID'
666! ENDIF
667! endif
668!
669! PROCESS NEXT GRID.
670!
671! enddo
672! -------- grib2 processing ---------------
673! ------------------
674! elseif (grib == "grib2") then
675 if (me==0) write(*,*) ' in WRFPOST OUTFORM= ',grib
676 if (me==0) write(*,*) ' GRIB1 IS NOT SUPPORTED ANYMORE'
677 if (grib == "grib2") then
678 do while (npset < num_pset)
679 npset = npset+1
680 if (me==0) write(*,*)' in WRFPOST npset=',npset,' num_pset=',num_pset
681 CALL set_outflds(kth,th,kpv,pv)
682 if (me==0) write(*,*)' in WRFPOST size datapd',size(datapd)
683 if(allocated(datapd)) deallocate(datapd)
684!Jesse x-decomposition
685! allocate(datapd(im,1:jend-jsta+1,nrecout+100))
686 allocate(datapd(1:iend-ista+1,1:jend-jsta+1,nrecout+100))
687!$omp parallel do private(i,j,k)
688 do k=1,nrecout+100
689 do j=1,jend+1-jsta
690!Jesse x-decomposition
691! do i=1,im
692 do i =1,iend+1-ista
693 datapd(i,j,k) = 0.
694 enddo
695 enddo
696 enddo
697 call get_postfilename(post_fname)
698 if (me==0) write(*,*)'post_fname=',trim(post_fname)
699 if (me==0) write(*,*)'get_postfilename,post_fname=',trim(post_fname), &
700 'npset=',npset, 'num_pset=',num_pset, &
701 'iSF_SURFACE_PHYSICS=',isf_surface_physics
702!
703! PROCESS SELECTED FIELDS. FOR EACH SELECTED FIELD/LEVEL
704! WE GO THROUGH THE FOLLOWING STEPS:
705! (1) COMPUTE FIELD IF NEED BE
706! (2) WRITE FIELD TO OUTPUT FILE IN GRIB.
707!
708 CALL process(kth,kpv,th(1:kth),pv(1:kpv),iostatusd3d)
709 IF(me == 0) WRITE(6,*)'WRFPOST: PREPARE TO PROCESS NEXT GRID'
710!
711! write(*,*)'enter gribit2 before mpi_barrier'
712 call mpi_barrier(mpi_comm_comp,ierr)
713
714! if(me==0)call w3tage('bf grb2 ')
715! write(*,*)'enter gribit2 after mpi barrier'
716 call gribit2(post_fname)
717 deallocate(datapd)
718 deallocate(fld_info)
719!
720! PROCESS NEXT GRID.
721!
722 enddo
723
724 endif
725!
726!-------
727 call grib_info_finalize()
728!
729 IF(me == 0) THEN
730 WRITE(6,*)' '
731 WRITE(6,*)'ALL GRIDS PROCESSED.'
732 WRITE(6,*)' '
733 ENDIF
734!
735 call de_allocate
736
737! GO TO 98
738 1000 CONTINUE
739!exp call ext_ncd_ioclose ( DataHandle, Status )
740!
741 IF(me == 0) THEN
742 print*, 'INITPOST_tim = ', initpost_tim
743 print*, 'MDLFLD_tim = ', etafld2_tim
744 print*, 'MDL2P_tim = ',eta2p_tim
745 print*, 'MDL2SIGMA_tim = ',mdl2sigma_tim
746 print*, 'MDL2AGL_tim = ',mdl2agl_tim
747 print*, 'SURFCE_tim = ',surfce2_tim
748 print*, 'CLDRAD_tim = ',cldrad_tim
749 print*, 'MISCLN_tim = ',miscln_tim
750 print*, 'MDL2STD_tim = ',mdl2std_tim
751 print*, 'FIXED_tim = ',fixed_tim
752 print*, 'MDL2THANDPV_tim = ',mdl2thandpv_tim
753 print*, 'CALRAD_WCLOUD_tim = ',calrad_wcloud_tim
754 print*, 'RUN_IFI_tim = ',run_ifi_tim
755 print*, 'Total time = ',(mpi_wtime() - bbtim)
756 print*, 'Time for OUTPUT = ',time_output
757 print*, 'Time for READxml = ',readxml_tim
758 endif
759!
760! END OF PROGRAM.
761!
762!
763! MPI_LAST WILL SHUTDOWN THE IO SERVER, IF IT EXISTS
764!
765 CALL mpi_last
766!
767!
768 end if
769!
770!
771!
772! call summary()
773 if (me == 0) CALL w3tage('UNIFIED_POST')
774 CALL mpi_finalize(ierr)
775
776
777 stop 0
778
779 END
780
subroutine allocate_all()
SET UP MESSGAE PASSING INFO.
subroutine de_allocate
2023-08-16 | Yali Mao | Add CIT to GTG fields.
Definition DEALLOCATE.f:19
subroutine initpost
This routine initializes constants and variables at the start of an ETA model or post processor run.
Definition INITPOST.F:26
subroutine initpost_gfs_nems_mpiio(iostatusaer)
initializes constants and variables at the start of GFS model or post processor run.
subroutine initpost_nems(nrec, nfile)
INITPOST_NEMS This routine initializes constants and variables at the start of an NEMS model or post ...
subroutine initpost_netcdf(ncid2d, ncid3d)
2023-04-17 | Eric James | Read in unified ext550 extinction (and remove aodtot) for RRFS 2023-04-21 |...
subroutine mpi_first()
MPI_FIRST() Sets up message passing info (MPI).
Definition MPI_FIRST.f:46
subroutine mpi_last
SUBPROGRAM: MPI_LAST SHUTS DOWN THE IO SERVER PRGRMMR: TUCCILLO ORG: IBM.
Definition MPI_LAST.f:32
subroutine process(kth, kpv, th, pv, iostatusd3d)
process() is a driver for major post routines.
Definition PROCESS.f:40
subroutine read_xml()
Definition READ_xml.f:14
subroutine server
SUBPROGRAM: SERVER PERFORMS IO TO DISK PRGRMMR: TUCCILLO ORG: IBM.
Definition SERVER.f:37
subroutine setup_servers(mype, npes, inumq, mpi_comm_comp, mpi_comm_inter)
This subroutine is to setup I/O servers.
subroutine set_outflds(kth, th, kpv, pv)
Reads post XML control file.
Definition SET_OUTFLDS.f:24
program wrfpost
Definition WRFPOST.F:44
real(kind=8) etafld2_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) mdl2agl_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) surfce2_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) eta2p_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) calrad_wcloud_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) mdl2std_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) run_ifi_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) time_output
Initialized as 0, but never used.
Definition CTLBLK.f:219
real(kind=8) fixed_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) readxml_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) miscln_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) mdl2thandpv_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) cldrad_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209
real(kind=8) mdl2sigma_tim
Time to execute named routine; note that ETAFLD2 and ETA2P refer to MDLFLD and MDL2P routines respect...
Definition CTLBLK.f:209