source: flexpart.git/src/init_domainfill_mpi.f90 @ 4c64400

10.4.1_peseiGFS_025bugfixes+enhancementsdevrelease-10release-10.4.1scaling-bugunivie
Last change on this file since 4c64400 was 16b61a5, checked in by Espen Sollum ATMOS <eso@…>, 8 years ago

Reworked the domain-filling option (MPI). Fixed a slow loop which had errors in loop counter (MPI)

  • Property mode set to 100644
File size: 19.3 KB
Line 
1!**********************************************************************
2! Copyright 1998,1999,2000,2001,2002,2005,2007,2008,2009,2010         *
3! Andreas Stohl, Petra Seibert, A. Frank, Gerhard Wotawa,             *
4! Caroline Forster, Sabine Eckhardt, John Burkhart, Harald Sodemann   *
5!                                                                     *
6! This file is part of FLEXPART.                                      *
7!                                                                     *
8! FLEXPART is free software: you can redistribute it and/or modify    *
9! it under the terms of the GNU General Public License as published by*
10! the Free Software Foundation, either version 3 of the License, or   *
11! (at your option) any later version.                                 *
12!                                                                     *
13! FLEXPART is distributed in the hope that it will be useful,         *
14! but WITHOUT ANY WARRANTY; without even the implied warranty of      *
15! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the       *
16! GNU General Public License for more details.                        *
17!                                                                     *
18! You should have received a copy of the GNU General Public License   *
19! along with FLEXPART.  If not, see <http://www.gnu.org/licenses/>.   *
20!**********************************************************************
21
22subroutine init_domainfill
23!
24!*****************************************************************************
25!                                                                            *
26! Initializes particles equally distributed over the first release location  *
27! specified in file RELEASES. This box is assumed to be the domain for doing *
28! domain-filling trajectory calculations.                                    *
29! All particles carry the same amount of mass which alltogether comprises the*
30! mass of air within the box.                                                *
31!                                                                            *
32!     Author: A. Stohl                                                       *
33!                                                                            *
34!     15 October 2002                                                        *
35!                                                                            *
36!  CHANGES                                                                   *
37!    12/2014 eso: MPI version                                                *
38!*****************************************************************************
39!                                                                            *
40! Variables:                                                                 *
41!                                                                            *
42! numparticlecount    consecutively counts the number of particles released  *
43! nx_we(2)       grid indices for western and eastern boundary of domain-    *
44!                filling trajectory calculations                             *
45! ny_sn(2)       grid indices for southern and northern boundary of domain-  *
46!                filling trajectory calculations                             *
47!                                                                            *
48!*****************************************************************************
49! MPI version:
50!
51!   -Root process allocates temporary arrays holding properties for
52!    all particles in the simulation
53!   -An index array is used to assign 1st particle to 1st process, 2nd particle
54!    to 2nd process and so on so that they are evenly distibuted geographically
55!   -Inititialization for domain-filling is done as in the serial code
56!   -Root process distributes particles evenly to other processes
57!
58!*****************************************************************************
59
60  use point_mod
61  use par_mod
62  use com_mod
63  use random_mod, only: ran1
64  use mpi_mod
65
66  implicit none
67
68! ncolumn_mpi,numparttot_mpi        ncolumn,numparttot per process
69  integer :: j,ix,jy,kz,ncolumn,numparttot,ncolumn_mpi,numparttot_mpi, arr_size
70  real :: gridarea(0:nymax-1),pp(nzmax),ylat,ylatp,ylatm,hzone
71  real :: cosfactm,cosfactp,deltacol,dz1,dz2,dz,pnew,fractus
72  real,parameter :: pih=pi/180.
73  real :: colmass(0:nxmax-1,0:nymax-1),colmasstotal,zposition
74
75  integer :: ixm,ixp,jym,jyp,indzm,indzp,in,indzh,i,jj
76  real :: pvpart,ddx,ddy,rddx,rddy,p1,p2,p3,p4,y1(2)
77
78  integer :: idummy = -11
79  integer,allocatable,dimension(:) :: idx ! index array
80  integer :: stride
81  integer, parameter :: nullsize=0
82  logical :: first_call=.true.
83
84! Use different seed for each process ! TODO: not needed anymore
85  if (first_call) then
86    idummy=idummy+mp_seed
87    first_call=.false.
88  end if
89
90! Determine the release region (only full grid cells), over which particles
91! shall be initialized
92! Use 2 fields for west/east and south/north boundary
93!**************************************************************************
94
95  nx_we(1)=max(int(xpoint1(1)),0)
96  nx_we(2)=min((int(xpoint2(1))+1),nxmin1)
97  ny_sn(1)=max(int(ypoint1(1)),0)
98  ny_sn(2)=min((int(ypoint2(1))+1),nymin1)
99
100! For global simulations (both global wind data and global domain-filling),
101! set a switch, such that no boundary conditions are used
102!**************************************************************************
103  if (xglobal.and.sglobal.and.nglobal) then
104    if ((nx_we(1).eq.0).and.(nx_we(2).eq.nxmin1).and. &
105         (ny_sn(1).eq.0).and.(ny_sn(2).eq.nymin1)) then
106      gdomainfill=.true.
107    else
108      gdomainfill=.false.
109    endif
110  endif
111
112! Do not release particles twice (i.e., not at both in the leftmost and rightmost
113! grid cell) for a global domain
114!*****************************************************************************
115  if (xglobal) nx_we(2)=min(nx_we(2),nx-2)
116
117
118! This section only done by the root process
119!*******************************************
120  if (lroot) then
121! Arrays for particles to be released. Add a small number to npart(1) in case of
122! round-off errors
123    arr_size = npart(1) + mp_np
124    allocate(itra1_tmp(arr_size),npoint_tmp(arr_size),nclass_tmp(arr_size),&
125         & idt_tmp(arr_size),itramem_tmp(arr_size),itrasplit_tmp(arr_size),&
126         & xtra1_tmp(arr_size),ytra1_tmp(arr_size),ztra1_tmp(arr_size),&
127         & xmass1_tmp(arr_size, maxspec))
128
129! Index array for particles. This is to avoid having particles
130! near edges of domain all on one process.
131!****************************************************************************
132    allocate(idx(npart(1)))
133    stride = npart(1)/mp_partgroup_np
134
135    jj=0
136    do j=1, stride
137      do i=0, mp_partgroup_np-1
138        jj = jj+1
139        if (jj.gt.npart(1)) exit
140        idx(jj) = i*stride+j
141      end do
142    end do
143
144! Add extra indices if npart(1) not evenly divisible by number of processes
145    do i=1, mod(npart(1),mp_partgroup_np)
146      jj = jj+1
147      if (jj.gt.npart(1)) exit
148      idx(jj) = jj
149    end do
150
151! Initialize all particles as non-existent   
152    itra1_tmp(:)=-999999999
153
154! Calculate area of grid cell with formula M=2*pi*R*h*dx/360,
155! see Netz, Formeln der Mathematik, 5. Auflage (1983), p.90
156!************************************************************
157
158    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
159      ylat=ylat0+real(jy)*dy
160      ylatp=ylat+0.5*dy
161      ylatm=ylat-0.5*dy
162      if ((ylatm.lt.0).and.(ylatp.gt.0.)) then
163        hzone=1./dyconst
164      else
165        cosfactp=cos(ylatp*pih)*r_earth
166        cosfactm=cos(ylatm*pih)*r_earth
167        if (cosfactp.lt.cosfactm) then
168          hzone=sqrt(r_earth**2-cosfactp**2)- &
169               sqrt(r_earth**2-cosfactm**2)
170        else
171          hzone=sqrt(r_earth**2-cosfactm**2)- &
172               sqrt(r_earth**2-cosfactp**2)
173        endif
174      endif
175      gridarea(jy)=2.*pi*r_earth*hzone*dx/360.
176    end do
177
178! Do the same for the south pole
179
180    if (sglobal) then
181      ylat=ylat0
182      ylatp=ylat+0.5*dy
183      ylatm=ylat
184      cosfactm=0.
185      cosfactp=cos(ylatp*pih)*r_earth
186      hzone=sqrt(r_earth**2-cosfactm**2)- &
187           sqrt(r_earth**2-cosfactp**2)
188      gridarea(0)=2.*pi*r_earth*hzone*dx/360.
189    endif
190
191! Do the same for the north pole
192
193    if (nglobal) then
194      ylat=ylat0+real(nymin1)*dy
195      ylatp=ylat
196      ylatm=ylat-0.5*dy
197      cosfactp=0.
198      cosfactm=cos(ylatm*pih)*r_earth
199      hzone=sqrt(r_earth**2-cosfactp**2)- &
200           sqrt(r_earth**2-cosfactm**2)
201      gridarea(nymin1)=2.*pi*r_earth*hzone*dx/360.
202    endif
203
204
205! Calculate total mass of each grid column and of the whole atmosphere
206!*********************************************************************
207
208    colmasstotal=0.
209    do jy=ny_sn(1),ny_sn(2)          ! loop about latitudes
210      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
211        pp(1)=rho(ix,jy,1,1)*r_air*tt(ix,jy,1,1)
212        pp(nz)=rho(ix,jy,nz,1)*r_air*tt(ix,jy,nz,1)
213        colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)
214        colmasstotal=colmasstotal+colmass(ix,jy)
215
216      end do
217    end do
218
219    write(*,*) 'Atm. mass: ',colmasstotal
220
221
222    if (ipin.eq.0) numpart=0
223
224! Determine the particle positions
225!*********************************
226
227    numparttot=0
228    numcolumn=0
229    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
230      ylat=ylat0+real(jy)*dy
231      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
232        ncolumn=nint(0.999*real(npart(1))*colmass(ix,jy)/ &
233             colmasstotal)
234        if (ncolumn.eq.0) goto 30
235        if (ncolumn.gt.numcolumn) numcolumn=ncolumn
236
237! Calculate pressure at the altitudes of model surfaces, using the air density
238! information, which is stored as a 3-d field
239!*****************************************************************************
240
241        do kz=1,nz
242          pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
243        end do
244
245
246        deltacol=(pp(1)-pp(nz))/real(ncolumn)
247        pnew=pp(1)+deltacol/2.
248        jj=0
249        do j=1,ncolumn
250          jj=jj+1
251
252
253! For columns with many particles (i.e. around the equator), distribute
254! the particles equally, for columns with few particles (i.e. around the
255! poles), distribute the particles randomly
256!***********************************************************************
257
258
259          if (ncolumn.gt.20) then
260            pnew=pnew-deltacol
261          else
262            pnew=pp(1)-ran1(idummy)*(pp(1)-pp(nz))
263          endif
264
265          do kz=1,nz-1
266            if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
267              dz1=pp(kz)-pnew
268              dz2=pnew-pp(kz+1)
269              dz=1./(dz1+dz2)
270
271! Assign particle position
272!*************************
273! Do the following steps only if particles are not read in from previous model run
274!*****************************************************************************
275              if (ipin.eq.0) then
276                xtra1_tmp(idx(numpart+jj))=real(ix)-0.5+ran1(idummy)
277                if (ix.eq.0) xtra1_tmp(idx(numpart+jj))=ran1(idummy)
278                if (ix.eq.nxmin1) xtra1_tmp(idx(numpart+jj))= &
279                     real(nxmin1)-ran1(idummy)
280                ytra1_tmp(idx(numpart+jj))=real(jy)-0.5+ran1(idummy)
281                ztra1_tmp(idx(numpart+jj))=(height(kz)*dz2+height(kz+1)*dz1)*dz
282                if (ztra1_tmp(idx(numpart+jj)).gt.height(nz)-0.5) &
283                     ztra1_tmp(idx(numpart+jj))=height(nz)-0.5
284
285
286! Interpolate PV to the particle position
287!****************************************
288                ixm=int(xtra1_tmp(idx(numpart+jj)))
289                jym=int(ytra1_tmp(idx(numpart+jj)))
290                ixp=ixm+1
291                jyp=jym+1
292                ddx=xtra1_tmp(idx(numpart+jj))-real(ixm)
293                ddy=ytra1_tmp(idx(numpart+jj))-real(jym)
294                rddx=1.-ddx
295                rddy=1.-ddy
296                p1=rddx*rddy
297                p2=ddx*rddy
298                p3=rddx*ddy
299                p4=ddx*ddy
300                do i=2,nz
301                  if (height(i).gt.ztra1_tmp(idx(numpart+jj))) then
302                    indzm=i-1
303                    indzp=i
304                    goto 6
305                  endif
306                end do
3076               continue
308                dz1=ztra1_tmp(idx(numpart+jj))-height(indzm)
309                dz2=height(indzp)-ztra1_tmp(idx(numpart+jj))
310                dz=1./(dz1+dz2)
311                do in=1,2
312                  indzh=indzm+in-1
313                  y1(in)=p1*pv(ixm,jym,indzh,1) &
314                       +p2*pv(ixp,jym,indzh,1) &
315                       +p3*pv(ixm,jyp,indzh,1) &
316                       +p4*pv(ixp,jyp,indzh,1)
317                end do
318                pvpart=(dz2*y1(1)+dz1*y1(2))*dz
319                if (ylat.lt.0.) pvpart=-1.*pvpart
320
321
322! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
323!*****************************************************************************
324
325                if (((ztra1_tmp(idx(numpart+jj)).gt.3000.).and. &
326                     (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
327
328! Assign certain properties to the particle
329!******************************************
330                  nclass_tmp(idx(numpart+jj))=min(int(ran1(idummy)* &
331                       real(nclassunc))+1,nclassunc)
332                  numparticlecount=numparticlecount+1
333                  npoint_tmp(idx(numpart+jj))=numparticlecount
334                  idt_tmp(idx(numpart+jj))=mintime
335                  itra1_tmp(idx(numpart+jj))=0
336                  itramem_tmp(idx(numpart+jj))=0
337                  itrasplit_tmp(idx(numpart+jj))=itra1_tmp(idx(numpart+jj))+ldirect* &
338                       itsplit
339                  xmass1_tmp(idx(numpart+jj),1)=colmass(ix,jy)/real(ncolumn)
340                  if (mdomainfill.eq.2) xmass1_tmp(idx(numpart+jj),1)= &
341                       xmass1_tmp(idx(numpart+jj),1)*pvpart*48./29.*ozonescale/10.**9
342                else
343                  jj=jj-1
344                endif
345              endif
346            endif
347          end do
348        end do
349        numparttot=numparttot+ncolumn
350        if (ipin.eq.0) numpart=numpart+jj
35130      continue
352      end do
353    end do
354
355
356! Check whether numpart is really smaller than maxpart
357!*****************************************************
358
359! ESO :TODO: this warning need to be moved further up, else out-of-bounds error earlier
360    if (numpart.gt.maxpart) then
361      write(*,*) 'numpart too large: change source in init_atm_mass.f'
362      write(*,*) 'numpart: ',numpart,' maxpart: ',maxpart
363    endif
364
365
366    xmassperparticle=colmasstotal/real(numparttot)
367
368
369! Make sure that all particles are within domain
370!***********************************************
371
372!    do j=1,numpart
373    do j=1,npoint(1)
374      if ((xtra1_tmp(j).lt.0.).or.(xtra1_tmp(j).ge.real(nxmin1)).or. &
375           (ytra1_tmp(j).lt.0.).or.(ytra1_tmp(j).ge.real(nymin1))) then
376        itra1_tmp(j)=-999999999
377      endif
378    end do
379
380
381
382
383! For boundary conditions, we need fewer particle release heights per column,
384! because otherwise it takes too long until enough mass has accumulated to
385! release a particle at the boundary (would take dx/u seconds), leading to
386! relatively large position errors of the order of one grid distance.
387! It's better to release fewer particles per column, but to do so more often.
388! Thus, use on the order of nz starting heights per column.
389! We thus repeat the above to determine fewer starting heights, that are
390! used furtheron in subroutine boundcond_domainfill.f.
391!****************************************************************************
392
393    fractus=real(numcolumn)/real(nz)
394    write(*,*) 'Total number of particles at model start: ',numpart
395    write(*,*) 'Maximum number of particles per column: ',numcolumn
396    write(*,*) 'If ',fractus,' <1, better use more particles'
397    fractus=sqrt(max(fractus,1.))/2.
398
399    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
400      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
401        ncolumn=nint(0.999/fractus*real(npart(1))*colmass(ix,jy) &
402             /colmasstotal)
403        if (ncolumn.gt.maxcolumn) stop 'maxcolumn too small'
404        if (ncolumn.eq.0) goto 80
405
406
407! Memorize how many particles per column shall be used for all boundaries
408! This is further used in subroutine boundcond_domainfill.f
409! Use 2 fields for west/east and south/north boundary
410!************************************************************************
411
412        if (ix.eq.nx_we(1)) numcolumn_we(1,jy)=ncolumn
413        if (ix.eq.nx_we(2)) numcolumn_we(2,jy)=ncolumn
414        if (jy.eq.ny_sn(1)) numcolumn_sn(1,ix)=ncolumn
415        if (jy.eq.ny_sn(2)) numcolumn_sn(2,ix)=ncolumn
416
417! Calculate pressure at the altitudes of model surfaces, using the air density
418! information, which is stored as a 3-d field
419!*****************************************************************************
420
421        do kz=1,nz
422          pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
423        end do
424
425! Determine the reference starting altitudes
426!*******************************************
427
428        deltacol=(pp(1)-pp(nz))/real(ncolumn)
429        pnew=pp(1)+deltacol/2.
430        do j=1,ncolumn
431          pnew=pnew-deltacol
432          do kz=1,nz-1
433            if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
434              dz1=pp(kz)-pnew
435              dz2=pnew-pp(kz+1)
436              dz=1./(dz1+dz2)
437              zposition=(height(kz)*dz2+height(kz+1)*dz1)*dz
438              if (zposition.gt.height(nz)-0.5) zposition=height(nz)-0.5
439
440! Memorize vertical positions where particles are introduced
441! This is further used in subroutine boundcond_domainfill.f
442!***********************************************************
443
444              if (ix.eq.nx_we(1)) zcolumn_we(1,jy,j)=zposition
445              if (ix.eq.nx_we(2)) zcolumn_we(2,jy,j)=zposition
446              if (jy.eq.ny_sn(1)) zcolumn_sn(1,ix,j)=zposition
447              if (jy.eq.ny_sn(2)) zcolumn_sn(2,ix,j)=zposition
448
449! Initialize mass that has accumulated at boundary to zero
450!*********************************************************
451
452              acc_mass_we(1,jy,j)=0.
453              acc_mass_we(2,jy,j)=0.
454              acc_mass_sn(1,jy,j)=0.
455              acc_mass_sn(2,jy,j)=0.
456            endif
457          end do
458        end do
45980      continue
460      end do
461    end do
462
463! If particles shall be read in to continue an existing run,
464! then the accumulated masses at the domain boundaries must be read in, too.
465! This overrides any previous calculations.
466!***************************************************************************
467
468! eso TODO: only needed for root process
469    if (ipin.eq.1) then
470      open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
471           form='unformatted')
472      read(unitboundcond) numcolumn_we,numcolumn_sn, &
473           zcolumn_we,zcolumn_sn,acc_mass_we,acc_mass_sn
474      close(unitboundcond)
475    endif
476
477    numpart = npart(1)/mp_partgroup_np
478    if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
479
480  else ! Allocate dummy arrays for receiving processes
481    allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
482         & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
483         & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
484         & xmass1_tmp(nullsize, nullsize))
485   
486  end if ! end if(lroot) 
487
488
489! Distribute particles to other processes (numpart is 'per-process', not total)
490    call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
491! eso TODO: xmassperparticle: not necessary to send
492    call MPI_Bcast(xmassperparticle, 1, mp_sp, id_root, mp_comm_used, mp_ierr)
493    call mpif_send_part_properties(numpart)
494
495! Deallocate the temporary arrays used for all particles
496    deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
497         & itrasplit_tmp,xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
498
499
500end subroutine init_domainfill
Note: See TracBrowser for help on using the repository browser.
hosted by ZAMG