source: flexpart.git/src/init_domainfill_mpi.f90 @ 3481cc1

10.4.1_peseiGFS_025bugfixes+enhancementsdevrelease-10release-10.4.1scaling-bug
Last change on this file since 3481cc1 was 3481cc1, checked in by Ignacio Pisso <ip@…>, 4 years ago

move license from headers to a different file

  • Property mode set to 100644
File size: 18.1 KB
Line 
1subroutine init_domainfill
2!
3!*****************************************************************************
4!                                                                            *
5! Initializes particles equally distributed over the first release location  *
6! specified in file RELEASES. This box is assumed to be the domain for doing *
7! domain-filling trajectory calculations.                                    *
8! All particles carry the same amount of mass which alltogether comprises the*
9! mass of air within the box.                                                *
10!                                                                            *
11!     Author: A. Stohl                                                       *
12!                                                                            *
13!     15 October 2002                                                        *
14!                                                                            *
15!  CHANGES                                                                   *
16!    12/2014 eso: MPI version                                                *
17!*****************************************************************************
18!                                                                            *
19! Variables:                                                                 *
20!                                                                            *
21! numparticlecount    consecutively counts the number of particles released  *
22! nx_we(2)       grid indices for western and eastern boundary of domain-    *
23!                filling trajectory calculations                             *
24! ny_sn(2)       grid indices for southern and northern boundary of domain-  *
25!                filling trajectory calculations                             *
26!                                                                            *
27!*****************************************************************************
28! MPI version:
29!
30!   -Root process allocates temporary arrays holding properties for
31!    all particles in the simulation
32!   -An index array is used to assign 1st particle to 1st process, 2nd particle
33!    to 2nd process and so on so that they are evenly distibuted geographically
34!   -Inititialization for domain-filling is done as in the serial code
35!   -Root process distributes particles evenly to other processes
36!
37!*****************************************************************************
38
39  use point_mod
40  use par_mod
41  use com_mod
42  use random_mod, only: ran1
43  use mpi_mod
44
45  implicit none
46
47! ncolumn_mpi,numparttot_mpi        ncolumn,numparttot per process
48  integer :: j,ix,jy,kz,ncolumn,numparttot,ncolumn_mpi,numparttot_mpi, arr_size
49  real :: gridarea(0:nymax-1),pp(nzmax),ylat,ylatp,ylatm,hzone
50  real :: cosfactm,cosfactp,deltacol,dz1,dz2,dz,pnew,fractus
51  real,parameter :: pih=pi/180.
52  real :: colmass(0:nxmax-1,0:nymax-1),colmasstotal,zposition
53
54  integer :: ixm,ixp,jym,jyp,indzm,indzp,in,indzh,i,jj
55  real :: pvpart,ddx,ddy,rddx,rddy,p1,p2,p3,p4,y1(2)
56
57  integer :: idummy = -11
58  integer,allocatable,dimension(:) :: idx ! index array
59  integer :: stride
60  integer, parameter :: nullsize=0
61  logical :: first_call=.true.
62
63! Use different seed for each process ! TODO: not needed anymore
64  if (first_call) then
65    idummy=idummy+mp_seed
66    first_call=.false.
67  end if
68
69! Determine the release region (only full grid cells), over which particles
70! shall be initialized
71! Use 2 fields for west/east and south/north boundary
72!**************************************************************************
73
74  nx_we(1)=max(int(xpoint1(1)),0)
75  nx_we(2)=min((int(xpoint2(1))+1),nxmin1)
76  ny_sn(1)=max(int(ypoint1(1)),0)
77  ny_sn(2)=min((int(ypoint2(1))+1),nymin1)
78
79! For global simulations (both global wind data and global domain-filling),
80! set a switch, such that no boundary conditions are used
81!**************************************************************************
82  if (xglobal.and.sglobal.and.nglobal) then
83    if ((nx_we(1).eq.0).and.(nx_we(2).eq.nxmin1).and. &
84         (ny_sn(1).eq.0).and.(ny_sn(2).eq.nymin1)) then
85      gdomainfill=.true.
86    else
87      gdomainfill=.false.
88    endif
89  endif
90
91! Exit here if resuming a run from particle dump
92!***********************************************
93  if (gdomainfill.and.ipin.ne.0) return
94
95! Do not release particles twice (i.e., not at both in the leftmost and rightmost
96! grid cell) for a global domain
97!*****************************************************************************
98  if (xglobal) nx_we(2)=min(nx_we(2),nx-2)
99
100
101! This section only done by the root process
102!*******************************************
103  if (lroot) then
104! Arrays for particles to be released. Add a small number to npart(1) in case of
105! round-off errors
106    arr_size = npart(1) + mp_np
107    allocate(itra1_tmp(arr_size),npoint_tmp(arr_size),nclass_tmp(arr_size),&
108         & idt_tmp(arr_size),itramem_tmp(arr_size),itrasplit_tmp(arr_size),&
109         & xtra1_tmp(arr_size),ytra1_tmp(arr_size),ztra1_tmp(arr_size),&
110         & xmass1_tmp(arr_size, maxspec))
111
112! Index array for particles. This is to avoid having particles
113! near edges of domain all on one process.
114!****************************************************************************
115    allocate(idx(npart(1)))
116    stride = npart(1)/mp_partgroup_np
117
118    jj=0
119    do j=1, stride
120      do i=0, mp_partgroup_np-1
121        jj = jj+1
122        if (jj.gt.npart(1)) exit
123        idx(jj) = i*stride+j
124      end do
125    end do
126
127! Add extra indices if npart(1) not evenly divisible by number of processes
128    do i=1, mod(npart(1),mp_partgroup_np)
129      jj = jj+1
130      if (jj.gt.npart(1)) exit
131      idx(jj) = jj
132    end do
133
134! Initialize all particles as non-existent   
135    itra1_tmp(:)=-999999999
136
137! Calculate area of grid cell with formula M=2*pi*R*h*dx/360,
138! see Netz, Formeln der Mathematik, 5. Auflage (1983), p.90
139!************************************************************
140
141    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
142      ylat=ylat0+real(jy)*dy
143      ylatp=ylat+0.5*dy
144      ylatm=ylat-0.5*dy
145      if ((ylatm.lt.0).and.(ylatp.gt.0.)) then
146        hzone=1./dyconst
147      else
148        cosfactp=cos(ylatp*pih)*r_earth
149        cosfactm=cos(ylatm*pih)*r_earth
150        if (cosfactp.lt.cosfactm) then
151          hzone=sqrt(r_earth**2-cosfactp**2)- &
152               sqrt(r_earth**2-cosfactm**2)
153        else
154          hzone=sqrt(r_earth**2-cosfactm**2)- &
155               sqrt(r_earth**2-cosfactp**2)
156        endif
157      endif
158      gridarea(jy)=2.*pi*r_earth*hzone*dx/360.
159    end do
160
161! Do the same for the south pole
162
163    if (sglobal) then
164      ylat=ylat0
165      ylatp=ylat+0.5*dy
166      ylatm=ylat
167      cosfactm=0.
168      cosfactp=cos(ylatp*pih)*r_earth
169      hzone=sqrt(r_earth**2-cosfactm**2)- &
170           sqrt(r_earth**2-cosfactp**2)
171      gridarea(0)=2.*pi*r_earth*hzone*dx/360.
172    endif
173
174! Do the same for the north pole
175
176    if (nglobal) then
177      ylat=ylat0+real(nymin1)*dy
178      ylatp=ylat
179      ylatm=ylat-0.5*dy
180      cosfactp=0.
181      cosfactm=cos(ylatm*pih)*r_earth
182      hzone=sqrt(r_earth**2-cosfactp**2)- &
183           sqrt(r_earth**2-cosfactm**2)
184      gridarea(nymin1)=2.*pi*r_earth*hzone*dx/360.
185    endif
186
187
188! Calculate total mass of each grid column and of the whole atmosphere
189!*********************************************************************
190
191    colmasstotal=0.
192    do jy=ny_sn(1),ny_sn(2)          ! loop about latitudes
193      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
194        pp(1)=rho(ix,jy,1,1)*r_air*tt(ix,jy,1,1)
195        pp(nz)=rho(ix,jy,nz,1)*r_air*tt(ix,jy,nz,1)
196        colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)
197        colmasstotal=colmasstotal+colmass(ix,jy)
198      end do
199    end do
200
201    write(*,*) 'Atm. mass: ',colmasstotal
202
203
204    if (ipin.eq.0) numpart=0
205
206! Determine the particle positions
207!*********************************
208
209    numparttot=0
210    numcolumn=0
211    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
212      ylat=ylat0+real(jy)*dy
213      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
214        ncolumn=nint(0.999*real(npart(1))*colmass(ix,jy)/ &
215             colmasstotal)
216        if (ncolumn.eq.0) goto 30
217        if (ncolumn.gt.numcolumn) numcolumn=ncolumn
218
219! Calculate pressure at the altitudes of model surfaces, using the air density
220! information, which is stored as a 3-d field
221!*****************************************************************************
222
223        do kz=1,nz
224          pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
225        end do
226
227
228        deltacol=(pp(1)-pp(nz))/real(ncolumn)
229        pnew=pp(1)+deltacol/2.
230        jj=0
231        do j=1,ncolumn
232          jj=jj+1
233
234
235! For columns with many particles (i.e. around the equator), distribute
236! the particles equally, for columns with few particles (i.e. around the
237! poles), distribute the particles randomly
238!***********************************************************************
239
240
241          if (ncolumn.gt.20) then
242            pnew=pnew-deltacol
243          else
244            pnew=pp(1)-ran1(idummy)*(pp(1)-pp(nz))
245          endif
246
247          do kz=1,nz-1
248            if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
249              dz1=pp(kz)-pnew
250              dz2=pnew-pp(kz+1)
251              dz=1./(dz1+dz2)
252
253! Assign particle position
254!*************************
255! Do the following steps only if particles are not read in from previous model run
256!*****************************************************************************
257              if (ipin.eq.0) then
258                xtra1_tmp(idx(numpart+jj))=real(ix)-0.5+ran1(idummy)
259                if (ix.eq.0) xtra1_tmp(idx(numpart+jj))=ran1(idummy)
260                if (ix.eq.nxmin1) xtra1_tmp(idx(numpart+jj))= &
261                     real(nxmin1)-ran1(idummy)
262                ytra1_tmp(idx(numpart+jj))=real(jy)-0.5+ran1(idummy)
263                ztra1_tmp(idx(numpart+jj))=(height(kz)*dz2+height(kz+1)*dz1)*dz
264                if (ztra1_tmp(idx(numpart+jj)).gt.height(nz)-0.5) &
265                     ztra1_tmp(idx(numpart+jj))=height(nz)-0.5
266
267
268! Interpolate PV to the particle position
269!****************************************
270                ixm=int(xtra1_tmp(idx(numpart+jj)))
271                jym=int(ytra1_tmp(idx(numpart+jj)))
272                ixp=ixm+1
273                jyp=jym+1
274                ddx=xtra1_tmp(idx(numpart+jj))-real(ixm)
275                ddy=ytra1_tmp(idx(numpart+jj))-real(jym)
276                rddx=1.-ddx
277                rddy=1.-ddy
278                p1=rddx*rddy
279                p2=ddx*rddy
280                p3=rddx*ddy
281                p4=ddx*ddy
282                do i=2,nz
283                  if (height(i).gt.ztra1_tmp(idx(numpart+jj))) then
284                    indzm=i-1
285                    indzp=i
286                    goto 6
287                  endif
288                end do
2896               continue
290                dz1=ztra1_tmp(idx(numpart+jj))-height(indzm)
291                dz2=height(indzp)-ztra1_tmp(idx(numpart+jj))
292                dz=1./(dz1+dz2)
293                do in=1,2
294                  indzh=indzm+in-1
295                  y1(in)=p1*pv(ixm,jym,indzh,1) &
296                       +p2*pv(ixp,jym,indzh,1) &
297                       +p3*pv(ixm,jyp,indzh,1) &
298                       +p4*pv(ixp,jyp,indzh,1)
299                end do
300                pvpart=(dz2*y1(1)+dz1*y1(2))*dz
301                if (ylat.lt.0.) pvpart=-1.*pvpart
302
303
304! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
305!*****************************************************************************
306
307                if (((ztra1_tmp(idx(numpart+jj)).gt.3000.).and. &
308                     (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
309
310! Assign certain properties to the particle
311!******************************************
312                  nclass_tmp(idx(numpart+jj))=min(int(ran1(idummy)* &
313                       real(nclassunc))+1,nclassunc)
314                  numparticlecount=numparticlecount+1
315                  npoint_tmp(idx(numpart+jj))=numparticlecount
316                  idt_tmp(idx(numpart+jj))=mintime
317                  itra1_tmp(idx(numpart+jj))=0
318                  itramem_tmp(idx(numpart+jj))=0
319                  itrasplit_tmp(idx(numpart+jj))=itra1_tmp(idx(numpart+jj))+ldirect* &
320                       itsplit
321                  xmass1_tmp(idx(numpart+jj),1)=colmass(ix,jy)/real(ncolumn)
322                  if (mdomainfill.eq.2) xmass1_tmp(idx(numpart+jj),1)= &
323                       xmass1_tmp(idx(numpart+jj),1)*pvpart*48./29.*ozonescale/10.**9
324                else
325                  jj=jj-1
326                endif
327              endif
328            endif
329          end do
330        end do
331        numparttot=numparttot+ncolumn
332        if (ipin.eq.0) numpart=numpart+jj
33330      continue
334      end do
335    end do
336
337
338! Check whether numpart is really smaller than maxpart
339!*****************************************************
340
341! ESO :TODO: this warning need to be moved further up, else out-of-bounds error earlier
342    if (numpart.gt.maxpart) then
343      write(*,*) 'numpart too large: change source in init_atm_mass.f'
344      write(*,*) 'numpart: ',numpart,' maxpart: ',maxpart
345    endif
346
347
348    xmassperparticle=colmasstotal/real(numparttot)
349
350
351! Make sure that all particles are within domain
352!***********************************************
353
354    do j=1,numpart
355      if ((xtra1_tmp(j).lt.0.).or.(xtra1_tmp(j).ge.real(nxmin1)).or. &
356           (ytra1_tmp(j).lt.0.).or.(ytra1_tmp(j).ge.real(nymin1))) then
357        itra1_tmp(j)=-999999999
358      endif
359    end do
360
361
362
363
364! For boundary conditions, we need fewer particle release heights per column,
365! because otherwise it takes too long until enough mass has accumulated to
366! release a particle at the boundary (would take dx/u seconds), leading to
367! relatively large position errors of the order of one grid distance.
368! It's better to release fewer particles per column, but to do so more often.
369! Thus, use on the order of nz starting heights per column.
370! We thus repeat the above to determine fewer starting heights, that are
371! used furtheron in subroutine boundcond_domainfill.f.
372!****************************************************************************
373
374    fractus=real(numcolumn)/real(nz)
375    write(*,*) 'Total number of particles at model start: ',numpart
376    write(*,*) 'Maximum number of particles per column: ',numcolumn
377    write(*,*) 'If ',fractus,' <1, better use more particles'
378    fractus=sqrt(max(fractus,1.))/2.
379
380    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
381      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
382        ncolumn=nint(0.999/fractus*real(npart(1))*colmass(ix,jy) &
383             /colmasstotal)
384        if (ncolumn.gt.maxcolumn) stop 'maxcolumn too small'
385        if (ncolumn.eq.0) goto 80
386
387
388! Memorize how many particles per column shall be used for all boundaries
389! This is further used in subroutine boundcond_domainfill.f
390! Use 2 fields for west/east and south/north boundary
391!************************************************************************
392
393        if (ix.eq.nx_we(1)) numcolumn_we(1,jy)=ncolumn
394        if (ix.eq.nx_we(2)) numcolumn_we(2,jy)=ncolumn
395        if (jy.eq.ny_sn(1)) numcolumn_sn(1,ix)=ncolumn
396        if (jy.eq.ny_sn(2)) numcolumn_sn(2,ix)=ncolumn
397
398! Calculate pressure at the altitudes of model surfaces, using the air density
399! information, which is stored as a 3-d field
400!*****************************************************************************
401
402        do kz=1,nz
403          pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
404        end do
405
406! Determine the reference starting altitudes
407!*******************************************
408
409        deltacol=(pp(1)-pp(nz))/real(ncolumn)
410        pnew=pp(1)+deltacol/2.
411        do j=1,ncolumn
412          pnew=pnew-deltacol
413          do kz=1,nz-1
414            if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
415              dz1=pp(kz)-pnew
416              dz2=pnew-pp(kz+1)
417              dz=1./(dz1+dz2)
418              zposition=(height(kz)*dz2+height(kz+1)*dz1)*dz
419              if (zposition.gt.height(nz)-0.5) zposition=height(nz)-0.5
420
421! Memorize vertical positions where particles are introduced
422! This is further used in subroutine boundcond_domainfill.f
423!***********************************************************
424
425              if (ix.eq.nx_we(1)) zcolumn_we(1,jy,j)=zposition
426              if (ix.eq.nx_we(2)) zcolumn_we(2,jy,j)=zposition
427              if (jy.eq.ny_sn(1)) zcolumn_sn(1,ix,j)=zposition
428              if (jy.eq.ny_sn(2)) zcolumn_sn(2,ix,j)=zposition
429
430! Initialize mass that has accumulated at boundary to zero
431!*********************************************************
432
433              acc_mass_we(1,jy,j)=0.
434              acc_mass_we(2,jy,j)=0.
435              acc_mass_sn(1,jy,j)=0.
436              acc_mass_sn(2,jy,j)=0.
437            endif
438          end do
439        end do
44080      continue
441      end do
442    end do
443
444! If particles shall be read in to continue an existing run,
445! then the accumulated masses at the domain boundaries must be read in, too.
446! This overrides any previous calculations.
447!***************************************************************************
448
449! eso TODO: only needed for root process
450    if ((ipin.eq.1).and.(.not.gdomainfill)) then
451      open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
452           form='unformatted')
453      read(unitboundcond) numcolumn_we,numcolumn_sn, &
454           zcolumn_we,zcolumn_sn,acc_mass_we,acc_mass_sn
455      close(unitboundcond)
456    endif
457
458    if (ipin.eq.0) then   
459      numpart = numpart/mp_partgroup_np
460      if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
461    end if
462
463  else ! Allocate dummy arrays for receiving processes
464    if (ipin.eq.0) then   
465      allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
466           & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
467           & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
468           & xmass1_tmp(nullsize, nullsize))
469    end if
470   
471  end if ! end if(lroot)
472
473
474
475! Distribute particles to other processes (numpart is 'per-process', not total)
476! Only if not restarting from previous run
477  if (ipin.eq.0) then
478    call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
479    call mpif_send_part_properties(npart(1)/mp_partgroup_np)
480
481! Deallocate the temporary arrays used for all particles
482    deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
483         & itrasplit_tmp,xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
484  end if
485
486
487end subroutine init_domainfill
Note: See TracBrowser for help on using the repository browser.
hosted by ZAMG