source: flexpart.git/src/init_domainfill_mpi.f90

10.4.1_peseiGFS_025bugfixes+enhancementsdevrelease-10release-10.4.1scaling-bug
Last change on this file was 92fab65, checked in by Ignacio Pisso <ip@…>, 4 years ago

add SPDX-License-Identifier to all .f90 files

  • Property mode set to 100644
File size: 18.2 KB
Line 
1! SPDX-FileCopyrightText: FLEXPART 1998-2019, see flexpart_license.txt
2! SPDX-License-Identifier: GPL-3.0-or-later
3
4subroutine init_domainfill
5!
6!*****************************************************************************
7!                                                                            *
8! Initializes particles equally distributed over the first release location  *
9! specified in file RELEASES. This box is assumed to be the domain for doing *
10! domain-filling trajectory calculations.                                    *
11! All particles carry the same amount of mass which alltogether comprises the*
12! mass of air within the box.                                                *
13!                                                                            *
14!     Author: A. Stohl                                                       *
15!                                                                            *
16!     15 October 2002                                                        *
17!                                                                            *
18!  CHANGES                                                                   *
19!    12/2014 eso: MPI version                                                *
20!*****************************************************************************
21!                                                                            *
22! Variables:                                                                 *
23!                                                                            *
24! numparticlecount    consecutively counts the number of particles released  *
25! nx_we(2)       grid indices for western and eastern boundary of domain-    *
26!                filling trajectory calculations                             *
27! ny_sn(2)       grid indices for southern and northern boundary of domain-  *
28!                filling trajectory calculations                             *
29!                                                                            *
30!*****************************************************************************
31! MPI version:
32!
33!   -Root process allocates temporary arrays holding properties for
34!    all particles in the simulation
35!   -An index array is used to assign 1st particle to 1st process, 2nd particle
36!    to 2nd process and so on so that they are evenly distibuted geographically
37!   -Inititialization for domain-filling is done as in the serial code
38!   -Root process distributes particles evenly to other processes
39!
40!*****************************************************************************
41
42  use point_mod
43  use par_mod
44  use com_mod
45  use random_mod, only: ran1
46  use mpi_mod
47
48  implicit none
49
50! ncolumn_mpi,numparttot_mpi        ncolumn,numparttot per process
51  integer :: j,ix,jy,kz,ncolumn,numparttot,ncolumn_mpi,numparttot_mpi, arr_size
52  real :: gridarea(0:nymax-1),pp(nzmax),ylat,ylatp,ylatm,hzone
53  real :: cosfactm,cosfactp,deltacol,dz1,dz2,dz,pnew,fractus
54  real,parameter :: pih=pi/180.
55  real :: colmass(0:nxmax-1,0:nymax-1),colmasstotal,zposition
56
57  integer :: ixm,ixp,jym,jyp,indzm,indzp,in,indzh,i,jj
58  real :: pvpart,ddx,ddy,rddx,rddy,p1,p2,p3,p4,y1(2)
59
60  integer :: idummy = -11
61  integer,allocatable,dimension(:) :: idx ! index array
62  integer :: stride
63  integer, parameter :: nullsize=0
64  logical :: first_call=.true.
65
66! Use different seed for each process ! TODO: not needed anymore
67  if (first_call) then
68    idummy=idummy+mp_seed
69    first_call=.false.
70  end if
71
72! Determine the release region (only full grid cells), over which particles
73! shall be initialized
74! Use 2 fields for west/east and south/north boundary
75!**************************************************************************
76
77  nx_we(1)=max(int(xpoint1(1)),0)
78  nx_we(2)=min((int(xpoint2(1))+1),nxmin1)
79  ny_sn(1)=max(int(ypoint1(1)),0)
80  ny_sn(2)=min((int(ypoint2(1))+1),nymin1)
81
82! For global simulations (both global wind data and global domain-filling),
83! set a switch, such that no boundary conditions are used
84!**************************************************************************
85  if (xglobal.and.sglobal.and.nglobal) then
86    if ((nx_we(1).eq.0).and.(nx_we(2).eq.nxmin1).and. &
87         (ny_sn(1).eq.0).and.(ny_sn(2).eq.nymin1)) then
88      gdomainfill=.true.
89    else
90      gdomainfill=.false.
91    endif
92  endif
93
94! Exit here if resuming a run from particle dump
95!***********************************************
96  if (gdomainfill.and.ipin.ne.0) return
97
98! Do not release particles twice (i.e., not at both in the leftmost and rightmost
99! grid cell) for a global domain
100!*****************************************************************************
101  if (xglobal) nx_we(2)=min(nx_we(2),nx-2)
102
103
104! This section only done by the root process
105!*******************************************
106  if (lroot) then
107! Arrays for particles to be released. Add a small number to npart(1) in case of
108! round-off errors
109    arr_size = npart(1) + mp_np
110    allocate(itra1_tmp(arr_size),npoint_tmp(arr_size),nclass_tmp(arr_size),&
111         & idt_tmp(arr_size),itramem_tmp(arr_size),itrasplit_tmp(arr_size),&
112         & xtra1_tmp(arr_size),ytra1_tmp(arr_size),ztra1_tmp(arr_size),&
113         & xmass1_tmp(arr_size, maxspec))
114
115! Index array for particles. This is to avoid having particles
116! near edges of domain all on one process.
117!****************************************************************************
118    allocate(idx(npart(1)))
119    stride = npart(1)/mp_partgroup_np
120
121    jj=0
122    do j=1, stride
123      do i=0, mp_partgroup_np-1
124        jj = jj+1
125        if (jj.gt.npart(1)) exit
126        idx(jj) = i*stride+j
127      end do
128    end do
129
130! Add extra indices if npart(1) not evenly divisible by number of processes
131    do i=1, mod(npart(1),mp_partgroup_np)
132      jj = jj+1
133      if (jj.gt.npart(1)) exit
134      idx(jj) = jj
135    end do
136
137! Initialize all particles as non-existent   
138    itra1_tmp(:)=-999999999
139
140! Calculate area of grid cell with formula M=2*pi*R*h*dx/360,
141! see Netz, Formeln der Mathematik, 5. Auflage (1983), p.90
142!************************************************************
143
144    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
145      ylat=ylat0+real(jy)*dy
146      ylatp=ylat+0.5*dy
147      ylatm=ylat-0.5*dy
148      if ((ylatm.lt.0).and.(ylatp.gt.0.)) then
149        hzone=1./dyconst
150      else
151        cosfactp=cos(ylatp*pih)*r_earth
152        cosfactm=cos(ylatm*pih)*r_earth
153        if (cosfactp.lt.cosfactm) then
154          hzone=sqrt(r_earth**2-cosfactp**2)- &
155               sqrt(r_earth**2-cosfactm**2)
156        else
157          hzone=sqrt(r_earth**2-cosfactm**2)- &
158               sqrt(r_earth**2-cosfactp**2)
159        endif
160      endif
161      gridarea(jy)=2.*pi*r_earth*hzone*dx/360.
162    end do
163
164! Do the same for the south pole
165
166    if (sglobal) then
167      ylat=ylat0
168      ylatp=ylat+0.5*dy
169      ylatm=ylat
170      cosfactm=0.
171      cosfactp=cos(ylatp*pih)*r_earth
172      hzone=sqrt(r_earth**2-cosfactm**2)- &
173           sqrt(r_earth**2-cosfactp**2)
174      gridarea(0)=2.*pi*r_earth*hzone*dx/360.
175    endif
176
177! Do the same for the north pole
178
179    if (nglobal) then
180      ylat=ylat0+real(nymin1)*dy
181      ylatp=ylat
182      ylatm=ylat-0.5*dy
183      cosfactp=0.
184      cosfactm=cos(ylatm*pih)*r_earth
185      hzone=sqrt(r_earth**2-cosfactp**2)- &
186           sqrt(r_earth**2-cosfactm**2)
187      gridarea(nymin1)=2.*pi*r_earth*hzone*dx/360.
188    endif
189
190
191! Calculate total mass of each grid column and of the whole atmosphere
192!*********************************************************************
193
194    colmasstotal=0.
195    do jy=ny_sn(1),ny_sn(2)          ! loop about latitudes
196      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
197        pp(1)=rho(ix,jy,1,1)*r_air*tt(ix,jy,1,1)
198        pp(nz)=rho(ix,jy,nz,1)*r_air*tt(ix,jy,nz,1)
199        colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)
200        colmasstotal=colmasstotal+colmass(ix,jy)
201      end do
202    end do
203
204    write(*,*) 'Atm. mass: ',colmasstotal
205
206
207    if (ipin.eq.0) numpart=0
208
209! Determine the particle positions
210!*********************************
211
212    numparttot=0
213    numcolumn=0
214    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
215      ylat=ylat0+real(jy)*dy
216      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
217        ncolumn=nint(0.999*real(npart(1))*colmass(ix,jy)/ &
218             colmasstotal)
219        if (ncolumn.eq.0) goto 30
220        if (ncolumn.gt.numcolumn) numcolumn=ncolumn
221
222! Calculate pressure at the altitudes of model surfaces, using the air density
223! information, which is stored as a 3-d field
224!*****************************************************************************
225
226        do kz=1,nz
227          pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
228        end do
229
230
231        deltacol=(pp(1)-pp(nz))/real(ncolumn)
232        pnew=pp(1)+deltacol/2.
233        jj=0
234        do j=1,ncolumn
235          jj=jj+1
236
237
238! For columns with many particles (i.e. around the equator), distribute
239! the particles equally, for columns with few particles (i.e. around the
240! poles), distribute the particles randomly
241!***********************************************************************
242
243
244          if (ncolumn.gt.20) then
245            pnew=pnew-deltacol
246          else
247            pnew=pp(1)-ran1(idummy)*(pp(1)-pp(nz))
248          endif
249
250          do kz=1,nz-1
251            if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
252              dz1=pp(kz)-pnew
253              dz2=pnew-pp(kz+1)
254              dz=1./(dz1+dz2)
255
256! Assign particle position
257!*************************
258! Do the following steps only if particles are not read in from previous model run
259!*****************************************************************************
260              if (ipin.eq.0) then
261                xtra1_tmp(idx(numpart+jj))=real(ix)-0.5+ran1(idummy)
262                if (ix.eq.0) xtra1_tmp(idx(numpart+jj))=ran1(idummy)
263                if (ix.eq.nxmin1) xtra1_tmp(idx(numpart+jj))= &
264                     real(nxmin1)-ran1(idummy)
265                ytra1_tmp(idx(numpart+jj))=real(jy)-0.5+ran1(idummy)
266                ztra1_tmp(idx(numpart+jj))=(height(kz)*dz2+height(kz+1)*dz1)*dz
267                if (ztra1_tmp(idx(numpart+jj)).gt.height(nz)-0.5) &
268                     ztra1_tmp(idx(numpart+jj))=height(nz)-0.5
269
270
271! Interpolate PV to the particle position
272!****************************************
273                ixm=int(xtra1_tmp(idx(numpart+jj)))
274                jym=int(ytra1_tmp(idx(numpart+jj)))
275                ixp=ixm+1
276                jyp=jym+1
277                ddx=xtra1_tmp(idx(numpart+jj))-real(ixm)
278                ddy=ytra1_tmp(idx(numpart+jj))-real(jym)
279                rddx=1.-ddx
280                rddy=1.-ddy
281                p1=rddx*rddy
282                p2=ddx*rddy
283                p3=rddx*ddy
284                p4=ddx*ddy
285                do i=2,nz
286                  if (height(i).gt.ztra1_tmp(idx(numpart+jj))) then
287                    indzm=i-1
288                    indzp=i
289                    goto 6
290                  endif
291                end do
2926               continue
293                dz1=ztra1_tmp(idx(numpart+jj))-height(indzm)
294                dz2=height(indzp)-ztra1_tmp(idx(numpart+jj))
295                dz=1./(dz1+dz2)
296                do in=1,2
297                  indzh=indzm+in-1
298                  y1(in)=p1*pv(ixm,jym,indzh,1) &
299                       +p2*pv(ixp,jym,indzh,1) &
300                       +p3*pv(ixm,jyp,indzh,1) &
301                       +p4*pv(ixp,jyp,indzh,1)
302                end do
303                pvpart=(dz2*y1(1)+dz1*y1(2))*dz
304                if (ylat.lt.0.) pvpart=-1.*pvpart
305
306
307! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
308!*****************************************************************************
309
310                if (((ztra1_tmp(idx(numpart+jj)).gt.3000.).and. &
311                     (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
312
313! Assign certain properties to the particle
314!******************************************
315                  nclass_tmp(idx(numpart+jj))=min(int(ran1(idummy)* &
316                       real(nclassunc))+1,nclassunc)
317                  numparticlecount=numparticlecount+1
318                  npoint_tmp(idx(numpart+jj))=numparticlecount
319                  idt_tmp(idx(numpart+jj))=mintime
320                  itra1_tmp(idx(numpart+jj))=0
321                  itramem_tmp(idx(numpart+jj))=0
322                  itrasplit_tmp(idx(numpart+jj))=itra1_tmp(idx(numpart+jj))+ldirect* &
323                       itsplit
324                  xmass1_tmp(idx(numpart+jj),1)=colmass(ix,jy)/real(ncolumn)
325                  if (mdomainfill.eq.2) xmass1_tmp(idx(numpart+jj),1)= &
326                       xmass1_tmp(idx(numpart+jj),1)*pvpart*48./29.*ozonescale/10.**9
327                else
328                  jj=jj-1
329                endif
330              endif
331            endif
332          end do
333        end do
334        numparttot=numparttot+ncolumn
335        if (ipin.eq.0) numpart=numpart+jj
33630      continue
337      end do
338    end do
339
340
341! Check whether numpart is really smaller than maxpart
342!*****************************************************
343
344! ESO :TODO: this warning need to be moved further up, else out-of-bounds error earlier
345    if (numpart.gt.maxpart) then
346      write(*,*) 'numpart too large: change source in init_atm_mass.f'
347      write(*,*) 'numpart: ',numpart,' maxpart: ',maxpart
348    endif
349
350
351    xmassperparticle=colmasstotal/real(numparttot)
352
353
354! Make sure that all particles are within domain
355!***********************************************
356
357    do j=1,numpart
358      if ((xtra1_tmp(j).lt.0.).or.(xtra1_tmp(j).ge.real(nxmin1)).or. &
359           (ytra1_tmp(j).lt.0.).or.(ytra1_tmp(j).ge.real(nymin1))) then
360        itra1_tmp(j)=-999999999
361      endif
362    end do
363
364
365
366
367! For boundary conditions, we need fewer particle release heights per column,
368! because otherwise it takes too long until enough mass has accumulated to
369! release a particle at the boundary (would take dx/u seconds), leading to
370! relatively large position errors of the order of one grid distance.
371! It's better to release fewer particles per column, but to do so more often.
372! Thus, use on the order of nz starting heights per column.
373! We thus repeat the above to determine fewer starting heights, that are
374! used furtheron in subroutine boundcond_domainfill.f.
375!****************************************************************************
376
377    fractus=real(numcolumn)/real(nz)
378    write(*,*) 'Total number of particles at model start: ',numpart
379    write(*,*) 'Maximum number of particles per column: ',numcolumn
380    write(*,*) 'If ',fractus,' <1, better use more particles'
381    fractus=sqrt(max(fractus,1.))/2.
382
383    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
384      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
385        ncolumn=nint(0.999/fractus*real(npart(1))*colmass(ix,jy) &
386             /colmasstotal)
387        if (ncolumn.gt.maxcolumn) stop 'maxcolumn too small'
388        if (ncolumn.eq.0) goto 80
389
390
391! Memorize how many particles per column shall be used for all boundaries
392! This is further used in subroutine boundcond_domainfill.f
393! Use 2 fields for west/east and south/north boundary
394!************************************************************************
395
396        if (ix.eq.nx_we(1)) numcolumn_we(1,jy)=ncolumn
397        if (ix.eq.nx_we(2)) numcolumn_we(2,jy)=ncolumn
398        if (jy.eq.ny_sn(1)) numcolumn_sn(1,ix)=ncolumn
399        if (jy.eq.ny_sn(2)) numcolumn_sn(2,ix)=ncolumn
400
401! Calculate pressure at the altitudes of model surfaces, using the air density
402! information, which is stored as a 3-d field
403!*****************************************************************************
404
405        do kz=1,nz
406          pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
407        end do
408
409! Determine the reference starting altitudes
410!*******************************************
411
412        deltacol=(pp(1)-pp(nz))/real(ncolumn)
413        pnew=pp(1)+deltacol/2.
414        do j=1,ncolumn
415          pnew=pnew-deltacol
416          do kz=1,nz-1
417            if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
418              dz1=pp(kz)-pnew
419              dz2=pnew-pp(kz+1)
420              dz=1./(dz1+dz2)
421              zposition=(height(kz)*dz2+height(kz+1)*dz1)*dz
422              if (zposition.gt.height(nz)-0.5) zposition=height(nz)-0.5
423
424! Memorize vertical positions where particles are introduced
425! This is further used in subroutine boundcond_domainfill.f
426!***********************************************************
427
428              if (ix.eq.nx_we(1)) zcolumn_we(1,jy,j)=zposition
429              if (ix.eq.nx_we(2)) zcolumn_we(2,jy,j)=zposition
430              if (jy.eq.ny_sn(1)) zcolumn_sn(1,ix,j)=zposition
431              if (jy.eq.ny_sn(2)) zcolumn_sn(2,ix,j)=zposition
432
433! Initialize mass that has accumulated at boundary to zero
434!*********************************************************
435
436              acc_mass_we(1,jy,j)=0.
437              acc_mass_we(2,jy,j)=0.
438              acc_mass_sn(1,jy,j)=0.
439              acc_mass_sn(2,jy,j)=0.
440            endif
441          end do
442        end do
44380      continue
444      end do
445    end do
446
447! If particles shall be read in to continue an existing run,
448! then the accumulated masses at the domain boundaries must be read in, too.
449! This overrides any previous calculations.
450!***************************************************************************
451
452! eso TODO: only needed for root process
453    if ((ipin.eq.1).and.(.not.gdomainfill)) then
454      open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
455           form='unformatted')
456      read(unitboundcond) numcolumn_we,numcolumn_sn, &
457           zcolumn_we,zcolumn_sn,acc_mass_we,acc_mass_sn
458      close(unitboundcond)
459    endif
460
461    if (ipin.eq.0) then   
462      numpart = numpart/mp_partgroup_np
463      if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
464    end if
465
466  else ! Allocate dummy arrays for receiving processes
467    if (ipin.eq.0) then   
468      allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
469           & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
470           & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
471           & xmass1_tmp(nullsize, nullsize))
472    end if
473   
474  end if ! end if(lroot)
475
476
477
478! Distribute particles to other processes (numpart is 'per-process', not total)
479! Only if not restarting from previous run
480  if (ipin.eq.0) then
481    call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
482    call mpif_send_part_properties(npart(1)/mp_partgroup_np)
483
484! Deallocate the temporary arrays used for all particles
485    deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
486         & itrasplit_tmp,xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
487  end if
488
489
490end subroutine init_domainfill
Note: See TracBrowser for help on using the repository browser.
hosted by ZAMG