Changeset d9f0585 in flexpart.git


Ignore:
Timestamp:
May 8, 2017, 8:34:40 AM (7 years ago)
Author:
Sabine <sabine.eckhardt@…>
Branches:
master, 10.4.1_pesei, GFS_025, bugfixes+enhancements, dev, release-10, release-10.4.1, scaling-bug, univie
Children:
6985a98
Parents:
d404d98 (diff), c8fc724 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent.
Message:

Merge branch 'dev' of git.nilu.no:flexpart/flexpart into dev

Files:
30 edited

Legend:

Unmodified
Added
Removed
  • src/FLEXPART.f90

    rdb712a8 rc8fc724  
    6767  call gasdev1(idummy,rannumb(maxrand),rannumb(maxrand-1))
    6868
     69
    6970  ! FLEXPART version string
    7071  flexversion_major = '10' ! Major version number, also used for species file names
    71   flexversion='Version '//trim(flexversion_major)//'.0beta (2015-05-01)'
     72  flexversion='Version '//trim(flexversion_major)//'.1beta (2016-11-02)'
    7273  verbosity=0
    7374
     
    384385  end do
    385386
     387  ! Inform whether output kernel is used or not
     388  !*********************************************
     389  if (lroot) then
     390    if (lnokernel) then
     391      write(*,*) "Concentrations are calculated without using kernel"
     392    else
     393      write(*,*) "Concentrations are calculated using kernel"
     394    end if
     395  end if
     396
     397
    386398  ! Calculate particle trajectories
    387399  !********************************
     
    402414
    403415! NIK 16.02.2005
    404   write(*,*) '**********************************************'
    405   write(*,*) 'Total number of occurences of below-cloud scavenging', tot_blc_count
    406   write(*,*) 'Total number of occurences of in-cloud    scavenging', tot_inc_count
    407   write(*,*) '**********************************************'
    408 
     416  do i=1,nspec
     417    write(*,*) '**********************************************'
     418    write(*,*) 'Scavenging statistics for species ', species(i), ':'
     419    write(*,*) 'Total number of occurences of below-cloud scavenging', &
     420         & tot_blc_count(i)
     421    write(*,*) 'Total number of occurences of in-cloud    scavenging', &
     422         & tot_inc_count(i)
     423    write(*,*) '**********************************************'
     424  end do
     425 
    409426  write(*,*) 'CONGRATULATIONS: YOU HAVE SUCCESSFULLY COMPLETED A FLE&
    410427       &XPART MODEL RUN!'
  • src/FLEXPART_MPI.f90

    r79abee9 rc8fc724  
    5555
    5656
    57 ! Initialize mpi
    58 !*********************
     57  ! Initialize mpi
     58  !*********************
    5959  call mpif_init
    6060
    6161  if (mp_measure_time) call mpif_mtime('flexpart',0)
    6262
    63 ! Initialize arrays in com_mod
    64 !*****************************
    65   call com_mod_allocate_part(maxpart_mpi)
     63  ! Initialize arrays in com_mod
     64  !*****************************
     65
     66  if(.not.(lmpreader.and.lmp_use_reader)) call com_mod_allocate_part(maxpart_mpi)
     67
    6668
    6769  ! Generate a large number of random numbers
     
    7981  flexversion_major = '10' ! Major version number, also used for species file names
    8082!  flexversion='Ver. 10 Beta MPI (2015-05-01)'
    81   flexversion='Ver. '//trim(flexversion_major)//' Beta MPI (2015-05-01)'
     83  flexversion='Ver. '//trim(flexversion_major)//'.1beta MPI (2016-11-02)'
    8284  verbosity=0
    8385
     
    306308  endif
    307309
    308   do j=1, size(itra1) ! maxpart_mpi
    309     itra1(j)=-999999999
    310   end do
     310  if (.not.(lmpreader.and.lmp_use_reader)) then
     311    do j=1, size(itra1) ! maxpart_mpi
     312      itra1(j)=-999999999
     313    end do
     314  end if
    311315
    312316  ! For continuation of previous run, read in particle positions
     
    318322    endif
    319323    ! readwind process skips this step
    320     if (lmp_use_reader.and..not.lmpreader) call readpartpositions
     324    if (.not.(lmpreader.and.lmp_use_reader)) call readpartpositions
    321325  else
    322326    if (verbosity.gt.0 .and. lroot) then
     
    425429  end do
    426430
     431  ! Inform whether output kernel is used or not
     432  !*********************************************
     433
     434  if (lroot) then
     435    if (lnokernel) then
     436      write(*,*) "Concentrations are calculated without using kernel"
     437    else
     438      write(*,*) "Concentrations are calculated using kernel"
     439    end if
     440  end if
    427441
    428442! Calculate particle trajectories
     
    448462! NIK 16.02.2005
    449463  if (lroot) then
    450     call MPI_Reduce(MPI_IN_PLACE, tot_blc_count, 1, MPI_INTEGER8, MPI_SUM, id_root, &
     464    call MPI_Reduce(MPI_IN_PLACE, tot_blc_count, nspec, MPI_INTEGER8, MPI_SUM, id_root, &
    451465         & mp_comm_used, mp_ierr)
    452     call MPI_Reduce(MPI_IN_PLACE, tot_inc_count, 1, MPI_INTEGER8, MPI_SUM, id_root, &
     466    call MPI_Reduce(MPI_IN_PLACE, tot_inc_count, nspec, MPI_INTEGER8, MPI_SUM, id_root, &
    453467         & mp_comm_used, mp_ierr)
    454468  else
    455469    if (mp_partgroup_pid.ge.0) then ! Skip for readwind process
    456       call MPI_Reduce(tot_blc_count, 0, 1, MPI_INTEGER8, MPI_SUM, id_root, &
     470      call MPI_Reduce(tot_blc_count, 0, nspec, MPI_INTEGER8, MPI_SUM, id_root, &
    457471           & mp_comm_used, mp_ierr)
    458       call MPI_Reduce(tot_inc_count, 0, 1, MPI_INTEGER8, MPI_SUM, id_root, &
     472      call MPI_Reduce(tot_inc_count, 0, nspec, MPI_INTEGER8, MPI_SUM, id_root, &
    459473           & mp_comm_used, mp_ierr)
    460474    end if
     
    462476
    463477  if (lroot) then
    464     write(*,*) '**********************************************'
    465     write(*,*) 'Total number of occurences of below-cloud scavenging', tot_blc_count
    466     write(*,*) 'Total number of occurences of in-cloud    scavenging', tot_inc_count
    467     write(*,*) '**********************************************'
     478    do i=1,nspec
     479      write(*,*) '**********************************************'
     480      write(*,*) 'Scavenging statistics for species ', species(i), ':'
     481      write(*,*) 'Total number of occurences of below-cloud scavenging', &
     482           & tot_blc_count(i)
     483      write(*,*) 'Total number of occurences of in-cloud    scavenging', &
     484           & tot_inc_count(i)
     485      write(*,*) '**********************************************'
     486    end do
    468487
    469488    write(*,*) 'CONGRATULATIONS: YOU HAVE SUCCESSFULLY COMPLETED A FLE&
  • src/boundcond_domainfill_mpi.f90

    r0f7835d r16b61a5  
    2121
    2222subroutine boundcond_domainfill(itime,loutend)
    23   !                                  i      i
    24   !*****************************************************************************
    25   !                                                                            *
    26   ! Particles are created by this subroutine continuously throughout the       *
    27   ! simulation at the boundaries of the domain-filling box.                    *
    28   ! All particles carry the same amount of mass which alltogether comprises the*
    29   ! mass of air within the box, which remains (more or less) constant.         *
    30   !                                                                            *
    31   !     Author: A. Stohl                                                       *
    32   !                                                                            *
    33   !     16 October 2002                                                        *
    34   !                                                                            *
    35   !*****************************************************************************
    36   !                                                                            *
    37   ! Variables:                                                                 *
    38   !                                                                            *
    39   ! nx_we(2)       grid indices for western and eastern boundary of domain-    *
    40   !                filling trajectory calculations                             *
    41   ! ny_sn(2)       grid indices for southern and northern boundary of domain-  *
    42   !                filling trajectory calculations                             *
    43   !                                                                            *
    44   !*****************************************************************************
     23!                                  i      i
     24!*****************************************************************************
     25!                                                                            *
     26! Particles are created by this subroutine continuously throughout the       *
     27! simulation at the boundaries of the domain-filling box.                    *
     28! All particles carry the same amount of mass which alltogether comprises the*
     29! mass of air within the box, which remains (more or less) constant.         *
     30!                                                                            *
     31!     Author: A. Stohl                                                       *
     32!                                                                            *
     33!     16 October 2002                                                        *
     34!                                                                            *
     35!*****************************************************************************
     36!                                                                            *
     37! Variables:                                                                 *
     38!                                                                            *
     39! nx_we(2)       grid indices for western and eastern boundary of domain-    *
     40!                filling trajectory calculations                             *
     41! ny_sn(2)       grid indices for southern and northern boundary of domain-  *
     42!                filling trajectory calculations                             *
     43!                                                                            *
     44!*****************************************************************************
     45!  CHANGES                                                                   
     46!    08/2016 eso: MPI version:                                               
     47!
     48!   -Root process release particles and distributes to other processes.
     49!    Temporary arrays are used, also for the non-root (receiving) processes.
     50!   -The scheme can be improved by having all processes report numpart
     51!    (keeping track of how many particles have left the domain), so that
     52!    a proportional amount of new particles can be distributed (however
     53!    we have a separate function called from timemanager that will
     54!    redistribute particles among processes if there are imbalances)     
     55!*****************************************************************************
    4556
    4657  use point_mod
     
    5566  integer :: itime,in,indz,indzp,i,loutend
    5667  integer :: j,k,ix,jy,m,indzh,indexh,minpart,ipart,mmass
    57   integer :: numactiveparticles
     68  integer :: numactiveparticles, numpart_total, rel_counter
     69  integer,allocatable,dimension(:) ::  numrel_mpi !, numactiveparticles_mpi
    5870
    5971  real :: windl(2),rhol(2)
     
    6678
    6779  integer :: idummy = -11
     80  integer :: mtag
    6881  logical :: first_call=.true.
    69 
    70   ! Use different seed for each process
     82! Sizes of temporary arrays are maxpartfract*maxpart. Increase maxpartfract if
     83! needed. 
     84  real,parameter :: maxpartfract=0.1
     85  integer :: tmp_size = int(maxpartfract*maxpart)
     86
     87! Use different seed for each process
    7188  if (first_call) then
    7289    idummy=idummy+mp_seed
     
    7592
    7693
    77   ! If domain-filling is global, no boundary conditions are needed
    78   !***************************************************************
     94! If domain-filling is global, no boundary conditions are needed
     95!***************************************************************
    7996
    8097  if (gdomainfill) return
     
    8299  accmasst=0.
    83100  numactiveparticles=0
    84 
    85   ! Terminate trajectories that have left the domain, if domain-filling
    86   ! trajectory calculation domain is not global
    87   !********************************************************************
     101! Keep track of active particles on each process
     102  allocate(numrel_mpi(0:mp_partgroup_np-1))
     103! numactiveparticles_mpi(0:mp_partgroup_np-1)
     104
     105! New particles to be released on each process
     106  numrel_mpi(:)=0
     107
     108! Terminate trajectories that have left the domain, if domain-filling
     109! trajectory calculation domain is not global. Done for all processes
     110!********************************************************************
    88111
    89112  do i=1,numpart
     
    98121         numactiveparticles+1
    99122  end do
    100 
    101 
    102   ! Determine auxiliary variables for time interpolation
    103   !*****************************************************
    104 
    105   dt1=real(itime-memtime(1))
    106   dt2=real(memtime(2)-itime)
    107   dtt=1./(dt1+dt2)
    108 
    109   ! Initialize auxiliary variable used to search for vacant storage space
    110   !**********************************************************************
    111 
    112   minpart=1
    113 
    114   !***************************************
    115   ! Western and eastern boundary condition
    116   !***************************************
    117 
    118   ! Loop from south to north
    119   !*************************
    120 
    121   do jy=ny_sn(1),ny_sn(2)
    122 
    123   ! Loop over western (index 1) and eastern (index 2) boundary
    124   !***********************************************************
    125 
    126     do k=1,2
    127 
    128   ! Loop over all release locations in a column
    129   !********************************************
    130 
    131       do j=1,numcolumn_we(k,jy)
    132 
    133   ! Determine, for each release location, the area of the corresponding boundary
    134   !*****************************************************************************
    135 
    136         if (j.eq.1) then
    137           deltaz=(zcolumn_we(k,jy,2)+zcolumn_we(k,jy,1))/2.
    138         else if (j.eq.numcolumn_we(k,jy)) then
    139   !        deltaz=height(nz)-(zcolumn_we(k,jy,j-1)+
    140   !    +        zcolumn_we(k,jy,j))/2.
    141   ! In order to avoid taking a very high column for very many particles,
    142   ! use the deltaz from one particle below instead
    143           deltaz=(zcolumn_we(k,jy,j)-zcolumn_we(k,jy,j-2))/2.
    144         else
    145           deltaz=(zcolumn_we(k,jy,j+1)-zcolumn_we(k,jy,j-1))/2.
    146         endif
    147         if ((jy.eq.ny_sn(1)).or.(jy.eq.ny_sn(2))) then
    148           boundarea=deltaz*111198.5/2.*dy
    149         else
    150           boundarea=deltaz*111198.5*dy
    151         endif
    152 
    153 
    154   ! Interpolate the wind velocity and density to the release location
    155   !******************************************************************
    156 
    157   ! Determine the model level below the release position
    158   !*****************************************************
    159 
    160         do i=2,nz
    161           if (height(i).gt.zcolumn_we(k,jy,j)) then
    162             indz=i-1
    163             indzp=i
    164             goto 6
     123!  numactiveparticles_mpi(mp_partid) = numactiveparticles
     124
     125
     126! Collect number of active particles from all processes
     127  ! call MPI_Allgather(numactiveparticles, 1, MPI_INTEGER, &
     128  !      &numactiveparticles_mpi, 1, MPI_INTEGER, mp_comm_used, mp_ierr)
     129
     130
     131! Total number of new releases
     132  numpart_total = 0
     133
     134
     135! This section only done by root process
     136!***************************************
     137
     138  if (lroot) then
     139
     140! Use separate arrays for newly released particles
     141!*************************************************
     142
     143    allocate(itra1_tmp(tmp_size),npoint_tmp(tmp_size),nclass_tmp(tmp_size),&
     144         & idt_tmp(tmp_size),itramem_tmp(tmp_size),itrasplit_tmp(tmp_size),&
     145         & xtra1_tmp(tmp_size),ytra1_tmp(tmp_size),ztra1_tmp(tmp_size),&
     146         & xmass1_tmp(tmp_size, maxspec))
     147
     148! Initialize all particles as non-existent   
     149    itra1_tmp(:)=-999999999
     150
     151! Determine auxiliary variables for time interpolation
     152!*****************************************************
     153
     154    dt1=real(itime-memtime(1))
     155    dt2=real(memtime(2)-itime)
     156    dtt=1./(dt1+dt2)
     157
     158! Initialize auxiliary variable used to search for vacant storage space
     159!**********************************************************************
     160
     161    minpart=1
     162
     163!***************************************
     164! Western and eastern boundary condition
     165!***************************************
     166
     167! Loop from south to north
     168!*************************
     169
     170    do jy=ny_sn(1),ny_sn(2)
     171
     172! Loop over western (index 1) and eastern (index 2) boundary
     173!***********************************************************
     174
     175      do k=1,2
     176
     177! Loop over all release locations in a column
     178!********************************************
     179
     180        do j=1,numcolumn_we(k,jy)
     181
     182! Determine, for each release location, the area of the corresponding boundary
     183!*****************************************************************************
     184
     185          if (j.eq.1) then
     186            deltaz=(zcolumn_we(k,jy,2)+zcolumn_we(k,jy,1))/2.
     187          else if (j.eq.numcolumn_we(k,jy)) then
     188!        deltaz=height(nz)-(zcolumn_we(k,jy,j-1)+
     189!    +        zcolumn_we(k,jy,j))/2.
     190! In order to avoid taking a very high column for very many particles,
     191! use the deltaz from one particle below instead
     192            deltaz=(zcolumn_we(k,jy,j)-zcolumn_we(k,jy,j-2))/2.
     193          else
     194            deltaz=(zcolumn_we(k,jy,j+1)-zcolumn_we(k,jy,j-1))/2.
    165195          endif
    166         end do
    167 6       continue
    168 
    169   ! Vertical distance to the level below and above current position
    170   !****************************************************************
    171 
    172         dz1=zcolumn_we(k,jy,j)-height(indz)
    173         dz2=height(indzp)-zcolumn_we(k,jy,j)
    174         dz=1./(dz1+dz2)
    175 
    176   ! Vertical and temporal interpolation
    177   !************************************
    178 
    179         do m=1,2
    180           indexh=memind(m)
    181           do in=1,2
    182             indzh=indz+in-1
    183             windl(in)=uu(nx_we(k),jy,indzh,indexh)
    184             rhol(in)=rho(nx_we(k),jy,indzh,indexh)
    185           end do
    186 
    187           windhl(m)=(dz2*windl(1)+dz1*windl(2))*dz
    188           rhohl(m)=(dz2*rhol(1)+dz1*rhol(2))*dz
    189         end do
    190 
    191         windx=(windhl(1)*dt2+windhl(2)*dt1)*dtt
    192         rhox=(rhohl(1)*dt2+rhohl(2)*dt1)*dtt
    193 
    194   ! Calculate mass flux, divided by number of processes
    195   !****************************************************
    196 
    197         fluxofmass=windx*rhox*boundarea*real(lsynctime)/mp_partgroup_np
    198 
    199 
    200   ! If the mass flux is directed into the domain, add it to previous mass fluxes;
    201   ! if it is out of the domain, set accumulated mass flux to zero
    202   !******************************************************************************
    203 
    204         if (k.eq.1) then
    205           if (fluxofmass.ge.0.) then
    206             acc_mass_we(k,jy,j)=acc_mass_we(k,jy,j)+fluxofmass
     196          if ((jy.eq.ny_sn(1)).or.(jy.eq.ny_sn(2))) then
     197            boundarea=deltaz*111198.5/2.*dy
    207198          else
    208             acc_mass_we(k,jy,j)=0.
     199            boundarea=deltaz*111198.5*dy
    209200          endif
    210         else
    211           if (fluxofmass.le.0.) then
    212             acc_mass_we(k,jy,j)=acc_mass_we(k,jy,j)+abs(fluxofmass)
    213           else
    214             acc_mass_we(k,jy,j)=0.
    215           endif
    216         endif
    217         accmasst=accmasst+acc_mass_we(k,jy,j)
    218 
    219   ! If the accumulated mass exceeds half the mass that each particle shall carry,
    220   ! one (or more) particle(s) is (are) released and the accumulated mass is
    221   ! reduced by the mass of this (these) particle(s)
    222   !******************************************************************************
    223 
    224         if (acc_mass_we(k,jy,j).ge.xmassperparticle/2.) then
    225           mmass=int((acc_mass_we(k,jy,j)+xmassperparticle/2.)/ &
    226                xmassperparticle)
    227           acc_mass_we(k,jy,j)=acc_mass_we(k,jy,j)- &
    228                real(mmass)*xmassperparticle
    229         else
    230           mmass=0
    231         endif
    232 
    233         do m=1,mmass
    234           do ipart=minpart,maxpart_mpi
    235 
    236   ! If a vacant storage space is found, attribute everything to this array element
    237   !*****************************************************************************
    238 
    239             if (itra1(ipart).ne.itime) then
    240 
    241   ! Assign particle positions
    242   !**************************
    243 
    244               xtra1(ipart)=real(nx_we(k))
    245               if (jy.eq.ny_sn(1)) then
    246                 ytra1(ipart)=real(jy)+0.5*ran1(idummy)
    247               else if (jy.eq.ny_sn(2)) then
    248                 ytra1(ipart)=real(jy)-0.5*ran1(idummy)
    249               else
    250                 ytra1(ipart)=real(jy)+(ran1(idummy)-.5)
    251               endif
    252               if (j.eq.1) then
    253                 ztra1(ipart)=zcolumn_we(k,jy,1)+(zcolumn_we(k,jy,2)- &
    254                      zcolumn_we(k,jy,1))/4.
    255               else if (j.eq.numcolumn_we(k,jy)) then
    256                 ztra1(ipart)=(2.*zcolumn_we(k,jy,j)+ &
    257                      zcolumn_we(k,jy,j-1)+height(nz))/4.
    258               else
    259                 ztra1(ipart)=zcolumn_we(k,jy,j-1)+ran1(idummy)* &
    260                      (zcolumn_we(k,jy,j+1)-zcolumn_we(k,jy,j-1))
    261               endif
    262 
    263   ! Interpolate PV to the particle position
    264   !****************************************
    265               ixm=int(xtra1(ipart))
    266               jym=int(ytra1(ipart))
    267               ixp=ixm+1
    268               jyp=jym+1
    269               ddx=xtra1(ipart)-real(ixm)
    270               ddy=ytra1(ipart)-real(jym)
    271               rddx=1.-ddx
    272               rddy=1.-ddy
    273               p1=rddx*rddy
    274               p2=ddx*rddy
    275               p3=rddx*ddy
    276               p4=ddx*ddy
    277               do i=2,nz
    278                 if (height(i).gt.ztra1(ipart)) then
    279                   indzm=i-1
    280                   indzp=i
    281                   goto 26
    282                 endif
    283               end do
    284 26            continue
    285               dz1=ztra1(ipart)-height(indzm)
    286               dz2=height(indzp)-ztra1(ipart)
    287               dz=1./(dz1+dz2)
    288               do mm=1,2
    289                 indexh=memind(mm)
    290                 do in=1,2
    291                   indzh=indzm+in-1
    292                   y1(in)=p1*pv(ixm,jym,indzh,indexh) &
    293                        +p2*pv(ixp,jym,indzh,indexh) &
    294                        +p3*pv(ixm,jyp,indzh,indexh) &
    295                        +p4*pv(ixp,jyp,indzh,indexh)
    296                 end do
    297                 yh1(mm)=(dz2*y1(1)+dz1*y1(2))*dz
    298               end do
    299               pvpart=(yh1(1)*dt2+yh1(2)*dt1)*dtt
    300               ylat=ylat0+ytra1(ipart)*dy
    301               if (ylat.lt.0.) pvpart=-1.*pvpart
    302 
    303 
    304   ! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
    305   !*****************************************************************************
    306 
    307               if (((ztra1(ipart).gt.3000.).and. &
    308                    (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
    309                 nclass(ipart)=min(int(ran1(idummy)* &
    310                      real(nclassunc))+1,nclassunc)
    311                 numactiveparticles=numactiveparticles+1
    312                 numparticlecount=numparticlecount+1
    313                 npoint(ipart)=numparticlecount
    314                 idt(ipart)=mintime
    315                 itra1(ipart)=itime
    316                 itramem(ipart)=itra1(ipart)
    317                 itrasplit(ipart)=itra1(ipart)+ldirect*itsplit
    318                 xmass1(ipart,1)=xmassperparticle
    319                 if (mdomainfill.eq.2) xmass1(ipart,1)= &
    320                      xmass1(ipart,1)*pvpart*48./29.*ozonescale/10.**9
    321               else
    322                 goto 71
    323               endif
    324 
    325 
    326   ! Increase numpart, if necessary
    327   !*******************************
    328 
    329               numpart=max(numpart,ipart)
    330               goto 73      ! Storage space has been found, stop searching
     201
     202
     203! Interpolate the wind velocity and density to the release location
     204!******************************************************************
     205
     206! Determine the model level below the release position
     207!*****************************************************
     208
     209          do i=2,nz
     210            if (height(i).gt.zcolumn_we(k,jy,j)) then
     211              indz=i-1
     212              indzp=i
     213              goto 6
    331214            endif
    332215          end do
    333           if (ipart.gt.maxpart_mpi) &
    334                stop 'boundcond_domainfill.f: too many particles required'
    335 73        minpart=ipart+1
    336 71        continue
     2166         continue
     217
     218! Vertical distance to the level below and above current position
     219!****************************************************************
     220
     221          dz1=zcolumn_we(k,jy,j)-height(indz)
     222          dz2=height(indzp)-zcolumn_we(k,jy,j)
     223          dz=1./(dz1+dz2)
     224
     225! Vertical and temporal interpolation
     226!************************************
     227
     228          do m=1,2
     229            indexh=memind(m)
     230            do in=1,2
     231              indzh=indz+in-1
     232              windl(in)=uu(nx_we(k),jy,indzh,indexh)
     233              rhol(in)=rho(nx_we(k),jy,indzh,indexh)
     234            end do
     235
     236            windhl(m)=(dz2*windl(1)+dz1*windl(2))*dz
     237            rhohl(m)=(dz2*rhol(1)+dz1*rhol(2))*dz
     238          end do
     239
     240          windx=(windhl(1)*dt2+windhl(2)*dt1)*dtt
     241          rhox=(rhohl(1)*dt2+rhohl(2)*dt1)*dtt
     242
     243! Calculate mass flux
     244!********************
     245
     246          fluxofmass=windx*rhox*boundarea*real(lsynctime)
     247
     248
     249! If the mass flux is directed into the domain, add it to previous mass fluxes;
     250! if it is out of the domain, set accumulated mass flux to zero
     251!******************************************************************************
     252
     253          if (k.eq.1) then
     254            if (fluxofmass.ge.0.) then
     255              acc_mass_we(k,jy,j)=acc_mass_we(k,jy,j)+fluxofmass
     256            else
     257              acc_mass_we(k,jy,j)=0.
     258            endif
     259          else
     260            if (fluxofmass.le.0.) then
     261              acc_mass_we(k,jy,j)=acc_mass_we(k,jy,j)+abs(fluxofmass)
     262            else
     263              acc_mass_we(k,jy,j)=0.
     264            endif
     265          endif
     266          accmasst=accmasst+acc_mass_we(k,jy,j)
     267
     268! If the accumulated mass exceeds half the mass that each particle shall carry,
     269! one (or more) particle(s) is (are) released and the accumulated mass is
     270! reduced by the mass of this (these) particle(s)
     271!******************************************************************************
     272
     273          if (acc_mass_we(k,jy,j).ge.xmassperparticle/2.) then
     274            mmass=int((acc_mass_we(k,jy,j)+xmassperparticle/2.)/ &
     275                 xmassperparticle)
     276            acc_mass_we(k,jy,j)=acc_mass_we(k,jy,j)- &
     277                 real(mmass)*xmassperparticle
     278          else
     279            mmass=0
     280          endif
     281
     282          do m=1,mmass
     283            do ipart=minpart,maxpart
     284
     285! If a vacant storage space is found, attribute everything to this array element
     286! TODO: for the MPI version this test can be removed, as all
     287!       elements in _tmp arrays are initialized to zero
     288!*****************************************************************************
     289
     290              if (itra1_tmp(ipart).ne.itime) then
     291
     292! Assign particle positions
     293!**************************
     294
     295                xtra1_tmp(ipart)=real(nx_we(k))
     296                if (jy.eq.ny_sn(1)) then
     297                  ytra1_tmp(ipart)=real(jy)+0.5*ran1(idummy)
     298                else if (jy.eq.ny_sn(2)) then
     299                  ytra1_tmp(ipart)=real(jy)-0.5*ran1(idummy)
     300                else
     301                  ytra1_tmp(ipart)=real(jy)+(ran1(idummy)-.5)
     302                endif
     303                if (j.eq.1) then
     304                  ztra1_tmp(ipart)=zcolumn_we(k,jy,1)+(zcolumn_we(k,jy,2)- &
     305                       zcolumn_we(k,jy,1))/4.
     306                else if (j.eq.numcolumn_we(k,jy)) then
     307                  ztra1_tmp(ipart)=(2.*zcolumn_we(k,jy,j)+ &
     308                       zcolumn_we(k,jy,j-1)+height(nz))/4.
     309                else
     310                  ztra1_tmp(ipart)=zcolumn_we(k,jy,j-1)+ran1(idummy)* &
     311                       (zcolumn_we(k,jy,j+1)-zcolumn_we(k,jy,j-1))
     312                endif
     313
     314! Interpolate PV to the particle position
     315!****************************************
     316                ixm=int(xtra1_tmp(ipart))
     317                jym=int(ytra1_tmp(ipart))
     318                ixp=ixm+1
     319                jyp=jym+1
     320                ddx=xtra1_tmp(ipart)-real(ixm)
     321                ddy=ytra1_tmp(ipart)-real(jym)
     322                rddx=1.-ddx
     323                rddy=1.-ddy
     324                p1=rddx*rddy
     325                p2=ddx*rddy
     326                p3=rddx*ddy
     327                p4=ddx*ddy
     328                do i=2,nz
     329                  if (height(i).gt.ztra1_tmp(ipart)) then
     330                    indzm=i-1
     331                    indzp=i
     332                    goto 26
     333                  endif
     334                end do
     33526              continue
     336                dz1=ztra1_tmp(ipart)-height(indzm)
     337                dz2=height(indzp)-ztra1_tmp(ipart)
     338                dz=1./(dz1+dz2)
     339                do mm=1,2
     340                  indexh=memind(mm)
     341                  do in=1,2
     342                    indzh=indzm+in-1
     343                    y1(in)=p1*pv(ixm,jym,indzh,indexh) &
     344                         +p2*pv(ixp,jym,indzh,indexh) &
     345                         +p3*pv(ixm,jyp,indzh,indexh) &
     346                         +p4*pv(ixp,jyp,indzh,indexh)
     347                  end do
     348                  yh1(mm)=(dz2*y1(1)+dz1*y1(2))*dz
     349                end do
     350                pvpart=(yh1(1)*dt2+yh1(2)*dt1)*dtt
     351                ylat=ylat0+ytra1_tmp(ipart)*dy
     352                if (ylat.lt.0.) pvpart=-1.*pvpart
     353
     354
     355! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
     356!*****************************************************************************
     357
     358                if (((ztra1_tmp(ipart).gt.3000.).and. &
     359                     (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
     360                  nclass_tmp(ipart)=min(int(ran1(idummy)* &
     361                       real(nclassunc))+1,nclassunc)
     362                  numactiveparticles=numactiveparticles+1
     363                  numparticlecount=numparticlecount+1
     364                  npoint_tmp(ipart)=numparticlecount
     365                  idt_tmp(ipart)=mintime
     366                  itra1_tmp(ipart)=itime
     367                  itramem_tmp(ipart)=itra1_tmp(ipart)
     368                  itrasplit_tmp(ipart)=itra1_tmp(ipart)+ldirect*itsplit
     369                  xmass1_tmp(ipart,1)=xmassperparticle
     370                  if (mdomainfill.eq.2) xmass1_tmp(ipart,1)= &
     371                       xmass1_tmp(ipart,1)*pvpart*48./29.*ozonescale/10.**9
     372                else
     373                  goto 71
     374                endif
     375
     376
     377! Increase numpart, if necessary
     378!*******************************
     379
     380                numpart_total=max(numpart_total,ipart)
     381                goto 73      ! Storage space has been found, stop searching
     382              endif
     383            end do
     384            if (ipart.gt.tmp_size) &
     385                 stop 'boundcond_domainfill_mpi.f90: too many particles required'
     38673          minpart=ipart+1
     38771          continue
     388          end do
     389
     390
    337391        end do
    338 
    339 
    340392      end do
    341393    end do
    342   end do
    343 
    344 
    345   !*****************************************
    346   ! Southern and northern boundary condition
    347   !*****************************************
    348 
    349   ! Loop from west to east
    350   !***********************
    351 
    352   do ix=nx_we(1),nx_we(2)
    353 
    354   ! Loop over southern (index 1) and northern (index 2) boundary
    355   !*************************************************************
    356 
    357     do k=1,2
    358       ylat=ylat0+real(ny_sn(k))*dy
    359       cosfact=cos(ylat*pi180)
    360 
    361   ! Loop over all release locations in a column
    362   !********************************************
    363 
    364       do j=1,numcolumn_sn(k,ix)
    365 
    366   ! Determine, for each release location, the area of the corresponding boundary
    367   !*****************************************************************************
    368 
    369         if (j.eq.1) then
    370           deltaz=(zcolumn_sn(k,ix,2)+zcolumn_sn(k,ix,1))/2.
    371         else if (j.eq.numcolumn_sn(k,ix)) then
    372   !        deltaz=height(nz)-(zcolumn_sn(k,ix,j-1)+
    373   !    +        zcolumn_sn(k,ix,j))/2.
    374   ! In order to avoid taking a very high column for very many particles,
    375   ! use the deltaz from one particle below instead
    376           deltaz=(zcolumn_sn(k,ix,j)-zcolumn_sn(k,ix,j-2))/2.
    377         else
    378           deltaz=(zcolumn_sn(k,ix,j+1)-zcolumn_sn(k,ix,j-1))/2.
    379         endif
    380         if ((ix.eq.nx_we(1)).or.(ix.eq.nx_we(2))) then
    381           boundarea=deltaz*111198.5/2.*cosfact*dx
    382         else
    383           boundarea=deltaz*111198.5*cosfact*dx
    384         endif
    385 
    386 
    387   ! Interpolate the wind velocity and density to the release location
    388   !******************************************************************
    389 
    390   ! Determine the model level below the release position
    391   !*****************************************************
    392 
    393         do i=2,nz
    394           if (height(i).gt.zcolumn_sn(k,ix,j)) then
    395             indz=i-1
    396             indzp=i
    397             goto 16
     394
     395
     396!*****************************************
     397! Southern and northern boundary condition
     398!*****************************************
     399
     400! Loop from west to east
     401!***********************
     402
     403    do ix=nx_we(1),nx_we(2)
     404
     405! Loop over southern (index 1) and northern (index 2) boundary
     406!*************************************************************
     407
     408      do k=1,2
     409        ylat=ylat0+real(ny_sn(k))*dy
     410        cosfact=cos(ylat*pi180)
     411
     412! Loop over all release locations in a column
     413!********************************************
     414
     415        do j=1,numcolumn_sn(k,ix)
     416
     417! Determine, for each release location, the area of the corresponding boundary
     418!*****************************************************************************
     419
     420          if (j.eq.1) then
     421            deltaz=(zcolumn_sn(k,ix,2)+zcolumn_sn(k,ix,1))/2.
     422          else if (j.eq.numcolumn_sn(k,ix)) then
     423!        deltaz=height(nz)-(zcolumn_sn(k,ix,j-1)+
     424!    +        zcolumn_sn(k,ix,j))/2.
     425! In order to avoid taking a very high column for very many particles,
     426! use the deltaz from one particle below instead
     427            deltaz=(zcolumn_sn(k,ix,j)-zcolumn_sn(k,ix,j-2))/2.
     428          else
     429            deltaz=(zcolumn_sn(k,ix,j+1)-zcolumn_sn(k,ix,j-1))/2.
    398430          endif
    399         end do
    400 16      continue
    401 
    402   ! Vertical distance to the level below and above current position
    403   !****************************************************************
    404 
    405         dz1=zcolumn_sn(k,ix,j)-height(indz)
    406         dz2=height(indzp)-zcolumn_sn(k,ix,j)
    407         dz=1./(dz1+dz2)
    408 
    409   ! Vertical and temporal interpolation
    410   !************************************
    411 
    412         do m=1,2
    413           indexh=memind(m)
    414           do in=1,2
    415             indzh=indz+in-1
    416             windl(in)=vv(ix,ny_sn(k),indzh,indexh)
    417             rhol(in)=rho(ix,ny_sn(k),indzh,indexh)
    418           end do
    419 
    420           windhl(m)=(dz2*windl(1)+dz1*windl(2))*dz
    421           rhohl(m)=(dz2*rhol(1)+dz1*rhol(2))*dz
    422         end do
    423 
    424         windx=(windhl(1)*dt2+windhl(2)*dt1)*dtt
    425         rhox=(rhohl(1)*dt2+rhohl(2)*dt1)*dtt
    426 
    427   ! Calculate mass flux, divided by number of processes
    428   !****************************************************
    429 
    430         fluxofmass=windx*rhox*boundarea*real(lsynctime)/mp_partgroup_np
    431 
    432   ! If the mass flux is directed into the domain, add it to previous mass fluxes;
    433   ! if it is out of the domain, set accumulated mass flux to zero
    434   !******************************************************************************
    435 
    436         if (k.eq.1) then
    437           if (fluxofmass.ge.0.) then
    438             acc_mass_sn(k,ix,j)=acc_mass_sn(k,ix,j)+fluxofmass
     431          if ((ix.eq.nx_we(1)).or.(ix.eq.nx_we(2))) then
     432            boundarea=deltaz*111198.5/2.*cosfact*dx
    439433          else
    440             acc_mass_sn(k,ix,j)=0.
     434            boundarea=deltaz*111198.5*cosfact*dx
    441435          endif
    442         else
    443           if (fluxofmass.le.0.) then
    444             acc_mass_sn(k,ix,j)=acc_mass_sn(k,ix,j)+abs(fluxofmass)
    445           else
    446             acc_mass_sn(k,ix,j)=0.
    447           endif
    448         endif
    449         accmasst=accmasst+acc_mass_sn(k,ix,j)
    450 
    451   ! If the accumulated mass exceeds half the mass that each particle shall carry,
    452   ! one (or more) particle(s) is (are) released and the accumulated mass is
    453   ! reduced by the mass of this (these) particle(s)
    454   !******************************************************************************
    455 
    456         if (acc_mass_sn(k,ix,j).ge.xmassperparticle/2.) then
    457           mmass=int((acc_mass_sn(k,ix,j)+xmassperparticle/2.)/ &
    458                xmassperparticle)
    459           acc_mass_sn(k,ix,j)=acc_mass_sn(k,ix,j)- &
    460                real(mmass)*xmassperparticle
    461         else
    462           mmass=0
    463         endif
    464 
    465         do m=1,mmass
    466           do ipart=minpart,maxpart_mpi
    467 
    468   ! If a vacant storage space is found, attribute everything to this array element
    469   !*****************************************************************************
    470 
    471             if (itra1(ipart).ne.itime) then
    472 
    473   ! Assign particle positions
    474   !**************************
    475 
    476               ytra1(ipart)=real(ny_sn(k))
    477               if (ix.eq.nx_we(1)) then
    478                 xtra1(ipart)=real(ix)+0.5*ran1(idummy)
    479               else if (ix.eq.nx_we(2)) then
    480                 xtra1(ipart)=real(ix)-0.5*ran1(idummy)
    481               else
    482                 xtra1(ipart)=real(ix)+(ran1(idummy)-.5)
    483               endif
    484               if (j.eq.1) then
    485                 ztra1(ipart)=zcolumn_sn(k,ix,1)+(zcolumn_sn(k,ix,2)- &
    486                      zcolumn_sn(k,ix,1))/4.
    487               else if (j.eq.numcolumn_sn(k,ix)) then
    488                 ztra1(ipart)=(2.*zcolumn_sn(k,ix,j)+ &
    489                      zcolumn_sn(k,ix,j-1)+height(nz))/4.
    490               else
    491                 ztra1(ipart)=zcolumn_sn(k,ix,j-1)+ran1(idummy)* &
    492                      (zcolumn_sn(k,ix,j+1)-zcolumn_sn(k,ix,j-1))
    493               endif
    494 
    495 
    496   ! Interpolate PV to the particle position
    497   !****************************************
    498               ixm=int(xtra1(ipart))
    499               jym=int(ytra1(ipart))
    500               ixp=ixm+1
    501               jyp=jym+1
    502               ddx=xtra1(ipart)-real(ixm)
    503               ddy=ytra1(ipart)-real(jym)
    504               rddx=1.-ddx
    505               rddy=1.-ddy
    506               p1=rddx*rddy
    507               p2=ddx*rddy
    508               p3=rddx*ddy
    509               p4=ddx*ddy
    510               do i=2,nz
    511                 if (height(i).gt.ztra1(ipart)) then
    512                   indzm=i-1
    513                   indzp=i
    514                   goto 126
    515                 endif
    516               end do
    517 126           continue
    518               dz1=ztra1(ipart)-height(indzm)
    519               dz2=height(indzp)-ztra1(ipart)
    520               dz=1./(dz1+dz2)
    521               do mm=1,2
    522                 indexh=memind(mm)
    523                 do in=1,2
    524                   indzh=indzm+in-1
    525                   y1(in)=p1*pv(ixm,jym,indzh,indexh) &
    526                        +p2*pv(ixp,jym,indzh,indexh) &
    527                        +p3*pv(ixm,jyp,indzh,indexh) &
    528                        +p4*pv(ixp,jyp,indzh,indexh)
    529                 end do
    530                 yh1(mm)=(dz2*y1(1)+dz1*y1(2))*dz
    531               end do
    532               pvpart=(yh1(1)*dt2+yh1(2)*dt1)*dtt
    533               if (ylat.lt.0.) pvpart=-1.*pvpart
    534 
    535 
    536   ! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
    537   !*****************************************************************************
    538 
    539               if (((ztra1(ipart).gt.3000.).and. &
    540                    (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
    541                 nclass(ipart)=min(int(ran1(idummy)* &
    542                      real(nclassunc))+1,nclassunc)
    543                 numactiveparticles=numactiveparticles+1
    544                 numparticlecount=numparticlecount+1
    545                 npoint(ipart)=numparticlecount
    546                 idt(ipart)=mintime
    547                 itra1(ipart)=itime
    548                 itramem(ipart)=itra1(ipart)
    549                 itrasplit(ipart)=itra1(ipart)+ldirect*itsplit
    550                 xmass1(ipart,1)=xmassperparticle
    551                 if (mdomainfill.eq.2) xmass1(ipart,1)= &
    552                      xmass1(ipart,1)*pvpart*48./29.*ozonescale/10.**9
    553               else
    554                 goto 171
    555               endif
    556 
    557 
    558   ! Increase numpart, if necessary
    559   !*******************************
    560               numpart=max(numpart,ipart)
    561               goto 173      ! Storage space has been found, stop searching
     436
     437
     438! Interpolate the wind velocity and density to the release location
     439!******************************************************************
     440
     441! Determine the model level below the release position
     442!*****************************************************
     443
     444          do i=2,nz
     445            if (height(i).gt.zcolumn_sn(k,ix,j)) then
     446              indz=i-1
     447              indzp=i
     448              goto 16
    562449            endif
    563450          end do
    564           if (ipart.gt.maxpart_mpi) &
    565                stop 'boundcond_domainfill.f: too many particles required'
    566 173       minpart=ipart+1
    567 171       continue
     45116        continue
     452
     453! Vertical distance to the level below and above current position
     454!****************************************************************
     455
     456          dz1=zcolumn_sn(k,ix,j)-height(indz)
     457          dz2=height(indzp)-zcolumn_sn(k,ix,j)
     458          dz=1./(dz1+dz2)
     459
     460! Vertical and temporal interpolation
     461!************************************
     462
     463          do m=1,2
     464            indexh=memind(m)
     465            do in=1,2
     466              indzh=indz+in-1
     467              windl(in)=vv(ix,ny_sn(k),indzh,indexh)
     468              rhol(in)=rho(ix,ny_sn(k),indzh,indexh)
     469            end do
     470
     471            windhl(m)=(dz2*windl(1)+dz1*windl(2))*dz
     472            rhohl(m)=(dz2*rhol(1)+dz1*rhol(2))*dz
     473          end do
     474
     475          windx=(windhl(1)*dt2+windhl(2)*dt1)*dtt
     476          rhox=(rhohl(1)*dt2+rhohl(2)*dt1)*dtt
     477
     478! Calculate mass flux
     479!********************
     480
     481          fluxofmass=windx*rhox*boundarea*real(lsynctime)
     482
     483! If the mass flux is directed into the domain, add it to previous mass fluxes;
     484! if it is out of the domain, set accumulated mass flux to zero
     485!******************************************************************************
     486
     487          if (k.eq.1) then
     488            if (fluxofmass.ge.0.) then
     489              acc_mass_sn(k,ix,j)=acc_mass_sn(k,ix,j)+fluxofmass
     490            else
     491              acc_mass_sn(k,ix,j)=0.
     492            endif
     493          else
     494            if (fluxofmass.le.0.) then
     495              acc_mass_sn(k,ix,j)=acc_mass_sn(k,ix,j)+abs(fluxofmass)
     496            else
     497              acc_mass_sn(k,ix,j)=0.
     498            endif
     499          endif
     500          accmasst=accmasst+acc_mass_sn(k,ix,j)
     501
     502! If the accumulated mass exceeds half the mass that each particle shall carry,
     503! one (or more) particle(s) is (are) released and the accumulated mass is
     504! reduced by the mass of this (these) particle(s)
     505!******************************************************************************
     506
     507          if (acc_mass_sn(k,ix,j).ge.xmassperparticle/2.) then
     508            mmass=int((acc_mass_sn(k,ix,j)+xmassperparticle/2.)/ &
     509                 xmassperparticle)
     510            acc_mass_sn(k,ix,j)=acc_mass_sn(k,ix,j)- &
     511                 real(mmass)*xmassperparticle
     512          else
     513            mmass=0
     514          endif
     515
     516          do m=1,mmass
     517            do ipart=minpart,maxpart
     518
     519! If a vacant storage space is found, attribute everything to this array element
     520!*****************************************************************************
     521
     522              if (itra1_tmp(ipart).ne.itime) then
     523
     524! Assign particle positions
     525!**************************
     526
     527                ytra1_tmp(ipart)=real(ny_sn(k))
     528                if (ix.eq.nx_we(1)) then
     529                  xtra1_tmp(ipart)=real(ix)+0.5*ran1(idummy)
     530                else if (ix.eq.nx_we(2)) then
     531                  xtra1_tmp(ipart)=real(ix)-0.5*ran1(idummy)
     532                else
     533                  xtra1_tmp(ipart)=real(ix)+(ran1(idummy)-.5)
     534                endif
     535                if (j.eq.1) then
     536                  ztra1_tmp(ipart)=zcolumn_sn(k,ix,1)+(zcolumn_sn(k,ix,2)- &
     537                       zcolumn_sn(k,ix,1))/4.
     538                else if (j.eq.numcolumn_sn(k,ix)) then
     539                  ztra1_tmp(ipart)=(2.*zcolumn_sn(k,ix,j)+ &
     540                       zcolumn_sn(k,ix,j-1)+height(nz))/4.
     541                else
     542                  ztra1_tmp(ipart)=zcolumn_sn(k,ix,j-1)+ran1(idummy)* &
     543                       (zcolumn_sn(k,ix,j+1)-zcolumn_sn(k,ix,j-1))
     544                endif
     545
     546
     547! Interpolate PV to the particle position
     548!****************************************
     549                ixm=int(xtra1_tmp(ipart))
     550                jym=int(ytra1_tmp(ipart))
     551                ixp=ixm+1
     552                jyp=jym+1
     553                ddx=xtra1_tmp(ipart)-real(ixm)
     554                ddy=ytra1_tmp(ipart)-real(jym)
     555                rddx=1.-ddx
     556                rddy=1.-ddy
     557                p1=rddx*rddy
     558                p2=ddx*rddy
     559                p3=rddx*ddy
     560                p4=ddx*ddy
     561                do i=2,nz
     562                  if (height(i).gt.ztra1_tmp(ipart)) then
     563                    indzm=i-1
     564                    indzp=i
     565                    goto 126
     566                  endif
     567                end do
     568126             continue
     569                dz1=ztra1_tmp(ipart)-height(indzm)
     570                dz2=height(indzp)-ztra1_tmp(ipart)
     571                dz=1./(dz1+dz2)
     572                do mm=1,2
     573                  indexh=memind(mm)
     574                  do in=1,2
     575                    indzh=indzm+in-1
     576                    y1(in)=p1*pv(ixm,jym,indzh,indexh) &
     577                         +p2*pv(ixp,jym,indzh,indexh) &
     578                         +p3*pv(ixm,jyp,indzh,indexh) &
     579                         +p4*pv(ixp,jyp,indzh,indexh)
     580                  end do
     581                  yh1(mm)=(dz2*y1(1)+dz1*y1(2))*dz
     582                end do
     583                pvpart=(yh1(1)*dt2+yh1(2)*dt1)*dtt
     584                if (ylat.lt.0.) pvpart=-1.*pvpart
     585
     586
     587! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
     588!*****************************************************************************
     589
     590                if (((ztra1_tmp(ipart).gt.3000.).and. &
     591                     (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
     592                  nclass_tmp(ipart)=min(int(ran1(idummy)* &
     593                       real(nclassunc))+1,nclassunc)
     594                  numactiveparticles=numactiveparticles+1
     595                  numparticlecount=numparticlecount+1
     596                  npoint_tmp(ipart)=numparticlecount
     597                  idt_tmp(ipart)=mintime
     598                  itra1_tmp(ipart)=itime
     599                  itramem_tmp(ipart)=itra1_tmp(ipart)
     600                  itrasplit_tmp(ipart)=itra1_tmp(ipart)+ldirect*itsplit
     601                  xmass1_tmp(ipart,1)=xmassperparticle
     602                  if (mdomainfill.eq.2) xmass1_tmp(ipart,1)= &
     603                       xmass1_tmp(ipart,1)*pvpart*48./29.*ozonescale/10.**9
     604                else
     605                  goto 171
     606                endif
     607
     608
     609! Increase numpart, if necessary
     610!*******************************
     611                numpart_total=max(numpart_total,ipart)
     612                goto 173      ! Storage space has been found, stop searching
     613              endif
     614            end do
     615            if (ipart.gt.tmp_size) &
     616                 stop 'boundcond_domainfill.f: too many particles required'
     617173         minpart=ipart+1
     618171         continue
     619          end do
     620
     621
    568622        end do
    569 
    570 
    571623      end do
    572624    end do
     625
     626
     627! xm=0.
     628! do i=1,numpart_total
     629!   if (itra1_tmp(i).eq.itime) xm=xm+xmass1(i,1)
     630! end do
     631
     632!write(*,*) itime,numactiveparticles,numparticlecount,numpart,
     633!    +xm,accmasst,xm+accmasst
     634
     635  end if ! if lroot
     636
     637! Distribute the number of particles to be released
     638! *************************************************
     639  call MPI_Bcast(numpart_total, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
     640
     641  do i=0, mp_partgroup_np-1
     642    numrel_mpi(i) = numpart_total/mp_partgroup_np
     643    if (i.lt.mod(numpart_total,mp_partgroup_np)) numrel_mpi(i) = numrel_mpi(i) + 1
    573644  end do
    574645
    575 
    576   xm=0.
    577   do i=1,numpart
    578     if (itra1(i).eq.itime) xm=xm+xmass1(i,1)
     646! Allocate temporary arrays for receiving processes
     647  if (.not.lroot) then
     648    allocate(itra1_tmp(numrel_mpi(mp_partid)),&
     649         & npoint_tmp(numrel_mpi(mp_partid)),&
     650         & nclass_tmp(numrel_mpi(mp_partid)),&
     651         & idt_tmp(numrel_mpi(mp_partid)),&
     652         & itramem_tmp(numrel_mpi(mp_partid)),&
     653         & itrasplit_tmp(numrel_mpi(mp_partid)),&
     654         & xtra1_tmp(numrel_mpi(mp_partid)),&
     655         & ytra1_tmp(numrel_mpi(mp_partid)),&
     656         & ztra1_tmp(numrel_mpi(mp_partid)),&
     657         & xmass1_tmp(numrel_mpi(mp_partid),maxspec))
     658     
     659! Initialize all particles as non-existent   
     660    itra1_tmp(:)=-999999999
     661  end if
     662
     663! Distribute particles
     664! Keep track of released particles so far
     665  rel_counter = 0
     666  mtag = 1000
     667
     668  do i=0, mp_partgroup_np-1
     669
     670! For root process, nothing to do except update release count
     671    if (i.eq.0) then
     672      rel_counter = rel_counter + numrel_mpi(i)
     673      cycle
     674    end if
     675     
     676! Send particles from root to non-root processes
     677    if (lroot.and.numrel_mpi(i).gt.0) then
     678
     679      call MPI_SEND(nclass_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     680           &numrel_mpi(i),MPI_INTEGER,i,mtag+1*i,mp_comm_used,mp_ierr)
     681
     682      call MPI_SEND(npoint_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     683           &numrel_mpi(i),MPI_INTEGER,i,mtag+2*i,mp_comm_used,mp_ierr)
     684
     685      call MPI_SEND(itra1_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     686           &numrel_mpi(i),MPI_INTEGER,i,mtag+3*i,mp_comm_used,mp_ierr)
     687
     688      call MPI_SEND(idt_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     689           &numrel_mpi(i),MPI_INTEGER,i,mtag+4*i,mp_comm_used,mp_ierr)
     690
     691      call MPI_SEND(itramem_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     692           &numrel_mpi(i),MPI_INTEGER,i,mtag+5*i,mp_comm_used,mp_ierr)
     693
     694      call MPI_SEND(itrasplit_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     695           &numrel_mpi(i),MPI_INTEGER,i,mtag+6*i,mp_comm_used,mp_ierr)
     696
     697      call MPI_SEND(xtra1_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     698           &numrel_mpi(i),mp_dp,i,mtag+7*i,mp_comm_used,mp_ierr)
     699
     700      call MPI_SEND(ytra1_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     701           &numrel_mpi(i),mp_dp,i,mtag+8*i,mp_comm_used,mp_ierr)
     702
     703      call MPI_SEND(ztra1_tmp(rel_counter+1:rel_counter+numrel_mpi(i)),&
     704           &numrel_mpi(i),mp_sp,i,mtag+9*i,mp_comm_used,mp_ierr)
     705
     706      do j=1,nspec
     707        call MPI_SEND(xmass1_tmp(rel_counter+1:rel_counter+numrel_mpi(i),j),&
     708             &numrel_mpi(i),mp_sp,i,mtag+(9+j)*i,mp_comm_used,mp_ierr)
     709      end do
     710
     711! Non-root processes issue receive requests
     712    else if (i.eq.mp_partid.and.numrel_mpi(i).gt.0) then
     713      call MPI_RECV(nclass_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     714           &MPI_INTEGER,id_root,mtag+1*i,mp_comm_used,mp_status,mp_ierr)
     715
     716      call MPI_RECV(npoint_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     717           &MPI_INTEGER,id_root,mtag+2*i,mp_comm_used,mp_status,mp_ierr)
     718
     719      call MPI_RECV(itra1_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     720           &MPI_INTEGER,id_root,mtag+3*i,mp_comm_used,mp_status,mp_ierr)
     721
     722      call MPI_RECV(idt_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     723           &MPI_INTEGER,id_root,mtag+4*i,mp_comm_used,mp_status,mp_ierr)
     724
     725      call MPI_RECV(itramem_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     726           &MPI_INTEGER,id_root,mtag+5*i,mp_comm_used,mp_status,mp_ierr)
     727
     728      call MPI_RECV(itrasplit_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     729           &MPI_INTEGER,id_root,mtag+6*i,mp_comm_used,mp_status,mp_ierr)
     730
     731      call MPI_RECV(xtra1_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     732           &mp_dp,id_root,mtag+7*i,mp_comm_used,mp_status,mp_ierr)
     733
     734      call MPI_RECV(ytra1_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     735           &mp_dp,id_root,mtag+8*i,mp_comm_used,mp_status,mp_ierr)
     736
     737      call MPI_RECV(ztra1_tmp(1:numrel_mpi(i)),numrel_mpi(i),&
     738           &mp_sp,id_root,mtag+9*i,mp_comm_used,mp_status,mp_ierr)
     739
     740      do j=1,nspec
     741        call MPI_RECV(xmass1_tmp(1:numrel_mpi(i),j),numrel_mpi(i),&
     742             &mp_sp,id_root,mtag+(9+j)*i,mp_comm_used,mp_status,mp_ierr)
     743
     744      end do
     745    end if
     746    rel_counter = rel_counter + numrel_mpi(i)
    579747  end do
    580748
    581   !write(*,*) itime,numactiveparticles,numparticlecount,numpart,
    582   !    +xm,accmasst,xm+accmasst
    583 
    584 
    585   ! If particles shall be dumped, then accumulated masses at the domain boundaries
    586   ! must be dumped, too, to be used for later runs
    587   !*****************************************************************************
    588 
    589 ! :TODO: eso parallelize
     749! Find free storage space for the new particles.
     750! This section is independent of the redistribution scheme used
     751! ********************************************************************
     752
     753! Keep track of released particles so far
     754  minpart=1
     755   
     756! The algorithm should be correct also for root process
     757  do i=1, numrel_mpi(mp_partid)
     758    do ipart=minpart, maxpart
     759      if (itra1(ipart).ne.itime) then
     760        itra1(ipart) = itra1_tmp(i)
     761        npoint(ipart) = npoint_tmp(i)
     762        nclass(ipart) = nclass_tmp(i)
     763        idt(ipart) = idt_tmp(i)
     764        itramem(ipart) = itramem_tmp(i)
     765        itrasplit(ipart) = itrasplit_tmp(i)
     766        xtra1(ipart) = xtra1_tmp(i)
     767        ytra1(ipart) = ytra1_tmp(i)
     768        ztra1(ipart) = ztra1_tmp(i)
     769        xmass1(ipart,:) = xmass1_tmp(i,:)
     770! Increase numpart, if necessary
     771        numpart=max(numpart,ipart)
     772        goto 200 ! Storage space has been found, stop searching
     773      end if
     774    end do
     775200 minpart=ipart+1
     776  end do
     777
     778! If particles shall be dumped, then accumulated masses at the domain boundaries
     779! must be dumped, too, to be used for later runs
     780!*****************************************************************************
     781
    590782  if ((ipout.gt.0).and.(itime.eq.loutend)) then
    591783    if (lroot) then
     784      call mpif_mtime('iotime',0)
    592785      open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
    593786           form='unformatted')
     
    595788           zcolumn_we,zcolumn_sn,acc_mass_we,acc_mass_sn
    596789      close(unitboundcond)
     790      call mpif_mtime('iotime',1)
    597791    end if
    598792  endif
    599793
     794! Deallocate temporary arrays
     795  deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,itrasplit_tmp,&
     796       & xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp,numrel_mpi)
     797! numactiveparticles_mpi
     798
     799
    600800end subroutine boundcond_domainfill
  • src/com_mod.f90

    r62e65c7 rc8fc724  
    132132
    133133!ZHG SEP 2015 wheather or not to read clouds from GRIB
    134   logical :: readclouds
     134  logical :: readclouds=.false.
    135135!ESO DEC 2015 whether or not both clwc and ciwc are present (if so they are summed)
    136   logical :: sumclouds
     136  logical :: sumclouds=.false.
    137137
    138138  logical,dimension(maxnests) :: readclouds_nest, sumclouds_nest
     
    140140
    141141!NIK 16.02.2015
    142   integer(selected_int_kind(16)) :: tot_blc_count=0, tot_inc_count=0
     142  integer(selected_int_kind(16)), dimension(maxspec) :: tot_blc_count=0, &
     143       &tot_inc_count=0
    143144
    144145
     
    576577  real :: dxoutn,dyoutn,outlon0n,outlat0n,xoutshiftn,youtshiftn
    577578  !real outheight(maxzgrid),outheighthalf(maxzgrid)
    578   logical :: DEP,DRYDEP,DRYDEPSPEC(maxspec),WETDEP,OHREA,ASSSPEC
     579  logical :: DEP,DRYDEP,DRYDEPSPEC(maxspec),WETDEP,WETDEPSPEC(maxspec),&
     580       & OHREA,ASSSPEC
    579581
    580582  ! numxgrid,numygrid       number of grid points in x,y-direction
     
    593595  ! DRYDEPSPEC              .true., if dry deposition is switched on for that species
    594596  ! WETDEP                  .true., if wet deposition is switched on
     597  ! WETDEPSPEC              .true., if wet deposition is switched on for that species
    595598  ! OHREA                   .true., if OH reaction is switched on
    596599  ! ASSSPEC                 .true., if there are two species asscoiated
     
    743746
    744747  ! These variables are used to avoid having separate versions of
    745   ! files in cases where differences with MPI version is minor (eso)
     748  ! files in cases where differences with MPI version are minor (eso)
    746749  !*****************************************************************
    747750  integer :: mpi_mode=0 ! .gt. 0 if running MPI version
  • src/conccalc.f90

    rfdc0f03 r4c64400  
    126126  !*****************************************************************************
    127127      do ind=indz,indzp
    128         rhoprof(ind-indz+1)=p1*rho(ix ,jy ,ind,2) &
     128        rhoprof(ind-indz+1)=p1*rho(ix ,jy ,ind,memind(2)) &
    129129             +p2*rho(ixp,jy ,ind,2) &
    130130             +p3*rho(ix ,jyp,ind,2) &
     
    181181  !*****************************************************************************
    182182
    183       if ((itage.lt.10800).or.(xl.lt.0.5).or.(yl.lt.0.5).or. &
     183      if (lnokernel.or.(itage.lt.10800).or.(xl.lt.0.5).or.(yl.lt.0.5).or. &
    184184           (xl.gt.real(numxgrid-1)-0.5).or. &
    185185           (yl.gt.real(numygrid-1)-0.5)) then             ! no kernel, direct attribution to grid cell
  • src/conccalc_mpi.f90

    r5f9d14a r4c64400  
    194194  !*****************************************************************************
    195195
    196       if ((itage.lt.10800).or.(xl.lt.0.5).or.(yl.lt.0.5).or. &
     196      if (lnokernel.or.(itage.lt.10800).or.(xl.lt.0.5).or.(yl.lt.0.5).or. &
    197197           (xl.gt.real(numxgrid-1)-0.5).or. &
    198198           (yl.gt.real(numygrid-1)-0.5)) then             ! no kernel, direct attribution to grid cell
  • src/concoutput.f90

    rdb712a8 r16b61a5  
    626626!*************************
    627627
    628   ! do ks=1,nspec
    629   !   do kp=1,maxpointspec_act
    630   !     do i=1,numreceptor
    631   !       creceptor(i,ks)=0.
    632   !     end do
    633   !     do jy=0,numygrid-1
    634   !       do ix=0,numxgrid-1
    635   !         do l=1,nclassunc
    636   !           do nage=1,nageclass
    637   !             do kz=1,numzgrid
    638   !               gridunc(ix,jy,kz,ks,kp,l,nage)=0.
    639   !             end do
    640   !           end do
    641   !         end do
    642   !       end do
    643   !     end do
    644   !   end do
    645   ! end do
     628! do ks=1,nspec
     629!   do kp=1,maxpointspec_act
     630!     do i=1,numreceptor
     631!       creceptor(i,ks)=0.
     632!     end do
     633!     do jy=0,numygrid-1
     634!       do ix=0,numxgrid-1
     635!         do l=1,nclassunc
     636!           do nage=1,nageclass
     637!             do kz=1,numzgrid
     638!               gridunc(ix,jy,kz,ks,kp,l,nage)=0.
     639!             end do
     640!           end do
     641!         end do
     642!       end do
     643!     end do
     644!   end do
     645! end do
    646646  creceptor(:,:)=0.
    647647  gridunc(:,:,:,:,:,:,:)=0.
  • src/concoutput_nest_mpi.f90

    r38b7917 r16b61a5  
    105105! Measure execution time
    106106  if (mp_measure_time) call mpif_mtime('iotime',0)
    107   !   call cpu_time(mp_root_time_beg)
    108   !   mp_root_wtime_beg = mpi_wtime()
    109   ! end if
    110107
    111108  if (verbosity.eq.1) then
  • src/concoutput_surf.f90

    r6a678e3 r16b61a5  
    2121
    2222subroutine concoutput_surf(itime,outnum,gridtotalunc,wetgridtotalunc, &
    23        drygridtotalunc)
    24   !                        i     i          o             o
    25   !       o
    26   !*****************************************************************************
    27   !                                                                            *
    28   !     Output of the concentration grid and the receptor concentrations.      *
    29   !                                                                            *
    30   !     Author: A. Stohl                                                       *
    31   !                                                                            *
    32   !     24 May 1995                                                            *
    33   !                                                                            *
    34   !     13 April 1999, Major update: if output size is smaller, dump output    *
    35   !                    in sparse matrix format; additional output of           *
    36   !                    uncertainty                                             *
    37   !                                                                            *
    38   !     05 April 2000, Major update: output of age classes; output for backward*
    39   !                    runs is time spent in grid cell times total mass of     *
    40   !                    species.                                                *
    41   !                                                                            *
    42   !     17 February 2002, Appropriate dimensions for backward and forward runs *
    43   !                       are now specified in file par_mod                    *
    44   !                                                                            *
    45   !     June 2006, write grid in sparse matrix with a single write command     *
    46   !                in order to save disk space                                 *
    47   !                                                                            *
    48   !     2008 new sparse matrix format                                          *
    49   !                                                                            *
    50   !*****************************************************************************
    51   !                                                                            *
    52   ! Variables:                                                                 *
    53   ! outnum          number of samples                                          *
    54   ! ncells          number of cells with non-zero concentrations               *
    55   ! sparse          .true. if in sparse matrix format, else .false.            *
    56   ! tot_mu          1 for forward, initial mass mixing ration for backw. runs  *
    57   !                                                                            *
    58   !*****************************************************************************
     23     drygridtotalunc)
     24!                        i     i          o             o
     25!       o
     26!*****************************************************************************
     27!                                                                            *
     28!     Output of the concentration grid and the receptor concentrations.      *
     29!                                                                            *
     30!     Author: A. Stohl                                                       *
     31!                                                                            *
     32!     24 May 1995                                                            *
     33!                                                                            *
     34!     13 April 1999, Major update: if output size is smaller, dump output    *
     35!                    in sparse matrix format; additional output of           *
     36!                    uncertainty                                             *
     37!                                                                            *
     38!     05 April 2000, Major update: output of age classes; output for backward*
     39!                    runs is time spent in grid cell times total mass of     *
     40!                    species.                                                *
     41!                                                                            *
     42!     17 February 2002, Appropriate dimensions for backward and forward runs *
     43!                       are now specified in file par_mod                    *
     44!                                                                            *
     45!     June 2006, write grid in sparse matrix with a single write command     *
     46!                in order to save disk space                                 *
     47!                                                                            *
     48!     2008 new sparse matrix format                                          *
     49!                                                                            *
     50!*****************************************************************************
     51!                                                                            *
     52! Variables:                                                                 *
     53! outnum          number of samples                                          *
     54! ncells          number of cells with non-zero concentrations               *
     55! sparse          .true. if in sparse matrix format, else .false.            *
     56! tot_mu          1 for forward, initial mass mixing ration for backw. runs  *
     57!                                                                            *
     58!*****************************************************************************
    5959
    6060  use unc_mod
     
    7373  real :: outnum,densityoutrecept(maxreceptor),xl,yl
    7474
    75   !real densityoutgrid(0:numxgrid-1,0:numygrid-1,numzgrid),
    76   !    +grid(0:numxgrid-1,0:numygrid-1,numzgrid,maxspec,maxpointspec_act,
    77   !    +    maxageclass)
    78   !real wetgrid(0:numxgrid-1,0:numygrid-1,maxspec,maxpointspec_act,
    79   !    +       maxageclass)
    80   !real drygrid(0:numxgrid-1,0:numygrid-1,maxspec,
    81   !    +       maxpointspec_act,maxageclass)
    82   !real gridsigma(0:numxgrid-1,0:numygrid-1,numzgrid,maxspec,
    83   !    +       maxpointspec_act,maxageclass),
    84   !    +     drygridsigma(0:numxgrid-1,0:numygrid-1,maxspec,
    85   !    +     maxpointspec_act,maxageclass),
    86   !    +     wetgridsigma(0:numxgrid-1,0:numygrid-1,maxspec,
    87   !    +     maxpointspec_act,maxageclass)
    88   !real factor(0:numxgrid-1,0:numygrid-1,numzgrid)
    89   !real sparse_dump_r(numxgrid*numygrid*numzgrid)
    90   !integer sparse_dump_i(numxgrid*numygrid*numzgrid)
    91 
    92   !real sparse_dump_u(numxgrid*numygrid*numzgrid)
     75!real densityoutgrid(0:numxgrid-1,0:numygrid-1,numzgrid),
     76!    +grid(0:numxgrid-1,0:numygrid-1,numzgrid,maxspec,maxpointspec_act,
     77!    +    maxageclass)
     78!real wetgrid(0:numxgrid-1,0:numygrid-1,maxspec,maxpointspec_act,
     79!    +       maxageclass)
     80!real drygrid(0:numxgrid-1,0:numygrid-1,maxspec,
     81!    +       maxpointspec_act,maxageclass)
     82!real gridsigma(0:numxgrid-1,0:numygrid-1,numzgrid,maxspec,
     83!    +       maxpointspec_act,maxageclass),
     84!    +     drygridsigma(0:numxgrid-1,0:numygrid-1,maxspec,
     85!    +     maxpointspec_act,maxageclass),
     86!    +     wetgridsigma(0:numxgrid-1,0:numygrid-1,maxspec,
     87!    +     maxpointspec_act,maxageclass)
     88!real factor(0:numxgrid-1,0:numygrid-1,numzgrid)
     89!real sparse_dump_r(numxgrid*numygrid*numzgrid)
     90!integer sparse_dump_i(numxgrid*numygrid*numzgrid)
     91
     92!real sparse_dump_u(numxgrid*numygrid*numzgrid)
    9393  real(dep_prec) :: auxgrid(nclassunc)
    9494  real(sp) :: gridtotal,gridsigmatotal,gridtotalunc
     
    104104
    105105  if (verbosity.eq.1) then
    106      print*,'inside concoutput_surf '
    107      CALL SYSTEM_CLOCK(count_clock)
    108      WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
     106    print*,'inside concoutput_surf '
     107    CALL SYSTEM_CLOCK(count_clock)
     108    WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
    109109  endif
    110110
    111   ! Determine current calendar date, needed for the file name
    112   !**********************************************************
     111! Determine current calendar date, needed for the file name
     112!**********************************************************
    113113
    114114  jul=bdate+real(itime,kind=dp)/86400._dp
     
    116116  write(adate,'(i8.8)') jjjjmmdd
    117117  write(atime,'(i6.6)') ihmmss
    118   !write(unitdates,'(a)') adate//atime
    119 
    120     open(unitdates,file=path(2)(1:length(2))//'dates', ACCESS='APPEND')
    121       write(unitdates,'(a)') adate//atime
    122     close(unitdates)
    123 
    124   ! For forward simulations, output fields have dimension MAXSPEC,
    125   ! for backward simulations, output fields have dimension MAXPOINT.
    126   ! Thus, make loops either about nspec, or about numpoint
    127   !*****************************************************************
     118!write(unitdates,'(a)') adate//atime
     119
     120  open(unitdates,file=path(2)(1:length(2))//'dates', ACCESS='APPEND')
     121  write(unitdates,'(a)') adate//atime
     122  close(unitdates)
     123
     124! For forward simulations, output fields have dimension MAXSPEC,
     125! for backward simulations, output fields have dimension MAXPOINT.
     126! Thus, make loops either about nspec, or about numpoint
     127!*****************************************************************
    128128
    129129
     
    144144
    145145  if (verbosity.eq.1) then
    146      print*,'concoutput_surf 2'
    147      CALL SYSTEM_CLOCK(count_clock)
    148      WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
     146    print*,'concoutput_surf 2'
     147    CALL SYSTEM_CLOCK(count_clock)
     148    WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
    149149  endif
    150150
    151   !*******************************************************************
    152   ! Compute air density: sufficiently accurate to take it
    153   ! from coarse grid at some time
    154   ! Determine center altitude of output layer, and interpolate density
    155   ! data to that altitude
    156   !*******************************************************************
     151!*******************************************************************
     152! Compute air density: sufficiently accurate to take it
     153! from coarse grid at some time
     154! Determine center altitude of output layer, and interpolate density
     155! data to that altitude
     156!*******************************************************************
    157157
    158158  do kz=1,numzgrid
     
    166166           (height(kzz).gt.halfheight)) goto 46
    167167    end do
    168 46   kzz=max(min(kzz,nz),2)
     16846  kzz=max(min(kzz,nz),2)
    169169    dz1=halfheight-height(kzz-1)
    170170    dz2=height(kzz)-halfheight
     
    184184  end do
    185185
    186     do i=1,numreceptor
    187       xl=xreceptor(i)
    188       yl=yreceptor(i)
    189       iix=max(min(nint(xl),nxmin1),0)
    190       jjy=max(min(nint(yl),nymin1),0)
    191       densityoutrecept(i)=rho(iix,jjy,1,2)
    192     end do
    193 
    194 
    195   ! Output is different for forward and backward simulations
    196     do kz=1,numzgrid
    197       do jy=0,numygrid-1
    198         do ix=0,numxgrid-1
    199           if (ldirect.eq.1) then
    200             factor3d(ix,jy,kz)=1.e12/volume(ix,jy,kz)/outnum
    201           else
    202             factor3d(ix,jy,kz)=real(abs(loutaver))/outnum
    203           endif
    204         end do
     186  do i=1,numreceptor
     187    xl=xreceptor(i)
     188    yl=yreceptor(i)
     189    iix=max(min(nint(xl),nxmin1),0)
     190    jjy=max(min(nint(yl),nymin1),0)
     191    densityoutrecept(i)=rho(iix,jjy,1,2)
     192  end do
     193
     194
     195! Output is different for forward and backward simulations
     196  do kz=1,numzgrid
     197    do jy=0,numygrid-1
     198      do ix=0,numxgrid-1
     199        if (ldirect.eq.1) then
     200          factor3d(ix,jy,kz)=1.e12/volume(ix,jy,kz)/outnum
     201        else
     202          factor3d(ix,jy,kz)=real(abs(loutaver))/outnum
     203        endif
    205204      end do
    206205    end do
    207 
    208   !*********************************************************************
    209   ! Determine the standard deviation of the mean concentration or mixing
    210   ! ratio (uncertainty of the output) and the dry and wet deposition
    211   !*********************************************************************
     206  end do
     207
     208!*********************************************************************
     209! Determine the standard deviation of the mean concentration or mixing
     210! ratio (uncertainty of the output) and the dry and wet deposition
     211!*********************************************************************
    212212
    213213  if (verbosity.eq.1) then
    214      print*,'concoutput_surf 3 (sd)'
    215      CALL SYSTEM_CLOCK(count_clock)
    216      WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
     214    print*,'concoutput_surf 3 (sd)'
     215    CALL SYSTEM_CLOCK(count_clock)
     216    WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
    217217  endif
    218218  gridtotal=0.
     
    228228  do ks=1,nspec
    229229
    230   write(anspec,'(i3.3)') ks
    231   if ((iout.eq.1).or.(iout.eq.3).or.(iout.eq.5)) then
    232     if (ldirect.eq.1) then
    233       open(unitoutgrid,file=path(2)(1:length(2))//'grid_conc_'//adate// &
     230    write(anspec,'(i3.3)') ks
     231    if ((iout.eq.1).or.(iout.eq.3).or.(iout.eq.5)) then
     232      if (ldirect.eq.1) then
     233        open(unitoutgrid,file=path(2)(1:length(2))//'grid_conc_'//adate// &
     234             atime//'_'//anspec,form='unformatted')
     235      else
     236        open(unitoutgrid,file=path(2)(1:length(2))//'grid_time_'//adate// &
     237             atime//'_'//anspec,form='unformatted')
     238      endif
     239      write(unitoutgrid) itime
     240    endif
     241
     242    if ((iout.eq.2).or.(iout.eq.3)) then      ! mixing ratio
     243      open(unitoutgridppt,file=path(2)(1:length(2))//'grid_pptv_'//adate// &
    234244           atime//'_'//anspec,form='unformatted')
    235     else
    236       open(unitoutgrid,file=path(2)(1:length(2))//'grid_time_'//adate// &
    237            atime//'_'//anspec,form='unformatted')
     245
     246      write(unitoutgridppt) itime
    238247    endif
    239      write(unitoutgrid) itime
    240    endif
    241 
    242   if ((iout.eq.2).or.(iout.eq.3)) then      ! mixing ratio
    243    open(unitoutgridppt,file=path(2)(1:length(2))//'grid_pptv_'//adate// &
    244         atime//'_'//anspec,form='unformatted')
    245 
    246     write(unitoutgridppt) itime
    247   endif
    248 
    249   do kp=1,maxpointspec_act
    250   do nage=1,nageclass
    251 
    252     do jy=0,numygrid-1
    253       do ix=0,numxgrid-1
    254 
    255   ! WET DEPOSITION
    256         if ((WETDEP).and.(ldirect.gt.0)) then
    257             do l=1,nclassunc
    258               auxgrid(l)=wetgridunc(ix,jy,ks,kp,l,nage)
    259             end do
    260             call mean(auxgrid,wetgrid(ix,jy), &
    261                  wetgridsigma(ix,jy),nclassunc)
    262   ! Multiply by number of classes to get total concentration
    263             wetgrid(ix,jy)=wetgrid(ix,jy) &
    264                  *nclassunc
    265             wetgridtotal=wetgridtotal+wetgrid(ix,jy)
    266   ! Calculate standard deviation of the mean
    267             wetgridsigma(ix,jy)= &
    268                  wetgridsigma(ix,jy)* &
    269                  sqrt(real(nclassunc))
    270             wetgridsigmatotal=wetgridsigmatotal+ &
    271                  wetgridsigma(ix,jy)
     248
     249    do kp=1,maxpointspec_act
     250      do nage=1,nageclass
     251
     252        do jy=0,numygrid-1
     253          do ix=0,numxgrid-1
     254
     255! WET DEPOSITION
     256            if ((WETDEP).and.(ldirect.gt.0)) then
     257              do l=1,nclassunc
     258                auxgrid(l)=wetgridunc(ix,jy,ks,kp,l,nage)
     259              end do
     260              call mean(auxgrid,wetgrid(ix,jy), &
     261                   wetgridsigma(ix,jy),nclassunc)
     262! Multiply by number of classes to get total concentration
     263              wetgrid(ix,jy)=wetgrid(ix,jy) &
     264                   *nclassunc
     265              wetgridtotal=wetgridtotal+wetgrid(ix,jy)
     266! Calculate standard deviation of the mean
     267              wetgridsigma(ix,jy)= &
     268                   wetgridsigma(ix,jy)* &
     269                   sqrt(real(nclassunc))
     270              wetgridsigmatotal=wetgridsigmatotal+ &
     271                   wetgridsigma(ix,jy)
     272            endif
     273
     274! DRY DEPOSITION
     275            if ((DRYDEP).and.(ldirect.gt.0)) then
     276              do l=1,nclassunc
     277                auxgrid(l)=drygridunc(ix,jy,ks,kp,l,nage)
     278              end do
     279              call mean(auxgrid,drygrid(ix,jy), &
     280                   drygridsigma(ix,jy),nclassunc)
     281! Multiply by number of classes to get total concentration
     282              drygrid(ix,jy)=drygrid(ix,jy)* &
     283                   nclassunc
     284              drygridtotal=drygridtotal+drygrid(ix,jy)
     285! Calculate standard deviation of the mean
     286              drygridsigma(ix,jy)= &
     287                   drygridsigma(ix,jy)* &
     288                   sqrt(real(nclassunc))
     289125           drygridsigmatotal=drygridsigmatotal+ &
     290                   drygridsigma(ix,jy)
     291            endif
     292
     293! CONCENTRATION OR MIXING RATIO
     294            do kz=1,numzgrid
     295              do l=1,nclassunc
     296                auxgrid(l)=gridunc(ix,jy,kz,ks,kp,l,nage)
     297              end do
     298              call mean(auxgrid,grid(ix,jy,kz), &
     299                   gridsigma(ix,jy,kz),nclassunc)
     300! Multiply by number of classes to get total concentration
     301              grid(ix,jy,kz)= &
     302                   grid(ix,jy,kz)*nclassunc
     303              gridtotal=gridtotal+grid(ix,jy,kz)
     304! Calculate standard deviation of the mean
     305              gridsigma(ix,jy,kz)= &
     306                   gridsigma(ix,jy,kz)* &
     307                   sqrt(real(nclassunc))
     308              gridsigmatotal=gridsigmatotal+ &
     309                   gridsigma(ix,jy,kz)
     310            end do
     311          end do
     312        end do
     313
     314
     315!*******************************************************************
     316! Generate output: may be in concentration (ng/m3) or in mixing
     317! ratio (ppt) or both
     318! Output the position and the values alternated multiplied by
     319! 1 or -1, first line is number of values, number of positions
     320! For backward simulations, the unit is seconds, stored in grid_time
     321!*******************************************************************
     322
     323        if (verbosity.eq.1) then
     324          print*,'concoutput_surf 4 (output)'
     325          CALL SYSTEM_CLOCK(count_clock)
     326          WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
    272327        endif
    273328
    274   ! DRY DEPOSITION
    275         if ((DRYDEP).and.(ldirect.gt.0)) then
    276             do l=1,nclassunc
    277               auxgrid(l)=drygridunc(ix,jy,ks,kp,l,nage)
    278             end do
    279             call mean(auxgrid,drygrid(ix,jy), &
    280                  drygridsigma(ix,jy),nclassunc)
    281   ! Multiply by number of classes to get total concentration
    282             drygrid(ix,jy)=drygrid(ix,jy)* &
    283                  nclassunc
    284             drygridtotal=drygridtotal+drygrid(ix,jy)
    285   ! Calculate standard deviation of the mean
    286             drygridsigma(ix,jy)= &
    287                  drygridsigma(ix,jy)* &
    288                  sqrt(real(nclassunc))
    289 125         drygridsigmatotal=drygridsigmatotal+ &
    290                  drygridsigma(ix,jy)
    291         endif
    292 
    293   ! CONCENTRATION OR MIXING RATIO
    294         do kz=1,numzgrid
    295             do l=1,nclassunc
    296               auxgrid(l)=gridunc(ix,jy,kz,ks,kp,l,nage)
    297             end do
    298             call mean(auxgrid,grid(ix,jy,kz), &
    299                  gridsigma(ix,jy,kz),nclassunc)
    300   ! Multiply by number of classes to get total concentration
    301             grid(ix,jy,kz)= &
    302                  grid(ix,jy,kz)*nclassunc
    303             gridtotal=gridtotal+grid(ix,jy,kz)
    304   ! Calculate standard deviation of the mean
    305             gridsigma(ix,jy,kz)= &
    306                  gridsigma(ix,jy,kz)* &
    307                  sqrt(real(nclassunc))
    308             gridsigmatotal=gridsigmatotal+ &
    309                  gridsigma(ix,jy,kz)
    310         end do
    311       end do
    312     end do
    313 
    314 
    315   !*******************************************************************
    316   ! Generate output: may be in concentration (ng/m3) or in mixing
    317   ! ratio (ppt) or both
    318   ! Output the position and the values alternated multiplied by
    319   ! 1 or -1, first line is number of values, number of positions
    320   ! For backward simulations, the unit is seconds, stored in grid_time
    321   !*******************************************************************
    322 
    323   if (verbosity.eq.1) then
    324      print*,'concoutput_surf 4 (output)'
    325      CALL SYSTEM_CLOCK(count_clock)
    326      WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
    327   endif
    328 
    329   ! Concentration output
    330   !*********************
    331 
    332   if ((iout.eq.1).or.(iout.eq.3).or.(iout.eq.5)) then
    333 
    334   if (verbosity.eq.1) then
    335      print*,'concoutput_surf (Wet deposition)'
    336      CALL SYSTEM_CLOCK(count_clock)
    337      WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
    338   endif
    339 
    340   ! Wet deposition
    341          sp_count_i=0
    342          sp_count_r=0
    343          sp_fact=-1.
    344          sp_zer=.true.
    345          if ((ldirect.eq.1).and.(WETDEP)) then
    346          do jy=0,numygrid-1
    347             do ix=0,numxgrid-1
    348   ! concentraion greater zero
    349               if (wetgrid(ix,jy).gt.smallnum) then
    350                  if (sp_zer.eqv..true.) then ! first non zero value
     329! Concentration output
     330!*********************
     331
     332        if ((iout.eq.1).or.(iout.eq.3).or.(iout.eq.5)) then
     333
     334          if (verbosity.eq.1) then
     335            print*,'concoutput_surf (Wet deposition)'
     336            CALL SYSTEM_CLOCK(count_clock)
     337            WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
     338          endif
     339
     340! Wet deposition
     341          sp_count_i=0
     342          sp_count_r=0
     343          sp_fact=-1.
     344          sp_zer=.true.
     345          if ((ldirect.eq.1).and.(WETDEP)) then
     346            do jy=0,numygrid-1
     347              do ix=0,numxgrid-1
     348! concentraion greater zero
     349                if (wetgrid(ix,jy).gt.smallnum) then
     350                  if (sp_zer.eqv..true.) then ! first non zero value
    351351                    sp_count_i=sp_count_i+1
    352352                    sparse_dump_i(sp_count_i)=ix+jy*numxgrid
    353353                    sp_zer=.false.
    354354                    sp_fact=sp_fact*(-1.)
    355                  endif
    356                  sp_count_r=sp_count_r+1
    357                  sparse_dump_r(sp_count_r)= &
    358                       sp_fact*1.e12*wetgrid(ix,jy)/area(ix,jy)
    359                  sparse_dump_u(sp_count_r)= &
    360                       1.e12*wetgridsigma(ix,jy)/area(ix,jy)
    361               else ! concentration is zero
     355                  endif
     356                  sp_count_r=sp_count_r+1
     357                  sparse_dump_r(sp_count_r)= &
     358                       sp_fact*1.e12*wetgrid(ix,jy)/area(ix,jy)
     359                  sparse_dump_u(sp_count_r)= &
     360                       1.e12*wetgridsigma(ix,jy)/area(ix,jy)
     361                else ! concentration is zero
    362362                  sp_zer=.true.
    363               endif
    364             end do
    365          end do
    366          else
     363                endif
     364              end do
     365            end do
     366          else
    367367            sp_count_i=0
    368368            sp_count_r=0
    369          endif
    370          write(unitoutgrid) sp_count_i
    371          write(unitoutgrid) (sparse_dump_i(i),i=1,sp_count_i)
    372          write(unitoutgrid) sp_count_r
    373          write(unitoutgrid) (sparse_dump_r(i),i=1,sp_count_r)
     369          endif
     370          write(unitoutgrid) sp_count_i
     371          write(unitoutgrid) (sparse_dump_i(i),i=1,sp_count_i)
     372          write(unitoutgrid) sp_count_r
     373          write(unitoutgrid) (sparse_dump_r(i),i=1,sp_count_r)
    374374!         write(unitoutgrid) (sparse_dump_u(i),i=1,sp_count_r)
    375375
    376   if (verbosity.eq.1) then
    377      print*,'concoutput_surf (Dry deposition)'
    378      CALL SYSTEM_CLOCK(count_clock)
    379      WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
    380   endif
    381   ! Dry deposition
    382          sp_count_i=0
    383          sp_count_r=0
    384          sp_fact=-1.
    385          sp_zer=.true.
    386          if ((ldirect.eq.1).and.(DRYDEP)) then
    387           do jy=0,numygrid-1
    388             do ix=0,numxgrid-1
    389               if (drygrid(ix,jy).gt.smallnum) then
    390                  if (sp_zer.eqv..true.) then ! first non zero value
     376          if (verbosity.eq.1) then
     377            print*,'concoutput_surf (Dry deposition)'
     378            CALL SYSTEM_CLOCK(count_clock)
     379            WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
     380          endif
     381! Dry deposition
     382          sp_count_i=0
     383          sp_count_r=0
     384          sp_fact=-1.
     385          sp_zer=.true.
     386          if ((ldirect.eq.1).and.(DRYDEP)) then
     387            do jy=0,numygrid-1
     388              do ix=0,numxgrid-1
     389                if (drygrid(ix,jy).gt.smallnum) then
     390                  if (sp_zer.eqv..true.) then ! first non zero value
    391391                    sp_count_i=sp_count_i+1
    392392                    sparse_dump_i(sp_count_i)=ix+jy*numxgrid
    393393                    sp_zer=.false.
    394394                    sp_fact=sp_fact*(-1.)
    395                  endif
    396                  sp_count_r=sp_count_r+1
    397                  sparse_dump_r(sp_count_r)= &
    398                       sp_fact* &
    399                       1.e12*drygrid(ix,jy)/area(ix,jy)
     395                  endif
     396                  sp_count_r=sp_count_r+1
     397                  sparse_dump_r(sp_count_r)= &
     398                       sp_fact* &
     399                       1.e12*drygrid(ix,jy)/area(ix,jy)
    400400                  sparse_dump_u(sp_count_r)= &
    401                       1.e12*drygridsigma(ix,jy)/area(ix,jy)
    402               else ! concentration is zero
     401                       1.e12*drygridsigma(ix,jy)/area(ix,jy)
     402                else ! concentration is zero
    403403                  sp_zer=.true.
    404               endif
    405             end do
    406           end do
    407          else
     404                endif
     405              end do
     406            end do
     407          else
    408408            sp_count_i=0
    409409            sp_count_r=0
    410          endif
    411          write(unitoutgrid) sp_count_i
    412          write(unitoutgrid) (sparse_dump_i(i),i=1,sp_count_i)
    413          write(unitoutgrid) sp_count_r
    414          write(unitoutgrid) (sparse_dump_r(i),i=1,sp_count_r)
     410          endif
     411          write(unitoutgrid) sp_count_i
     412          write(unitoutgrid) (sparse_dump_i(i),i=1,sp_count_i)
     413          write(unitoutgrid) sp_count_r
     414          write(unitoutgrid) (sparse_dump_r(i),i=1,sp_count_r)
    415415!         write(unitoutgrid) (sparse_dump_u(i),i=1,sp_count_r)
    416416
    417   if (verbosity.eq.1) then
    418      print*,'concoutput_surf (Concentrations)'
    419      CALL SYSTEM_CLOCK(count_clock)
    420      WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
    421   endif
    422 
    423   ! Concentrations
    424 
    425   ! surf_only write only 1st layer
    426 
    427          sp_count_i=0
    428          sp_count_r=0
    429          sp_fact=-1.
    430          sp_zer=.true.
     417          if (verbosity.eq.1) then
     418            print*,'concoutput_surf (Concentrations)'
     419            CALL SYSTEM_CLOCK(count_clock)
     420            WRITE(*,*) 'SYSTEM_CLOCK',count_clock - count_clock0   
     421          endif
     422
     423! Concentrations
     424
     425! surf_only write only 1st layer
     426
     427          sp_count_i=0
     428          sp_count_r=0
     429          sp_fact=-1.
     430          sp_zer=.true.
    431431          do kz=1,1
    432432            do jy=0,numygrid-1
     
    440440                    sp_fact=sp_fact*(-1.)
    441441                  endif
    442                    sp_count_r=sp_count_r+1
    443                    sparse_dump_r(sp_count_r)= &
    444                         sp_fact* &
    445                         grid(ix,jy,kz)* &
    446                         factor3d(ix,jy,kz)/tot_mu(ks,kp)
    447   !                 if ((factor(ix,jy,kz)/tot_mu(ks,kp)).eq.0)
    448   !    +              write (*,*) factor(ix,jy,kz),tot_mu(ks,kp),ks,kp
    449                    sparse_dump_u(sp_count_r)= &
    450                         gridsigma(ix,jy,kz)* &
    451                         factor3d(ix,jy,kz)/tot_mu(ks,kp)
     442                  sp_count_r=sp_count_r+1
     443                  sparse_dump_r(sp_count_r)= &
     444                       sp_fact* &
     445                       grid(ix,jy,kz)* &
     446                       factor3d(ix,jy,kz)/tot_mu(ks,kp)
     447!                 if ((factor(ix,jy,kz)/tot_mu(ks,kp)).eq.0)
     448!    +              write (*,*) factor(ix,jy,kz),tot_mu(ks,kp),ks,kp
     449                  sparse_dump_u(sp_count_r)= &
     450                       gridsigma(ix,jy,kz)* &
     451                       factor3d(ix,jy,kz)/tot_mu(ks,kp)
    452452                else ! concentration is zero
    453453                  sp_zer=.true.
    454454                endif
    455              end do
    456            end do
    457          end do
    458          write(unitoutgrid) sp_count_i
    459          write(unitoutgrid) (sparse_dump_i(i),i=1,sp_count_i)
    460          write(unitoutgrid) sp_count_r
    461          write(unitoutgrid) (sparse_dump_r(i),i=1,sp_count_r)
     455              end do
     456            end do
     457          end do
     458          write(unitoutgrid) sp_count_i
     459          write(unitoutgrid) (sparse_dump_i(i),i=1,sp_count_i)
     460          write(unitoutgrid) sp_count_r
     461          write(unitoutgrid) (sparse_dump_r(i),i=1,sp_count_r)
    462462!         write(unitoutgrid) (sparse_dump_u(i),i=1,sp_count_r)
    463463
    464   endif !  concentration output
    465 
    466   ! Mixing ratio output
    467   !********************
    468 
    469   if ((iout.eq.2).or.(iout.eq.3)) then      ! mixing ratio
    470 
    471   ! Wet deposition
    472          sp_count_i=0
    473          sp_count_r=0
    474          sp_fact=-1.
    475          sp_zer=.true.
    476          if ((ldirect.eq.1).and.(WETDEP)) then
    477           do jy=0,numygrid-1
    478             do ix=0,numxgrid-1
     464        endif !  concentration output
     465
     466! Mixing ratio output
     467!********************
     468
     469        if ((iout.eq.2).or.(iout.eq.3)) then      ! mixing ratio
     470
     471! Wet deposition
     472          sp_count_i=0
     473          sp_count_r=0
     474          sp_fact=-1.
     475          sp_zer=.true.
     476          if ((ldirect.eq.1).and.(WETDEP)) then
     477            do jy=0,numygrid-1
     478              do ix=0,numxgrid-1
    479479                if (wetgrid(ix,jy).gt.smallnum) then
    480480                  if (sp_zer.eqv..true.) then ! first non zero value
     
    484484                    sp_zer=.false.
    485485                    sp_fact=sp_fact*(-1.)
    486                  endif
    487                  sp_count_r=sp_count_r+1
    488                  sparse_dump_r(sp_count_r)= &
    489                       sp_fact* &
    490                       1.e12*wetgrid(ix,jy)/area(ix,jy)
    491                  sparse_dump_u(sp_count_r)= &
    492                       1.e12*wetgridsigma(ix,jy)/area(ix,jy)
    493               else ! concentration is zero
     486                  endif
     487                  sp_count_r=sp_count_r+1
     488                  sparse_dump_r(sp_count_r)= &
     489                       sp_fact* &
     490                       1.e12*wetgrid(ix,jy)/area(ix,jy)
     491                  sparse_dump_u(sp_count_r)= &
     492                       1.e12*wetgridsigma(ix,jy)/area(ix,jy)
     493                else ! concentration is zero
    494494                  sp_zer=.true.
    495               endif
    496             end do
    497           end do
    498          else
    499            sp_count_i=0
    500            sp_count_r=0
    501          endif
    502          write(unitoutgridppt) sp_count_i
    503          write(unitoutgridppt) (sparse_dump_i(i),i=1,sp_count_i)
    504          write(unitoutgridppt) sp_count_r
    505          write(unitoutgridppt) (sparse_dump_r(i),i=1,sp_count_r)
     495                endif
     496              end do
     497            end do
     498          else
     499            sp_count_i=0
     500            sp_count_r=0
     501          endif
     502          write(unitoutgridppt) sp_count_i
     503          write(unitoutgridppt) (sparse_dump_i(i),i=1,sp_count_i)
     504          write(unitoutgridppt) sp_count_r
     505          write(unitoutgridppt) (sparse_dump_r(i),i=1,sp_count_r)
    506506!         write(unitoutgridppt) (sparse_dump_u(i),i=1,sp_count_r)
    507507
    508508
    509   ! Dry deposition
    510          sp_count_i=0
    511          sp_count_r=0
    512          sp_fact=-1.
    513          sp_zer=.true.
    514          if ((ldirect.eq.1).and.(DRYDEP)) then
    515           do jy=0,numygrid-1
    516             do ix=0,numxgrid-1
     509! Dry deposition
     510          sp_count_i=0
     511          sp_count_r=0
     512          sp_fact=-1.
     513          sp_zer=.true.
     514          if ((ldirect.eq.1).and.(DRYDEP)) then
     515            do jy=0,numygrid-1
     516              do ix=0,numxgrid-1
    517517                if (drygrid(ix,jy).gt.smallnum) then
    518518                  if (sp_zer.eqv..true.) then ! first non zero value
     
    522522                    sp_zer=.false.
    523523                    sp_fact=sp_fact*(-1)
    524                  endif
    525                  sp_count_r=sp_count_r+1
    526                  sparse_dump_r(sp_count_r)= &
    527                       sp_fact* &
    528                       1.e12*drygrid(ix,jy)/area(ix,jy)
    529                  sparse_dump_u(sp_count_r)= &
    530                       1.e12*drygridsigma(ix,jy)/area(ix,jy)
    531               else ! concentration is zero
     524                  endif
     525                  sp_count_r=sp_count_r+1
     526                  sparse_dump_r(sp_count_r)= &
     527                       sp_fact* &
     528                       1.e12*drygrid(ix,jy)/area(ix,jy)
     529                  sparse_dump_u(sp_count_r)= &
     530                       1.e12*drygridsigma(ix,jy)/area(ix,jy)
     531                else ! concentration is zero
    532532                  sp_zer=.true.
    533               endif
    534             end do
    535           end do
    536          else
    537            sp_count_i=0
    538            sp_count_r=0
    539          endif
    540          write(unitoutgridppt) sp_count_i
    541          write(unitoutgridppt) (sparse_dump_i(i),i=1,sp_count_i)
    542          write(unitoutgridppt) sp_count_r
    543          write(unitoutgridppt) (sparse_dump_r(i),i=1,sp_count_r)
     533                endif
     534              end do
     535            end do
     536          else
     537            sp_count_i=0
     538            sp_count_r=0
     539          endif
     540          write(unitoutgridppt) sp_count_i
     541          write(unitoutgridppt) (sparse_dump_i(i),i=1,sp_count_i)
     542          write(unitoutgridppt) sp_count_r
     543          write(unitoutgridppt) (sparse_dump_r(i),i=1,sp_count_r)
    544544!         write(unitoutgridppt) (sparse_dump_u(i),i=1,sp_count_r)
    545545
    546546
    547   ! Mixing ratios
    548 
    549   ! surf_only write only 1st layer
    550 
    551          sp_count_i=0
    552          sp_count_r=0
    553          sp_fact=-1.
    554          sp_zer=.true.
     547! Mixing ratios
     548
     549! surf_only write only 1st layer
     550
     551          sp_count_i=0
     552          sp_count_r=0
     553          sp_fact=-1.
     554          sp_zer=.true.
    555555          do kz=1,1
    556556            do jy=0,numygrid-1
     
    563563                    sp_zer=.false.
    564564                    sp_fact=sp_fact*(-1.)
    565                  endif
    566                  sp_count_r=sp_count_r+1
    567                  sparse_dump_r(sp_count_r)= &
    568                       sp_fact* &
    569                       1.e12*grid(ix,jy,kz) &
    570                       /volume(ix,jy,kz)/outnum* &
    571                       weightair/weightmolar(ks)/densityoutgrid(ix,jy,kz)
    572                  sparse_dump_u(sp_count_r)= &
    573                       1.e12*gridsigma(ix,jy,kz)/volume(ix,jy,kz)/ &
    574                       outnum*weightair/weightmolar(ks)/ &
    575                       densityoutgrid(ix,jy,kz)
    576               else ! concentration is zero
     565                  endif
     566                  sp_count_r=sp_count_r+1
     567                  sparse_dump_r(sp_count_r)= &
     568                       sp_fact* &
     569                       1.e12*grid(ix,jy,kz) &
     570                       /volume(ix,jy,kz)/outnum* &
     571                       weightair/weightmolar(ks)/densityoutgrid(ix,jy,kz)
     572                  sparse_dump_u(sp_count_r)= &
     573                       1.e12*gridsigma(ix,jy,kz)/volume(ix,jy,kz)/ &
     574                       outnum*weightair/weightmolar(ks)/ &
     575                       densityoutgrid(ix,jy,kz)
     576                else ! concentration is zero
    577577                  sp_zer=.true.
    578               endif
     578                endif
    579579              end do
    580580            end do
    581581          end do
    582          write(unitoutgridppt) sp_count_i
    583          write(unitoutgridppt) (sparse_dump_i(i),i=1,sp_count_i)
    584          write(unitoutgridppt) sp_count_r
    585          write(unitoutgridppt) (sparse_dump_r(i),i=1,sp_count_r)
     582          write(unitoutgridppt) sp_count_i
     583          write(unitoutgridppt) (sparse_dump_i(i),i=1,sp_count_i)
     584          write(unitoutgridppt) sp_count_r
     585          write(unitoutgridppt) (sparse_dump_r(i),i=1,sp_count_r)
    586586!         write(unitoutgridppt) (sparse_dump_u(i),i=1,sp_count_r)
    587587
    588       endif ! output for ppt
    589 
    590   end do
    591   end do
     588        endif ! output for ppt
     589
     590      end do
     591    end do
    592592
    593593    close(unitoutgridppt)
     
    602602       drygridtotal
    603603
    604   ! Dump of receptor concentrations
    605 
    606     if (numreceptor.gt.0 .and. (iout.eq.2 .or. iout.eq.3)  ) then
    607       write(unitoutreceptppt) itime
    608       do ks=1,nspec
    609         write(unitoutreceptppt) (1.e12*creceptor(i,ks)/outnum* &
    610              weightair/weightmolar(ks)/densityoutrecept(i),i=1,numreceptor)
     604! Dump of receptor concentrations
     605
     606  if (numreceptor.gt.0 .and. (iout.eq.2 .or. iout.eq.3)  ) then
     607    write(unitoutreceptppt) itime
     608    do ks=1,nspec
     609      write(unitoutreceptppt) (1.e12*creceptor(i,ks)/outnum* &
     610           weightair/weightmolar(ks)/densityoutrecept(i),i=1,numreceptor)
     611    end do
     612  endif
     613
     614! Dump of receptor concentrations
     615
     616  if (numreceptor.gt.0) then
     617    write(unitoutrecept) itime
     618    do ks=1,nspec
     619      write(unitoutrecept) (1.e12*creceptor(i,ks)/outnum, &
     620           i=1,numreceptor)
     621    end do
     622  endif
     623
     624
     625
     626! Reinitialization of grid
     627!*************************
     628
     629  do ks=1,nspec
     630    do kp=1,maxpointspec_act
     631      do i=1,numreceptor
     632        creceptor(i,ks)=0.
    611633      end do
    612     endif
    613 
    614   ! Dump of receptor concentrations
    615 
    616     if (numreceptor.gt.0) then
    617       write(unitoutrecept) itime
    618       do ks=1,nspec
    619         write(unitoutrecept) (1.e12*creceptor(i,ks)/outnum, &
    620              i=1,numreceptor)
    621       end do
    622     endif
    623 
    624 
    625 
    626   ! Reinitialization of grid
    627   !*************************
    628 
    629   do ks=1,nspec
    630   do kp=1,maxpointspec_act
    631     do i=1,numreceptor
    632       creceptor(i,ks)=0.
    633     end do
    634     do jy=0,numygrid-1
    635       do ix=0,numxgrid-1
    636         do l=1,nclassunc
    637           do nage=1,nageclass
    638             do kz=1,numzgrid
    639               gridunc(ix,jy,kz,ks,kp,l,nage)=0.
     634      do jy=0,numygrid-1
     635        do ix=0,numxgrid-1
     636          do l=1,nclassunc
     637            do nage=1,nageclass
     638              do kz=1,numzgrid
     639                gridunc(ix,jy,kz,ks,kp,l,nage)=0.
     640              end do
    640641            end do
    641642          end do
     
    644645    end do
    645646  end do
    646   end do
    647647
    648648
  • src/drydepokernel.f90

    re200b7a r4c64400  
    4040  !                                                                            *
    4141  !*****************************************************************************
     42  ! Changes:
     43  ! eso 10/2016: Added option to disregard kernel
     44  !
     45  !*****************************************************************************
     46
    4247
    4348  use unc_mod
     
    4752  implicit none
    4853
    49   real :: x,y,deposit(maxspec),ddx,ddy,xl,yl,wx,wy,w
     54  real(dep_prec), dimension(maxspec) :: deposit
     55  real :: x,y,ddx,ddy,xl,yl,wx,wy,w
    5056  integer :: ix,jy,ixp,jyp,ks,nunc,nage,kp
    5157
     
    7480  endif
    7581
     82  ! If no kernel is used, direct attribution to grid cell
     83  !******************************************************
     84
     85  if (lnokernel) then
     86    do ks=1,nspec
     87      if ((abs(deposit(ks)).gt.0).and.DRYDEPSPEC(ks)) then
     88        if ((ix.ge.0).and.(jy.ge.0).and.(ix.le.numxgrid-1).and. &
     89             (jy.le.numygrid-1)) then
     90          drygridunc(ix,jy,ks,kp,nunc,nage)= &
     91               drygridunc(ix,jy,ks,kp,nunc,nage)+deposit(ks)
     92        end if
     93      end if
     94    end do
     95  else ! use kernel
     96
    7697
    7798  ! Determine mass fractions for four grid points
    7899  !**********************************************
    79     do ks=1,nspec
     100  do ks=1,nspec
    80101
    81     if ((abs(deposit(ks)).gt.0).and.DRYDEPSPEC(ks)) then
     102   if ((abs(deposit(ks)).gt.0).and.DRYDEPSPEC(ks)) then
    82103
    83104   if ((ix.ge.0).and.(jy.ge.0).and.(ix.le.numxgrid-1).and. &
    84105        (jy.le.numygrid-1)) then
    85     w=wx*wy
    86       drygridunc(ix,jy,ks,kp,nunc,nage)= &
    87            drygridunc(ix,jy,ks,kp,nunc,nage)+deposit(ks)*w
    88       continue
    89   endif
     106     w=wx*wy
     107     drygridunc(ix,jy,ks,kp,nunc,nage)= &
     108          drygridunc(ix,jy,ks,kp,nunc,nage)+deposit(ks)*w
     109     continue
     110   endif
    90111
    91112  if ((ixp.ge.0).and.(jyp.ge.0).and.(ixp.le.numxgrid-1).and. &
     
    112133  endif
    113134
    114     end do
     135  end do
     136end if
    115137
    116138end subroutine drydepokernel
  • src/drydepokernel_nest.f90

    re200b7a r4c64400  
    5050  implicit none
    5151
    52   real :: x,y,deposit(maxspec),ddx,ddy,xl,yl,wx,wy,w
     52  real(dep_prec), dimension(maxspec) :: deposit
     53  real :: x,y,ddx,ddy,xl,yl,wx,wy,w
    5354  integer :: ix,jy,ixp,jyp,ks,kp,nunc,nage
    5455
  • src/gfs_mod.f90

    r62e65c7 r4c64400  
    4242  !*********************************************
    4343
    44   integer,parameter :: nxmax=721,nymax=361,nuvzmax=64,nwzmax=64,nzmax=64
     44!  integer,parameter :: nxmax=721,nymax=361,nuvzmax=64,nwzmax=64,nzmax=64
     45  integer,parameter :: nxmax=361,nymax=181,nuvzmax=64,nwzmax=64,nzmax=64
    4546  integer,parameter :: nxshift=0     ! for GFS or FNL
    4647
  • src/gridcheck_gfs.f90

    r5f9d14a r4c64400  
    421421  !********************
    422422
    423   write(*,*)
    424   write(*,*)
    425   write(*,'(a,2i7)') 'Vertical levels in NCEP data: ', &
    426        nuvz,nwz
    427   write(*,*)
    428   write(*,'(a)') 'Mother domain:'
    429   write(*,'(a,f10.2,a1,f10.2,a,f10.2)') '  Longitude range: ', &
    430        xlon0,' to ',xlon0+(nx-1)*dx,'   Grid distance: ',dx
    431   write(*,'(a,f10.2,a1,f10.2,a,f10.2)') '  Latitude range : ', &
    432        ylat0,' to ',ylat0+(ny-1)*dy,'   Grid distance: ',dy
    433   write(*,*)
    434 
     423  if (lroot) then
     424    write(*,*)
     425    write(*,*)
     426    write(*,'(a,2i7)') 'Vertical levels in NCEP data: ', &
     427         nuvz,nwz
     428    write(*,*)
     429    write(*,'(a)') 'Mother domain:'
     430    write(*,'(a,f10.2,a1,f10.2,a,f10.2)') '  Longitude range: ', &
     431         xlon0,' to ',xlon0+(nx-1)*dx,'   Grid distance: ',dx
     432    write(*,'(a,f10.2,a1,f10.2,a,f10.2)') '  Latitude range : ', &
     433         ylat0,' to ',ylat0+(ny-1)*dy,'   Grid distance: ',dy
     434    write(*,*)
     435  end if
    435436
    436437  ! CALCULATE VERTICAL DISCRETIZATION OF ECMWF MODEL
  • src/init_domainfill_mpi.f90

    r0f7835d r16b61a5  
    4747!                                                                            *
    4848!*****************************************************************************
     49! MPI version:
     50!
     51!   -Root process allocates temporary arrays holding properties for
     52!    all particles in the simulation
     53!   -An index array is used to assign 1st particle to 1st process, 2nd particle
     54!    to 2nd process and so on so that they are evenly distibuted geographically
     55!   -Inititialization for domain-filling is done as in the serial code
     56!   -Root process distributes particles evenly to other processes
     57!
     58!*****************************************************************************
    4959
    5060  use point_mod
     
    5666  implicit none
    5767
    58   integer :: j,ix,jy,kz,ncolumn,numparttot
     68! ncolumn_mpi,numparttot_mpi        ncolumn,numparttot per process
     69  integer :: j,ix,jy,kz,ncolumn,numparttot,ncolumn_mpi,numparttot_mpi, arr_size
    5970  real :: gridarea(0:nymax-1),pp(nzmax),ylat,ylatp,ylatm,hzone
    6071  real :: cosfactm,cosfactp,deltacol,dz1,dz2,dz,pnew,fractus
     
    6677
    6778  integer :: idummy = -11
     79  integer,allocatable,dimension(:) :: idx ! index array
     80  integer :: stride
     81  integer, parameter :: nullsize=0
    6882  logical :: first_call=.true.
    6983
    70   ! Use different seed for each process
     84! Use different seed for each process ! TODO: not needed anymore
    7185  if (first_call) then
    7286    idummy=idummy+mp_seed
     
    102116
    103117
     118! This section only done by the root process
     119!*******************************************
     120  if (lroot) then
     121! Arrays for particles to be released. Add a small number to npart(1) in case of
     122! round-off errors
     123    arr_size = npart(1) + mp_np
     124    allocate(itra1_tmp(arr_size),npoint_tmp(arr_size),nclass_tmp(arr_size),&
     125         & idt_tmp(arr_size),itramem_tmp(arr_size),itrasplit_tmp(arr_size),&
     126         & xtra1_tmp(arr_size),ytra1_tmp(arr_size),ztra1_tmp(arr_size),&
     127         & xmass1_tmp(arr_size, maxspec))
     128
     129! Index array for particles. This is to avoid having particles
     130! near edges of domain all on one process.
     131!****************************************************************************
     132    allocate(idx(npart(1)))
     133    stride = npart(1)/mp_partgroup_np
     134
     135    jj=0
     136    do j=1, stride
     137      do i=0, mp_partgroup_np-1
     138        jj = jj+1
     139        if (jj.gt.npart(1)) exit
     140        idx(jj) = i*stride+j
     141      end do
     142    end do
     143
     144! Add extra indices if npart(1) not evenly divisible by number of processes
     145    do i=1, mod(npart(1),mp_partgroup_np)
     146      jj = jj+1
     147      if (jj.gt.npart(1)) exit
     148      idx(jj) = jj
     149    end do
     150
     151! Initialize all particles as non-existent   
     152    itra1_tmp(:)=-999999999
     153
    104154! Calculate area of grid cell with formula M=2*pi*R*h*dx/360,
    105155! see Netz, Formeln der Mathematik, 5. Auflage (1983), p.90
    106156!************************************************************
    107157
    108   do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
    109     ylat=ylat0+real(jy)*dy
    110     ylatp=ylat+0.5*dy
    111     ylatm=ylat-0.5*dy
    112     if ((ylatm.lt.0).and.(ylatp.gt.0.)) then
    113       hzone=1./dyconst
    114     else
     158    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
     159      ylat=ylat0+real(jy)*dy
     160      ylatp=ylat+0.5*dy
     161      ylatm=ylat-0.5*dy
     162      if ((ylatm.lt.0).and.(ylatp.gt.0.)) then
     163        hzone=1./dyconst
     164      else
     165        cosfactp=cos(ylatp*pih)*r_earth
     166        cosfactm=cos(ylatm*pih)*r_earth
     167        if (cosfactp.lt.cosfactm) then
     168          hzone=sqrt(r_earth**2-cosfactp**2)- &
     169               sqrt(r_earth**2-cosfactm**2)
     170        else
     171          hzone=sqrt(r_earth**2-cosfactm**2)- &
     172               sqrt(r_earth**2-cosfactp**2)
     173        endif
     174      endif
     175      gridarea(jy)=2.*pi*r_earth*hzone*dx/360.
     176    end do
     177
     178! Do the same for the south pole
     179
     180    if (sglobal) then
     181      ylat=ylat0
     182      ylatp=ylat+0.5*dy
     183      ylatm=ylat
     184      cosfactm=0.
    115185      cosfactp=cos(ylatp*pih)*r_earth
     186      hzone=sqrt(r_earth**2-cosfactm**2)- &
     187           sqrt(r_earth**2-cosfactp**2)
     188      gridarea(0)=2.*pi*r_earth*hzone*dx/360.
     189    endif
     190
     191! Do the same for the north pole
     192
     193    if (nglobal) then
     194      ylat=ylat0+real(nymin1)*dy
     195      ylatp=ylat
     196      ylatm=ylat-0.5*dy
     197      cosfactp=0.
    116198      cosfactm=cos(ylatm*pih)*r_earth
    117       if (cosfactp.lt.cosfactm) then
    118         hzone=sqrt(r_earth**2-cosfactp**2)- &
    119              sqrt(r_earth**2-cosfactm**2)
    120       else
    121         hzone=sqrt(r_earth**2-cosfactm**2)- &
    122              sqrt(r_earth**2-cosfactp**2)
    123       endif
     199      hzone=sqrt(r_earth**2-cosfactp**2)- &
     200           sqrt(r_earth**2-cosfactm**2)
     201      gridarea(nymin1)=2.*pi*r_earth*hzone*dx/360.
    124202    endif
    125     gridarea(jy)=2.*pi*r_earth*hzone*dx/360.
    126   end do
    127 
    128 ! Do the same for the south pole
    129 
    130   if (sglobal) then
    131     ylat=ylat0
    132     ylatp=ylat+0.5*dy
    133     ylatm=ylat
    134     cosfactm=0.
    135     cosfactp=cos(ylatp*pih)*r_earth
    136     hzone=sqrt(r_earth**2-cosfactm**2)- &
    137          sqrt(r_earth**2-cosfactp**2)
    138     gridarea(0)=2.*pi*r_earth*hzone*dx/360.
    139   endif
    140 
    141 ! Do the same for the north pole
    142 
    143   if (nglobal) then
    144     ylat=ylat0+real(nymin1)*dy
    145     ylatp=ylat
    146     ylatm=ylat-0.5*dy
    147     cosfactp=0.
    148     cosfactm=cos(ylatm*pih)*r_earth
    149     hzone=sqrt(r_earth**2-cosfactp**2)- &
    150          sqrt(r_earth**2-cosfactm**2)
    151     gridarea(nymin1)=2.*pi*r_earth*hzone*dx/360.
    152   endif
    153203
    154204
     
    156206!*********************************************************************
    157207
    158   colmasstotal=0.
    159   do jy=ny_sn(1),ny_sn(2)          ! loop about latitudes
    160     do ix=nx_we(1),nx_we(2)      ! loop about longitudes
    161       pp(1)=rho(ix,jy,1,1)*r_air*tt(ix,jy,1,1)
    162       pp(nz)=rho(ix,jy,nz,1)*r_air*tt(ix,jy,nz,1)
    163       ! Each MPI process is assigned an equal share of particles
    164       colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)/mp_partgroup_np
    165       colmasstotal=colmasstotal+colmass(ix,jy)
    166 
    167     end do
    168   end do
    169 
    170   if (lroot) write(*,*) 'Atm. mass: ',colmasstotal
    171 
    172 
    173   if (ipin.eq.0) numpart=0
     208    colmasstotal=0.
     209    do jy=ny_sn(1),ny_sn(2)          ! loop about latitudes
     210      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
     211        pp(1)=rho(ix,jy,1,1)*r_air*tt(ix,jy,1,1)
     212        pp(nz)=rho(ix,jy,nz,1)*r_air*tt(ix,jy,nz,1)
     213        colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)
     214        colmasstotal=colmasstotal+colmass(ix,jy)
     215
     216      end do
     217    end do
     218
     219    write(*,*) 'Atm. mass: ',colmasstotal
     220
     221
     222    if (ipin.eq.0) numpart=0
    174223
    175224! Determine the particle positions
    176225!*********************************
    177226
    178   numparttot=0
    179   numcolumn=0
    180   do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
    181     ylat=ylat0+real(jy)*dy
    182     do ix=nx_we(1),nx_we(2)      ! loop about longitudes
    183       ncolumn=nint(0.999*real(npart(1)/mp_partgroup_np)*colmass(ix,jy)/ &
    184            colmasstotal)
    185       if (ncolumn.eq.0) goto 30
    186       if (ncolumn.gt.numcolumn) numcolumn=ncolumn
     227    numparttot=0
     228    numcolumn=0
     229    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
     230      ylat=ylat0+real(jy)*dy
     231      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
     232        ncolumn=nint(0.999*real(npart(1))*colmass(ix,jy)/ &
     233             colmasstotal)
     234        if (ncolumn.eq.0) goto 30
     235        if (ncolumn.gt.numcolumn) numcolumn=ncolumn
    187236
    188237! Calculate pressure at the altitudes of model surfaces, using the air density
     
    190239!*****************************************************************************
    191240
    192       do kz=1,nz
    193         pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
    194       end do
    195 
    196 
    197       deltacol=(pp(1)-pp(nz))/real(ncolumn)
    198       pnew=pp(1)+deltacol/2.
    199       jj=0
    200       do j=1,ncolumn
    201         jj=jj+1
     241        do kz=1,nz
     242          pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
     243        end do
     244
     245
     246        deltacol=(pp(1)-pp(nz))/real(ncolumn)
     247        pnew=pp(1)+deltacol/2.
     248        jj=0
     249        do j=1,ncolumn
     250          jj=jj+1
    202251
    203252
     
    208257
    209258
    210         if (ncolumn.gt.20) then
    211           pnew=pnew-deltacol
    212         else
    213           pnew=pp(1)-ran1(idummy)*(pp(1)-pp(nz))
    214         endif
    215 
    216         do kz=1,nz-1
    217           if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
    218             dz1=pp(kz)-pnew
    219             dz2=pnew-pp(kz+1)
    220             dz=1./(dz1+dz2)
     259          if (ncolumn.gt.20) then
     260            pnew=pnew-deltacol
     261          else
     262            pnew=pp(1)-ran1(idummy)*(pp(1)-pp(nz))
     263          endif
     264
     265          do kz=1,nz-1
     266            if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
     267              dz1=pp(kz)-pnew
     268              dz2=pnew-pp(kz+1)
     269              dz=1./(dz1+dz2)
    221270
    222271! Assign particle position
     
    224273! Do the following steps only if particles are not read in from previous model run
    225274!*****************************************************************************
    226             if (ipin.eq.0) then
    227               xtra1(numpart+jj)=real(ix)-0.5+ran1(idummy)
    228               if (ix.eq.0) xtra1(numpart+jj)=ran1(idummy)
    229               if (ix.eq.nxmin1) xtra1(numpart+jj)= &
    230                    real(nxmin1)-ran1(idummy)
    231               ytra1(numpart+jj)=real(jy)-0.5+ran1(idummy)
    232               ztra1(numpart+jj)=(height(kz)*dz2+height(kz+1)*dz1)*dz
    233               if (ztra1(numpart+jj).gt.height(nz)-0.5) &
    234                    ztra1(numpart+jj)=height(nz)-0.5
     275              if (ipin.eq.0) then
     276                xtra1_tmp(idx(numpart+jj))=real(ix)-0.5+ran1(idummy)
     277                if (ix.eq.0) xtra1_tmp(idx(numpart+jj))=ran1(idummy)
     278                if (ix.eq.nxmin1) xtra1_tmp(idx(numpart+jj))= &
     279                     real(nxmin1)-ran1(idummy)
     280                ytra1_tmp(idx(numpart+jj))=real(jy)-0.5+ran1(idummy)
     281                ztra1_tmp(idx(numpart+jj))=(height(kz)*dz2+height(kz+1)*dz1)*dz
     282                if (ztra1_tmp(idx(numpart+jj)).gt.height(nz)-0.5) &
     283                     ztra1_tmp(idx(numpart+jj))=height(nz)-0.5
    235284
    236285
    237286! Interpolate PV to the particle position
    238287!****************************************
    239               ixm=int(xtra1(numpart+jj))
    240               jym=int(ytra1(numpart+jj))
    241               ixp=ixm+1
    242               jyp=jym+1
    243               ddx=xtra1(numpart+jj)-real(ixm)
    244               ddy=ytra1(numpart+jj)-real(jym)
    245               rddx=1.-ddx
    246               rddy=1.-ddy
    247               p1=rddx*rddy
    248               p2=ddx*rddy
    249               p3=rddx*ddy
    250               p4=ddx*ddy
    251               do i=2,nz
    252                 if (height(i).gt.ztra1(numpart+jj)) then
    253                   indzm=i-1
    254                   indzp=i
    255                   goto 6
    256                 endif
    257               end do
    258 6             continue
    259               dz1=ztra1(numpart+jj)-height(indzm)
    260               dz2=height(indzp)-ztra1(numpart+jj)
    261               dz=1./(dz1+dz2)
    262               do in=1,2
    263                 indzh=indzm+in-1
    264                 y1(in)=p1*pv(ixm,jym,indzh,1) &
    265                      +p2*pv(ixp,jym,indzh,1) &
    266                      +p3*pv(ixm,jyp,indzh,1) &
    267                      +p4*pv(ixp,jyp,indzh,1)
    268               end do
    269               pvpart=(dz2*y1(1)+dz1*y1(2))*dz
    270               if (ylat.lt.0.) pvpart=-1.*pvpart
     288                ixm=int(xtra1_tmp(idx(numpart+jj)))
     289                jym=int(ytra1_tmp(idx(numpart+jj)))
     290                ixp=ixm+1
     291                jyp=jym+1
     292                ddx=xtra1_tmp(idx(numpart+jj))-real(ixm)
     293                ddy=ytra1_tmp(idx(numpart+jj))-real(jym)
     294                rddx=1.-ddx
     295                rddy=1.-ddy
     296                p1=rddx*rddy
     297                p2=ddx*rddy
     298                p3=rddx*ddy
     299                p4=ddx*ddy
     300                do i=2,nz
     301                  if (height(i).gt.ztra1_tmp(idx(numpart+jj))) then
     302                    indzm=i-1
     303                    indzp=i
     304                    goto 6
     305                  endif
     306                end do
     3076               continue
     308                dz1=ztra1_tmp(idx(numpart+jj))-height(indzm)
     309                dz2=height(indzp)-ztra1_tmp(idx(numpart+jj))
     310                dz=1./(dz1+dz2)
     311                do in=1,2
     312                  indzh=indzm+in-1
     313                  y1(in)=p1*pv(ixm,jym,indzh,1) &
     314                       +p2*pv(ixp,jym,indzh,1) &
     315                       +p3*pv(ixm,jyp,indzh,1) &
     316                       +p4*pv(ixp,jyp,indzh,1)
     317                end do
     318                pvpart=(dz2*y1(1)+dz1*y1(2))*dz
     319                if (ylat.lt.0.) pvpart=-1.*pvpart
    271320
    272321
     
    274323!*****************************************************************************
    275324
    276               if (((ztra1(numpart+jj).gt.3000.).and. &
    277                    (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
     325                if (((ztra1_tmp(idx(numpart+jj)).gt.3000.).and. &
     326                     (pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
    278327
    279328! Assign certain properties to the particle
    280329!******************************************
    281                 nclass(numpart+jj)=min(int(ran1(idummy)* &
    282                      real(nclassunc))+1,nclassunc)
    283                 numparticlecount=numparticlecount+1
    284                 npoint(numpart+jj)=numparticlecount
    285                 idt(numpart+jj)=mintime
    286                 itra1(numpart+jj)=0
    287                 itramem(numpart+jj)=0
    288                 itrasplit(numpart+jj)=itra1(numpart+jj)+ldirect* &
    289                      itsplit
    290                 xmass1(numpart+jj,1)=colmass(ix,jy)/real(ncolumn)
    291                 if (mdomainfill.eq.2) xmass1(numpart+jj,1)= &
    292                      xmass1(numpart+jj,1)*pvpart*48./29.*ozonescale/10.**9
    293               else
    294                 jj=jj-1
     330                  nclass_tmp(idx(numpart+jj))=min(int(ran1(idummy)* &
     331                       real(nclassunc))+1,nclassunc)
     332                  numparticlecount=numparticlecount+1
     333                  npoint_tmp(idx(numpart+jj))=numparticlecount
     334                  idt_tmp(idx(numpart+jj))=mintime
     335                  itra1_tmp(idx(numpart+jj))=0
     336                  itramem_tmp(idx(numpart+jj))=0
     337                  itrasplit_tmp(idx(numpart+jj))=itra1_tmp(idx(numpart+jj))+ldirect* &
     338                       itsplit
     339                  xmass1_tmp(idx(numpart+jj),1)=colmass(ix,jy)/real(ncolumn)
     340                  if (mdomainfill.eq.2) xmass1_tmp(idx(numpart+jj),1)= &
     341                       xmass1_tmp(idx(numpart+jj),1)*pvpart*48./29.*ozonescale/10.**9
     342                else
     343                  jj=jj-1
     344                endif
    295345              endif
    296346            endif
    297           endif
     347          end do
    298348        end do
     349        numparttot=numparttot+ncolumn
     350        if (ipin.eq.0) numpart=numpart+jj
     35130      continue
    299352      end do
    300       numparttot=numparttot+ncolumn
    301       if (ipin.eq.0) numpart=numpart+jj
    302 30    continue
    303     end do
    304   end do
    305 
    306 
    307 ! Check whether numpart is really smaller than maxpart per process
    308 !*****************************************************************
    309 
    310   if (numpart.gt.maxpart_mpi) then
    311     write(*,*) 'numpart too large: change source in init_atm_mass.f'
    312     write(*,*) 'numpart: ',numpart,' maxpart: ',maxpart_mpi
    313   endif
    314 
    315 
    316   xmassperparticle=colmasstotal/real(numparttot)
     353    end do
     354
     355
     356! Check whether numpart is really smaller than maxpart
     357!*****************************************************
     358
     359! ESO :TODO: this warning need to be moved further up, else out-of-bounds error earlier
     360    if (numpart.gt.maxpart) then
     361      write(*,*) 'numpart too large: change source in init_atm_mass.f'
     362      write(*,*) 'numpart: ',numpart,' maxpart: ',maxpart
     363    endif
     364
     365
     366    xmassperparticle=colmasstotal/real(numparttot)
    317367
    318368
     
    320370!***********************************************
    321371
    322   do j=1,numpart
    323     if ((xtra1(j).lt.0.).or.(xtra1(j).ge.real(nxmin1)).or. &
    324          (ytra1(j).lt.0.).or.(ytra1(j).ge.real(nymin1))) then
    325       itra1(j)=-999999999
    326     endif
    327   end do
     372!    do j=1,numpart
     373    do j=1,npoint(1)
     374      if ((xtra1_tmp(j).lt.0.).or.(xtra1_tmp(j).ge.real(nxmin1)).or. &
     375           (ytra1_tmp(j).lt.0.).or.(ytra1_tmp(j).ge.real(nymin1))) then
     376        itra1_tmp(j)=-999999999
     377      endif
     378    end do
    328379
    329380
     
    340391!****************************************************************************
    341392
    342   fractus=real(numcolumn)/real(nz)
    343   write(*,*) 'Total number of particles at model start: ',numpart*mp_partgroup_np
    344   write(*,*) 'Maximum number of particles per column: ',numcolumn*mp_partgroup_np
    345   write(*,*) 'If ',fractus*mp_partgroup_np,' <1, better use more particles'
    346   fractus=sqrt(max(fractus,1.))/2.
    347 
    348   do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
    349     do ix=nx_we(1),nx_we(2)      ! loop about longitudes
    350       ncolumn=nint(0.999/fractus*real(npart(1)/mp_partgroup_np)*colmass(ix,jy) &
    351            /colmasstotal)
    352       if (ncolumn.gt.maxcolumn) stop 'maxcolumn too small'
    353       if (ncolumn.eq.0) goto 80
     393    fractus=real(numcolumn)/real(nz)
     394    write(*,*) 'Total number of particles at model start: ',numpart
     395    write(*,*) 'Maximum number of particles per column: ',numcolumn
     396    write(*,*) 'If ',fractus,' <1, better use more particles'
     397    fractus=sqrt(max(fractus,1.))/2.
     398
     399    do jy=ny_sn(1),ny_sn(2)      ! loop about latitudes
     400      do ix=nx_we(1),nx_we(2)      ! loop about longitudes
     401        ncolumn=nint(0.999/fractus*real(npart(1))*colmass(ix,jy) &
     402             /colmasstotal)
     403        if (ncolumn.gt.maxcolumn) stop 'maxcolumn too small'
     404        if (ncolumn.eq.0) goto 80
    354405
    355406
     
    359410!************************************************************************
    360411
    361       if (ix.eq.nx_we(1)) numcolumn_we(1,jy)=ncolumn
    362       if (ix.eq.nx_we(2)) numcolumn_we(2,jy)=ncolumn
    363       if (jy.eq.ny_sn(1)) numcolumn_sn(1,ix)=ncolumn
    364       if (jy.eq.ny_sn(2)) numcolumn_sn(2,ix)=ncolumn
     412        if (ix.eq.nx_we(1)) numcolumn_we(1,jy)=ncolumn
     413        if (ix.eq.nx_we(2)) numcolumn_we(2,jy)=ncolumn
     414        if (jy.eq.ny_sn(1)) numcolumn_sn(1,ix)=ncolumn
     415        if (jy.eq.ny_sn(2)) numcolumn_sn(2,ix)=ncolumn
    365416
    366417! Calculate pressure at the altitudes of model surfaces, using the air density
     
    368419!*****************************************************************************
    369420
    370       do kz=1,nz
    371         pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
    372       end do
     421        do kz=1,nz
     422          pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
     423        end do
    373424
    374425! Determine the reference starting altitudes
    375426!*******************************************
    376427
    377       deltacol=(pp(1)-pp(nz))/real(ncolumn)
    378       pnew=pp(1)+deltacol/2.
    379       do j=1,ncolumn
    380         pnew=pnew-deltacol
    381         do kz=1,nz-1
    382           if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
    383             dz1=pp(kz)-pnew
    384             dz2=pnew-pp(kz+1)
    385             dz=1./(dz1+dz2)
    386             zposition=(height(kz)*dz2+height(kz+1)*dz1)*dz
    387             if (zposition.gt.height(nz)-0.5) zposition=height(nz)-0.5
     428        deltacol=(pp(1)-pp(nz))/real(ncolumn)
     429        pnew=pp(1)+deltacol/2.
     430        do j=1,ncolumn
     431          pnew=pnew-deltacol
     432          do kz=1,nz-1
     433            if ((pp(kz).ge.pnew).and.(pp(kz+1).lt.pnew)) then
     434              dz1=pp(kz)-pnew
     435              dz2=pnew-pp(kz+1)
     436              dz=1./(dz1+dz2)
     437              zposition=(height(kz)*dz2+height(kz+1)*dz1)*dz
     438              if (zposition.gt.height(nz)-0.5) zposition=height(nz)-0.5
    388439
    389440! Memorize vertical positions where particles are introduced
     
    391442!***********************************************************
    392443
    393             if (ix.eq.nx_we(1)) zcolumn_we(1,jy,j)=zposition
    394             if (ix.eq.nx_we(2)) zcolumn_we(2,jy,j)=zposition
    395             if (jy.eq.ny_sn(1)) zcolumn_sn(1,ix,j)=zposition
    396             if (jy.eq.ny_sn(2)) zcolumn_sn(2,ix,j)=zposition
     444              if (ix.eq.nx_we(1)) zcolumn_we(1,jy,j)=zposition
     445              if (ix.eq.nx_we(2)) zcolumn_we(2,jy,j)=zposition
     446              if (jy.eq.ny_sn(1)) zcolumn_sn(1,ix,j)=zposition
     447              if (jy.eq.ny_sn(2)) zcolumn_sn(2,ix,j)=zposition
    397448
    398449! Initialize mass that has accumulated at boundary to zero
    399450!*********************************************************
    400451
    401             acc_mass_we(1,jy,j)=0.
    402             acc_mass_we(2,jy,j)=0.
    403             acc_mass_sn(1,jy,j)=0.
    404             acc_mass_sn(2,jy,j)=0.
    405           endif
     452              acc_mass_we(1,jy,j)=0.
     453              acc_mass_we(2,jy,j)=0.
     454              acc_mass_sn(1,jy,j)=0.
     455              acc_mass_sn(2,jy,j)=0.
     456            endif
     457          end do
    406458        end do
     45980      continue
    407460      end do
    408 80    continue
    409     end do
    410   end do
     461    end do
    411462
    412463! If particles shall be read in to continue an existing run,
     
    415466!***************************************************************************
    416467
    417 ! :TODO: eso: parallelize
    418   if (ipin.eq.1) then
    419     open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
    420          form='unformatted')
    421     read(unitboundcond) numcolumn_we,numcolumn_sn, &
    422          zcolumn_we,zcolumn_sn,acc_mass_we,acc_mass_sn
    423     close(unitboundcond)
    424   endif
    425 
    426 
     468! eso TODO: only needed for root process
     469    if (ipin.eq.1) then
     470      open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
     471           form='unformatted')
     472      read(unitboundcond) numcolumn_we,numcolumn_sn, &
     473           zcolumn_we,zcolumn_sn,acc_mass_we,acc_mass_sn
     474      close(unitboundcond)
     475    endif
     476
     477    numpart = npart(1)/mp_partgroup_np
     478    if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
     479
     480  else ! Allocate dummy arrays for receiving processes
     481    allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
     482         & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
     483         & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
     484         & xmass1_tmp(nullsize, nullsize))
     485   
     486  end if ! end if(lroot) 
     487
     488
     489! Distribute particles to other processes (numpart is 'per-process', not total)
     490    call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
     491! eso TODO: xmassperparticle: not necessary to send
     492    call MPI_Bcast(xmassperparticle, 1, mp_sp, id_root, mp_comm_used, mp_ierr)
     493    call mpif_send_part_properties(numpart)
     494
     495! Deallocate the temporary arrays used for all particles
     496    deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
     497         & itrasplit_tmp,xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
    427498
    428499
  • src/makefile

    r341f4b7 r4c64400  
    419419        par_mod.o point_mod.o unc_mod.o xmass_mod.o
    420420timemanager_mpi.o: com_mod.o flux_mod.o mpi_mod.o oh_mod.o outg_mod.o \
    421         par_mod.o point_mod.o unc_mod.o xmass_mod.o
     421        par_mod.o point_mod.o unc_mod.o xmass_mod.o netcdf_output_mod.o
    422422unc_mod.o: par_mod.o
    423423verttransform.o: cmapf_mod.o com_mod.o par_mod.o
  • src/mean_mod.f90

    r6a678e3 r4c64400  
    2929    module procedure mean_sp
    3030    module procedure mean_dp
    31     module procedure mean_mixed_prec
     31    module procedure mean_mixed_dss
     32    module procedure mean_mixed_dsd
     33
    3234  end interface mean
    3335
     
    6466    ! real(sp),parameter :: eps=1.0e-30
    6567
     68    integer,intent(in) :: number
    6669    real(sp), intent(in) :: x_sp(number)
    6770    real(sp), intent(out) ::xm,xs
    68     integer,intent(in) :: number
    6971    real(sp) :: xl,xq,xaux
    7072    real(sp),parameter :: eps=1.0e-30
     
    116118    implicit none
    117119
     120    integer,intent(in) :: number
    118121    real(dp), intent(in) :: x_dp(number)
    119122    real(dp), intent(out) ::xm,xs
    120     integer,intent(in) :: number
    121123    real(dp) :: xl,xq,xaux
    122124    real(dp),parameter :: eps=1.0e-30
     
    142144  end subroutine mean_dp
    143145
    144   subroutine mean_mixed_prec(x_dp,xm,xs,number)
    145 
    146 !*****************************************************************************
    147 !                                                                            *
    148 !  This subroutine calculates mean and standard deviation of a given element.*
    149 !                                                                            *
    150 !      AUTHOR: Andreas Stohl, 25 January 1994                                *
    151 !                                                                            *
    152 !      Mixed precision version ESO 2016 (dp input, sp output)                *
     146  subroutine mean_mixed_dss(x_dp,xm,xs,number)
     147
     148!*****************************************************************************
     149!                                                                            *
     150!  This subroutine calculates mean and standard deviation of a given element.*
     151!                                                                            *
     152!      AUTHOR: Andreas Stohl, 25 January 1994                                *
     153!                                                                            *
     154!      Mixed precision version ESO 2016 (dp in, sp out, sp out)              *
    153155!*****************************************************************************
    154156!                                                                            *
     
    168170    implicit none
    169171
     172    integer,intent(in) :: number
    170173    real(dp), intent(in) :: x_dp(number)
    171174    real(sp), intent(out) ::xm,xs
    172     integer,intent(in) :: number
    173175    real(sp) :: xl,xq,xaux
    174176    real(sp),parameter :: eps=1.0e-30
     
    192194    endif
    193195
    194   end subroutine mean_mixed_prec
     196  end subroutine mean_mixed_dss
     197
     198  subroutine mean_mixed_dsd(x_dp,xm,xs_dp,number)
     199
     200!*****************************************************************************
     201!                                                                            *
     202!  This subroutine calculates mean and standard deviation of a given element.*
     203!                                                                            *
     204!      AUTHOR: Andreas Stohl, 25 January 1994                                *
     205!                                                                            *
     206!      Mixed precision version ESO 2016 (dp in, sp out, dp out)              *
     207!*****************************************************************************
     208!                                                                            *
     209! Variables:                                                                 *
     210! x_dp(number)        field of input data                                    *
     211! xm                  mean                                                   *
     212! xs_dp               standard deviation                                     *
     213! number              number of elements of field x_dp                       *
     214!                                                                            *
     215! Constants:                                                                 *
     216! eps                 tiny number                                            *
     217!                                                                            *
     218!*****************************************************************************
     219
     220    use par_mod, only: sp,dp
     221
     222    implicit none
     223
     224    integer,intent(in) :: number
     225    real(dp), intent(in) :: x_dp(number)
     226    real(sp), intent(out) ::xm
     227    real(dp), intent(out) ::xs_dp
     228    real(dp) :: xl,xq,xaux
     229    real(dp),parameter :: eps=1.0e-30_dp
     230    integer :: i
     231
     232    xl=0._dp
     233    xq=0._dp
     234    do i=1,number
     235      xl=xl+x_dp(i)
     236      xq=xq+x_dp(i)*x_dp(i)
     237    end do
     238
     239    xm=xl/real(number,kind=sp)
     240
     241    xaux=xq-xl*xl/real(number,kind=dp)
     242
     243    if (xaux.lt.eps) then
     244      xs_dp=0._dp
     245    else
     246      xs_dp=sqrt(xaux/real(number-1,kind=dp))
     247    endif
     248
     249  end subroutine mean_mixed_dsd
     250
    195251end module mean_mod
  • src/mpi_mod.f90

    r861805a r4c64400  
    122122  logical, parameter :: mp_dev_mode = .false.
    123123  logical, parameter :: mp_dbg_out = .false.
    124   logical, parameter :: mp_time_barrier=.false.
     124  logical, parameter :: mp_time_barrier=.true.
    125125  logical, parameter :: mp_measure_time=.false.
    126126  logical, parameter :: mp_exact_numpart=.true.
     
    251251        write(*,FMT='(80("#"))')
    252252      end if
    253       lmp_sync=.true. ! :DBG: eso fix this...
     253      lmp_sync=.true.
    254254    end if
    255255
     
    261261!**********************************************************************
    262262
     263!    id_read = min(mp_np-1, 1)
    263264    id_read = mp_np-1
    264265
     
    330331! Set maxpart per process
    331332! eso 08/2016: Increase maxpart per process, in case of unbalanced distribution
    332     maxpart_mpi=int(mp_maxpart_factor*maxpart/mp_partgroup_np)
     333    maxpart_mpi=int(mp_maxpart_factor*real(maxpart)/real(mp_partgroup_np))
    333334
    334335! Do not allocate particle data arrays for readwind process
     
    486487    integer :: i,jj, addone
    487488
     489! Exit if too many particles
     490    if (num_part.gt.maxpart_mpi) then
     491      write(*,*) '#####################################################'
     492      write(*,*) '#### ERROR - TOTAL NUMBER OF PARTICLES REQUIRED  ####'
     493      write(*,*) '#### EXCEEDS THE MAXIMUM ALLOWED NUMBER. REDUCE  ####'
     494      write(*,*) '#### EITHER NUMBER OF PARTICLES PER RELEASE POINT####'
     495      write(*,*) '#### OR INCREASE MAXPART.                        ####'
     496      write(*,*) '#####################################################'
     497!      call MPI_FINALIZE(mp_ierr)
     498      stop
     499    end if
     500
     501
    488502! Time for MPI communications
    489503!****************************
     
    527541
    528542    if (mp_measure_time) call mpif_mtime('commtime',1)
    529     write(*,*) "PID ", mp_partid, "ending MPI_Scatter operation"
    530543
    531544    goto 601
     
    535548
    536549! After the transfer of particles it it possible that the value of
    537 ! "numpart" is larger than the actual, so we reduce it if there are
     550! "numpart" is larger than the actual used, so we reduce it if there are
    538551! invalid particles at the end of the arrays
    539552601 do i=num_part, 1, -1
     
    628641        if ( numparticles_mpi(idx_arr(m)).gt.mp_min_redist.and.&
    629642             & real(num_trans)/real(numparticles_mpi(idx_arr(m))).gt.mp_redist_fract) then
     643! DBG
     644          ! write(*,*) 'mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, numparticles_mpi', &
     645          !      &mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, numparticles_mpi
     646! DBG
    630647          call mpif_redist_part(itime, idx_arr(m), idx_arr(i), num_trans/2)
    631648        end if
     
    661678    integer :: i, j, minpart, ipart, maxnumpart
    662679 
     680! Check for invalid input arguments
     681!**********************************
     682 if (src_proc.eq.dest_proc) then
     683   write(*,*) '<mpi_mod::mpif_redist_part>: Error: &
     684        &src_proc.eq.dest_proc'
     685   stop
     686 end if
     687
    663688! Measure time for MPI communications
    664689!************************************
     
    674699      ul=numpart
    675700
    676       ! if (mp_dev_mode) then
    677       !   write(*,FMT='(72("#"))')
    678       !   write(*,*) "Sending ", num_trans, "particles (from/to)", src_proc, dest_proc
    679       !   write(*,*) "numpart before: ", numpart
    680       ! end if
     701      if (mp_dev_mode) then
     702        write(*,FMT='(72("#"))')
     703        write(*,*) "Sending ", num_trans, "particles (from/to)", src_proc, dest_proc
     704        write(*,*) "numpart before: ", numpart
     705      end if
    681706
    682707      call MPI_SEND(nclass(ll:ul),num_trans,&
     
    717742      numpart = numpart-num_trans
    718743
    719       ! if (mp_dev_mode) then
    720       !   write(*,*) "numpart after: ", numpart
    721       !   write(*,FMT='(72("#"))')
    722       ! end if
     744      if (mp_dev_mode) then
     745        write(*,*) "numpart after: ", numpart
     746        write(*,FMT='(72("#"))')
     747      end if
    723748
    724749    else if (mp_partid.eq.dest_proc) then
     
    731756      ul=numpart+num_trans
    732757
    733       ! if (mp_dev_mode) then
    734       !   write(*,FMT='(72("#"))')
    735       !   write(*,*) "Receiving ", num_trans, "particles (from/to)", src_proc, dest_proc
    736       !   write(*,*) "numpart before: ", numpart
    737       ! end if
     758      if (mp_dev_mode) then
     759        write(*,FMT='(72("#"))')
     760        write(*,*) "Receiving ", num_trans, "particles (from/to)", src_proc, dest_proc
     761        write(*,*) "numpart before: ", numpart
     762      end if
    738763
    739764! We could receive the data directly at nclass(ll:ul) etc., but this leaves
     
    785810      do i=1, num_trans
    786811! Take into acount that we may have transferred invalid particles
    787         if (itra1_tmp(minpart).ne.itime) goto 200
     812        if (itra1_tmp(i).ne.itime) cycle
    788813        do ipart=minpart,maxnumpart
    789814          if (itra1(ipart).ne.itime) then
    790             itra1(ipart) = itra1_tmp(minpart)
    791             npoint(ipart) = npoint_tmp(minpart)
    792             nclass(ipart) = nclass_tmp(minpart)
    793             idt(ipart) = idt_tmp(minpart)
    794             itramem(ipart) = itramem_tmp(minpart)
    795             itrasplit(ipart) = itrasplit_tmp(minpart)
    796             xtra1(ipart) = xtra1_tmp(minpart)
    797             ytra1(ipart) = ytra1_tmp(minpart)
    798             ztra1(ipart) = ztra1_tmp(minpart)
    799             xmass1(ipart,:) = xmass1_tmp(minpart,:)
    800 ! Increase numpart, if necessary
    801             numpart=max(numpart,ipart)
     815            itra1(ipart) = itra1_tmp(i)
     816            npoint(ipart) = npoint_tmp(i)
     817            nclass(ipart) = nclass_tmp(i)
     818            idt(ipart) = idt_tmp(i)
     819            itramem(ipart) = itramem_tmp(i)
     820            itrasplit(ipart) = itrasplit_tmp(i)
     821            xtra1(ipart) = xtra1_tmp(i)
     822            ytra1(ipart) = ytra1_tmp(i)
     823            ztra1(ipart) = ztra1_tmp(i)
     824            xmass1(ipart,:) = xmass1_tmp(i,:)
    802825            goto 200 ! Storage space has been found, stop searching
    803826          end if
     827! :TODO: add check for if too many particles requiried
    804828        end do
    805 200     minpart=minpart+1
     829200     minpart=ipart+1
    806830      end do
    807 
    808       deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,itrasplit_tmp,&
    809            & xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
    810 
    811       ! if (mp_dev_mode) then
    812       !   write(*,*) "numpart after: ", numpart
    813       !   write(*,FMT='(72("#"))')
    814       ! end if
     831! Increase numpart, if necessary
     832      numpart=max(numpart,ipart)
     833
     834      deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
     835           &itrasplit_tmp,xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
     836
     837      if (mp_dev_mode) then
     838        write(*,*) "numpart after: ", numpart
     839        write(*,FMT='(72("#"))')
     840      end if
    815841
    816842    else
     
    23142340!
    23152341! TODO
    2316 !   take into account nested wind fields
     2342!   NB: take into account nested wind fields by using a separate
     2343!   subroutine mpif_gf_request_nest (and separate request arrays for the
     2344!   nested variables)
    23172345!
    23182346!*******************************************************************************
     
    27272755! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR VERTTRANSFORM:',&
    27282756!      & mp_vt_time_total
    2729 ! NB: the 'flush' function is possibly a gfortran-specific extension
    2730           call flush()
     2757! NB: the 'flush' function is possibly a gfortran-specific extension,
     2758! comment out if it gives problems
     2759!          call flush()
    27312760        end if
    27322761      end do
  • src/netcdf_output_mod.f90

    rf28aa0a r4c64400  
    1919!**********************************************************************
    2020
     21
    2122  !*****************************************************************************
    2223  !                                                                            *
     
    2425  !  residence time and wet and dry deposition output.                         *
    2526  !                                                                            *
    26   !  - writeheader_netcdf generates file including all information previously    *
     27  !  - writeheader_netcdf generates file including all information previously  *
    2728  !    stored in separate header files                                         *
    28   !  - concoutput_netcdf write concentration output and wet and dry deposition   *
     29  !  - concoutput_netcdf write concentration output and wet and dry deposition *
    2930  !                                                                            *
    3031  !     Author: D. Brunner                                                     *
     
    893894  gridsigmatotal=0.
    894895  gridtotalunc=0.
    895   wetgridtotal=0.
    896   wetgridsigmatotal=0.
    897   wetgridtotalunc=0.
    898   drygridtotal=0.
    899   drygridsigmatotal=0.
    900   drygridtotalunc=0.
     896  wetgridtotal=0._dep_prec
     897  wetgridsigmatotal=0._dep_prec
     898  wetgridtotalunc=0._dep_prec
     899  drygridtotal=0._dep_prec
     900  drygridsigmatotal=0._dep_prec
     901  drygridtotalunc=0._dep_prec
    901902
    902903  do ks=1,nspec
     
    922923                   wetgridsigma(ix,jy),nclassunc)
    923924              ! Multiply by number of classes to get total concentration
    924               wetgrid(ix,jy)=wetgrid(ix,jy)*real(nclassunc,kind=dep_prec)
     925              wetgrid(ix,jy)=wetgrid(ix,jy)*real(nclassunc,kind=sp)
    925926              wetgridtotal=wetgridtotal+wetgrid(ix,jy)
    926927              ! Calculate standard deviation of the mean
     
    946947                   drygridsigma(ix,jy),nclassunc)
    947948              ! Multiply by number of classes to get total concentration
    948               drygrid(ix,jy)=drygrid(ix,jy)*real(nclassunc)
     949              drygrid(ix,jy)=drygrid(ix,jy)*real(nclassunc,kind=sp)
    949950              drygridtotal=drygridtotal+drygrid(ix,jy)
    950951              ! Calculate standard deviation of the mean
    951952              drygridsigma(ix,jy)= &
    952953                   drygridsigma(ix,jy)* &
    953                    sqrt(real(nclassunc))
     954                   sqrt(real(nclassunc, kind=dep_prec))
    954955              drygridsigmatotal=drygridsigmatotal+ &
    955956                   drygridsigma(ix,jy)
     
    10541055  if (wetgridtotal.gt.0.) wetgridtotalunc=wetgridsigmatotal/ &
    10551056       wetgridtotal
    1056   if (drygridtotal.gt.0.) drygridtotalunc=drygridsigmatotal/ &
    1057        drygridtotal
     1057  if (drygridtotal.gt.0.) drygridtotalunc=real(drygridsigmatotal/ &
     1058       drygridtotal, kind=dep_prec)
    10581059
    10591060  ! Dump of receptor concentrations
     
    12981299              wetgridsigma(ix,jy)= &
    12991300                   wetgridsigma(ix,jy)* &
    1300                    sqrt(real(nclassunc))
     1301                   sqrt(real(nclassunc,kind=dep_prec))
    13011302            endif
    13021303
     
    13191320              drygridsigma(ix,jy)= &
    13201321                   drygridsigma(ix,jy)* &
    1321                    sqrt(real(nclassunc))
     1322                   sqrt(real(nclassunc,kind=dep_prec))
    13221323            endif
    13231324
  • src/outg_mod.f90

    r6a678e3 r4c64400  
    2222module outg_mod
    2323
    24   use par_mod, only: dep_prec
     24  use par_mod, only: dep_prec, sp
    2525
    2626  implicit none
     
    3939  real,allocatable, dimension (:,:,:) :: factor3d
    4040  real,allocatable, dimension (:,:,:) :: grid
    41   real(dep_prec),allocatable, dimension (:,:) :: wetgrid
    42   real(dep_prec),allocatable, dimension (:,:) :: drygrid
     41  real(sp),allocatable, dimension (:,:) :: wetgrid
     42  real(sp),allocatable, dimension (:,:) :: drygrid
    4343  real,allocatable, dimension (:,:,:) :: gridsigma
    4444  real(dep_prec),allocatable, dimension (:,:) :: drygridsigma
  • src/par_mod.f90

    r6b3dad4 rd9f0585  
    5858
    5959  integer,parameter :: dep_prec=sp
     60
     61  !****************************************************************
     62  ! Set to T to disable use of kernel for concentrations/deposition
     63  !****************************************************************
     64
     65  logical, parameter :: lnokernel=.false.
    6066
    6167  !***********************************************************
  • src/readavailable.f90

    rdb712a8 r4c64400  
    236236  do i=2,numbwf
    237237    idiff=abs(wftime(i)-wftime(i-1))
    238     if (idiff.gt.idiffmax) then
     238    if (idiff.gt.idiffmax.and.lroot) then
    239239      write(*,*) 'FLEXPART WARNING: TIME DIFFERENCE BETWEEN TWO'
    240240      write(*,*) 'WIND FIELDS IS TOO BIG FOR TRANSPORT CALCULATION.&
    241241           &'
    242242      write(*,*) 'THEREFORE, TRAJECTORIES HAVE TO BE SKIPPED.'
    243     else if (idiff.gt.idiffnorm) then
     243    else if (idiff.gt.idiffnorm.and.lroot) then
    244244      write(*,*) 'FLEXPART WARNING: TIME DIFFERENCE BETWEEN TWO'
    245245      write(*,*) 'WIND FIELDS IS BIG. THIS MAY CAUSE A DEGRADATION'
  • src/readcommand.f90

    rc04b739 r4c64400  
    170170    if (index(line,'LDIRECT') .eq. 0) then
    171171      old = .false.
    172       write(*,*) 'COMMAND in old short format, please update to namelist format'
     172      if (lroot) write(*,*) 'COMMAND in old short format, &
     173           &please update to namelist format'
    173174    else
    174175      old = .true.
    175       write(*,*) 'COMMAND in old long format, please update to namelist format'
     176      if (lroot) write(*,*) 'COMMAND in old long format, &
     177           &please update to namelist format'
    176178    endif
    177179    rewind(unitcommand)
  • src/readreleases.f90

    r05cf28d rc8fc724  
    218218  rewind(unitreleases)
    219219
     220  if (nspec.gt.maxspec) goto 994
     221
    220222  ! allocate arrays of matching size for number of species (namelist output)
    221223  deallocate(mass)
     
    274276  do i=1,maxspec
    275277    DRYDEPSPEC(i)=.false.
     278    WETDEPSPEC(i)=.false.
    276279  end do
    277280
     
    367370         &(dquer(i).gt.0. .and. (crain_aero(i) .gt. 0. .or. csnow_aero(i).gt.0.)))  then
    368371      WETDEP=.true.
     372      WETDEPSPEC(i)=.true.
    369373      if (lroot) then
    370374        write (*,*) '  Below-cloud scavenging: ON'
     
    378382    if (dquer(i).gt.0..and.(ccn_aero(i).gt.0. .or. in_aero(i).gt.0.))  then
    379383      WETDEP=.true.
     384      WETDEPSPEC(i)=.true.
    380385      if (lroot) then
    381386        write (*,*) '  In-cloud scavenging: ON'
     
    397402    endif
    398403
    399   end do
     404  end do ! end loop over species
    400405
    401406  if (WETDEP.or.DRYDEP) DEP=.true.
     
    574579  endif
    575580
     581
    576582  ! Determine the release rate (particles per second) and total number
    577583  ! of particles released during the simulation
     
    633639  endif
    634640
     641  if (lroot) then
     642    write(*,FMT='(A,ES14.7)') ' Total mass released:', sum(xmass(1:numpoint,1:nspec))
     643  end if
     644
    635645  return
    636646
  • src/readspecies.f90

    r62e65c7 rc8fc724  
    197197    weta_gas(pos_spec)=pweta_gas
    198198    wetb_gas(pos_spec)=pwetb_gas
    199     crain_aero=pcrain_aero
    200     csnow_aero=pcsnow_aero
     199    crain_aero(pos_spec)=pcrain_aero
     200    csnow_aero(pos_spec)=pcsnow_aero
    201201    ccn_aero(pos_spec)=pccn_aero
    202202    in_aero(pos_spec)=pin_aero
  • src/releaseparticles_mpi.f90

    r861805a r16b61a5  
    416416          endif
    417417        end do
    418 ! ESO TODO: Here we could use dynamic allocation and increase array sizes
    419418        if (ipart.gt.maxpart_mpi) goto 996
    420419
  • src/timemanager_mpi.f90

    r861805a r16b61a5  
    578578        end if
    579579
    580         ! Write particles for all processes
     580        ! Write number of particles for all processes
    581581        if (mp_dev_mode) write(*,*) "PID, itime, numpart", mp_pid,itime,numpart
    582582
     
    870870  endif
    871871  deallocate(gridunc)
    872   deallocate(xpoint1,xpoint2,ypoint1,ypoint2,zpoint1,zpoint2,xmass, checklifetime)
     872  deallocate(xpoint1,xpoint2,ypoint1,ypoint2,zpoint1,zpoint2,xmass)
     873  if (allocated(checklifetime)) deallocate(checklifetime)
    873874  deallocate(ireleasestart,ireleaseend,npart,kindz)
    874875  deallocate(xmasssave)
  • src/unc_mod.f90

    r6a678e3 r4c64400  
    2626  implicit none
    2727
    28   real,allocatable ,dimension (:,:,:,:,:,:,:) :: gridunc
     28  real,allocatable, dimension (:,:,:,:,:,:,:) :: gridunc
    2929  real,allocatable, dimension (:,:,:,:,:,:,:) :: griduncn
    3030  real(dep_prec),allocatable, dimension (:,:,:,:,:,:) :: drygridunc
  • src/wetdepo.f90

    r05cf28d rc8fc724  
    9090  real :: frac_act, liq_frac, dquer_m
    9191
    92   integer :: blc_count, inc_count
     92  integer(selected_int_kind(16)), dimension(nspec) :: blc_count, inc_count
    9393  real    :: Si_dummy, wetscav_dummy
    9494  logical :: readclouds_this_nest
     
    107107!************************
    108108
    109   blc_count=0
    110   inc_count=0
     109  blc_count(:)=0
     110  inc_count(:)=0
    111111
    112112  do jpart=1,numpart
     
    256256    do ks=1,nspec      ! loop over species
    257257      wetdeposit(ks)=0.
    258       wetscav=0.   
     258      wetscav=0.
     259
     260! Cycle loop if wet deposition for the species is off
     261      if (WETDEPSPEC(ks).eqv..false.) cycle
    259262
    260263      if (ngrid.gt.0) then
     
    274277        if ((dquer(ks).le.0.).and.(weta_gas(ks).gt.0..or.wetb_gas(ks).gt.0.)) then
    275278          !        if (weta(ks).gt.0. .or. wetb(ks).gt.0.) then
    276           blc_count=blc_count+1
     279          blc_count(ks)=blc_count(ks)+1
    277280          wetscav=weta_gas(ks)*prec(1)**wetb_gas(ks)
    278281
     
    280283!*********************************************************************************
    281284        else if ((dquer(ks).gt.0.).and.(crain_aero(ks).gt.0..or.csnow_aero(ks).gt.0.)) then
    282           blc_count=blc_count+1
     285          blc_count(ks)=blc_count(ks)+1
    283286
    284287!NIK 17.02.2015
     
    320323! given in species file, or if gas and positive Henry's constant
    321324        if ((ccn_aero(ks).gt.0. .or. in_aero(ks).gt.0.).or.(henry(ks).gt.0.and.dquer(ks).le.0)) then
    322           inc_count=inc_count+1
     325          inc_count(ks)=inc_count(ks)+1
    323326! if negative coefficients (turned off) set to zero for use in equation
    324327          if (ccn_aero(ks).lt.0.) ccn_aero(ks)=0.
     
    432435
    433436! count the total number of below-cloud and in-cloud occurences:
    434   tot_blc_count=tot_blc_count+blc_count
    435   tot_inc_count=tot_inc_count+inc_count
     437  tot_blc_count(1:nspec)=tot_blc_count(1:nspec)+blc_count(1:nspec)
     438  tot_inc_count(1:nspec)=tot_inc_count(1:nspec)+inc_count(1:nspec)
    436439
    437440end subroutine wetdepo
  • src/wetdepokernel.f90

    re200b7a r4c64400  
    4040  !                                                                            *
    4141  !*****************************************************************************
     42  ! Changes:
     43  ! eso 10/2016: Added option to disregard kernel
     44  !
     45  !*****************************************************************************
    4246
    4347  use unc_mod
     
    7377  endif
    7478
     79  ! If no kernel is used, direct attribution to grid cell
     80  !******************************************************
    7581
     82  if (lnokernel) then
     83    do ks=1,nspec
     84      if ((ix.ge.0).and.(jy.ge.0).and.(ix.le.numxgrid-1).and. &
     85           (jy.le.numygrid-1)) then
     86        wetgridunc(ix,jy,ks,kp,nunc,nage)= &
     87             wetgridunc(ix,jy,ks,kp,nunc,nage)+deposit(ks)
     88      end if
     89    end do
     90  else ! use kernel
     91   
    7692  ! Determine mass fractions for four grid points
    7793  !**********************************************
     
    107123  endif
    108124  end do
     125  end if
    109126
    110127end subroutine wetdepokernel
  • options/SPECIES/SPECIES_031

    r341f4b7 r35fa90d  
    99 POHCCONST=  1.07E-11,
    1010 POHDCONST=  1203.00000    ,
    11  POHNCONST=  2.00000000    ,
     11 POHNCONST=  0.00000000    ,
    1212 /
Note: See TracChangeset for help on using the changeset viewer.
hosted by ZAMG