Changeset d404d98 in flexpart.git


Ignore:
Timestamp:
Oct 13, 2016, 2:41:23 PM (7 years ago)
Author:
Sabine <sabine.eckhardt@…>
Branches:
master, 10.4.1_pesei, GFS_025, bugfixes+enhancements, dev, release-10, release-10.4.1, scaling-bug, univie
Children:
d9f0585
Parents:
46706c7 (diff), 861805a (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent.
Message:

Merge branch 'dev' of git.nilu.no:flexpart/flexpart into dev

Files:
5 edited

Legend:

Unmodified
Added
Removed
  • src/mpi_mod.f90

    r0f7835d r861805a  
    9090! MPI tags/requests for send/receive operation
    9191  integer :: tm1
    92   integer, parameter :: nvar_async=26 !27 !29 :DBG:
     92  integer, parameter :: nvar_async=26
    9393!integer, dimension(:), allocatable :: tags
    9494  integer, dimension(:), allocatable :: reqs
     95
     96! Status array used for certain MPI operations (MPI_RECV)
     97  integer, dimension(MPI_STATUS_SIZE) :: mp_status
    9598
    9699
     
    119122  logical, parameter :: mp_dev_mode = .false.
    120123  logical, parameter :: mp_dbg_out = .false.
    121   logical, parameter :: mp_time_barrier=.true.
     124  logical, parameter :: mp_time_barrier=.false.
    122125  logical, parameter :: mp_measure_time=.false.
    123126  logical, parameter :: mp_exact_numpart=.true.
     
    149152  integer, private :: dat_lun
    150153
     154! Temporary arrays for particles (allocated and deallocated as needed)
     155  integer, allocatable, dimension(:) :: nclass_tmp, npoint_tmp, itra1_tmp, idt_tmp, &
     156       & itramem_tmp, itrasplit_tmp
     157  real(kind=dp), allocatable, dimension(:) :: xtra1_tmp, ytra1_tmp
     158  real, allocatable, dimension(:) :: ztra1_tmp
     159  real, allocatable, dimension(:,:) :: xmass1_tmp
     160
     161! mp_redist_fract        Exchange particles between processes if relative difference
     162!                        is larger. A good value is between 0.0 and 0.5
     163! mp_maxpart_factor      Allocate more memory per process than strictly needed
     164!                        (safety factor). Recommended value between 1.5 and 2.5
     165! mp_min_redist          Do not redistribute particles if below this limit
     166  real, parameter :: mp_redist_fract=0.2, mp_maxpart_factor=1.5
     167  integer,parameter :: mp_min_redist=100000
     168
     169
    151170contains
    152171
     
    195214    if (dep_prec==dp) then
    196215      mp_cp = MPI_REAL8
    197       ! TODO: write info message for serial version as well
     216! TODO: write info message for serial version as well
    198217      if (lroot.and.verbosity>0) write(*,*) 'Using double precision for deposition fields'
    199218    else if (dep_prec==sp) then
     
    242261!**********************************************************************
    243262
    244 !    id_read = min(mp_np-1, 1)
    245263    id_read = mp_np-1
    246264
     
    311329
    312330! Set maxpart per process
    313     if (mp_partid.lt.mod(maxpart,mp_partgroup_np)) addmaxpart=1
    314     maxpart_mpi=int(maxpart/mp_partgroup_np)+addmaxpart
     331! eso 08/2016: Increase maxpart per process, in case of unbalanced distribution
     332    maxpart_mpi=int(mp_maxpart_factor*maxpart/mp_partgroup_np)
    315333
    316334! Do not allocate particle data arrays for readwind process
     
    321339! Set random seed for each non-root process
    322340    if (mp_pid.gt.0) then
    323 !    if (mp_pid.ge.0) then
    324 !      call system_clock(s)
    325341      s = 244
    326342      mp_seed = -abs(mod((s*181)*((mp_pid-83)*359), 104729))
    327343    end if
    328     if (mp_dev_mode) write(*,*) 'PID, mp_seed: ',mp_pid, mp_seed
    329344    if (mp_dbg_mode) then
    330 ! :DBG: For debugging, set all seed to 0 and maxrand to e.g. 20
    331345      mp_seed=0
    332346      if (lroot) write(*,*) 'MPI: setting seed=0'
     
    454468
    455469
     470  subroutine mpif_send_part_properties(num_part)
     471!***********************************************************************
     472! Distribute particle properties from root to all processes.
     473
     474! Used by init_domainfill_mpi
     475!
     476! Variables:
     477!
     478! num_part        input, number of particles per process (rounded up)
     479!
     480!***********************************************************************
     481    use com_mod
     482
     483    implicit none
     484
     485    integer,intent(in) :: num_part
     486    integer :: i,jj, addone
     487
     488! Time for MPI communications
     489!****************************
     490    if (mp_measure_time) call mpif_mtime('commtime',0)
     491
     492! Distribute variables, send from pid 0 to other processes (including itself)
     493!****************************************************************************
     494
     495    call MPI_SCATTER(nclass_tmp,num_part,MPI_INTEGER,nclass,&
     496         &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     497    if (mp_ierr /= 0) goto 600
     498    call MPI_SCATTER(npoint_tmp,num_part,MPI_INTEGER,npoint,&
     499         &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     500    if (mp_ierr /= 0) goto 600
     501    call MPI_SCATTER(itra1_tmp,num_part,MPI_INTEGER,itra1,&
     502         &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     503    if (mp_ierr /= 0) goto 600
     504    call MPI_SCATTER(idt_tmp,num_part,MPI_INTEGER,idt,&
     505         &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     506    if (mp_ierr /= 0) goto 600
     507    call MPI_SCATTER(itramem_tmp,num_part,MPI_INTEGER,itramem,&
     508         &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     509    if (mp_ierr /= 0) goto 600
     510    call MPI_SCATTER(itrasplit_tmp,num_part,MPI_INTEGER,itrasplit,&
     511         &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     512    if (mp_ierr /= 0) goto 600
     513    call MPI_SCATTER(xtra1_tmp,num_part,mp_dp,xtra1,&
     514         &num_part,mp_dp,id_root,mp_comm_used,mp_ierr)
     515    if (mp_ierr /= 0) goto 600
     516    call MPI_SCATTER(ytra1_tmp,num_part,mp_dp,ytra1,&
     517         &num_part,mp_dp,id_root,mp_comm_used,mp_ierr)
     518    if (mp_ierr /= 0) goto 600
     519    call MPI_SCATTER(ztra1_tmp,num_part,mp_sp,ztra1,&
     520         &num_part,mp_sp,id_root,mp_comm_used,mp_ierr)
     521    if (mp_ierr /= 0) goto 600
     522    do i=1,nspec
     523      call MPI_SCATTER(xmass1_tmp(:,i),num_part,mp_sp,xmass1(:,i),&
     524           &num_part,mp_sp,id_root,mp_comm_used,mp_ierr)
     525      if (mp_ierr /= 0) goto 600
     526    end do
     527
     528    if (mp_measure_time) call mpif_mtime('commtime',1)
     529    write(*,*) "PID ", mp_partid, "ending MPI_Scatter operation"
     530
     531    goto 601
     532
     533600 write(*,*) "mpi_mod> mp_ierr \= 0", mp_ierr
     534    stop
     535
     536! After the transfer of particles it it possible that the value of
     537! "numpart" is larger than the actual, so we reduce it if there are
     538! invalid particles at the end of the arrays
     539601 do i=num_part, 1, -1
     540      if (itra1(i).eq.-999999999) then
     541        numpart=numpart-1
     542      else
     543        exit
     544      end if
     545    end do
     546
     547
     548!601 end subroutine mpif_send_part_properties
     549  end subroutine mpif_send_part_properties
     550
     551
     552  subroutine mpif_calculate_part_redist(itime)
     553!***********************************************************************
     554! Calculate number of particles to redistribute between processes. This routine
     555! can be called at regular time intervals to keep a level number of
     556! particles on each process.
     557!
     558! First, all processes report their local "numpart" to each other, which is
     559! stored in array "numpart_mpi(np)". This array is sorted from low to
     560! high values, along with a corresponding process ID array "idx_arr(np)".
     561! If the relative difference between the highest and lowest "numpart_mpi" value
     562! is above a threshold, particles are transferred from process idx_arr(np-1)
     563! to process idx_arr(0). Repeat for processes idx_arr(np-i) and idx_arr(i)
     564! for all valid i.
     565! Note: If np is an odd number, the process with the median
     566! number of particles is left unchanged
     567!
     568! VARIABLES
     569! itime        input, current time
     570!***********************************************************************
     571    use com_mod
     572
     573    implicit none
     574   
     575    integer, intent(in) :: itime
     576    real :: pmin,z
     577    integer :: i,jj,nn, num_part=1,m,imin, num_trans
     578    logical :: first_iter
     579    integer,allocatable,dimension(:) :: numparticles_mpi, idx_arr
     580    real,allocatable,dimension(:) :: sorted ! TODO: we don't really need this
     581
     582! Exit if running with only 1 process
     583!************************************************************************
     584    if (mp_np.eq.1) return
     585
     586! All processes exchange information on number of particles
     587!****************************************************************************
     588    allocate(numparticles_mpi(0:mp_partgroup_np-1), &
     589         &idx_arr(0:mp_partgroup_np-1), sorted(0:mp_partgroup_np-1))
     590
     591    call MPI_Allgather(numpart, 1, MPI_INTEGER, numparticles_mpi, &
     592         & 1, MPI_INTEGER, mp_comm_used, mp_ierr)
     593
     594
     595! Sort from lowest to highest
     596! Initial guess: correct order
     597    sorted(:) = numparticles_mpi(:)
     598    do i=0, mp_partgroup_np-1
     599      idx_arr(i) = i
     600    end do
     601
     602! For each successive element in index array, see if a lower value exists
     603    do i=0, mp_partgroup_np-2
     604      pmin=sorted(i)
     605      imin=idx_arr(i)
     606      m=i+1
     607      do jj=m, mp_partgroup_np-1
     608        if (pmin.le.sorted(jj)) cycle
     609        z=pmin
     610        pmin=sorted(jj)
     611        sorted(jj)=z
     612
     613        nn=imin
     614        imin=idx_arr(jj)
     615        idx_arr(jj)=nn
     616      end do
     617      sorted(i)=pmin
     618      idx_arr(i)=imin
     619    end do
     620
     621! For each pair of processes, transfer particles if the difference is above a
     622! limit, and numpart at sending process large enough
     623
     624    m=mp_partgroup_np-1 ! index for last sorted process (most particles)
     625    do i=0,mp_partgroup_np/2-1
     626      num_trans = numparticles_mpi(idx_arr(m)) - numparticles_mpi(idx_arr(i))
     627      if (mp_partid.eq.idx_arr(m).or.mp_partid.eq.idx_arr(i)) then
     628        if ( numparticles_mpi(idx_arr(m)).gt.mp_min_redist.and.&
     629             & real(num_trans)/real(numparticles_mpi(idx_arr(m))).gt.mp_redist_fract) then
     630          call mpif_redist_part(itime, idx_arr(m), idx_arr(i), num_trans/2)
     631        end if
     632      end if
     633      m=m-1
     634    end do
     635
     636    deallocate(numparticles_mpi, idx_arr, sorted)
     637
     638  end subroutine mpif_calculate_part_redist
     639
     640
     641  subroutine mpif_redist_part(itime, src_proc, dest_proc, num_trans)
     642!***********************************************************************
     643! Transfer particle properties between two arbitrary processes.
     644!
     645! VARIABLES
     646!
     647! itime           input, current time
     648! src_proc        input, ID of source process
     649! dest_proc       input, ID of destination process
     650! num_trans       input, number of particles to transfer
     651!
     652!************************************************************************
     653    use com_mod !TODO:  ,only: nclass etc
     654
     655    implicit none
     656
     657    integer, intent(in) :: itime, src_proc, dest_proc, num_trans
     658    integer :: ll, ul ! lower and upper indices in arrays
     659    integer :: arr_sz ! size of temporary arrays
     660    integer :: mtag   ! MPI message tag
     661    integer :: i, j, minpart, ipart, maxnumpart
     662 
     663! Measure time for MPI communications
     664!************************************
     665    if (mp_measure_time) call mpif_mtime('commtime',0)
     666
     667! Send particles
     668!***************
     669    if (mp_partid.eq.src_proc) then
     670      mtag = 2000
     671
     672! Array indices for the transferred particles
     673      ll=numpart-num_trans+1
     674      ul=numpart
     675
     676      ! if (mp_dev_mode) then
     677      !   write(*,FMT='(72("#"))')
     678      !   write(*,*) "Sending ", num_trans, "particles (from/to)", src_proc, dest_proc
     679      !   write(*,*) "numpart before: ", numpart
     680      ! end if
     681
     682      call MPI_SEND(nclass(ll:ul),num_trans,&
     683           & MPI_INTEGER,dest_proc,mtag+1,mp_comm_used,mp_ierr)
     684
     685      call MPI_SEND(npoint(ll:ul),num_trans,&
     686           & MPI_INTEGER,dest_proc,mtag+2,mp_comm_used,mp_ierr)
     687
     688      call MPI_SEND(itra1(ll:ul),num_trans, &
     689           & MPI_INTEGER,dest_proc,mtag+3,mp_comm_used,mp_ierr)
     690
     691      call MPI_SEND(idt(ll:ul),num_trans, &
     692           & MPI_INTEGER,dest_proc,mtag+4,mp_comm_used,mp_ierr)
     693
     694      call MPI_SEND(itramem(ll:ul),num_trans, &
     695           & MPI_INTEGER,dest_proc,mtag+5,mp_comm_used,mp_ierr)
     696
     697      call MPI_SEND(itrasplit(ll:ul),num_trans,&
     698           & MPI_INTEGER,dest_proc,mtag+6,mp_comm_used,mp_ierr)
     699
     700      call MPI_SEND(xtra1(ll:ul),num_trans, &
     701           & mp_dp,dest_proc,mtag+7,mp_comm_used,mp_ierr)
     702
     703      call MPI_SEND(ytra1(ll:ul),num_trans,&
     704           & mp_dp,dest_proc,mtag+8,mp_comm_used,mp_ierr)
     705
     706      call MPI_SEND(ztra1(ll:ul),num_trans,&
     707           & mp_sp,dest_proc,mtag+9,mp_comm_used,mp_ierr)
     708
     709      do j=1,nspec
     710        call MPI_SEND(xmass1(ll:ul,j),num_trans,&
     711             & mp_sp,dest_proc,mtag+(9+j),mp_comm_used,mp_ierr)
     712      end do
     713
     714! Terminate transferred particles, update numpart
     715      itra1(ll:ul) = -999999999
     716
     717      numpart = numpart-num_trans
     718
     719      ! if (mp_dev_mode) then
     720      !   write(*,*) "numpart after: ", numpart
     721      !   write(*,FMT='(72("#"))')
     722      ! end if
     723
     724    else if (mp_partid.eq.dest_proc) then
     725
     726! Receive particles
     727!******************
     728      mtag = 2000
     729      ! Array indices for the transferred particles
     730      ll=numpart+1
     731      ul=numpart+num_trans
     732
     733      ! if (mp_dev_mode) then
     734      !   write(*,FMT='(72("#"))')
     735      !   write(*,*) "Receiving ", num_trans, "particles (from/to)", src_proc, dest_proc
     736      !   write(*,*) "numpart before: ", numpart
     737      ! end if
     738
     739! We could receive the data directly at nclass(ll:ul) etc., but this leaves
     740! vacant spaces at lower indices. Using temporary arrays instead.
     741      arr_sz = ul-ll+1
     742      allocate(itra1_tmp(arr_sz),npoint_tmp(arr_sz),nclass_tmp(arr_sz),&
     743           & idt_tmp(arr_sz),itramem_tmp(arr_sz),itrasplit_tmp(arr_sz),&
     744           & xtra1_tmp(arr_sz),ytra1_tmp(arr_sz),ztra1_tmp(arr_sz),&
     745           & xmass1_tmp(arr_sz, maxspec))
     746     
     747      call MPI_RECV(nclass_tmp,num_trans,&
     748           & MPI_INTEGER,src_proc,mtag+1,mp_comm_used,mp_status,mp_ierr)
     749
     750      call MPI_RECV(npoint_tmp,num_trans,&
     751           & MPI_INTEGER,src_proc,mtag+2,mp_comm_used,mp_status,mp_ierr)
     752
     753      call MPI_RECV(itra1_tmp,num_trans, &
     754           & MPI_INTEGER,src_proc,mtag+3,mp_comm_used,mp_status,mp_ierr)
     755
     756      call MPI_RECV(idt_tmp,num_trans, &
     757           & MPI_INTEGER,src_proc,mtag+4,mp_comm_used,mp_status,mp_ierr)
     758
     759      call MPI_RECV(itramem_tmp,num_trans, &
     760           & MPI_INTEGER,src_proc,mtag+5,mp_comm_used,mp_status,mp_ierr)
     761
     762      call MPI_RECV(itrasplit_tmp,num_trans,&
     763           & MPI_INTEGER,src_proc,mtag+6,mp_comm_used,mp_status,mp_ierr)
     764
     765      call MPI_RECV(xtra1_tmp,num_trans, &
     766           & mp_dp,src_proc,mtag+7,mp_comm_used,mp_status,mp_ierr)
     767
     768      call MPI_RECV(ytra1_tmp,num_trans,&
     769           & mp_dp,src_proc,mtag+8,mp_comm_used,mp_status,mp_ierr)
     770
     771      call MPI_RECV(ztra1_tmp,num_trans,&
     772           & mp_sp,src_proc,mtag+9,mp_comm_used,mp_status,mp_ierr)
     773
     774      do j=1,nspec
     775        call MPI_RECV(xmass1_tmp(:,j),num_trans,&
     776             & mp_sp,src_proc,mtag+(9+j),mp_comm_used,mp_status,mp_ierr)
     777      end do
     778
     779! This is the maximum value numpart can possibly have after the transfer
     780      maxnumpart=numpart+num_trans
     781
     782! Search for vacant space and copy from temporary storage
     783!********************************************************
     784      minpart=1
     785      do i=1, num_trans
     786! Take into acount that we may have transferred invalid particles
     787        if (itra1_tmp(minpart).ne.itime) goto 200
     788        do ipart=minpart,maxnumpart
     789          if (itra1(ipart).ne.itime) then
     790            itra1(ipart) = itra1_tmp(minpart)
     791            npoint(ipart) = npoint_tmp(minpart)
     792            nclass(ipart) = nclass_tmp(minpart)
     793            idt(ipart) = idt_tmp(minpart)
     794            itramem(ipart) = itramem_tmp(minpart)
     795            itrasplit(ipart) = itrasplit_tmp(minpart)
     796            xtra1(ipart) = xtra1_tmp(minpart)
     797            ytra1(ipart) = ytra1_tmp(minpart)
     798            ztra1(ipart) = ztra1_tmp(minpart)
     799            xmass1(ipart,:) = xmass1_tmp(minpart,:)
     800! Increase numpart, if necessary
     801            numpart=max(numpart,ipart)
     802            goto 200 ! Storage space has been found, stop searching
     803          end if
     804        end do
     805200     minpart=minpart+1
     806      end do
     807
     808      deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,itrasplit_tmp,&
     809           & xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
     810
     811      ! if (mp_dev_mode) then
     812      !   write(*,*) "numpart after: ", numpart
     813      !   write(*,FMT='(72("#"))')
     814      ! end if
     815
     816    else
     817! This routine should only be called by the two participating processes
     818      write(*,*) "ERROR: wrong process has entered mpi_mod::mpif_redist_part"
     819      stop
     820      return
     821    end if
     822
     823! Measure time for MPI communications
     824!************************************
     825    if (mp_measure_time) call mpif_mtime('commtime',1)
     826
     827  end subroutine mpif_redist_part
     828
     829
    456830  subroutine mpif_tm_send_vars
    457831!***********************************************************************
     
    477851    if (lroot) then
    478852      call MPI_SCATTER(npoint,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    479            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     853           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    480854      if (mp_ierr /= 0) goto 600
    481855      call MPI_SCATTER(idt,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    482            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     856           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    483857      if (mp_ierr /= 0) goto 600
    484858      call MPI_SCATTER(itra1,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    485            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     859           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    486860      if (mp_ierr /= 0) goto 600
    487861      call MPI_SCATTER(nclass,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    488            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     862           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    489863      if (mp_ierr /= 0) goto 600
    490864      call MPI_SCATTER(itramem,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    491            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     865           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    492866      if (mp_ierr /= 0) goto 600
    493867
    494868! int2
    495869      call MPI_SCATTER(cbt,numpart_mpi,MPI_INTEGER2,MPI_IN_PLACE,&
    496            &numpart_mpi,MPI_INTEGER2,id_root,mp_comm_used,mp_ierr)
     870           & numpart_mpi,MPI_INTEGER2,id_root,mp_comm_used,mp_ierr)
    497871      if (mp_ierr /= 0) goto 600
    498872
    499873! real
    500874      call MPI_SCATTER(uap,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    501            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     875           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    502876      if (mp_ierr /= 0) goto 600
    503877      call MPI_SCATTER(ucp,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    504            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     878           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    505879      if (mp_ierr /= 0) goto 600
    506880      call MPI_SCATTER(uzp,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    507            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     881           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    508882      if (mp_ierr /= 0) goto 600
    509883
    510884      call MPI_SCATTER(us,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    511            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     885           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    512886      if (mp_ierr /= 0) goto 600
    513887      call MPI_SCATTER(vs,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    514            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     888           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    515889      if (mp_ierr /= 0) goto 600
    516890      call MPI_SCATTER(ws,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    517            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     891           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    518892      if (mp_ierr /= 0) goto 600
    519893
    520894      call MPI_SCATTER(xtra1,numpart_mpi,mp_dp,MPI_IN_PLACE,&
    521            &numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
     895           & numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
    522896      if (mp_ierr /= 0) goto 600
    523897      call MPI_SCATTER(ytra1,numpart_mpi,mp_dp,MPI_IN_PLACE,&
    524            &numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
     898           & numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
    525899      if (mp_ierr /= 0) goto 600
    526900      call MPI_SCATTER(ztra1,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    527            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     901           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    528902      if (mp_ierr /= 0) goto 600
    529903
    530904      do i=1,nspec
    531905        call MPI_SCATTER(xmass1(:,i),numpart_mpi,mp_sp,MPI_IN_PLACE,&
    532              &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     906             & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    533907        if (mp_ierr /= 0) goto 600
    534908      end do
     
    537911! integers
    538912      call MPI_SCATTER(npoint,numpart_mpi,MPI_INTEGER,npoint,&
    539            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     913           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    540914      if (mp_ierr /= 0) goto 600
    541915      call MPI_SCATTER(idt,numpart_mpi,MPI_INTEGER,idt,&
    542            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     916           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    543917      if (mp_ierr /= 0) goto 600
    544918      call MPI_SCATTER(itra1,numpart_mpi,MPI_INTEGER,itra1,&
    545            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     919           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    546920      if (mp_ierr /= 0) goto 600
    547921      call MPI_SCATTER(nclass,numpart_mpi,MPI_INTEGER,nclass,&
    548            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     922           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    549923      if (mp_ierr /= 0) goto 600
    550924      call MPI_SCATTER(itramem,numpart_mpi,MPI_INTEGER,itramem,&
    551            &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     925           & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    552926      if (mp_ierr /= 0) goto 600
    553927
    554928! int2
    555929      call MPI_SCATTER(cbt,numpart_mpi,MPI_INTEGER2,cbt,&
    556            &numpart_mpi,MPI_INTEGER2,id_root,mp_comm_used,mp_ierr)
     930           & numpart_mpi,MPI_INTEGER2,id_root,mp_comm_used,mp_ierr)
    557931      if (mp_ierr /= 0) goto 600
    558932
    559933! reals
    560934      call MPI_SCATTER(uap,numpart_mpi,mp_sp,uap,&
    561            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     935           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    562936      if (mp_ierr /= 0) goto 600
    563937      call MPI_SCATTER(ucp,numpart_mpi,mp_sp,ucp,&
    564            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     938           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    565939      if (mp_ierr /= 0) goto 600
    566940      call MPI_SCATTER(uzp,numpart_mpi,mp_sp,uzp,&
    567            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     941           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    568942      if (mp_ierr /= 0) goto 600
    569943
    570944      call MPI_SCATTER(us,numpart_mpi,mp_sp,us,&
    571            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     945           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    572946      if (mp_ierr /= 0) goto 600
    573947      call MPI_SCATTER(vs,numpart_mpi,mp_sp,vs,&
    574            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     948           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    575949      if (mp_ierr /= 0) goto 600
    576950      call MPI_SCATTER(ws,numpart_mpi,mp_sp,ws,&
    577            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     951           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    578952      if (mp_ierr /= 0) goto 600
    579953
    580954      call MPI_SCATTER(xtra1,numpart_mpi,mp_dp,xtra1,&
    581            &numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
     955           & numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
    582956      if (mp_ierr /= 0) goto 600
    583957      call MPI_SCATTER(ytra1,numpart_mpi,mp_dp,ytra1,&
    584            &numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
     958           & numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
    585959      if (mp_ierr /= 0) goto 600
    586960      call MPI_SCATTER(ztra1,numpart_mpi,mp_sp,ztra1,&
    587            &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     961           & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    588962      if (mp_ierr /= 0) goto 600
    589963
    590964      do i=1,nspec
    591965        call MPI_SCATTER(xmass1(:,i),numpart_mpi,mp_sp,xmass1(:,i),&
    592              &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     966             & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    593967        if (mp_ierr /= 0) goto 600
    594968      end do
     
    11451519
    11461520! cloud water/ice:
    1147     if (readclouds_nest(i)) then
    1148       ! call MPI_Bcast(icloud_stats(:,:,:,li:ui),d2s1*5,mp_sp,id_read,MPI_COMM_WORLD,mp_ierr)
    1149       ! if (mp_ierr /= 0) goto 600
    1150       call MPI_Bcast(ctwcn(:,:,li:ui,i),d2s1,mp_sp,id_read,MPI_COMM_WORLD,mp_ierr)
    1151       if (mp_ierr /= 0) goto 600
    1152     end if
     1521      if (readclouds_nest(i)) then
     1522! call MPI_Bcast(icloud_stats(:,:,:,li:ui),d2s1*5,mp_sp,id_read,MPI_COMM_WORLD,mp_ierr)
     1523! if (mp_ierr /= 0) goto 600
     1524        call MPI_Bcast(ctwcn(:,:,li:ui,i),d2s1,mp_sp,id_read,MPI_COMM_WORLD,mp_ierr)
     1525        if (mp_ierr /= 0) goto 600
     1526      end if
    11531527
    11541528! 2D fields
     
    13981772    integer :: d2s1 = nxmax*nymax
    13991773    integer :: d2s2 = nxmax*nymax*maxspec
    1400     !integer :: d1_size1 = maxwf
     1774!integer :: d1_size1 = maxwf
    14011775
    14021776!    integer :: d3s1,d3s2,d2s1,d2s2
     
    16452019      if (dest.eq.id_read) cycle
    16462020      do k=1, numbnests
    1647       i=dest*nvar_async
    1648       call MPI_Isend(uun(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1649       if (mp_ierr /= 0) goto 600
    1650       i=i+1
    1651       call MPI_Isend(vvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1652       if (mp_ierr /= 0) goto 600
    1653       i=i+1
    1654       call MPI_Isend(wwn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1655       if (mp_ierr /= 0) goto 600
    1656       i=i+1
    1657       call MPI_Isend(ttn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1658       if (mp_ierr /= 0) goto 600
    1659       i=i+1
    1660       call MPI_Isend(rhon(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1661       if (mp_ierr /= 0) goto 600
    1662       i=i+1
    1663       call MPI_Isend(drhodzn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1664       if (mp_ierr /= 0) goto 600
    1665       i=i+1
    1666       call MPI_Isend(tthn(:,:,:,mind,k),d3s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1667       if (mp_ierr /= 0) goto 600
    1668       i=i+1
    1669       call MPI_Isend(qvhn(:,:,:,mind,k),d3s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1670       if (mp_ierr /= 0) goto 600
    1671       i=i+1
    1672       call MPI_Isend(qvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1673       if (mp_ierr /= 0) goto 600
    1674       i=i+1
    1675       call MPI_Isend(pvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1676       if (mp_ierr /= 0) goto 600
    1677       i=i+1
    1678       call MPI_Isend(cloudsn(:,:,:,mind,k),d3s1,MPI_INTEGER1,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1679       i=i+1
    1680       if (mp_ierr /= 0) goto 600
    1681       call MPI_Isend(cloudshn(:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1682       if (mp_ierr /= 0) goto 600
    1683       i=i+1
    1684       call MPI_Isend(vdepn(:,:,:,mind,k),d2s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1685       if (mp_ierr /= 0) goto 600
    1686       i=i+1
    1687       call MPI_Isend(psn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1688       if (mp_ierr /= 0) goto 600
    1689       i=i+1
    1690       call MPI_Isend(sdn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1691       if (mp_ierr /= 0) goto 600
    1692       i=i+1
     2021        i=dest*nvar_async
     2022        call MPI_Isend(uun(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2023        if (mp_ierr /= 0) goto 600
     2024        i=i+1
     2025        call MPI_Isend(vvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2026        if (mp_ierr /= 0) goto 600
     2027        i=i+1
     2028        call MPI_Isend(wwn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2029        if (mp_ierr /= 0) goto 600
     2030        i=i+1
     2031        call MPI_Isend(ttn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2032        if (mp_ierr /= 0) goto 600
     2033        i=i+1
     2034        call MPI_Isend(rhon(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2035        if (mp_ierr /= 0) goto 600
     2036        i=i+1
     2037        call MPI_Isend(drhodzn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2038        if (mp_ierr /= 0) goto 600
     2039        i=i+1
     2040        call MPI_Isend(tthn(:,:,:,mind,k),d3s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2041        if (mp_ierr /= 0) goto 600
     2042        i=i+1
     2043        call MPI_Isend(qvhn(:,:,:,mind,k),d3s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2044        if (mp_ierr /= 0) goto 600
     2045        i=i+1
     2046        call MPI_Isend(qvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2047        if (mp_ierr /= 0) goto 600
     2048        i=i+1
     2049        call MPI_Isend(pvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2050        if (mp_ierr /= 0) goto 600
     2051        i=i+1
     2052        call MPI_Isend(cloudsn(:,:,:,mind,k),d3s1,MPI_INTEGER1,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2053        i=i+1
     2054        if (mp_ierr /= 0) goto 600
     2055        call MPI_Isend(cloudshn(:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2056        if (mp_ierr /= 0) goto 600
     2057        i=i+1
     2058        call MPI_Isend(vdepn(:,:,:,mind,k),d2s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2059        if (mp_ierr /= 0) goto 600
     2060        i=i+1
     2061        call MPI_Isend(psn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2062        if (mp_ierr /= 0) goto 600
     2063        i=i+1
     2064        call MPI_Isend(sdn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2065        if (mp_ierr /= 0) goto 600
     2066        i=i+1
    16932067! 15
    1694       call MPI_Isend(tccn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1695       if (mp_ierr /= 0) goto 600
    1696       i=i+1
    1697       call MPI_Isend(tt2n(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1698       if (mp_ierr /= 0) goto 600
    1699       i=i+1
    1700       call MPI_Isend(td2n(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1701       if (mp_ierr /= 0) goto 600
    1702       i=i+1
    1703       call MPI_Isend(lsprecn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1704       if (mp_ierr /= 0) goto 600
    1705       i=i+1
    1706       call MPI_Isend(convprecn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1707       if (mp_ierr /= 0) goto 600
    1708       i=i+1
    1709       call MPI_Isend(ustarn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1710       if (mp_ierr /= 0) goto 600
    1711       i=i+1
    1712       call MPI_Isend(wstarn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1713       if (mp_ierr /= 0) goto 600
    1714       i=i+1
    1715       call MPI_Isend(hmixn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1716       if (mp_ierr /= 0) goto 600
    1717       i=i+1
    1718       call MPI_Isend(tropopausen(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1719       if (mp_ierr /= 0) goto 600
    1720       i=i+1
    1721       call MPI_Isend(olin(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    1722       if (mp_ierr /= 0) goto 600
     2068        call MPI_Isend(tccn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2069        if (mp_ierr /= 0) goto 600
     2070        i=i+1
     2071        call MPI_Isend(tt2n(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2072        if (mp_ierr /= 0) goto 600
     2073        i=i+1
     2074        call MPI_Isend(td2n(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2075        if (mp_ierr /= 0) goto 600
     2076        i=i+1
     2077        call MPI_Isend(lsprecn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2078        if (mp_ierr /= 0) goto 600
     2079        i=i+1
     2080        call MPI_Isend(convprecn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2081        if (mp_ierr /= 0) goto 600
     2082        i=i+1
     2083        call MPI_Isend(ustarn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2084        if (mp_ierr /= 0) goto 600
     2085        i=i+1
     2086        call MPI_Isend(wstarn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2087        if (mp_ierr /= 0) goto 600
     2088        i=i+1
     2089        call MPI_Isend(hmixn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2090        if (mp_ierr /= 0) goto 600
     2091        i=i+1
     2092        call MPI_Isend(tropopausen(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2093        if (mp_ierr /= 0) goto 600
     2094        i=i+1
     2095        call MPI_Isend(olin(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     2096        if (mp_ierr /= 0) goto 600
    17232097! 25
    17242098
    17252099! Send cloud water if it exists. Increment counter always (as on receiving end)
    1726       if (readclouds) then
    1727         i=i+1
    1728         call MPI_Isend(ctwcn(:,:,mind,k),d2s1,mp_sp,dest,tm1,&
    1729              &MPI_COMM_WORLD,reqs(i),mp_ierr)
    1730         if (mp_ierr /= 0) goto 600
    1731       end if
     2100        if (readclouds) then
     2101          i=i+1
     2102          call MPI_Isend(ctwcn(:,:,mind,k),d2s1,mp_sp,dest,tm1,&
     2103               &MPI_COMM_WORLD,reqs(i),mp_ierr)
     2104          if (mp_ierr /= 0) goto 600
     2105        end if
     2106      end do
    17322107    end do
    1733   end do
    17342108
    17352109    if (mp_measure_time) call mpif_mtime('commtime',1)
     
    18102184    do k=1, numbnests
    18112185! Get MPI tags/requests for communications
    1812     j=mp_pid*nvar_async
    1813     call MPI_Irecv(uun(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    1814          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1815     if (mp_ierr /= 0) goto 600
    1816     j=j+1
    1817     call MPI_Irecv(vvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    1818          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1819     if (mp_ierr /= 0) goto 600
    1820     j=j+1
    1821     call MPI_Irecv(wwn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    1822          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1823     if (mp_ierr /= 0) goto 600
    1824     j=j+1
    1825     call MPI_Irecv(ttn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    1826          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1827     if (mp_ierr /= 0) goto 600
    1828     j=j+1
    1829     call MPI_Irecv(rhon(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    1830          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1831     if (mp_ierr /= 0) goto 600
    1832     j=j+1
    1833     call MPI_Irecv(drhodzn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    1834          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1835     if (mp_ierr /= 0) goto 600
    1836     j=j+1
    1837     call MPI_Irecv(tthn(:,:,:,mind,k),d3s2,mp_sp,id_read,MPI_ANY_TAG,&
    1838          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1839     if (mp_ierr /= 0) goto 600
    1840     j=j+1
    1841     call MPI_Irecv(qvhn(:,:,:,mind,k),d3s2,mp_sp,id_read,MPI_ANY_TAG,&
    1842          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1843     if (mp_ierr /= 0) goto 600
    1844     j=j+1
    1845     call MPI_Irecv(qvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    1846          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1847     if (mp_ierr /= 0) goto 600
    1848     j=j+1
    1849     call MPI_Irecv(pvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    1850          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1851     if (mp_ierr /= 0) goto 600
    1852     j=j+1
    1853     call MPI_Irecv(cloudsn(:,:,:,mind,k),d3s1,MPI_INTEGER1,id_read,MPI_ANY_TAG,&
    1854          &MPI_COMM_WORLD,reqs(j),mp_ierr)   
    1855     if (mp_ierr /= 0) goto 600
    1856     j=j+1
    1857     call MPI_Irecv(cloudshn(:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1858          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1859     if (mp_ierr /= 0) goto 600
    1860     j=j+1
    1861     call MPI_Irecv(vdepn(:,:,:,mind,k),d2s2,mp_sp,id_read,MPI_ANY_TAG,&
    1862          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1863     if (mp_ierr /= 0) goto 600
    1864     j=j+1
    1865     call MPI_Irecv(psn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1866          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1867     if (mp_ierr /= 0) goto 600
    1868     j=j+1
    1869     call MPI_Irecv(sdn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1870          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1871     if (mp_ierr /= 0) goto 600
    1872     j=j+1
    1873     call MPI_Irecv(tccn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1874          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1875     if (mp_ierr /= 0) goto 600
    1876     j=j+1
    1877     call MPI_Irecv(tt2n(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1878          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1879     if (mp_ierr /= 0) goto 600
    1880     j=j+1
    1881     call MPI_Irecv(td2n(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1882          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1883     if (mp_ierr /= 0) goto 600
    1884     j=j+1
    1885     call MPI_Irecv(lsprecn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1886          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1887     if (mp_ierr /= 0) goto 600
    1888     j=j+1
    1889     call MPI_Irecv(convprecn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1890          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1891     if (mp_ierr /= 0) goto 600
    1892     call MPI_Irecv(ustarn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1893          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1894     if (mp_ierr /= 0) goto 600
    1895     j=j+1
    1896     call MPI_Irecv(wstarn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1897          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1898     if (mp_ierr /= 0) goto 600
    1899     j=j+1
    1900     call MPI_Irecv(hmixn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1901          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1902     if (mp_ierr /= 0) goto 600
    1903     j=j+1
    1904     call MPI_Irecv(tropopausen(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1905          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1906     if (mp_ierr /= 0) goto 600
    1907     j=j+1
    1908     call MPI_Irecv(olin(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    1909          &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1910     if (mp_ierr /= 0) goto 600
     2186      j=mp_pid*nvar_async
     2187      call MPI_Irecv(uun(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     2188           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2189      if (mp_ierr /= 0) goto 600
     2190      j=j+1
     2191      call MPI_Irecv(vvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     2192           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2193      if (mp_ierr /= 0) goto 600
     2194      j=j+1
     2195      call MPI_Irecv(wwn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     2196           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2197      if (mp_ierr /= 0) goto 600
     2198      j=j+1
     2199      call MPI_Irecv(ttn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     2200           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2201      if (mp_ierr /= 0) goto 600
     2202      j=j+1
     2203      call MPI_Irecv(rhon(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     2204           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2205      if (mp_ierr /= 0) goto 600
     2206      j=j+1
     2207      call MPI_Irecv(drhodzn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     2208           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2209      if (mp_ierr /= 0) goto 600
     2210      j=j+1
     2211      call MPI_Irecv(tthn(:,:,:,mind,k),d3s2,mp_sp,id_read,MPI_ANY_TAG,&
     2212           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2213      if (mp_ierr /= 0) goto 600
     2214      j=j+1
     2215      call MPI_Irecv(qvhn(:,:,:,mind,k),d3s2,mp_sp,id_read,MPI_ANY_TAG,&
     2216           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2217      if (mp_ierr /= 0) goto 600
     2218      j=j+1
     2219      call MPI_Irecv(qvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     2220           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2221      if (mp_ierr /= 0) goto 600
     2222      j=j+1
     2223      call MPI_Irecv(pvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     2224           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2225      if (mp_ierr /= 0) goto 600
     2226      j=j+1
     2227      call MPI_Irecv(cloudsn(:,:,:,mind,k),d3s1,MPI_INTEGER1,id_read,MPI_ANY_TAG,&
     2228           &MPI_COMM_WORLD,reqs(j),mp_ierr)   
     2229      if (mp_ierr /= 0) goto 600
     2230      j=j+1
     2231      call MPI_Irecv(cloudshn(:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2232           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2233      if (mp_ierr /= 0) goto 600
     2234      j=j+1
     2235      call MPI_Irecv(vdepn(:,:,:,mind,k),d2s2,mp_sp,id_read,MPI_ANY_TAG,&
     2236           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2237      if (mp_ierr /= 0) goto 600
     2238      j=j+1
     2239      call MPI_Irecv(psn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2240           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2241      if (mp_ierr /= 0) goto 600
     2242      j=j+1
     2243      call MPI_Irecv(sdn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2244           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2245      if (mp_ierr /= 0) goto 600
     2246      j=j+1
     2247      call MPI_Irecv(tccn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2248           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2249      if (mp_ierr /= 0) goto 600
     2250      j=j+1
     2251      call MPI_Irecv(tt2n(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2252           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2253      if (mp_ierr /= 0) goto 600
     2254      j=j+1
     2255      call MPI_Irecv(td2n(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2256           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2257      if (mp_ierr /= 0) goto 600
     2258      j=j+1
     2259      call MPI_Irecv(lsprecn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2260           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2261      if (mp_ierr /= 0) goto 600
     2262      j=j+1
     2263      call MPI_Irecv(convprecn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2264           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2265      if (mp_ierr /= 0) goto 600
     2266      call MPI_Irecv(ustarn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2267           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2268      if (mp_ierr /= 0) goto 600
     2269      j=j+1
     2270      call MPI_Irecv(wstarn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2271           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2272      if (mp_ierr /= 0) goto 600
     2273      j=j+1
     2274      call MPI_Irecv(hmixn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2275           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2276      if (mp_ierr /= 0) goto 600
     2277      j=j+1
     2278      call MPI_Irecv(tropopausen(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2279           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2280      if (mp_ierr /= 0) goto 600
     2281      j=j+1
     2282      call MPI_Irecv(olin(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2283           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2284      if (mp_ierr /= 0) goto 600
    19112285
    19122286! Post request for clwc. These data are possibly not sent, request must then be cancelled
    19132287! For now assume that data at all steps either have or do not have water
    1914     if (readclouds) then
    1915       j=j+1
    1916       call MPI_Irecv(ctwcn(:,:,mind,k),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
    1917            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    1918       if (mp_ierr /= 0) goto 600
    1919     end if
    1920   end do
     2288      if (readclouds) then
     2289        j=j+1
     2290        call MPI_Irecv(ctwcn(:,:,mind,k),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
     2291             &MPI_COMM_WORLD,reqs(j),mp_ierr)
     2292        if (mp_ierr /= 0) goto 600
     2293      end if
     2294    end do
    19212295
    19222296    if (mp_measure_time) call mpif_mtime('commtime',1)
     
    23492723          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR CONCCALC:',&
    23502724               & mp_conccalc_time_total
    2351           ! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR VERTTRANSFORM:',&
    2352           !      & mp_vt_wtime_total
    2353           ! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR VERTTRANSFORM:',&
    2354           !      & mp_vt_time_total
     2725! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR VERTTRANSFORM:',&
     2726!      & mp_vt_wtime_total
     2727! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR VERTTRANSFORM:',&
     2728!      & mp_vt_time_total
    23552729! NB: the 'flush' function is possibly a gfortran-specific extension
    23562730          call flush()
     
    23882762
    23892763
    2390     end subroutine mpif_finalize
    2391 
    2392 
    2393     subroutine get_lun(my_lun)
     2764  end subroutine mpif_finalize
     2765
     2766
     2767  subroutine get_lun(my_lun)
    23942768!***********************************************************************
    23952769! get_lun:
     
    23972771!***********************************************************************
    23982772
    2399       implicit none
    2400 
    2401       integer, intent(inout) :: my_lun
    2402       integer, save :: free_lun=100
    2403       logical :: exists, iopen
    2404 
    2405 !***********************************************************************
    2406 
    2407       loop1: do
    2408         inquire(UNIT=free_lun, EXIST=exists, OPENED=iopen)
    2409         if (exists .and. .not.iopen) exit loop1
    2410         free_lun = free_lun+1
    2411       end do loop1
    2412       my_lun = free_lun
    2413 
    2414     end subroutine get_lun
    2415 
    2416 
    2417     subroutine write_data_dbg(array_in, array_name, tstep, ident)
     2773    implicit none
     2774
     2775    integer, intent(inout) :: my_lun
     2776    integer, save :: free_lun=100
     2777    logical :: exists, iopen
     2778
     2779!***********************************************************************
     2780
     2781    loop1: do
     2782      inquire(UNIT=free_lun, EXIST=exists, OPENED=iopen)
     2783      if (exists .and. .not.iopen) exit loop1
     2784      free_lun = free_lun+1
     2785    end do loop1
     2786    my_lun = free_lun
     2787
     2788  end subroutine get_lun
     2789
     2790
     2791  subroutine write_data_dbg(array_in, array_name, tstep, ident)
    24182792!***********************************************************************
    24192793! Write one-dimensional arrays to file (for debugging purposes)
    24202794!***********************************************************************
    2421       implicit none
    2422 
    2423       real, intent(in), dimension(:) :: array_in
    2424       integer, intent(in) :: tstep
    2425       integer :: lios
    2426       character(LEN=*), intent(in) :: ident, array_name
    2427 
    2428       character(LEN=8) :: c_ts
    2429       character(LEN=40) :: fn_1, fn_2
    2430 
    2431 !***********************************************************************
    2432 
    2433       write(c_ts, FMT='(I8.8,BZ)') tstep
    2434       fn_1='-'//trim(adjustl(c_ts))//'-'//trim(ident)
    2435       write(c_ts, FMT='(I2.2,BZ)') mp_np
    2436       fn_2= trim(adjustl(array_name))//trim(adjustl(fn_1))//'-np'//trim(adjustl(c_ts))//'.dat'
    2437 
    2438       call get_lun(dat_lun)
    2439       open(UNIT=dat_lun, FILE=fn_2, IOSTAT=lios, ACTION='WRITE', &
    2440            FORM='UNFORMATTED', STATUS='REPLACE')
    2441       write(UNIT=dat_lun, IOSTAT=lios) array_in
    2442       close(UNIT=dat_lun)
    2443 
    2444     end subroutine write_data_dbg
     2795    implicit none
     2796
     2797    real, intent(in), dimension(:) :: array_in
     2798    integer, intent(in) :: tstep
     2799    integer :: lios
     2800    character(LEN=*), intent(in) :: ident, array_name
     2801
     2802    character(LEN=8) :: c_ts
     2803    character(LEN=40) :: fn_1, fn_2
     2804
     2805!***********************************************************************
     2806
     2807    write(c_ts, FMT='(I8.8,BZ)') tstep
     2808    fn_1='-'//trim(adjustl(c_ts))//'-'//trim(ident)
     2809    write(c_ts, FMT='(I2.2,BZ)') mp_np
     2810    fn_2= trim(adjustl(array_name))//trim(adjustl(fn_1))//'-np'//trim(adjustl(c_ts))//'.dat'
     2811
     2812    call get_lun(dat_lun)
     2813    open(UNIT=dat_lun, FILE=fn_2, IOSTAT=lios, ACTION='WRITE', &
     2814         FORM='UNFORMATTED', STATUS='REPLACE')
     2815    write(UNIT=dat_lun, IOSTAT=lios) array_in
     2816    close(UNIT=dat_lun)
     2817
     2818  end subroutine write_data_dbg
     2819
     2820
     2821  subroutine set_fields_synthetic()
     2822!*******************************************************************************
     2823! DESCRIPTION
     2824!   Set all meteorological fields to synthetic (usually constant/homogeneous)
     2825!   values.
     2826!   Used for validation and error-checking
     2827!
     2828! NOTE
     2829!   This version uses asynchronious communications.
     2830!
     2831! VARIABLES
     2832!   
     2833!
     2834!
     2835!*******************************************************************************
     2836    use com_mod
     2837
     2838    implicit none
     2839
     2840    integer :: li=1, ui=2 ! wfmem indices (i.e, operate on entire field)
     2841
     2842    if (.not.lmp_sync) ui=3
     2843
     2844
     2845! Variables transferred at initialization only
     2846!*********************************************
     2847!    readclouds=readclouds_
     2848    oro(:,:)=0.0
     2849    excessoro(:,:)=0.0
     2850    lsm(:,:)=0.0
     2851    xlanduse(:,:,:)=0.0
     2852!    wftime
     2853!    numbwf
     2854!    nmixz
     2855!    height
     2856
     2857! Time-varying fields:
     2858    uu(:,:,:,li:ui) = 10.0
     2859    vv(:,:,:,li:ui) = 0.0
     2860    uupol(:,:,:,li:ui) = 10.0
     2861    vvpol(:,:,:,li:ui)=0.0
     2862    ww(:,:,:,li:ui)=0.
     2863    tt(:,:,:,li:ui)=300.
     2864    rho(:,:,:,li:ui)=1.3
     2865    drhodz(:,:,:,li:ui)=.0
     2866    tth(:,:,:,li:ui)=0.0
     2867    qvh(:,:,:,li:ui)=1.0
     2868    qv(:,:,:,li:ui)=1.0
     2869
     2870    pv(:,:,:,li:ui)=1.0
     2871    clouds(:,:,:,li:ui)=0
     2872
     2873    clwc(:,:,:,li:ui)=0.0
     2874    ciwc(:,:,:,li:ui)=0.0
     2875
     2876! 2D fields
     2877
     2878    cloudsh(:,:,li:ui)=0
     2879    vdep(:,:,:,li:ui)=0.0
     2880    ps(:,:,:,li:ui)=1.0e5
     2881    sd(:,:,:,li:ui)=0.0
     2882    tcc(:,:,:,li:ui)=0.0
     2883    tt2(:,:,:,li:ui)=300.
     2884    td2(:,:,:,li:ui)=250.
     2885    lsprec(:,:,:,li:ui)=0.0
     2886    convprec(:,:,:,li:ui)=0.0
     2887    ustar(:,:,:,li:ui)=1.0
     2888    wstar(:,:,:,li:ui)=1.0
     2889    hmix(:,:,:,li:ui)=10000.
     2890    tropopause(:,:,:,li:ui)=10000.
     2891    oli(:,:,:,li:ui)=0.01
     2892
     2893  end subroutine set_fields_synthetic
    24452894
    24462895end module mpi_mod
  • src/releaseparticles_mpi.f90

    r7e52e2e r861805a  
    2121
    2222subroutine releaseparticles(itime)
    23   !                              o
    24   !*****************************************************************************
    25   !                                                                            *
    26   !     This subroutine releases particles from the release locations.         *
    27   !                                                                            *
    28   !     It searches for a "vacant" storage space and assigns all particle      *
    29   !     information to that space. A space is vacant either when no particle   *
    30   !     is yet assigned to it, or when it's particle is expired and, thus,     *
    31   !     the storage space is made available to a new particle.                 *
    32   !                                                                            *
    33   !     Author: A. Stohl                                                       *
    34   !                                                                            *
    35   !     29 June 2002                                                           *
    36   !                                                                            *
    37   !   CHANGES                                                                  *
    38   !     12/2014 eso: MPI version                                               *
    39   !                                                                            *
    40   !*****************************************************************************
    41   !                                                                            *
    42   ! Variables:                                                                 *
    43   ! itime [s]            current time                                          *
    44   ! ireleasestart, ireleaseend          start and end times of all releases    *
    45   ! npart(maxpoint)      number of particles to be released in total           *
    46   ! numrel               number of particles to be released during this time   *
    47   !                      step                                                  *
    48   ! addpart              extra particle assigned to MPI process if             *
    49   !                      mod(npart(i),mp_partgroup_np) .ne. 0)                 *
    50   !*****************************************************************************
     23!                              o
     24!*****************************************************************************
     25!                                                                            *
     26!     This subroutine releases particles from the release locations.         *
     27!                                                                            *
     28!     It searches for a "vacant" storage space and assigns all particle      *
     29!     information to that space. A space is vacant either when no particle   *
     30!     is yet assigned to it, or when it's particle is expired and, thus,     *
     31!     the storage space is made available to a new particle.                 *
     32!                                                                            *
     33!     Author: A. Stohl                                                       *
     34!                                                                            *
     35!     29 June 2002                                                           *
     36!                                                                            *
     37!   CHANGES                                                                  *
     38!     12/2014 eso: MPI version                                               *
     39!                                                                            *
     40!*****************************************************************************
     41!                                                                            *
     42! Variables:                                                                 *
     43! itime [s]            current time                                          *
     44! ireleasestart, ireleaseend          start and end times of all releases    *
     45! npart(maxpoint)      number of particles to be released in total           *
     46! numrel               number of particles to be released during this time   *
     47!                      step                                                  *
     48! addone               extra particle assigned to MPI process if             *
     49!                      mod(npart(i),mp_partgroup_np) .ne. 0)                 *
     50!*****************************************************************************
    5151
    5252  use point_mod
     
    5959  implicit none
    6060
    61   !real xaux,yaux,zaux,ran1,rfraction,xmasssave(maxpoint)
     61!real xaux,yaux,zaux,ran1,rfraction,xmasssave(maxpoint)
    6262  real :: xaux,yaux,zaux,rfraction
    6363  real :: topo,rhoaux(2),r,t,rhoout,ddx,ddy,rddx,rddy,p1,p2,p3,p4
     
    7373
    7474  integer :: idummy = -7
    75   !save idummy,xmasssave
    76   !data idummy/-7/,xmasssave/maxpoint*0./
     75!save idummy,xmasssave
     76!data idummy/-7/,xmasssave/maxpoint*0./
    7777
    7878  logical :: first_call=.true.
    7979
    80   ! Use different seed for each process.
    81   !****************************************************************************
     80! Use different seed for each process.
     81!****************************************************************************
    8282  if (first_call) then
    8383    idummy=idummy+mp_seed
     
    8787  mind2=memind(2)
    8888
    89   ! Determine the actual date and time in Greenwich (i.e., UTC + correction for daylight savings time)
    90   !*****************************************************************************
     89! Determine the actual date and time in Greenwich (i.e., UTC + correction for daylight savings time)
     90!*****************************************************************************
    9191
    9292  julmonday=juldate(19000101,0)          ! this is a Monday
     
    9797
    9898
    99   ! For every release point, check whether we are in the release time interval
    100   !***************************************************************************
     99! For every release point, check whether we are in the release time interval
     100!***************************************************************************
    101101
    102102  minpart=1
     
    105105         (itime.le.ireleaseend(i))) then
    106106
    107   ! Determine the local day and time
    108   !*********************************
     107! Determine the local day and time
     108!*********************************
    109109
    110110      xlonav=xlon0+(xpoint2(i)+xpoint1(i))/2.*dx  ! longitude needed to determine local time
     
    124124      endif
    125125
    126   ! Calculate a species- and time-dependent correction factor, distinguishing between
    127   ! area (those with release starting at surface) and point (release starting above surface) sources
    128   ! Also, calculate an average time correction factor (species independent)
    129   !*****************************************************************************
     126! Calculate a species- and time-dependent correction factor, distinguishing between
     127! area (those with release starting at surface) and point (release starting above surface) sources
     128! Also, calculate an average time correction factor (species independent)
     129!*****************************************************************************
    130130      average_timecorrect=0.
    131131      do k=1,nspec
     
    139139      average_timecorrect=average_timecorrect/real(nspec)
    140140
    141   ! Determine number of particles to be released this time; at start and at end of release,
    142   ! only half the particles are released
    143   !*****************************************************************************
     141! Determine number of particles to be released this time; at start and at end of release,
     142! only half the particles are released
     143!*****************************************************************************
    144144
    145145      if (ireleasestart(i).ne.ireleaseend(i)) then
     
    149149             (itime.eq.ireleaseend(i))) rfraction=rfraction/2.
    150150
    151   ! Take the species-average time correction factor in order to scale the
    152   ! number of particles released this time
    153   ! Also scale by number of MPI processes
    154   !**********************************************************************
     151! Take the species-average time correction factor in order to scale the
     152! number of particles released this time
     153! Also scale by number of MPI processes
     154!**********************************************************************
    155155
    156156        rfraction=rfraction*average_timecorrect
     
    158158        rfraction=rfraction+xmasssave(i)  ! number to be released at this time
    159159
    160         ! number to be released for one process
     160! number to be released for one process
    161161        if (mp_partid.lt.mod(int(rfraction),mp_partgroup_np)) then
    162162          addone=1
     
    180180        numrel=npart(i)/mp_partgroup_np+addone
    181181      endif
    182      
     182
    183183      xaux=xpoint2(i)-xpoint1(i)
    184184      yaux=ypoint2(i)-ypoint1(i)
     
    187187        do ipart=minpart,maxpart_mpi     ! search for free storage space
    188188
    189   ! If a free storage space is found, attribute everything to this array element
    190   !*****************************************************************************
     189! If a free storage space is found, attribute everything to this array element
     190!*****************************************************************************
    191191
    192192          if (itra1(ipart).ne.itime) then
    193193
    194   ! Particle coordinates are determined by using a random position within the release volume
    195   !*****************************************************************************
    196 
    197   ! Determine horizontal particle position
    198   !***************************************
     194! Particle coordinates are determined by using a random position within the release volume
     195!*****************************************************************************
     196
     197! Determine horizontal particle position
     198!***************************************
    199199
    200200            xtra1(ipart)=xpoint1(i)+ran1(idummy)*xaux
     
    207207            ytra1(ipart)=ypoint1(i)+ran1(idummy)*yaux
    208208
    209   ! Assign mass to particle: Total mass divided by total number of particles.
    210   ! Time variation has partly been taken into account already by a species-average
    211   ! correction factor, by which the number of particles released this time has been
    212   ! scaled. Adjust the mass per particle by the species-dependent time correction factor
    213   ! divided by the species-average one
    214   !*****************************************************************************
     209! Assign mass to particle: Total mass divided by total number of particles.
     210! Time variation has partly been taken into account already by a species-average
     211! correction factor, by which the number of particles released this time has been
     212! scaled. Adjust the mass per particle by the species-dependent time correction factor
     213! divided by the species-average one
     214!*****************************************************************************
    215215            do k=1,nspec
    216                xmass1(ipart,k)=xmass(i,k)/real(npart(i)) &
    217                     *timecorrect(k)/average_timecorrect
    218   !             write (*,*) 'xmass1: ',xmass1(ipart,k),ipart,k
    219   ! Assign certain properties to particle
    220   !**************************************
     216              xmass1(ipart,k)=xmass(i,k)/real(npart(i)) &
     217                   *timecorrect(k)/average_timecorrect
     218!             write (*,*) 'xmass1: ',xmass1(ipart,k),ipart,k
     219! Assign certain properties to particle
     220!**************************************
    221221            end do
    222222            nclass(ipart)=min(int(ran1(idummy)*real(nclassunc))+1, &
     
    234234
    235235
    236   ! Determine vertical particle position
    237   !*************************************
     236! Determine vertical particle position
     237!*************************************
    238238
    239239            ztra1(ipart)=zpoint1(i)+ran1(idummy)*zaux
    240240
    241   ! Interpolation of topography and density
    242   !****************************************
    243 
    244   ! Determine the nest we are in
    245   !*****************************
     241! Interpolation of topography and density
     242!****************************************
     243
     244! Determine the nest we are in
     245!*****************************
    246246
    247247            ngrid=0
     
    25725743          continue
    258258
    259   ! Determine (nested) grid coordinates and auxiliary parameters used for interpolation
    260   !*****************************************************************************
     259! Determine (nested) grid coordinates and auxiliary parameters used for interpolation
     260!*****************************************************************************
    261261
    262262            if (ngrid.gt.0) then
     
    294294            endif
    295295
    296   ! If starting height is in pressure coordinates, retrieve pressure profile and convert zpart1 to meters
    297   !*****************************************************************************
     296! If starting height is in pressure coordinates, retrieve pressure profile and convert zpart1 to meters
     297!*****************************************************************************
    298298            if (kindz(i).eq.3) then
    299299              presspart=ztra1(ipart)
     
    337337            endif
    338338
    339   ! If release positions are given in meters above sea level, subtract the
    340   ! topography from the starting height
    341   !***********************************************************************
     339! If release positions are given in meters above sea level, subtract the
     340! topography from the starting height
     341!***********************************************************************
    342342
    343343            if (kindz(i).eq.2) ztra1(ipart)=ztra1(ipart)-topo
     
    348348
    349349
    350   ! For special simulations, multiply particle concentration air density;
    351   ! Simply take the 2nd field in memory to do this (accurate enough)
    352   !***********************************************************************
    353   !AF IND_SOURCE switches between different units for concentrations at the source
    354   !Af    NOTE that in backward simulations the release of particles takes place at the
    355   !Af         receptor and the sampling at the source.
    356   !Af          1="mass"
    357   !Af          2="mass mixing ratio"
    358   !Af IND_RECEPTOR switches between different units for concentrations at the receptor
    359   !Af          1="mass"
    360   !Af          2="mass mixing ratio"
    361 
    362   !Af switches for the releasefile:
    363   !Af IND_REL =  1 : xmass * rho
    364   !Af IND_REL =  0 : xmass * 1
    365 
    366   !Af ind_rel is defined in readcommand.f
     350! For special simulations, multiply particle concentration air density;
     351! Simply take the 2nd field in memory to do this (accurate enough)
     352!***********************************************************************
     353!AF IND_SOURCE switches between different units for concentrations at the source
     354!Af    NOTE that in backward simulations the release of particles takes place at the
     355!Af         receptor and the sampling at the source.
     356!Af          1="mass"
     357!Af          2="mass mixing ratio"
     358!Af IND_RECEPTOR switches between different units for concentrations at the receptor
     359!Af          1="mass"
     360!Af          2="mass mixing ratio"
     361
     362!Af switches for the releasefile:
     363!Af IND_REL =  1 : xmass * rho
     364!Af IND_REL =  0 : xmass * 1
     365
     366!Af ind_rel is defined in readcommand.f
    367367
    368368            if (ind_rel .eq. 1) then
    369369
    370   ! Interpolate the air density
    371   !****************************
     370! Interpolate the air density
     371!****************************
    372372
    373373              do ii=2,nz
     
    403403
    404404
    405   ! Multiply "mass" (i.e., mass mixing ratio in forward runs) with density
    406   !********************************************************************
     405! Multiply "mass" (i.e., mass mixing ratio in forward runs) with density
     406!********************************************************************
    407407
    408408              do k=1,nspec
     
    416416          endif
    417417        end do
    418         if (ipart.gt.maxpart) goto 996
     418! ESO TODO: Here we could use dynamic allocation and increase array sizes
     419        if (ipart.gt.maxpart_mpi) goto 996
    419420
    42042134      minpart=ipart+1
    421422      end do
    422       endif
     423    endif
    423424  end do
    424425
     
    426427  return
    427428
    428 996   continue
     429996 continue
    429430  write(*,*) '#####################################################'
    430431  write(*,*) '#### FLEXPART MODEL SUBROUTINE RELEASEPARTICLES: ####'
  • src/timemanager_mpi.f90

    r0f7835d r861805a  
    104104
    105105  logical :: reqv_state=.false. ! .true. if waiting for a MPI_Irecv to complete
    106   integer :: j,ks,kp,l,n,itime=0,nstop,nstop1,memstat=0 !,mind
     106  integer :: j,ks,kp,l,n,itime=0,nstop,nstop1,memstat=0
    107107! integer :: ksp
    108108  integer :: ip
     
    155155
    156156  do itime=0,ideltas,lsynctime
     157   
    157158
    158159! Computation of wet deposition, OH reaction and mass transfer
     
    166167!********************************************************************
    167168
    168     if (mp_dev_mode) write(*,*) 'myid, itime: ',mp_pid,itime
     169    if (mp_dbg_mode) write(*,*) 'myid, itime: ',mp_pid,itime
    169170   
    170171    if (WETDEP .and. itime .ne. 0 .and. numpart .gt. 0) then
     
    274275
    275276    if (mp_measure_time.and..not.(lmpreader.and.lmp_use_reader)) call mpif_mtime('getfields',1)
     277
     278! For validation and tests: call the function below to set all fields to simple
     279! homogeneous values
     280!    if (mp_dev_mode) call set_fields_synthetic
     281
     282!*******************************************************************************
    276283
    277284    if (lmpreader.and.nstop1.gt.1) stop 'NO METEO FIELDS AVAILABLE'
     
    324331      call releaseparticles(itime)
    325332    endif
     333
     334
     335! Check if particles should be redistributed among processes
     336!***********************************************************
     337    call mpif_calculate_part_redist(itime)
    326338
    327339
     
    542554! Decide whether to write an estimate of the number of particles released,
    543555! or exact number (require MPI reduce operation)
    544         if (mp_dev_mode) then
     556        if (mp_dbg_mode) then
    545557          numpart_tot_mpi = numpart
    546558        else
     
    549561
    550562        if (mp_exact_numpart.and..not.(lmpreader.and.lmp_use_reader).and.&
    551              &.not.mp_dev_mode) then
     563             &.not.mp_dbg_mode) then
    552564          call MPI_Reduce(numpart, numpart_tot_mpi, 1, MPI_INTEGER, MPI_SUM, id_root, &
    553565               & mp_comm_used, mp_ierr)
     
    555567       
    556568        !CGZ-lifetime: output species lifetime
    557         if (lroot.or.mp_dev_mode) then
     569        if (lroot.or.mp_dbg_mode) then
    558570        !   write(*,*) 'Overview species lifetime in days', &
    559571        !        real((species_lifetime(:,1)/species_lifetime(:,2))/real(3600.0*24.0))
     
    565577        !   end if
    566578        end if
     579
     580        ! Write particles for all processes
     581        if (mp_dev_mode) write(*,*) "PID, itime, numpart", mp_pid,itime,numpart
     582
    567583
    56858445      format(i13,' SECONDS SIMULATED: ',i13, ' PARTICLES:    Uncertainty: ',3f7.3)
  • options/SPECIES/SPECIES_031

    r341f4b7 r35fa90d  
    99 POHCCONST=  1.07E-11,
    1010 POHDCONST=  1203.00000    ,
    11  POHNCONST=  2.00000000    ,
     11 POHNCONST=  0.00000000    ,
    1212 /
  • src/par_mod.f90

    r3b80e98 r6b3dad4  
    186186  !**************************************************
    187187
    188   integer,parameter :: maxpart=1000000
    189   integer,parameter :: maxspec=2
     188  integer,parameter :: maxpart=10000000
     189  integer,parameter :: maxspec=4
    190190  real,parameter :: minmass=0.0001
    191191
Note: See TracChangeset for help on using the changeset viewer.
hosted by ZAMG