Changes in / [d404d98:46706c7] in flexpart.git


Ignore:
Location:
src
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • src/mpi_mod.f90

    r861805a r0f7835d  
    9090! MPI tags/requests for send/receive operation
    9191  integer :: tm1
    92   integer, parameter :: nvar_async=26
     92  integer, parameter :: nvar_async=26 !27 !29 :DBG:
    9393!integer, dimension(:), allocatable :: tags
    9494  integer, dimension(:), allocatable :: reqs
    95 
    96 ! Status array used for certain MPI operations (MPI_RECV)
    97   integer, dimension(MPI_STATUS_SIZE) :: mp_status
    9895
    9996
     
    122119  logical, parameter :: mp_dev_mode = .false.
    123120  logical, parameter :: mp_dbg_out = .false.
    124   logical, parameter :: mp_time_barrier=.false.
     121  logical, parameter :: mp_time_barrier=.true.
    125122  logical, parameter :: mp_measure_time=.false.
    126123  logical, parameter :: mp_exact_numpart=.true.
     
    152149  integer, private :: dat_lun
    153150
    154 ! Temporary arrays for particles (allocated and deallocated as needed)
    155   integer, allocatable, dimension(:) :: nclass_tmp, npoint_tmp, itra1_tmp, idt_tmp, &
    156        & itramem_tmp, itrasplit_tmp
    157   real(kind=dp), allocatable, dimension(:) :: xtra1_tmp, ytra1_tmp
    158   real, allocatable, dimension(:) :: ztra1_tmp
    159   real, allocatable, dimension(:,:) :: xmass1_tmp
    160 
    161 ! mp_redist_fract        Exchange particles between processes if relative difference
    162 !                        is larger. A good value is between 0.0 and 0.5
    163 ! mp_maxpart_factor      Allocate more memory per process than strictly needed
    164 !                        (safety factor). Recommended value between 1.5 and 2.5
    165 ! mp_min_redist          Do not redistribute particles if below this limit
    166   real, parameter :: mp_redist_fract=0.2, mp_maxpart_factor=1.5
    167   integer,parameter :: mp_min_redist=100000
    168 
    169 
    170151contains
    171152
     
    214195    if (dep_prec==dp) then
    215196      mp_cp = MPI_REAL8
    216 ! TODO: write info message for serial version as well
     197      ! TODO: write info message for serial version as well
    217198      if (lroot.and.verbosity>0) write(*,*) 'Using double precision for deposition fields'
    218199    else if (dep_prec==sp) then
     
    261242!**********************************************************************
    262243
     244!    id_read = min(mp_np-1, 1)
    263245    id_read = mp_np-1
    264246
     
    329311
    330312! Set maxpart per process
    331 ! eso 08/2016: Increase maxpart per process, in case of unbalanced distribution
    332     maxpart_mpi=int(mp_maxpart_factor*maxpart/mp_partgroup_np)
     313    if (mp_partid.lt.mod(maxpart,mp_partgroup_np)) addmaxpart=1
     314    maxpart_mpi=int(maxpart/mp_partgroup_np)+addmaxpart
    333315
    334316! Do not allocate particle data arrays for readwind process
     
    339321! Set random seed for each non-root process
    340322    if (mp_pid.gt.0) then
     323!    if (mp_pid.ge.0) then
     324!      call system_clock(s)
    341325      s = 244
    342326      mp_seed = -abs(mod((s*181)*((mp_pid-83)*359), 104729))
    343327    end if
     328    if (mp_dev_mode) write(*,*) 'PID, mp_seed: ',mp_pid, mp_seed
    344329    if (mp_dbg_mode) then
     330! :DBG: For debugging, set all seed to 0 and maxrand to e.g. 20
    345331      mp_seed=0
    346332      if (lroot) write(*,*) 'MPI: setting seed=0'
     
    468454
    469455
    470   subroutine mpif_send_part_properties(num_part)
    471 !***********************************************************************
    472 ! Distribute particle properties from root to all processes.
    473 
    474 ! Used by init_domainfill_mpi
    475 !
    476 ! Variables:
    477 !
    478 ! num_part        input, number of particles per process (rounded up)
     456  subroutine mpif_tm_send_vars
     457!***********************************************************************
     458! Distribute particle variables from pid0 to all processes.
     459! Called from timemanager
     460! *NOT IN USE* at the moment, but can be useful for debugging
    479461!
    480462!***********************************************************************
     
    483465    implicit none
    484466
    485     integer,intent(in) :: num_part
    486     integer :: i,jj, addone
     467    integer :: i
    487468
    488469! Time for MPI communications
     
    490471    if (mp_measure_time) call mpif_mtime('commtime',0)
    491472
    492 ! Distribute variables, send from pid 0 to other processes (including itself)
    493 !****************************************************************************
    494 
    495     call MPI_SCATTER(nclass_tmp,num_part,MPI_INTEGER,nclass,&
    496          &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    497     if (mp_ierr /= 0) goto 600
    498     call MPI_SCATTER(npoint_tmp,num_part,MPI_INTEGER,npoint,&
    499          &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    500     if (mp_ierr /= 0) goto 600
    501     call MPI_SCATTER(itra1_tmp,num_part,MPI_INTEGER,itra1,&
    502          &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    503     if (mp_ierr /= 0) goto 600
    504     call MPI_SCATTER(idt_tmp,num_part,MPI_INTEGER,idt,&
    505          &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    506     if (mp_ierr /= 0) goto 600
    507     call MPI_SCATTER(itramem_tmp,num_part,MPI_INTEGER,itramem,&
    508          &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    509     if (mp_ierr /= 0) goto 600
    510     call MPI_SCATTER(itrasplit_tmp,num_part,MPI_INTEGER,itrasplit,&
    511          &num_part,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    512     if (mp_ierr /= 0) goto 600
    513     call MPI_SCATTER(xtra1_tmp,num_part,mp_dp,xtra1,&
    514          &num_part,mp_dp,id_root,mp_comm_used,mp_ierr)
    515     if (mp_ierr /= 0) goto 600
    516     call MPI_SCATTER(ytra1_tmp,num_part,mp_dp,ytra1,&
    517          &num_part,mp_dp,id_root,mp_comm_used,mp_ierr)
    518     if (mp_ierr /= 0) goto 600
    519     call MPI_SCATTER(ztra1_tmp,num_part,mp_sp,ztra1,&
    520          &num_part,mp_sp,id_root,mp_comm_used,mp_ierr)
    521     if (mp_ierr /= 0) goto 600
    522     do i=1,nspec
    523       call MPI_SCATTER(xmass1_tmp(:,i),num_part,mp_sp,xmass1(:,i),&
    524            &num_part,mp_sp,id_root,mp_comm_used,mp_ierr)
    525       if (mp_ierr /= 0) goto 600
    526     end do
    527 
    528     if (mp_measure_time) call mpif_mtime('commtime',1)
    529     write(*,*) "PID ", mp_partid, "ending MPI_Scatter operation"
    530 
    531     goto 601
    532 
    533 600 write(*,*) "mpi_mod> mp_ierr \= 0", mp_ierr
    534     stop
    535 
    536 ! After the transfer of particles it it possible that the value of
    537 ! "numpart" is larger than the actual, so we reduce it if there are
    538 ! invalid particles at the end of the arrays
    539 601 do i=num_part, 1, -1
    540       if (itra1(i).eq.-999999999) then
    541         numpart=numpart-1
    542       else
    543         exit
    544       end if
    545     end do
    546 
    547 
    548 !601 end subroutine mpif_send_part_properties
    549   end subroutine mpif_send_part_properties
    550 
    551 
    552   subroutine mpif_calculate_part_redist(itime)
    553 !***********************************************************************
    554 ! Calculate number of particles to redistribute between processes. This routine
    555 ! can be called at regular time intervals to keep a level number of
    556 ! particles on each process.
    557 !
    558 ! First, all processes report their local "numpart" to each other, which is
    559 ! stored in array "numpart_mpi(np)". This array is sorted from low to
    560 ! high values, along with a corresponding process ID array "idx_arr(np)".
    561 ! If the relative difference between the highest and lowest "numpart_mpi" value
    562 ! is above a threshold, particles are transferred from process idx_arr(np-1)
    563 ! to process idx_arr(0). Repeat for processes idx_arr(np-i) and idx_arr(i)
    564 ! for all valid i.
    565 ! Note: If np is an odd number, the process with the median
    566 ! number of particles is left unchanged
    567 !
    568 ! VARIABLES
    569 ! itime        input, current time
    570 !***********************************************************************
    571     use com_mod
    572 
    573     implicit none
    574    
    575     integer, intent(in) :: itime
    576     real :: pmin,z
    577     integer :: i,jj,nn, num_part=1,m,imin, num_trans
    578     logical :: first_iter
    579     integer,allocatable,dimension(:) :: numparticles_mpi, idx_arr
    580     real,allocatable,dimension(:) :: sorted ! TODO: we don't really need this
    581 
    582 ! Exit if running with only 1 process
    583 !************************************************************************
    584     if (mp_np.eq.1) return
    585 
    586 ! All processes exchange information on number of particles
    587 !****************************************************************************
    588     allocate(numparticles_mpi(0:mp_partgroup_np-1), &
    589          &idx_arr(0:mp_partgroup_np-1), sorted(0:mp_partgroup_np-1))
    590 
    591     call MPI_Allgather(numpart, 1, MPI_INTEGER, numparticles_mpi, &
    592          & 1, MPI_INTEGER, mp_comm_used, mp_ierr)
    593 
    594 
    595 ! Sort from lowest to highest
    596 ! Initial guess: correct order
    597     sorted(:) = numparticles_mpi(:)
    598     do i=0, mp_partgroup_np-1
    599       idx_arr(i) = i
    600     end do
    601 
    602 ! For each successive element in index array, see if a lower value exists
    603     do i=0, mp_partgroup_np-2
    604       pmin=sorted(i)
    605       imin=idx_arr(i)
    606       m=i+1
    607       do jj=m, mp_partgroup_np-1
    608         if (pmin.le.sorted(jj)) cycle
    609         z=pmin
    610         pmin=sorted(jj)
    611         sorted(jj)=z
    612 
    613         nn=imin
    614         imin=idx_arr(jj)
    615         idx_arr(jj)=nn
    616       end do
    617       sorted(i)=pmin
    618       idx_arr(i)=imin
    619     end do
    620 
    621 ! For each pair of processes, transfer particles if the difference is above a
    622 ! limit, and numpart at sending process large enough
    623 
    624     m=mp_partgroup_np-1 ! index for last sorted process (most particles)
    625     do i=0,mp_partgroup_np/2-1
    626       num_trans = numparticles_mpi(idx_arr(m)) - numparticles_mpi(idx_arr(i))
    627       if (mp_partid.eq.idx_arr(m).or.mp_partid.eq.idx_arr(i)) then
    628         if ( numparticles_mpi(idx_arr(m)).gt.mp_min_redist.and.&
    629              & real(num_trans)/real(numparticles_mpi(idx_arr(m))).gt.mp_redist_fract) then
    630           call mpif_redist_part(itime, idx_arr(m), idx_arr(i), num_trans/2)
    631         end if
    632       end if
    633       m=m-1
    634     end do
    635 
    636     deallocate(numparticles_mpi, idx_arr, sorted)
    637 
    638   end subroutine mpif_calculate_part_redist
    639 
    640 
    641   subroutine mpif_redist_part(itime, src_proc, dest_proc, num_trans)
    642 !***********************************************************************
    643 ! Transfer particle properties between two arbitrary processes.
    644 !
    645 ! VARIABLES
    646 !
    647 ! itime           input, current time
    648 ! src_proc        input, ID of source process
    649 ! dest_proc       input, ID of destination process
    650 ! num_trans       input, number of particles to transfer
    651 !
    652 !************************************************************************
    653     use com_mod !TODO:  ,only: nclass etc
    654 
    655     implicit none
    656 
    657     integer, intent(in) :: itime, src_proc, dest_proc, num_trans
    658     integer :: ll, ul ! lower and upper indices in arrays
    659     integer :: arr_sz ! size of temporary arrays
    660     integer :: mtag   ! MPI message tag
    661     integer :: i, j, minpart, ipart, maxnumpart
    662  
    663 ! Measure time for MPI communications
    664 !************************************
    665     if (mp_measure_time) call mpif_mtime('commtime',0)
    666 
    667 ! Send particles
    668 !***************
    669     if (mp_partid.eq.src_proc) then
    670       mtag = 2000
    671 
    672 ! Array indices for the transferred particles
    673       ll=numpart-num_trans+1
    674       ul=numpart
    675 
    676       ! if (mp_dev_mode) then
    677       !   write(*,FMT='(72("#"))')
    678       !   write(*,*) "Sending ", num_trans, "particles (from/to)", src_proc, dest_proc
    679       !   write(*,*) "numpart before: ", numpart
    680       ! end if
    681 
    682       call MPI_SEND(nclass(ll:ul),num_trans,&
    683            & MPI_INTEGER,dest_proc,mtag+1,mp_comm_used,mp_ierr)
    684 
    685       call MPI_SEND(npoint(ll:ul),num_trans,&
    686            & MPI_INTEGER,dest_proc,mtag+2,mp_comm_used,mp_ierr)
    687 
    688       call MPI_SEND(itra1(ll:ul),num_trans, &
    689            & MPI_INTEGER,dest_proc,mtag+3,mp_comm_used,mp_ierr)
    690 
    691       call MPI_SEND(idt(ll:ul),num_trans, &
    692            & MPI_INTEGER,dest_proc,mtag+4,mp_comm_used,mp_ierr)
    693 
    694       call MPI_SEND(itramem(ll:ul),num_trans, &
    695            & MPI_INTEGER,dest_proc,mtag+5,mp_comm_used,mp_ierr)
    696 
    697       call MPI_SEND(itrasplit(ll:ul),num_trans,&
    698            & MPI_INTEGER,dest_proc,mtag+6,mp_comm_used,mp_ierr)
    699 
    700       call MPI_SEND(xtra1(ll:ul),num_trans, &
    701            & mp_dp,dest_proc,mtag+7,mp_comm_used,mp_ierr)
    702 
    703       call MPI_SEND(ytra1(ll:ul),num_trans,&
    704            & mp_dp,dest_proc,mtag+8,mp_comm_used,mp_ierr)
    705 
    706       call MPI_SEND(ztra1(ll:ul),num_trans,&
    707            & mp_sp,dest_proc,mtag+9,mp_comm_used,mp_ierr)
    708 
    709       do j=1,nspec
    710         call MPI_SEND(xmass1(ll:ul,j),num_trans,&
    711              & mp_sp,dest_proc,mtag+(9+j),mp_comm_used,mp_ierr)
    712       end do
    713 
    714 ! Terminate transferred particles, update numpart
    715       itra1(ll:ul) = -999999999
    716 
    717       numpart = numpart-num_trans
    718 
    719       ! if (mp_dev_mode) then
    720       !   write(*,*) "numpart after: ", numpart
    721       !   write(*,FMT='(72("#"))')
    722       ! end if
    723 
    724     else if (mp_partid.eq.dest_proc) then
    725 
    726 ! Receive particles
    727 !******************
    728       mtag = 2000
    729       ! Array indices for the transferred particles
    730       ll=numpart+1
    731       ul=numpart+num_trans
    732 
    733       ! if (mp_dev_mode) then
    734       !   write(*,FMT='(72("#"))')
    735       !   write(*,*) "Receiving ", num_trans, "particles (from/to)", src_proc, dest_proc
    736       !   write(*,*) "numpart before: ", numpart
    737       ! end if
    738 
    739 ! We could receive the data directly at nclass(ll:ul) etc., but this leaves
    740 ! vacant spaces at lower indices. Using temporary arrays instead.
    741       arr_sz = ul-ll+1
    742       allocate(itra1_tmp(arr_sz),npoint_tmp(arr_sz),nclass_tmp(arr_sz),&
    743            & idt_tmp(arr_sz),itramem_tmp(arr_sz),itrasplit_tmp(arr_sz),&
    744            & xtra1_tmp(arr_sz),ytra1_tmp(arr_sz),ztra1_tmp(arr_sz),&
    745            & xmass1_tmp(arr_sz, maxspec))
    746      
    747       call MPI_RECV(nclass_tmp,num_trans,&
    748            & MPI_INTEGER,src_proc,mtag+1,mp_comm_used,mp_status,mp_ierr)
    749 
    750       call MPI_RECV(npoint_tmp,num_trans,&
    751            & MPI_INTEGER,src_proc,mtag+2,mp_comm_used,mp_status,mp_ierr)
    752 
    753       call MPI_RECV(itra1_tmp,num_trans, &
    754            & MPI_INTEGER,src_proc,mtag+3,mp_comm_used,mp_status,mp_ierr)
    755 
    756       call MPI_RECV(idt_tmp,num_trans, &
    757            & MPI_INTEGER,src_proc,mtag+4,mp_comm_used,mp_status,mp_ierr)
    758 
    759       call MPI_RECV(itramem_tmp,num_trans, &
    760            & MPI_INTEGER,src_proc,mtag+5,mp_comm_used,mp_status,mp_ierr)
    761 
    762       call MPI_RECV(itrasplit_tmp,num_trans,&
    763            & MPI_INTEGER,src_proc,mtag+6,mp_comm_used,mp_status,mp_ierr)
    764 
    765       call MPI_RECV(xtra1_tmp,num_trans, &
    766            & mp_dp,src_proc,mtag+7,mp_comm_used,mp_status,mp_ierr)
    767 
    768       call MPI_RECV(ytra1_tmp,num_trans,&
    769            & mp_dp,src_proc,mtag+8,mp_comm_used,mp_status,mp_ierr)
    770 
    771       call MPI_RECV(ztra1_tmp,num_trans,&
    772            & mp_sp,src_proc,mtag+9,mp_comm_used,mp_status,mp_ierr)
    773 
    774       do j=1,nspec
    775         call MPI_RECV(xmass1_tmp(:,j),num_trans,&
    776              & mp_sp,src_proc,mtag+(9+j),mp_comm_used,mp_status,mp_ierr)
    777       end do
    778 
    779 ! This is the maximum value numpart can possibly have after the transfer
    780       maxnumpart=numpart+num_trans
    781 
    782 ! Search for vacant space and copy from temporary storage
    783 !********************************************************
    784       minpart=1
    785       do i=1, num_trans
    786 ! Take into acount that we may have transferred invalid particles
    787         if (itra1_tmp(minpart).ne.itime) goto 200
    788         do ipart=minpart,maxnumpart
    789           if (itra1(ipart).ne.itime) then
    790             itra1(ipart) = itra1_tmp(minpart)
    791             npoint(ipart) = npoint_tmp(minpart)
    792             nclass(ipart) = nclass_tmp(minpart)
    793             idt(ipart) = idt_tmp(minpart)
    794             itramem(ipart) = itramem_tmp(minpart)
    795             itrasplit(ipart) = itrasplit_tmp(minpart)
    796             xtra1(ipart) = xtra1_tmp(minpart)
    797             ytra1(ipart) = ytra1_tmp(minpart)
    798             ztra1(ipart) = ztra1_tmp(minpart)
    799             xmass1(ipart,:) = xmass1_tmp(minpart,:)
    800 ! Increase numpart, if necessary
    801             numpart=max(numpart,ipart)
    802             goto 200 ! Storage space has been found, stop searching
    803           end if
    804         end do
    805 200     minpart=minpart+1
    806       end do
    807 
    808       deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,itrasplit_tmp,&
    809            & xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
    810 
    811       ! if (mp_dev_mode) then
    812       !   write(*,*) "numpart after: ", numpart
    813       !   write(*,FMT='(72("#"))')
    814       ! end if
    815 
    816     else
    817 ! This routine should only be called by the two participating processes
    818       write(*,*) "ERROR: wrong process has entered mpi_mod::mpif_redist_part"
    819       stop
    820       return
    821     end if
    822 
    823 ! Measure time for MPI communications
    824 !************************************
    825     if (mp_measure_time) call mpif_mtime('commtime',1)
    826 
    827   end subroutine mpif_redist_part
    828 
    829 
    830   subroutine mpif_tm_send_vars
    831 !***********************************************************************
    832 ! Distribute particle variables from pid0 to all processes.
    833 ! Called from timemanager
    834 ! *NOT IN USE* at the moment, but can be useful for debugging
    835 !
    836 !***********************************************************************
    837     use com_mod
    838 
    839     implicit none
    840 
    841     integer :: i
    842 
    843 ! Time for MPI communications
    844 !****************************
    845     if (mp_measure_time) call mpif_mtime('commtime',0)
    846 
    847473! Distribute variables, send from pid 0 to other processes
    848474!**********************************************************************
     
    851477    if (lroot) then
    852478      call MPI_SCATTER(npoint,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    853            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     479           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    854480      if (mp_ierr /= 0) goto 600
    855481      call MPI_SCATTER(idt,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    856            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     482           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    857483      if (mp_ierr /= 0) goto 600
    858484      call MPI_SCATTER(itra1,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    859            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     485           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    860486      if (mp_ierr /= 0) goto 600
    861487      call MPI_SCATTER(nclass,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    862            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     488           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    863489      if (mp_ierr /= 0) goto 600
    864490      call MPI_SCATTER(itramem,numpart_mpi,MPI_INTEGER,MPI_IN_PLACE,&
    865            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     491           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    866492      if (mp_ierr /= 0) goto 600
    867493
    868494! int2
    869495      call MPI_SCATTER(cbt,numpart_mpi,MPI_INTEGER2,MPI_IN_PLACE,&
    870            & numpart_mpi,MPI_INTEGER2,id_root,mp_comm_used,mp_ierr)
     496           &numpart_mpi,MPI_INTEGER2,id_root,mp_comm_used,mp_ierr)
    871497      if (mp_ierr /= 0) goto 600
    872498
    873499! real
    874500      call MPI_SCATTER(uap,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    875            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     501           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    876502      if (mp_ierr /= 0) goto 600
    877503      call MPI_SCATTER(ucp,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    878            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     504           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    879505      if (mp_ierr /= 0) goto 600
    880506      call MPI_SCATTER(uzp,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    881            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     507           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    882508      if (mp_ierr /= 0) goto 600
    883509
    884510      call MPI_SCATTER(us,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    885            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     511           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    886512      if (mp_ierr /= 0) goto 600
    887513      call MPI_SCATTER(vs,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    888            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     514           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    889515      if (mp_ierr /= 0) goto 600
    890516      call MPI_SCATTER(ws,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    891            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     517           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    892518      if (mp_ierr /= 0) goto 600
    893519
    894520      call MPI_SCATTER(xtra1,numpart_mpi,mp_dp,MPI_IN_PLACE,&
    895            & numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
     521           &numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
    896522      if (mp_ierr /= 0) goto 600
    897523      call MPI_SCATTER(ytra1,numpart_mpi,mp_dp,MPI_IN_PLACE,&
    898            & numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
     524           &numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
    899525      if (mp_ierr /= 0) goto 600
    900526      call MPI_SCATTER(ztra1,numpart_mpi,mp_sp,MPI_IN_PLACE,&
    901            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     527           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    902528      if (mp_ierr /= 0) goto 600
    903529
    904530      do i=1,nspec
    905531        call MPI_SCATTER(xmass1(:,i),numpart_mpi,mp_sp,MPI_IN_PLACE,&
    906              & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     532             &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    907533        if (mp_ierr /= 0) goto 600
    908534      end do
     
    911537! integers
    912538      call MPI_SCATTER(npoint,numpart_mpi,MPI_INTEGER,npoint,&
    913            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     539           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    914540      if (mp_ierr /= 0) goto 600
    915541      call MPI_SCATTER(idt,numpart_mpi,MPI_INTEGER,idt,&
    916            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     542           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    917543      if (mp_ierr /= 0) goto 600
    918544      call MPI_SCATTER(itra1,numpart_mpi,MPI_INTEGER,itra1,&
    919            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     545           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    920546      if (mp_ierr /= 0) goto 600
    921547      call MPI_SCATTER(nclass,numpart_mpi,MPI_INTEGER,nclass,&
    922            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     548           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    923549      if (mp_ierr /= 0) goto 600
    924550      call MPI_SCATTER(itramem,numpart_mpi,MPI_INTEGER,itramem,&
    925            & numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
     551           &numpart_mpi,MPI_INTEGER,id_root,mp_comm_used,mp_ierr)
    926552      if (mp_ierr /= 0) goto 600
    927553
    928554! int2
    929555      call MPI_SCATTER(cbt,numpart_mpi,MPI_INTEGER2,cbt,&
    930            & numpart_mpi,MPI_INTEGER2,id_root,mp_comm_used,mp_ierr)
     556           &numpart_mpi,MPI_INTEGER2,id_root,mp_comm_used,mp_ierr)
    931557      if (mp_ierr /= 0) goto 600
    932558
    933559! reals
    934560      call MPI_SCATTER(uap,numpart_mpi,mp_sp,uap,&
    935            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     561           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    936562      if (mp_ierr /= 0) goto 600
    937563      call MPI_SCATTER(ucp,numpart_mpi,mp_sp,ucp,&
    938            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     564           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    939565      if (mp_ierr /= 0) goto 600
    940566      call MPI_SCATTER(uzp,numpart_mpi,mp_sp,uzp,&
    941            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     567           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    942568      if (mp_ierr /= 0) goto 600
    943569
    944570      call MPI_SCATTER(us,numpart_mpi,mp_sp,us,&
    945            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     571           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    946572      if (mp_ierr /= 0) goto 600
    947573      call MPI_SCATTER(vs,numpart_mpi,mp_sp,vs,&
    948            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     574           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    949575      if (mp_ierr /= 0) goto 600
    950576      call MPI_SCATTER(ws,numpart_mpi,mp_sp,ws,&
    951            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     577           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    952578      if (mp_ierr /= 0) goto 600
    953579
    954580      call MPI_SCATTER(xtra1,numpart_mpi,mp_dp,xtra1,&
    955            & numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
     581           &numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
    956582      if (mp_ierr /= 0) goto 600
    957583      call MPI_SCATTER(ytra1,numpart_mpi,mp_dp,ytra1,&
    958            & numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
     584           &numpart_mpi,mp_dp,id_root,mp_comm_used,mp_ierr)
    959585      if (mp_ierr /= 0) goto 600
    960586      call MPI_SCATTER(ztra1,numpart_mpi,mp_sp,ztra1,&
    961            & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     587           &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    962588      if (mp_ierr /= 0) goto 600
    963589
    964590      do i=1,nspec
    965591        call MPI_SCATTER(xmass1(:,i),numpart_mpi,mp_sp,xmass1(:,i),&
    966              & numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
     592             &numpart_mpi,mp_sp,id_root,mp_comm_used,mp_ierr)
    967593        if (mp_ierr /= 0) goto 600
    968594      end do
     
    15191145
    15201146! cloud water/ice:
    1521       if (readclouds_nest(i)) then
    1522 ! call MPI_Bcast(icloud_stats(:,:,:,li:ui),d2s1*5,mp_sp,id_read,MPI_COMM_WORLD,mp_ierr)
    1523 ! if (mp_ierr /= 0) goto 600
    1524         call MPI_Bcast(ctwcn(:,:,li:ui,i),d2s1,mp_sp,id_read,MPI_COMM_WORLD,mp_ierr)
    1525         if (mp_ierr /= 0) goto 600
    1526       end if
     1147    if (readclouds_nest(i)) then
     1148      ! call MPI_Bcast(icloud_stats(:,:,:,li:ui),d2s1*5,mp_sp,id_read,MPI_COMM_WORLD,mp_ierr)
     1149      ! if (mp_ierr /= 0) goto 600
     1150      call MPI_Bcast(ctwcn(:,:,li:ui,i),d2s1,mp_sp,id_read,MPI_COMM_WORLD,mp_ierr)
     1151      if (mp_ierr /= 0) goto 600
     1152    end if
    15271153
    15281154! 2D fields
     
    17721398    integer :: d2s1 = nxmax*nymax
    17731399    integer :: d2s2 = nxmax*nymax*maxspec
    1774 !integer :: d1_size1 = maxwf
     1400    !integer :: d1_size1 = maxwf
    17751401
    17761402!    integer :: d3s1,d3s2,d2s1,d2s2
     
    20191645      if (dest.eq.id_read) cycle
    20201646      do k=1, numbnests
    2021         i=dest*nvar_async
    2022         call MPI_Isend(uun(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2023         if (mp_ierr /= 0) goto 600
     1647      i=dest*nvar_async
     1648      call MPI_Isend(uun(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1649      if (mp_ierr /= 0) goto 600
     1650      i=i+1
     1651      call MPI_Isend(vvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1652      if (mp_ierr /= 0) goto 600
     1653      i=i+1
     1654      call MPI_Isend(wwn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1655      if (mp_ierr /= 0) goto 600
     1656      i=i+1
     1657      call MPI_Isend(ttn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1658      if (mp_ierr /= 0) goto 600
     1659      i=i+1
     1660      call MPI_Isend(rhon(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1661      if (mp_ierr /= 0) goto 600
     1662      i=i+1
     1663      call MPI_Isend(drhodzn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1664      if (mp_ierr /= 0) goto 600
     1665      i=i+1
     1666      call MPI_Isend(tthn(:,:,:,mind,k),d3s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1667      if (mp_ierr /= 0) goto 600
     1668      i=i+1
     1669      call MPI_Isend(qvhn(:,:,:,mind,k),d3s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1670      if (mp_ierr /= 0) goto 600
     1671      i=i+1
     1672      call MPI_Isend(qvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1673      if (mp_ierr /= 0) goto 600
     1674      i=i+1
     1675      call MPI_Isend(pvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1676      if (mp_ierr /= 0) goto 600
     1677      i=i+1
     1678      call MPI_Isend(cloudsn(:,:,:,mind,k),d3s1,MPI_INTEGER1,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1679      i=i+1
     1680      if (mp_ierr /= 0) goto 600
     1681      call MPI_Isend(cloudshn(:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1682      if (mp_ierr /= 0) goto 600
     1683      i=i+1
     1684      call MPI_Isend(vdepn(:,:,:,mind,k),d2s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1685      if (mp_ierr /= 0) goto 600
     1686      i=i+1
     1687      call MPI_Isend(psn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1688      if (mp_ierr /= 0) goto 600
     1689      i=i+1
     1690      call MPI_Isend(sdn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1691      if (mp_ierr /= 0) goto 600
     1692      i=i+1
     1693! 15
     1694      call MPI_Isend(tccn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1695      if (mp_ierr /= 0) goto 600
     1696      i=i+1
     1697      call MPI_Isend(tt2n(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1698      if (mp_ierr /= 0) goto 600
     1699      i=i+1
     1700      call MPI_Isend(td2n(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1701      if (mp_ierr /= 0) goto 600
     1702      i=i+1
     1703      call MPI_Isend(lsprecn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1704      if (mp_ierr /= 0) goto 600
     1705      i=i+1
     1706      call MPI_Isend(convprecn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1707      if (mp_ierr /= 0) goto 600
     1708      i=i+1
     1709      call MPI_Isend(ustarn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1710      if (mp_ierr /= 0) goto 600
     1711      i=i+1
     1712      call MPI_Isend(wstarn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1713      if (mp_ierr /= 0) goto 600
     1714      i=i+1
     1715      call MPI_Isend(hmixn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1716      if (mp_ierr /= 0) goto 600
     1717      i=i+1
     1718      call MPI_Isend(tropopausen(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1719      if (mp_ierr /= 0) goto 600
     1720      i=i+1
     1721      call MPI_Isend(olin(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1722      if (mp_ierr /= 0) goto 600
     1723! 25
     1724
     1725! Send cloud water if it exists. Increment counter always (as on receiving end)
     1726      if (readclouds) then
    20241727        i=i+1
    2025         call MPI_Isend(vvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2026         if (mp_ierr /= 0) goto 600
    2027         i=i+1
    2028         call MPI_Isend(wwn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
     1728        call MPI_Isend(ctwcn(:,:,mind,k),d2s1,mp_sp,dest,tm1,&
     1729             &MPI_COMM_WORLD,reqs(i),mp_ierr)
    20291730        if (mp_ierr /= 0) goto 600
    2030         i=i+1
    2031         call MPI_Isend(ttn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2032         if (mp_ierr /= 0) goto 600
    2033         i=i+1
    2034         call MPI_Isend(rhon(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2035         if (mp_ierr /= 0) goto 600
    2036         i=i+1
    2037         call MPI_Isend(drhodzn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2038         if (mp_ierr /= 0) goto 600
    2039         i=i+1
    2040         call MPI_Isend(tthn(:,:,:,mind,k),d3s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2041         if (mp_ierr /= 0) goto 600
    2042         i=i+1
    2043         call MPI_Isend(qvhn(:,:,:,mind,k),d3s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2044         if (mp_ierr /= 0) goto 600
    2045         i=i+1
    2046         call MPI_Isend(qvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2047         if (mp_ierr /= 0) goto 600
    2048         i=i+1
    2049         call MPI_Isend(pvn(:,:,:,mind,k),d3s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2050         if (mp_ierr /= 0) goto 600
    2051         i=i+1
    2052         call MPI_Isend(cloudsn(:,:,:,mind,k),d3s1,MPI_INTEGER1,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2053         i=i+1
    2054         if (mp_ierr /= 0) goto 600
    2055         call MPI_Isend(cloudshn(:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2056         if (mp_ierr /= 0) goto 600
    2057         i=i+1
    2058         call MPI_Isend(vdepn(:,:,:,mind,k),d2s2,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2059         if (mp_ierr /= 0) goto 600
    2060         i=i+1
    2061         call MPI_Isend(psn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2062         if (mp_ierr /= 0) goto 600
    2063         i=i+1
    2064         call MPI_Isend(sdn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2065         if (mp_ierr /= 0) goto 600
    2066         i=i+1
    2067 ! 15
    2068         call MPI_Isend(tccn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2069         if (mp_ierr /= 0) goto 600
    2070         i=i+1
    2071         call MPI_Isend(tt2n(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2072         if (mp_ierr /= 0) goto 600
    2073         i=i+1
    2074         call MPI_Isend(td2n(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2075         if (mp_ierr /= 0) goto 600
    2076         i=i+1
    2077         call MPI_Isend(lsprecn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2078         if (mp_ierr /= 0) goto 600
    2079         i=i+1
    2080         call MPI_Isend(convprecn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2081         if (mp_ierr /= 0) goto 600
    2082         i=i+1
    2083         call MPI_Isend(ustarn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2084         if (mp_ierr /= 0) goto 600
    2085         i=i+1
    2086         call MPI_Isend(wstarn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2087         if (mp_ierr /= 0) goto 600
    2088         i=i+1
    2089         call MPI_Isend(hmixn(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2090         if (mp_ierr /= 0) goto 600
    2091         i=i+1
    2092         call MPI_Isend(tropopausen(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2093         if (mp_ierr /= 0) goto 600
    2094         i=i+1
    2095         call MPI_Isend(olin(:,:,:,mind,k),d2s1,mp_sp,dest,tm1,MPI_COMM_WORLD,reqs(i),mp_ierr)
    2096         if (mp_ierr /= 0) goto 600
    2097 ! 25
    2098 
    2099 ! Send cloud water if it exists. Increment counter always (as on receiving end)
    2100         if (readclouds) then
    2101           i=i+1
    2102           call MPI_Isend(ctwcn(:,:,mind,k),d2s1,mp_sp,dest,tm1,&
    2103                &MPI_COMM_WORLD,reqs(i),mp_ierr)
    2104           if (mp_ierr /= 0) goto 600
    2105         end if
    2106       end do
     1731      end if
    21071732    end do
     1733  end do
    21081734
    21091735    if (mp_measure_time) call mpif_mtime('commtime',1)
     
    21841810    do k=1, numbnests
    21851811! Get MPI tags/requests for communications
    2186       j=mp_pid*nvar_async
    2187       call MPI_Irecv(uun(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    2188            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2189       if (mp_ierr /= 0) goto 600
    2190       j=j+1
    2191       call MPI_Irecv(vvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    2192            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2193       if (mp_ierr /= 0) goto 600
    2194       j=j+1
    2195       call MPI_Irecv(wwn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    2196            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2197       if (mp_ierr /= 0) goto 600
    2198       j=j+1
    2199       call MPI_Irecv(ttn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    2200            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2201       if (mp_ierr /= 0) goto 600
    2202       j=j+1
    2203       call MPI_Irecv(rhon(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    2204            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2205       if (mp_ierr /= 0) goto 600
    2206       j=j+1
    2207       call MPI_Irecv(drhodzn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    2208            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2209       if (mp_ierr /= 0) goto 600
    2210       j=j+1
    2211       call MPI_Irecv(tthn(:,:,:,mind,k),d3s2,mp_sp,id_read,MPI_ANY_TAG,&
    2212            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2213       if (mp_ierr /= 0) goto 600
    2214       j=j+1
    2215       call MPI_Irecv(qvhn(:,:,:,mind,k),d3s2,mp_sp,id_read,MPI_ANY_TAG,&
    2216            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2217       if (mp_ierr /= 0) goto 600
    2218       j=j+1
    2219       call MPI_Irecv(qvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    2220            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2221       if (mp_ierr /= 0) goto 600
    2222       j=j+1
    2223       call MPI_Irecv(pvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
    2224            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2225       if (mp_ierr /= 0) goto 600
    2226       j=j+1
    2227       call MPI_Irecv(cloudsn(:,:,:,mind,k),d3s1,MPI_INTEGER1,id_read,MPI_ANY_TAG,&
    2228            &MPI_COMM_WORLD,reqs(j),mp_ierr)   
    2229       if (mp_ierr /= 0) goto 600
    2230       j=j+1
    2231       call MPI_Irecv(cloudshn(:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2232            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2233       if (mp_ierr /= 0) goto 600
    2234       j=j+1
    2235       call MPI_Irecv(vdepn(:,:,:,mind,k),d2s2,mp_sp,id_read,MPI_ANY_TAG,&
    2236            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2237       if (mp_ierr /= 0) goto 600
    2238       j=j+1
    2239       call MPI_Irecv(psn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2240            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2241       if (mp_ierr /= 0) goto 600
    2242       j=j+1
    2243       call MPI_Irecv(sdn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2244            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2245       if (mp_ierr /= 0) goto 600
    2246       j=j+1
    2247       call MPI_Irecv(tccn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2248            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2249       if (mp_ierr /= 0) goto 600
    2250       j=j+1
    2251       call MPI_Irecv(tt2n(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2252            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2253       if (mp_ierr /= 0) goto 600
    2254       j=j+1
    2255       call MPI_Irecv(td2n(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2256            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2257       if (mp_ierr /= 0) goto 600
    2258       j=j+1
    2259       call MPI_Irecv(lsprecn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2260            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2261       if (mp_ierr /= 0) goto 600
    2262       j=j+1
    2263       call MPI_Irecv(convprecn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2264            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2265       if (mp_ierr /= 0) goto 600
    2266       call MPI_Irecv(ustarn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2267            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2268       if (mp_ierr /= 0) goto 600
    2269       j=j+1
    2270       call MPI_Irecv(wstarn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2271            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2272       if (mp_ierr /= 0) goto 600
    2273       j=j+1
    2274       call MPI_Irecv(hmixn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2275            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2276       if (mp_ierr /= 0) goto 600
    2277       j=j+1
    2278       call MPI_Irecv(tropopausen(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2279            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2280       if (mp_ierr /= 0) goto 600
    2281       j=j+1
    2282       call MPI_Irecv(olin(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    2283            &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2284       if (mp_ierr /= 0) goto 600
     1812    j=mp_pid*nvar_async
     1813    call MPI_Irecv(uun(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     1814         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1815    if (mp_ierr /= 0) goto 600
     1816    j=j+1
     1817    call MPI_Irecv(vvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     1818         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1819    if (mp_ierr /= 0) goto 600
     1820    j=j+1
     1821    call MPI_Irecv(wwn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     1822         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1823    if (mp_ierr /= 0) goto 600
     1824    j=j+1
     1825    call MPI_Irecv(ttn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     1826         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1827    if (mp_ierr /= 0) goto 600
     1828    j=j+1
     1829    call MPI_Irecv(rhon(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     1830         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1831    if (mp_ierr /= 0) goto 600
     1832    j=j+1
     1833    call MPI_Irecv(drhodzn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     1834         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1835    if (mp_ierr /= 0) goto 600
     1836    j=j+1
     1837    call MPI_Irecv(tthn(:,:,:,mind,k),d3s2,mp_sp,id_read,MPI_ANY_TAG,&
     1838         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1839    if (mp_ierr /= 0) goto 600
     1840    j=j+1
     1841    call MPI_Irecv(qvhn(:,:,:,mind,k),d3s2,mp_sp,id_read,MPI_ANY_TAG,&
     1842         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1843    if (mp_ierr /= 0) goto 600
     1844    j=j+1
     1845    call MPI_Irecv(qvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     1846         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1847    if (mp_ierr /= 0) goto 600
     1848    j=j+1
     1849    call MPI_Irecv(pvn(:,:,:,mind,k),d3s1,mp_sp,id_read,MPI_ANY_TAG,&
     1850         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1851    if (mp_ierr /= 0) goto 600
     1852    j=j+1
     1853    call MPI_Irecv(cloudsn(:,:,:,mind,k),d3s1,MPI_INTEGER1,id_read,MPI_ANY_TAG,&
     1854         &MPI_COMM_WORLD,reqs(j),mp_ierr)   
     1855    if (mp_ierr /= 0) goto 600
     1856    j=j+1
     1857    call MPI_Irecv(cloudshn(:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1858         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1859    if (mp_ierr /= 0) goto 600
     1860    j=j+1
     1861    call MPI_Irecv(vdepn(:,:,:,mind,k),d2s2,mp_sp,id_read,MPI_ANY_TAG,&
     1862         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1863    if (mp_ierr /= 0) goto 600
     1864    j=j+1
     1865    call MPI_Irecv(psn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1866         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1867    if (mp_ierr /= 0) goto 600
     1868    j=j+1
     1869    call MPI_Irecv(sdn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1870         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1871    if (mp_ierr /= 0) goto 600
     1872    j=j+1
     1873    call MPI_Irecv(tccn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1874         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1875    if (mp_ierr /= 0) goto 600
     1876    j=j+1
     1877    call MPI_Irecv(tt2n(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1878         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1879    if (mp_ierr /= 0) goto 600
     1880    j=j+1
     1881    call MPI_Irecv(td2n(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1882         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1883    if (mp_ierr /= 0) goto 600
     1884    j=j+1
     1885    call MPI_Irecv(lsprecn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1886         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1887    if (mp_ierr /= 0) goto 600
     1888    j=j+1
     1889    call MPI_Irecv(convprecn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1890         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1891    if (mp_ierr /= 0) goto 600
     1892    call MPI_Irecv(ustarn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1893         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1894    if (mp_ierr /= 0) goto 600
     1895    j=j+1
     1896    call MPI_Irecv(wstarn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1897         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1898    if (mp_ierr /= 0) goto 600
     1899    j=j+1
     1900    call MPI_Irecv(hmixn(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1901         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1902    if (mp_ierr /= 0) goto 600
     1903    j=j+1
     1904    call MPI_Irecv(tropopausen(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1905         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1906    if (mp_ierr /= 0) goto 600
     1907    j=j+1
     1908    call MPI_Irecv(olin(:,:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1909         &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1910    if (mp_ierr /= 0) goto 600
    22851911
    22861912! Post request for clwc. These data are possibly not sent, request must then be cancelled
    22871913! For now assume that data at all steps either have or do not have water
    2288       if (readclouds) then
    2289         j=j+1
    2290         call MPI_Irecv(ctwcn(:,:,mind,k),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
    2291              &MPI_COMM_WORLD,reqs(j),mp_ierr)
    2292         if (mp_ierr /= 0) goto 600
    2293       end if
    2294     end do
     1914    if (readclouds) then
     1915      j=j+1
     1916      call MPI_Irecv(ctwcn(:,:,mind,k),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
     1917           &MPI_COMM_WORLD,reqs(j),mp_ierr)
     1918      if (mp_ierr /= 0) goto 600
     1919    end if
     1920  end do
    22951921
    22961922    if (mp_measure_time) call mpif_mtime('commtime',1)
     
    27232349          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR CONCCALC:',&
    27242350               & mp_conccalc_time_total
    2725 ! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR VERTTRANSFORM:',&
    2726 !      & mp_vt_wtime_total
    2727 ! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR VERTTRANSFORM:',&
    2728 !      & mp_vt_time_total
     2351          ! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR VERTTRANSFORM:',&
     2352          !      & mp_vt_wtime_total
     2353          ! write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR VERTTRANSFORM:',&
     2354          !      & mp_vt_time_total
    27292355! NB: the 'flush' function is possibly a gfortran-specific extension
    27302356          call flush()
     
    27622388
    27632389
    2764   end subroutine mpif_finalize
    2765 
    2766 
    2767   subroutine get_lun(my_lun)
     2390    end subroutine mpif_finalize
     2391
     2392
     2393    subroutine get_lun(my_lun)
    27682394!***********************************************************************
    27692395! get_lun:
     
    27712397!***********************************************************************
    27722398
    2773     implicit none
    2774 
    2775     integer, intent(inout) :: my_lun
    2776     integer, save :: free_lun=100
    2777     logical :: exists, iopen
    2778 
    2779 !***********************************************************************
    2780 
    2781     loop1: do
    2782       inquire(UNIT=free_lun, EXIST=exists, OPENED=iopen)
    2783       if (exists .and. .not.iopen) exit loop1
    2784       free_lun = free_lun+1
    2785     end do loop1
    2786     my_lun = free_lun
    2787 
    2788   end subroutine get_lun
    2789 
    2790 
    2791   subroutine write_data_dbg(array_in, array_name, tstep, ident)
     2399      implicit none
     2400
     2401      integer, intent(inout) :: my_lun
     2402      integer, save :: free_lun=100
     2403      logical :: exists, iopen
     2404
     2405!***********************************************************************
     2406
     2407      loop1: do
     2408        inquire(UNIT=free_lun, EXIST=exists, OPENED=iopen)
     2409        if (exists .and. .not.iopen) exit loop1
     2410        free_lun = free_lun+1
     2411      end do loop1
     2412      my_lun = free_lun
     2413
     2414    end subroutine get_lun
     2415
     2416
     2417    subroutine write_data_dbg(array_in, array_name, tstep, ident)
    27922418!***********************************************************************
    27932419! Write one-dimensional arrays to file (for debugging purposes)
    27942420!***********************************************************************
    2795     implicit none
    2796 
    2797     real, intent(in), dimension(:) :: array_in
    2798     integer, intent(in) :: tstep
    2799     integer :: lios
    2800     character(LEN=*), intent(in) :: ident, array_name
    2801 
    2802     character(LEN=8) :: c_ts
    2803     character(LEN=40) :: fn_1, fn_2
    2804 
    2805 !***********************************************************************
    2806 
    2807     write(c_ts, FMT='(I8.8,BZ)') tstep
    2808     fn_1='-'//trim(adjustl(c_ts))//'-'//trim(ident)
    2809     write(c_ts, FMT='(I2.2,BZ)') mp_np
    2810     fn_2= trim(adjustl(array_name))//trim(adjustl(fn_1))//'-np'//trim(adjustl(c_ts))//'.dat'
    2811 
    2812     call get_lun(dat_lun)
    2813     open(UNIT=dat_lun, FILE=fn_2, IOSTAT=lios, ACTION='WRITE', &
    2814          FORM='UNFORMATTED', STATUS='REPLACE')
    2815     write(UNIT=dat_lun, IOSTAT=lios) array_in
    2816     close(UNIT=dat_lun)
    2817 
    2818   end subroutine write_data_dbg
    2819 
    2820 
    2821   subroutine set_fields_synthetic()
    2822 !*******************************************************************************
    2823 ! DESCRIPTION
    2824 !   Set all meteorological fields to synthetic (usually constant/homogeneous)
    2825 !   values.
    2826 !   Used for validation and error-checking
    2827 !
    2828 ! NOTE
    2829 !   This version uses asynchronious communications.
    2830 !
    2831 ! VARIABLES
    2832 !   
    2833 !
    2834 !
    2835 !*******************************************************************************
    2836     use com_mod
    2837 
    2838     implicit none
    2839 
    2840     integer :: li=1, ui=2 ! wfmem indices (i.e, operate on entire field)
    2841 
    2842     if (.not.lmp_sync) ui=3
    2843 
    2844 
    2845 ! Variables transferred at initialization only
    2846 !*********************************************
    2847 !    readclouds=readclouds_
    2848     oro(:,:)=0.0
    2849     excessoro(:,:)=0.0
    2850     lsm(:,:)=0.0
    2851     xlanduse(:,:,:)=0.0
    2852 !    wftime
    2853 !    numbwf
    2854 !    nmixz
    2855 !    height
    2856 
    2857 ! Time-varying fields:
    2858     uu(:,:,:,li:ui) = 10.0
    2859     vv(:,:,:,li:ui) = 0.0
    2860     uupol(:,:,:,li:ui) = 10.0
    2861     vvpol(:,:,:,li:ui)=0.0
    2862     ww(:,:,:,li:ui)=0.
    2863     tt(:,:,:,li:ui)=300.
    2864     rho(:,:,:,li:ui)=1.3
    2865     drhodz(:,:,:,li:ui)=.0
    2866     tth(:,:,:,li:ui)=0.0
    2867     qvh(:,:,:,li:ui)=1.0
    2868     qv(:,:,:,li:ui)=1.0
    2869 
    2870     pv(:,:,:,li:ui)=1.0
    2871     clouds(:,:,:,li:ui)=0
    2872 
    2873     clwc(:,:,:,li:ui)=0.0
    2874     ciwc(:,:,:,li:ui)=0.0
    2875 
    2876 ! 2D fields
    2877 
    2878     cloudsh(:,:,li:ui)=0
    2879     vdep(:,:,:,li:ui)=0.0
    2880     ps(:,:,:,li:ui)=1.0e5
    2881     sd(:,:,:,li:ui)=0.0
    2882     tcc(:,:,:,li:ui)=0.0
    2883     tt2(:,:,:,li:ui)=300.
    2884     td2(:,:,:,li:ui)=250.
    2885     lsprec(:,:,:,li:ui)=0.0
    2886     convprec(:,:,:,li:ui)=0.0
    2887     ustar(:,:,:,li:ui)=1.0
    2888     wstar(:,:,:,li:ui)=1.0
    2889     hmix(:,:,:,li:ui)=10000.
    2890     tropopause(:,:,:,li:ui)=10000.
    2891     oli(:,:,:,li:ui)=0.01
    2892 
    2893   end subroutine set_fields_synthetic
     2421      implicit none
     2422
     2423      real, intent(in), dimension(:) :: array_in
     2424      integer, intent(in) :: tstep
     2425      integer :: lios
     2426      character(LEN=*), intent(in) :: ident, array_name
     2427
     2428      character(LEN=8) :: c_ts
     2429      character(LEN=40) :: fn_1, fn_2
     2430
     2431!***********************************************************************
     2432
     2433      write(c_ts, FMT='(I8.8,BZ)') tstep
     2434      fn_1='-'//trim(adjustl(c_ts))//'-'//trim(ident)
     2435      write(c_ts, FMT='(I2.2,BZ)') mp_np
     2436      fn_2= trim(adjustl(array_name))//trim(adjustl(fn_1))//'-np'//trim(adjustl(c_ts))//'.dat'
     2437
     2438      call get_lun(dat_lun)
     2439      open(UNIT=dat_lun, FILE=fn_2, IOSTAT=lios, ACTION='WRITE', &
     2440           FORM='UNFORMATTED', STATUS='REPLACE')
     2441      write(UNIT=dat_lun, IOSTAT=lios) array_in
     2442      close(UNIT=dat_lun)
     2443
     2444    end subroutine write_data_dbg
    28942445
    28952446end module mpi_mod
  • src/releaseparticles_mpi.f90

    r861805a r7e52e2e  
    2121
    2222subroutine releaseparticles(itime)
    23 !                              o
    24 !*****************************************************************************
    25 !                                                                            *
    26 !     This subroutine releases particles from the release locations.         *
    27 !                                                                            *
    28 !     It searches for a "vacant" storage space and assigns all particle      *
    29 !     information to that space. A space is vacant either when no particle   *
    30 !     is yet assigned to it, or when it's particle is expired and, thus,     *
    31 !     the storage space is made available to a new particle.                 *
    32 !                                                                            *
    33 !     Author: A. Stohl                                                       *
    34 !                                                                            *
    35 !     29 June 2002                                                           *
    36 !                                                                            *
    37 !   CHANGES                                                                  *
    38 !     12/2014 eso: MPI version                                               *
    39 !                                                                            *
    40 !*****************************************************************************
    41 !                                                                            *
    42 ! Variables:                                                                 *
    43 ! itime [s]            current time                                          *
    44 ! ireleasestart, ireleaseend          start and end times of all releases    *
    45 ! npart(maxpoint)      number of particles to be released in total           *
    46 ! numrel               number of particles to be released during this time   *
    47 !                      step                                                  *
    48 ! addone               extra particle assigned to MPI process if             *
    49 !                      mod(npart(i),mp_partgroup_np) .ne. 0)                 *
    50 !*****************************************************************************
     23  !                              o
     24  !*****************************************************************************
     25  !                                                                            *
     26  !     This subroutine releases particles from the release locations.         *
     27  !                                                                            *
     28  !     It searches for a "vacant" storage space and assigns all particle      *
     29  !     information to that space. A space is vacant either when no particle   *
     30  !     is yet assigned to it, or when it's particle is expired and, thus,     *
     31  !     the storage space is made available to a new particle.                 *
     32  !                                                                            *
     33  !     Author: A. Stohl                                                       *
     34  !                                                                            *
     35  !     29 June 2002                                                           *
     36  !                                                                            *
     37  !   CHANGES                                                                  *
     38  !     12/2014 eso: MPI version                                               *
     39  !                                                                            *
     40  !*****************************************************************************
     41  !                                                                            *
     42  ! Variables:                                                                 *
     43  ! itime [s]            current time                                          *
     44  ! ireleasestart, ireleaseend          start and end times of all releases    *
     45  ! npart(maxpoint)      number of particles to be released in total           *
     46  ! numrel               number of particles to be released during this time   *
     47  !                      step                                                  *
     48  ! addpart              extra particle assigned to MPI process if             *
     49  !                      mod(npart(i),mp_partgroup_np) .ne. 0)                 *
     50  !*****************************************************************************
    5151
    5252  use point_mod
     
    5959  implicit none
    6060
    61 !real xaux,yaux,zaux,ran1,rfraction,xmasssave(maxpoint)
     61  !real xaux,yaux,zaux,ran1,rfraction,xmasssave(maxpoint)
    6262  real :: xaux,yaux,zaux,rfraction
    6363  real :: topo,rhoaux(2),r,t,rhoout,ddx,ddy,rddx,rddy,p1,p2,p3,p4
     
    7373
    7474  integer :: idummy = -7
    75 !save idummy,xmasssave
    76 !data idummy/-7/,xmasssave/maxpoint*0./
     75  !save idummy,xmasssave
     76  !data idummy/-7/,xmasssave/maxpoint*0./
    7777
    7878  logical :: first_call=.true.
    7979
    80 ! Use different seed for each process.
    81 !****************************************************************************
     80  ! Use different seed for each process.
     81  !****************************************************************************
    8282  if (first_call) then
    8383    idummy=idummy+mp_seed
     
    8787  mind2=memind(2)
    8888
    89 ! Determine the actual date and time in Greenwich (i.e., UTC + correction for daylight savings time)
    90 !*****************************************************************************
     89  ! Determine the actual date and time in Greenwich (i.e., UTC + correction for daylight savings time)
     90  !*****************************************************************************
    9191
    9292  julmonday=juldate(19000101,0)          ! this is a Monday
     
    9797
    9898
    99 ! For every release point, check whether we are in the release time interval
    100 !***************************************************************************
     99  ! For every release point, check whether we are in the release time interval
     100  !***************************************************************************
    101101
    102102  minpart=1
     
    105105         (itime.le.ireleaseend(i))) then
    106106
    107 ! Determine the local day and time
    108 !*********************************
     107  ! Determine the local day and time
     108  !*********************************
    109109
    110110      xlonav=xlon0+(xpoint2(i)+xpoint1(i))/2.*dx  ! longitude needed to determine local time
     
    124124      endif
    125125
    126 ! Calculate a species- and time-dependent correction factor, distinguishing between
    127 ! area (those with release starting at surface) and point (release starting above surface) sources
    128 ! Also, calculate an average time correction factor (species independent)
    129 !*****************************************************************************
     126  ! Calculate a species- and time-dependent correction factor, distinguishing between
     127  ! area (those with release starting at surface) and point (release starting above surface) sources
     128  ! Also, calculate an average time correction factor (species independent)
     129  !*****************************************************************************
    130130      average_timecorrect=0.
    131131      do k=1,nspec
     
    139139      average_timecorrect=average_timecorrect/real(nspec)
    140140
    141 ! Determine number of particles to be released this time; at start and at end of release,
    142 ! only half the particles are released
    143 !*****************************************************************************
     141  ! Determine number of particles to be released this time; at start and at end of release,
     142  ! only half the particles are released
     143  !*****************************************************************************
    144144
    145145      if (ireleasestart(i).ne.ireleaseend(i)) then
     
    149149             (itime.eq.ireleaseend(i))) rfraction=rfraction/2.
    150150
    151 ! Take the species-average time correction factor in order to scale the
    152 ! number of particles released this time
    153 ! Also scale by number of MPI processes
    154 !**********************************************************************
     151  ! Take the species-average time correction factor in order to scale the
     152  ! number of particles released this time
     153  ! Also scale by number of MPI processes
     154  !**********************************************************************
    155155
    156156        rfraction=rfraction*average_timecorrect
     
    158158        rfraction=rfraction+xmasssave(i)  ! number to be released at this time
    159159
    160 ! number to be released for one process
     160        ! number to be released for one process
    161161        if (mp_partid.lt.mod(int(rfraction),mp_partgroup_np)) then
    162162          addone=1
     
    180180        numrel=npart(i)/mp_partgroup_np+addone
    181181      endif
    182 
     182     
    183183      xaux=xpoint2(i)-xpoint1(i)
    184184      yaux=ypoint2(i)-ypoint1(i)
     
    187187        do ipart=minpart,maxpart_mpi     ! search for free storage space
    188188
    189 ! If a free storage space is found, attribute everything to this array element
    190 !*****************************************************************************
     189  ! If a free storage space is found, attribute everything to this array element
     190  !*****************************************************************************
    191191
    192192          if (itra1(ipart).ne.itime) then
    193193
    194 ! Particle coordinates are determined by using a random position within the release volume
    195 !*****************************************************************************
    196 
    197 ! Determine horizontal particle position
    198 !***************************************
     194  ! Particle coordinates are determined by using a random position within the release volume
     195  !*****************************************************************************
     196
     197  ! Determine horizontal particle position
     198  !***************************************
    199199
    200200            xtra1(ipart)=xpoint1(i)+ran1(idummy)*xaux
     
    207207            ytra1(ipart)=ypoint1(i)+ran1(idummy)*yaux
    208208
    209 ! Assign mass to particle: Total mass divided by total number of particles.
    210 ! Time variation has partly been taken into account already by a species-average
    211 ! correction factor, by which the number of particles released this time has been
    212 ! scaled. Adjust the mass per particle by the species-dependent time correction factor
    213 ! divided by the species-average one
    214 !*****************************************************************************
     209  ! Assign mass to particle: Total mass divided by total number of particles.
     210  ! Time variation has partly been taken into account already by a species-average
     211  ! correction factor, by which the number of particles released this time has been
     212  ! scaled. Adjust the mass per particle by the species-dependent time correction factor
     213  ! divided by the species-average one
     214  !*****************************************************************************
    215215            do k=1,nspec
    216               xmass1(ipart,k)=xmass(i,k)/real(npart(i)) &
    217                    *timecorrect(k)/average_timecorrect
    218 !             write (*,*) 'xmass1: ',xmass1(ipart,k),ipart,k
    219 ! Assign certain properties to particle
    220 !**************************************
     216               xmass1(ipart,k)=xmass(i,k)/real(npart(i)) &
     217                    *timecorrect(k)/average_timecorrect
     218  !             write (*,*) 'xmass1: ',xmass1(ipart,k),ipart,k
     219  ! Assign certain properties to particle
     220  !**************************************
    221221            end do
    222222            nclass(ipart)=min(int(ran1(idummy)*real(nclassunc))+1, &
     
    234234
    235235
    236 ! Determine vertical particle position
    237 !*************************************
     236  ! Determine vertical particle position
     237  !*************************************
    238238
    239239            ztra1(ipart)=zpoint1(i)+ran1(idummy)*zaux
    240240
    241 ! Interpolation of topography and density
    242 !****************************************
    243 
    244 ! Determine the nest we are in
    245 !*****************************
     241  ! Interpolation of topography and density
     242  !****************************************
     243
     244  ! Determine the nest we are in
     245  !*****************************
    246246
    247247            ngrid=0
     
    25725743          continue
    258258
    259 ! Determine (nested) grid coordinates and auxiliary parameters used for interpolation
    260 !*****************************************************************************
     259  ! Determine (nested) grid coordinates and auxiliary parameters used for interpolation
     260  !*****************************************************************************
    261261
    262262            if (ngrid.gt.0) then
     
    294294            endif
    295295
    296 ! If starting height is in pressure coordinates, retrieve pressure profile and convert zpart1 to meters
    297 !*****************************************************************************
     296  ! If starting height is in pressure coordinates, retrieve pressure profile and convert zpart1 to meters
     297  !*****************************************************************************
    298298            if (kindz(i).eq.3) then
    299299              presspart=ztra1(ipart)
     
    337337            endif
    338338
    339 ! If release positions are given in meters above sea level, subtract the
    340 ! topography from the starting height
    341 !***********************************************************************
     339  ! If release positions are given in meters above sea level, subtract the
     340  ! topography from the starting height
     341  !***********************************************************************
    342342
    343343            if (kindz(i).eq.2) ztra1(ipart)=ztra1(ipart)-topo
     
    348348
    349349
    350 ! For special simulations, multiply particle concentration air density;
    351 ! Simply take the 2nd field in memory to do this (accurate enough)
    352 !***********************************************************************
    353 !AF IND_SOURCE switches between different units for concentrations at the source
    354 !Af    NOTE that in backward simulations the release of particles takes place at the
    355 !Af         receptor and the sampling at the source.
    356 !Af          1="mass"
    357 !Af          2="mass mixing ratio"
    358 !Af IND_RECEPTOR switches between different units for concentrations at the receptor
    359 !Af          1="mass"
    360 !Af          2="mass mixing ratio"
    361 
    362 !Af switches for the releasefile:
    363 !Af IND_REL =  1 : xmass * rho
    364 !Af IND_REL =  0 : xmass * 1
    365 
    366 !Af ind_rel is defined in readcommand.f
     350  ! For special simulations, multiply particle concentration air density;
     351  ! Simply take the 2nd field in memory to do this (accurate enough)
     352  !***********************************************************************
     353  !AF IND_SOURCE switches between different units for concentrations at the source
     354  !Af    NOTE that in backward simulations the release of particles takes place at the
     355  !Af         receptor and the sampling at the source.
     356  !Af          1="mass"
     357  !Af          2="mass mixing ratio"
     358  !Af IND_RECEPTOR switches between different units for concentrations at the receptor
     359  !Af          1="mass"
     360  !Af          2="mass mixing ratio"
     361
     362  !Af switches for the releasefile:
     363  !Af IND_REL =  1 : xmass * rho
     364  !Af IND_REL =  0 : xmass * 1
     365
     366  !Af ind_rel is defined in readcommand.f
    367367
    368368            if (ind_rel .eq. 1) then
    369369
    370 ! Interpolate the air density
    371 !****************************
     370  ! Interpolate the air density
     371  !****************************
    372372
    373373              do ii=2,nz
     
    403403
    404404
    405 ! Multiply "mass" (i.e., mass mixing ratio in forward runs) with density
    406 !********************************************************************
     405  ! Multiply "mass" (i.e., mass mixing ratio in forward runs) with density
     406  !********************************************************************
    407407
    408408              do k=1,nspec
     
    416416          endif
    417417        end do
    418 ! ESO TODO: Here we could use dynamic allocation and increase array sizes
    419         if (ipart.gt.maxpart_mpi) goto 996
     418        if (ipart.gt.maxpart) goto 996
    420419
    42142034      minpart=ipart+1
    422421      end do
    423     endif
     422      endif
    424423  end do
    425424
     
    427426  return
    428427
    429 996 continue
     428996   continue
    430429  write(*,*) '#####################################################'
    431430  write(*,*) '#### FLEXPART MODEL SUBROUTINE RELEASEPARTICLES: ####'
  • src/timemanager_mpi.f90

    r861805a r0f7835d  
    104104
    105105  logical :: reqv_state=.false. ! .true. if waiting for a MPI_Irecv to complete
    106   integer :: j,ks,kp,l,n,itime=0,nstop,nstop1,memstat=0
     106  integer :: j,ks,kp,l,n,itime=0,nstop,nstop1,memstat=0 !,mind
    107107! integer :: ksp
    108108  integer :: ip
     
    155155
    156156  do itime=0,ideltas,lsynctime
    157    
    158157
    159158! Computation of wet deposition, OH reaction and mass transfer
     
    167166!********************************************************************
    168167
    169     if (mp_dbg_mode) write(*,*) 'myid, itime: ',mp_pid,itime
     168    if (mp_dev_mode) write(*,*) 'myid, itime: ',mp_pid,itime
    170169   
    171170    if (WETDEP .and. itime .ne. 0 .and. numpart .gt. 0) then
     
    275274
    276275    if (mp_measure_time.and..not.(lmpreader.and.lmp_use_reader)) call mpif_mtime('getfields',1)
    277 
    278 ! For validation and tests: call the function below to set all fields to simple
    279 ! homogeneous values
    280 !    if (mp_dev_mode) call set_fields_synthetic
    281 
    282 !*******************************************************************************
    283276
    284277    if (lmpreader.and.nstop1.gt.1) stop 'NO METEO FIELDS AVAILABLE'
     
    331324      call releaseparticles(itime)
    332325    endif
    333 
    334 
    335 ! Check if particles should be redistributed among processes
    336 !***********************************************************
    337     call mpif_calculate_part_redist(itime)
    338326
    339327
     
    554542! Decide whether to write an estimate of the number of particles released,
    555543! or exact number (require MPI reduce operation)
    556         if (mp_dbg_mode) then
     544        if (mp_dev_mode) then
    557545          numpart_tot_mpi = numpart
    558546        else
     
    561549
    562550        if (mp_exact_numpart.and..not.(lmpreader.and.lmp_use_reader).and.&
    563              &.not.mp_dbg_mode) then
     551             &.not.mp_dev_mode) then
    564552          call MPI_Reduce(numpart, numpart_tot_mpi, 1, MPI_INTEGER, MPI_SUM, id_root, &
    565553               & mp_comm_used, mp_ierr)
     
    567555       
    568556        !CGZ-lifetime: output species lifetime
    569         if (lroot.or.mp_dbg_mode) then
     557        if (lroot.or.mp_dev_mode) then
    570558        !   write(*,*) 'Overview species lifetime in days', &
    571559        !        real((species_lifetime(:,1)/species_lifetime(:,2))/real(3600.0*24.0))
     
    577565        !   end if
    578566        end if
    579 
    580         ! Write particles for all processes
    581         if (mp_dev_mode) write(*,*) "PID, itime, numpart", mp_pid,itime,numpart
    582 
    583567
    58456845      format(i13,' SECONDS SIMULATED: ',i13, ' PARTICLES:    Uncertainty: ',3f7.3)
Note: See TracChangeset for help on using the changeset viewer.
hosted by ZAMG