Changeset 328fdf9 in flexpart.git for src/mpi_mod.f90


Ignore:
Timestamp:
Apr 28, 2019, 7:53:42 PM (5 years ago)
Author:
Espen Sollum ATMOS <eso@…>
Branches:
master, 10.4.1_pesei, GFS_025, bugfixes+enhancements, dev, release-10, release-10.4.1, scaling-bug
Children:
0a94e13
Parents:
77783e3
Message:

bugfix: particles were lost at start of global domain filling run. Error in restarting domain filling run from particle dump

File:
1 edited

Legend:

Unmodified
Added
Removed
  • src/mpi_mod.f90

    r0ecc1fe r328fdf9  
    125125! mp_time_barrier   Measure MPI barrier time
    126126! mp_exact_numpart  Use an extra MPI communication to give the exact number of particles
    127 !                   to standard output (this does *not* otherwise affect the simulation)
     127!                   to standard output (this does *not* otherwise affect the simulation)
     128! mp_rebalance      Attempt to rebalance particle between processes
    128129  logical, parameter :: mp_dbg_mode = .false.
    129130  logical, parameter :: mp_dev_mode = .false.
     
    132133  logical, parameter :: mp_measure_time=.false.
    133134  logical, parameter :: mp_exact_numpart=.true.
     135  logical, parameter :: mp_rebalance=.true.
    134136
    135137! for measuring CPU/Wall time
     
    144146  real(dp),private :: mp_getfields_wtime_beg, mp_getfields_wtime_end, mp_getfields_wtime_total=0.
    145147  real(sp),private :: mp_getfields_time_beg, mp_getfields_time_end, mp_getfields_time_total=0.
    146   real(dp),private :: mp_readwind_wtime_beg, mp_readwind_wtime_end, mp_readwind_wtime_total=0.
    147   real(sp),private :: mp_readwind_time_beg, mp_readwind_time_end, mp_readwind_time_total=0.
     148!  real(dp),private :: mp_readwind_wtime_beg, mp_readwind_wtime_end, mp_readwind_wtime_total=0.
     149!  real(sp),private :: mp_readwind_time_beg, mp_readwind_time_end, mp_readwind_time_total=0.
    148150  real(dp),private :: mp_io_wtime_beg, mp_io_wtime_end, mp_io_wtime_total=0.
    149151  real(sp),private :: mp_io_time_beg, mp_io_time_end, mp_io_time_total=0.
     
    190192!   mp_np       number of running processes, decided at run-time
    191193!***********************************************************************
    192     use par_mod, only: maxpart, numwfmem, dep_prec
    193     use com_mod, only: mpi_mode, verbosity
     194    use par_mod, only: maxpart, numwfmem, dep_prec, maxreceptor, maxspec
     195    use com_mod, only: mpi_mode, verbosity, creceptor0
    194196
    195197    implicit none
     
    337339
    338340! Set maxpart per process
    339 ! eso 08/2016: Increase maxpart per process, in case of unbalanced distribution
     341! ESO 08/2016: Increase maxpart per process, in case of unbalanced distribution
    340342    maxpart_mpi=int(mp_maxpart_factor*real(maxpart)/real(mp_partgroup_np))
    341343    if (mp_np == 1) maxpart_mpi = maxpart
     
    365367    end if
    366368
     369! Write whether MPI_IN_PLACE is used or not
     370#ifdef USE_MPIINPLACE
     371    if (lroot) write(*,*) 'Using MPI_IN_PLACE operations'
     372#else
     373    if (lroot) allocate(creceptor0(maxreceptor,maxspec))
     374    if (lroot) write(*,*) 'Not using MPI_IN_PLACE operations'
     375#endif
    367376    goto 101
    368377
     
    559568! invalid particles at the end of the arrays
    560569
    561 601 do i=num_part, 1, -1
     570601 do i=numpart, 1, -1
    562571      if (itra1(i).eq.-999999999) then
    563572        numpart=numpart-1
     
    19611970    if (readclouds) then
    19621971      j=j+1
    1963       call MPI_Irecv(ctwc(:,:,mind),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1972      call MPI_Irecv(ctwc(:,:,mind),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
    19641973           &MPI_COMM_WORLD,reqs(j),mp_ierr)
    19651974      if (mp_ierr /= 0) goto 600
     
    23262335      if (readclouds) then
    23272336        j=j+1
    2328         call MPI_Irecv(ctwcn(:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2337        call MPI_Irecv(ctwcn(:,:,mind,k),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
    23292338             &MPI_COMM_WORLD,reqs(j),mp_ierr)
    23302339        if (mp_ierr /= 0) goto 600
     
    24622471    end if
    24632472
     2473  ! Receptor concentrations   
     2474    if (lroot) then
     2475      call MPI_Reduce(MPI_IN_PLACE,creceptor,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2476           & mp_comm_used,mp_ierr)
     2477      if (mp_ierr /= 0) goto 600
     2478    else
     2479      call MPI_Reduce(creceptor,0,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2480           & mp_comm_used,mp_ierr)
     2481    end if
     2482
    24642483#else
    24652484
    24662485      call MPI_Reduce(gridunc, gridunc0, grid_size3d, mp_sp, MPI_SUM, id_root, &
    24672486           & mp_comm_used, mp_ierr)
     2487      if (mp_ierr /= 0) goto 600
    24682488      if (lroot) gridunc = gridunc0
     2489
     2490      call MPI_Reduce(creceptor, creceptor0,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2491           & mp_comm_used,mp_ierr)
     2492      if (mp_ierr /= 0) goto 600
     2493      if (lroot) creceptor = creceptor0
    24692494
    24702495#endif
     
    24822507    end if
    24832508
    2484 ! Receptor concentrations   
    2485     if (lroot) then
    2486       call MPI_Reduce(MPI_IN_PLACE,creceptor,rcpt_size,mp_sp,MPI_SUM,id_root, &
    2487            & mp_comm_used,mp_ierr)
    2488       if (mp_ierr /= 0) goto 600
    2489     else
    2490       call MPI_Reduce(creceptor,0,rcpt_size,mp_sp,MPI_SUM,id_root, &
    2491            & mp_comm_used,mp_ierr)
    2492     end if
    24932509
    24942510    if (mp_measure_time) call mpif_mtime('commtime',1)
     
    27002716      end if
    27012717
    2702     case ('readwind')
    2703       if (imode.eq.0) then
    2704         call cpu_time(mp_readwind_time_beg)
    2705         mp_readwind_wtime_beg = mpi_wtime()
    2706       else
    2707         call cpu_time(mp_readwind_time_end)
    2708         mp_readwind_wtime_end = mpi_wtime()
    2709 
    2710         mp_readwind_time_total = mp_readwind_time_total + &
    2711              &(mp_readwind_time_end - mp_readwind_time_beg)
    2712         mp_readwind_wtime_total = mp_readwind_wtime_total + &
    2713              &(mp_readwind_wtime_end - mp_readwind_wtime_beg)
    2714       end if
     2718!    case ('readwind')
     2719!      if (imode.eq.0) then
     2720!        call cpu_time(mp_readwind_time_beg)
     2721!        mp_readwind_wtime_beg = mpi_wtime()
     2722!      else
     2723!        call cpu_time(mp_readwind_time_end)
     2724!        mp_readwind_wtime_end = mpi_wtime()
     2725!
     2726!        mp_readwind_time_total = mp_readwind_time_total + &
     2727!             &(mp_readwind_time_end - mp_readwind_time_beg)
     2728!        mp_readwind_wtime_total = mp_readwind_wtime_total + &
     2729!             &(mp_readwind_wtime_end - mp_readwind_wtime_beg)
     2730!      end if
    27152731
    27162732    case ('commtime')
     
    27882804          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR GETFIELDS:',&
    27892805               & mp_getfields_time_total
    2790           write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR READWIND:',&
    2791                & mp_readwind_wtime_total
    2792           write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR READWIND:',&
    2793                & mp_readwind_time_total
     2806!          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR READWIND:',&
     2807!               & mp_readwind_wtime_total
     2808!          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR READWIND:',&
     2809!               & mp_readwind_time_total
    27942810          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR FILE IO:',&
    27952811               & mp_io_wtime_total
Note: See TracChangeset for help on using the changeset viewer.
hosted by ZAMG