Changeset 328fdf9 in flexpart.git


Ignore:
Timestamp:
Apr 28, 2019, 7:53:42 PM (5 years ago)
Author:
Espen Sollum ATMOS <eso@…>
Branches:
master, 10.4.1_pesei, GFS_025, bugfixes+enhancements, dev, release-10, release-10.4.1, scaling-bug
Children:
0a94e13
Parents:
77783e3
Message:

bugfix: particles were lost at start of global domain filling run. Error in restarting domain filling run from particle dump

Location:
src
Files:
5 edited

Legend:

Unmodified
Added
Removed
  • src/com_mod.f90

    re9e0f06 r328fdf9  
    651651  real :: receptorarea(maxreceptor)
    652652  real :: creceptor(maxreceptor,maxspec)
     653  real, allocatable, dimension(:,:) :: creceptor0
    653654  character(len=16) :: receptorname(maxreceptor)
    654655  integer :: numreceptor
  • src/init_domainfill.f90

    rb5127f9 r328fdf9  
    414414!***************************************************************************
    415415
    416   if (ipin.eq.1) then
     416  if ((ipin.eq.1).and.(.not.gdomainfill)) then
    417417    open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
    418418         form='unformatted')
  • src/init_domainfill_mpi.f90

    rb5127f9 r328fdf9  
    110110  endif
    111111
     112! Exit here if resuming a run from particle dump
     113!***********************************************
     114  if (gdomainfill.and.ipin.ne.0) return
     115
    112116! Do not release particles twice (i.e., not at both in the leftmost and rightmost
    113117! grid cell) for a global domain
     
    213217        colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)
    214218        colmasstotal=colmasstotal+colmass(ix,jy)
    215 
    216219      end do
    217220    end do
     
    466469
    467470! eso TODO: only needed for root process
    468     if (ipin.eq.1) then
     471    if ((ipin.eq.1).and.(.not.gdomainfill)) then
    469472      open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
    470473           form='unformatted')
     
    474477    endif
    475478
    476     numpart = numpart/mp_partgroup_np
    477     if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
    478 
    479   else ! Allocate dummy arrays for receiving processes
    480     allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
    481          & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
    482          & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
    483          & xmass1_tmp(nullsize, nullsize))
     479    if (ipin.eq.0) then   
     480      numpart = numpart/mp_partgroup_np
     481      if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
     482    end if
     483
     484  else ! Allocate dummy arrays for receiving processes
     485    if (ipin.eq.0) then   
     486      allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
     487           & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
     488           & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
     489           & xmass1_tmp(nullsize, nullsize))
     490    end if
    484491   
    485   end if ! end if(lroot) 
     492  end if ! end if(lroot)
     493
    486494
    487495
    488496! Distribute particles to other processes (numpart is 'per-process', not total)
    489   call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
    490 ! eso TODO: xmassperparticle: not necessary to send
    491   call MPI_Bcast(xmassperparticle, 1, mp_sp, id_root, mp_comm_used, mp_ierr)
    492   call mpif_send_part_properties(numpart)
     497! Only if not restarting from previous run
     498  if (ipin.eq.0) then
     499    call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
     500    call mpif_send_part_properties(npart(1)/mp_partgroup_np)
    493501
    494502! Deallocate the temporary arrays used for all particles
    495   deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
     503    deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
    496504         & itrasplit_tmp,xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
     505  end if
    497506
    498507
  • src/mpi_mod.f90

    r0ecc1fe r328fdf9  
    125125! mp_time_barrier   Measure MPI barrier time
    126126! mp_exact_numpart  Use an extra MPI communication to give the exact number of particles
    127 !                   to standard output (this does *not* otherwise affect the simulation)
     127!                   to standard output (this does *not* otherwise affect the simulation)
     128! mp_rebalance      Attempt to rebalance particle between processes
    128129  logical, parameter :: mp_dbg_mode = .false.
    129130  logical, parameter :: mp_dev_mode = .false.
     
    132133  logical, parameter :: mp_measure_time=.false.
    133134  logical, parameter :: mp_exact_numpart=.true.
     135  logical, parameter :: mp_rebalance=.true.
    134136
    135137! for measuring CPU/Wall time
     
    144146  real(dp),private :: mp_getfields_wtime_beg, mp_getfields_wtime_end, mp_getfields_wtime_total=0.
    145147  real(sp),private :: mp_getfields_time_beg, mp_getfields_time_end, mp_getfields_time_total=0.
    146   real(dp),private :: mp_readwind_wtime_beg, mp_readwind_wtime_end, mp_readwind_wtime_total=0.
    147   real(sp),private :: mp_readwind_time_beg, mp_readwind_time_end, mp_readwind_time_total=0.
     148!  real(dp),private :: mp_readwind_wtime_beg, mp_readwind_wtime_end, mp_readwind_wtime_total=0.
     149!  real(sp),private :: mp_readwind_time_beg, mp_readwind_time_end, mp_readwind_time_total=0.
    148150  real(dp),private :: mp_io_wtime_beg, mp_io_wtime_end, mp_io_wtime_total=0.
    149151  real(sp),private :: mp_io_time_beg, mp_io_time_end, mp_io_time_total=0.
     
    190192!   mp_np       number of running processes, decided at run-time
    191193!***********************************************************************
    192     use par_mod, only: maxpart, numwfmem, dep_prec
    193     use com_mod, only: mpi_mode, verbosity
     194    use par_mod, only: maxpart, numwfmem, dep_prec, maxreceptor, maxspec
     195    use com_mod, only: mpi_mode, verbosity, creceptor0
    194196
    195197    implicit none
     
    337339
    338340! Set maxpart per process
    339 ! eso 08/2016: Increase maxpart per process, in case of unbalanced distribution
     341! ESO 08/2016: Increase maxpart per process, in case of unbalanced distribution
    340342    maxpart_mpi=int(mp_maxpart_factor*real(maxpart)/real(mp_partgroup_np))
    341343    if (mp_np == 1) maxpart_mpi = maxpart
     
    365367    end if
    366368
     369! Write whether MPI_IN_PLACE is used or not
     370#ifdef USE_MPIINPLACE
     371    if (lroot) write(*,*) 'Using MPI_IN_PLACE operations'
     372#else
     373    if (lroot) allocate(creceptor0(maxreceptor,maxspec))
     374    if (lroot) write(*,*) 'Not using MPI_IN_PLACE operations'
     375#endif
    367376    goto 101
    368377
     
    559568! invalid particles at the end of the arrays
    560569
    561 601 do i=num_part, 1, -1
     570601 do i=numpart, 1, -1
    562571      if (itra1(i).eq.-999999999) then
    563572        numpart=numpart-1
     
    19611970    if (readclouds) then
    19621971      j=j+1
    1963       call MPI_Irecv(ctwc(:,:,mind),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1972      call MPI_Irecv(ctwc(:,:,mind),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
    19641973           &MPI_COMM_WORLD,reqs(j),mp_ierr)
    19651974      if (mp_ierr /= 0) goto 600
     
    23262335      if (readclouds) then
    23272336        j=j+1
    2328         call MPI_Irecv(ctwcn(:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2337        call MPI_Irecv(ctwcn(:,:,mind,k),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
    23292338             &MPI_COMM_WORLD,reqs(j),mp_ierr)
    23302339        if (mp_ierr /= 0) goto 600
     
    24622471    end if
    24632472
     2473  ! Receptor concentrations   
     2474    if (lroot) then
     2475      call MPI_Reduce(MPI_IN_PLACE,creceptor,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2476           & mp_comm_used,mp_ierr)
     2477      if (mp_ierr /= 0) goto 600
     2478    else
     2479      call MPI_Reduce(creceptor,0,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2480           & mp_comm_used,mp_ierr)
     2481    end if
     2482
    24642483#else
    24652484
    24662485      call MPI_Reduce(gridunc, gridunc0, grid_size3d, mp_sp, MPI_SUM, id_root, &
    24672486           & mp_comm_used, mp_ierr)
     2487      if (mp_ierr /= 0) goto 600
    24682488      if (lroot) gridunc = gridunc0
     2489
     2490      call MPI_Reduce(creceptor, creceptor0,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2491           & mp_comm_used,mp_ierr)
     2492      if (mp_ierr /= 0) goto 600
     2493      if (lroot) creceptor = creceptor0
    24692494
    24702495#endif
     
    24822507    end if
    24832508
    2484 ! Receptor concentrations   
    2485     if (lroot) then
    2486       call MPI_Reduce(MPI_IN_PLACE,creceptor,rcpt_size,mp_sp,MPI_SUM,id_root, &
    2487            & mp_comm_used,mp_ierr)
    2488       if (mp_ierr /= 0) goto 600
    2489     else
    2490       call MPI_Reduce(creceptor,0,rcpt_size,mp_sp,MPI_SUM,id_root, &
    2491            & mp_comm_used,mp_ierr)
    2492     end if
    24932509
    24942510    if (mp_measure_time) call mpif_mtime('commtime',1)
     
    27002716      end if
    27012717
    2702     case ('readwind')
    2703       if (imode.eq.0) then
    2704         call cpu_time(mp_readwind_time_beg)
    2705         mp_readwind_wtime_beg = mpi_wtime()
    2706       else
    2707         call cpu_time(mp_readwind_time_end)
    2708         mp_readwind_wtime_end = mpi_wtime()
    2709 
    2710         mp_readwind_time_total = mp_readwind_time_total + &
    2711              &(mp_readwind_time_end - mp_readwind_time_beg)
    2712         mp_readwind_wtime_total = mp_readwind_wtime_total + &
    2713              &(mp_readwind_wtime_end - mp_readwind_wtime_beg)
    2714       end if
     2718!    case ('readwind')
     2719!      if (imode.eq.0) then
     2720!        call cpu_time(mp_readwind_time_beg)
     2721!        mp_readwind_wtime_beg = mpi_wtime()
     2722!      else
     2723!        call cpu_time(mp_readwind_time_end)
     2724!        mp_readwind_wtime_end = mpi_wtime()
     2725!
     2726!        mp_readwind_time_total = mp_readwind_time_total + &
     2727!             &(mp_readwind_time_end - mp_readwind_time_beg)
     2728!        mp_readwind_wtime_total = mp_readwind_wtime_total + &
     2729!             &(mp_readwind_wtime_end - mp_readwind_wtime_beg)
     2730!      end if
    27152731
    27162732    case ('commtime')
     
    27882804          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR GETFIELDS:',&
    27892805               & mp_getfields_time_total
    2790           write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR READWIND:',&
    2791                & mp_readwind_wtime_total
    2792           write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR READWIND:',&
    2793                & mp_readwind_time_total
     2806!          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR READWIND:',&
     2807!               & mp_readwind_wtime_total
     2808!          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR READWIND:',&
     2809!               & mp_readwind_time_total
    27942810          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR FILE IO:',&
    27952811               & mp_io_wtime_total
  • src/timemanager_mpi.f90

    r20963b1 r328fdf9  
    344344! Check if particles should be redistributed among processes
    345345!***********************************************************
    346     call mpif_calculate_part_redist(itime)
     346    if (mp_rebalance) call mpif_calculate_part_redist(itime)
    347347
    348348
     
    532532                end if
    533533
    534               else  ! :TODO: check for zeroing in the netcdf module
     534              else
    535535                call concoutput_surf_nest(itime,outnum)
    536536              end if
Note: See TracChangeset for help on using the changeset viewer.
hosted by ZAMG