Changes in / [d005a67:5d74ed9] in flexpart.git


Ignore:
Files:
3 deleted
16 edited

Legend:

Unmodified
Added
Removed
  • options/COMMAND

    r0a94e13 r6d73c4b  
    1919 IFINE=                 4, ! Reduction for time step in vertical transport, used only if CTL>1
    2020 IOUT=                  1, ! Output type: [1]mass 2]pptv 3]1&2 4]plume 5]1&4, +8 for NetCDF output     
    21  IPOUT=                 0, ! Particle position output: 0]no 1]every output 2]only at end 3]time averaged
     21 IPOUT=                 0, ! Particle position output: 0]no 1]every output 2]only at end  
    2222 LSUBGRID=              0, ! Increase of ABL heights due to sub-grid scale orographic variations;[0]off 1]on
    2323 LCONVECTION=           1, ! Switch for convection parameterization;0]off [1]on   
  • src/FLEXPART.f90

    r0a94e13 r50958b8  
    6868  integer :: detectformat
    6969
     70
     71
     72  ! Initialize arrays in com_mod
     73  !*****************************
     74  call com_mod_allocate_part(maxpart)
    7075 
     76
    7177  ! Generate a large number of random numbers
    7278  !******************************************
     
    166172  endif
    167173
    168   ! Initialize arrays in com_mod
    169   !*****************************
    170   call com_mod_allocate_part(maxpart)
    171 
    172 
    173174  ! Read the age classes to be used
    174175  !********************************
  • src/FLEXPART_MPI.f90

    r0c8c7f2 r20963b1  
    7777  if (mp_measure_time) call mpif_mtime('flexpart',0)
    7878
    79  
     79  ! Initialize arrays in com_mod
     80  !*****************************
     81
     82  if(.not.(lmpreader.and.lmp_use_reader)) call com_mod_allocate_part(maxpart_mpi)
     83
     84
    8085  ! Generate a large number of random numbers
    8186  !******************************************
     
    175180  endif
    176181
    177   ! Initialize arrays in com_mod
    178   !*****************************
    179 
    180   if(.not.(lmpreader.and.lmp_use_reader)) call com_mod_allocate_part(maxpart_mpi)
    181 
    182182
    183183! Read the age classes to be used
     
    413413  end if ! (mpif_pid == 0)
    414414
    415   if (mp_measure_time) call mpif_mtime('iotime',1)
     415  if (mp_measure_time) call mpif_mtime('iotime',0)
    416416
    417417  if (verbosity.gt.0 .and. lroot) then
  • src/com_mod.f90

    r0a94e13 re9e0f06  
    1818
    1919  implicit none
    20 
    21 
    2220
    2321  !****************************************************************
     
    7169
    7270  real :: ctl,fine
    73   integer :: ifine,iout,ipout,ipin,iflux,mdomainfill,ipoutfac
     71  integer :: ifine,iout,ipout,ipin,iflux,mdomainfill
    7472  integer :: mquasilag,nested_output,ind_source,ind_receptor
    7573  integer :: ind_rel,ind_samp,ioutputforeachrelease,linit_cond,surf_only
     
    8482  ! iout     output options: 1 conc. output (ng/m3), 2 mixing ratio (pptv), 3 both
    8583  ! ipout    particle dump options: 0 no, 1 every output interval, 2 only at end
    86   ! ipoutfac increase particle dump interval by factor (default 1)
    8784  ! ipin     read in particle positions from dumped file from a previous run
    8885  ! fine     real(ifine)
     
    131128
    132129  logical :: gdomainfill
     130
    133131  ! gdomainfill             .T., if domain-filling is global, .F. if not
    134132
     
    653651  real :: receptorarea(maxreceptor)
    654652  real :: creceptor(maxreceptor,maxspec)
    655   real, allocatable, dimension(:,:) :: creceptor0
    656653  character(len=16) :: receptorname(maxreceptor)
    657654  integer :: numreceptor
     
    676673  real, allocatable, dimension(:,:) :: xmass1
    677674  real, allocatable, dimension(:,:) :: xscav_frac1
    678 
    679 ! Variables used for writing out interval averages for partoutput
    680 !****************************************************************
    681 
    682   integer, allocatable, dimension(:) :: npart_av
    683   real, allocatable, dimension(:) :: part_av_cartx,part_av_carty,part_av_cartz,part_av_z,part_av_topo
    684   real, allocatable, dimension(:) :: part_av_pv,part_av_qv,part_av_tt,part_av_rho,part_av_tro,part_av_hmix
    685   real, allocatable, dimension(:) :: part_av_uu,part_av_vv,part_av_energy
    686675
    687676  ! eso: Moved from timemanager
     
    791780         & idt(nmpart),itramem(nmpart),itrasplit(nmpart),&
    792781         & xtra1(nmpart),ytra1(nmpart),ztra1(nmpart),&
    793          & xmass1(nmpart, maxspec))  ! ,&
    794 !         & checklifetime(nmpart,maxspec), species_lifetime(maxspec,2))!CGZ-lifetime
    795 
    796     if (ipout.eq.3) then
    797       allocate(npart_av(nmpart),part_av_cartx(nmpart),part_av_carty(nmpart),&
    798            & part_av_cartz(nmpart),part_av_z(nmpart),part_av_topo(nmpart))
    799       allocate(part_av_pv(nmpart),part_av_qv(nmpart),part_av_tt(nmpart),&
    800            & part_av_rho(nmpart),part_av_tro(nmpart),part_av_hmix(nmpart))
    801       allocate(part_av_uu(nmpart),part_av_vv(nmpart),part_av_energy(nmpart))
    802     end if
     782         & xmass1(nmpart, maxspec),&
     783         & checklifetime(nmpart,maxspec), species_lifetime(maxspec,2))!CGZ-lifetime
    803784
    804785
    805786    allocate(uap(nmpart),ucp(nmpart),uzp(nmpart),us(nmpart),&
    806787         & vs(nmpart),ws(nmpart),cbt(nmpart))
    807 
     788   
    808789  end subroutine com_mod_allocate_part
    809790
  • src/init_domainfill.f90

    r0a94e13 rb5127f9  
    8686    endif
    8787  endif
    88 
    89 ! Exit here if resuming a run from particle dump
    90 !***********************************************
    91   if (gdomainfill.and.ipin.ne.0) return
    9288
    9389! Do not release particles twice (i.e., not at both in the leftmost and rightmost
     
    418414!***************************************************************************
    419415
    420   if ((ipin.eq.1).and.(.not.gdomainfill)) then
     416  if (ipin.eq.1) then
    421417    open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
    422418         form='unformatted')
  • src/init_domainfill_mpi.f90

    r328fdf9 rb5127f9  
    110110  endif
    111111
    112 ! Exit here if resuming a run from particle dump
    113 !***********************************************
    114   if (gdomainfill.and.ipin.ne.0) return
    115 
    116112! Do not release particles twice (i.e., not at both in the leftmost and rightmost
    117113! grid cell) for a global domain
     
    217213        colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)
    218214        colmasstotal=colmasstotal+colmass(ix,jy)
     215
    219216      end do
    220217    end do
     
    469466
    470467! eso TODO: only needed for root process
    471     if ((ipin.eq.1).and.(.not.gdomainfill)) then
     468    if (ipin.eq.1) then
    472469      open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
    473470           form='unformatted')
     
    477474    endif
    478475
    479     if (ipin.eq.0) then   
    480       numpart = numpart/mp_partgroup_np
    481       if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
    482     end if
    483 
    484   else ! Allocate dummy arrays for receiving processes
    485     if (ipin.eq.0) then   
    486       allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
    487            & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
    488            & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
    489            & xmass1_tmp(nullsize, nullsize))
    490     end if
     476    numpart = numpart/mp_partgroup_np
     477    if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
     478
     479  else ! Allocate dummy arrays for receiving processes
     480    allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
     481         & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
     482         & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
     483         & xmass1_tmp(nullsize, nullsize))
    491484   
    492   end if ! end if(lroot)
    493 
     485  end if ! end if(lroot) 
    494486
    495487
    496488! Distribute particles to other processes (numpart is 'per-process', not total)
    497 ! Only if not restarting from previous run
    498   if (ipin.eq.0) then
    499     call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
    500     call mpif_send_part_properties(npart(1)/mp_partgroup_np)
     489  call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
     490! eso TODO: xmassperparticle: not necessary to send
     491  call MPI_Bcast(xmassperparticle, 1, mp_sp, id_root, mp_comm_used, mp_ierr)
     492  call mpif_send_part_properties(numpart)
    501493
    502494! Deallocate the temporary arrays used for all particles
    503     deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
     495  deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
    504496         & itrasplit_tmp,xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
    505   end if
    506497
    507498
  • src/makefile

    r0a94e13 r7123c70  
    118118OBJECTS_SERIAL = \
    119119        releaseparticles.o      partoutput.o \
    120         partoutput_average.o \
    121120        conccalc.o \
    122121        init_domainfill.o       concoutput.o  \
     
    133132## For MPI version
    134133OBJECTS_MPI = releaseparticles_mpi.o partoutput_mpi.o \
    135         partoutput_average_mpi.o conccalc_mpi.o \
     134        conccalc_mpi.o \
    136135        init_domainfill_mpi.o concoutput_mpi.o  \
    137136        timemanager_mpi.o FLEXPART_MPI.o        \
     
    150149advance.o               initialize.o            \
    151150writeheader.o           writeheader_txt.o       \
    152 partpos_average.o       writeprecip.o \
     151writeprecip.o \
    153152writeheader_surf.o      assignland.o\
    154153part0.o                 gethourlyOH.o\
     
    349348part0.o: par_mod.o
    350349partdep.o: par_mod.o
    351 partpos_average.o: com_mod.o par_mod.o
    352350partoutput.o: com_mod.o par_mod.o
    353 partoutput_average.o: com_mod.o par_mod.o
    354 partoutput_average_mpi.o: com_mod.o par_mod.o mpi_mod.o
    355351partoutput_mpi.o: com_mod.o mpi_mod.o par_mod.o
    356352partoutput_short.o: com_mod.o par_mod.o
  • src/mpi_mod.f90

    r0c8c7f2 r0ecc1fe  
    8888! Variables for MPI processes in the 'particle' group
    8989  integer, allocatable, dimension(:) :: mp_partgroup_rank
    90   integer, allocatable, dimension(:) :: npart_per_process
    9190  integer :: mp_partgroup_comm, mp_partgroup_pid, mp_partgroup_np
    9291
     
    126125! mp_time_barrier   Measure MPI barrier time
    127126! mp_exact_numpart  Use an extra MPI communication to give the exact number of particles
    128 !                   to standard output (this does not otherwise affect the simulation)
     127!                   to standard output (this does *not* otherwise affect the simulation)
    129128  logical, parameter :: mp_dbg_mode = .false.
    130129  logical, parameter :: mp_dev_mode = .false.
     
    191190!   mp_np       number of running processes, decided at run-time
    192191!***********************************************************************
    193     use par_mod, only: maxpart, numwfmem, dep_prec, maxreceptor, maxspec
    194     use com_mod, only: mpi_mode, verbosity, creceptor0
     192    use par_mod, only: maxpart, numwfmem, dep_prec
     193    use com_mod, only: mpi_mode, verbosity
    195194
    196195    implicit none
     
    338337
    339338! Set maxpart per process
    340 ! ESO 08/2016: Increase maxpart per process, in case of unbalanced distribution
     339! eso 08/2016: Increase maxpart per process, in case of unbalanced distribution
    341340    maxpart_mpi=int(mp_maxpart_factor*real(maxpart)/real(mp_partgroup_np))
    342341    if (mp_np == 1) maxpart_mpi = maxpart
     
    366365    end if
    367366
    368 ! Allocate array for number of particles per process   
    369     allocate(npart_per_process(0:mp_partgroup_np-1))
    370 
    371 ! Write whether MPI_IN_PLACE is used or not
    372 #ifdef USE_MPIINPLACE
    373     if (lroot) write(*,*) 'Using MPI_IN_PLACE operations'
    374 #else
    375     if (lroot) allocate(creceptor0(maxreceptor,maxspec))
    376     if (lroot) write(*,*) 'Not using MPI_IN_PLACE operations'
    377 #endif
    378367    goto 101
    379368
     
    570559! invalid particles at the end of the arrays
    571560
    572 601 do i=numpart, 1, -1
     561601 do i=num_part, 1, -1
    573562      if (itra1(i).eq.-999999999) then
    574563        numpart=numpart-1
     
    609598    integer :: i,jj,nn, num_part=1,m,imin, num_trans
    610599    logical :: first_iter
    611     integer,allocatable,dimension(:) :: idx_arr
     600    integer,allocatable,dimension(:) :: numparticles_mpi, idx_arr
    612601    real,allocatable,dimension(:) :: sorted ! TODO: we don't really need this
    613602
     
    618607! All processes exchange information on number of particles
    619608!****************************************************************************
    620     allocate( idx_arr(0:mp_partgroup_np-1), sorted(0:mp_partgroup_np-1))
    621 
    622     call MPI_Allgather(numpart, 1, MPI_INTEGER, npart_per_process, &
     609    allocate(numparticles_mpi(0:mp_partgroup_np-1), &
     610         &idx_arr(0:mp_partgroup_np-1), sorted(0:mp_partgroup_np-1))
     611
     612    call MPI_Allgather(numpart, 1, MPI_INTEGER, numparticles_mpi, &
    623613         & 1, MPI_INTEGER, mp_comm_used, mp_ierr)
    624614
     
    626616! Sort from lowest to highest
    627617! Initial guess: correct order
    628     sorted(:) = npart_per_process(:)
     618    sorted(:) = numparticles_mpi(:)
    629619    do i=0, mp_partgroup_np-1
    630620      idx_arr(i) = i
    631621    end do
    632 
    633 ! Do not rebalance particles for ipout=3   
    634     if (ipout.eq.3) return
    635622
    636623! For each successive element in index array, see if a lower value exists
     
    658645    m=mp_partgroup_np-1 ! index for last sorted process (most particles)
    659646    do i=0,mp_partgroup_np/2-1
    660       num_trans = npart_per_process(idx_arr(m)) - npart_per_process(idx_arr(i))
     647      num_trans = numparticles_mpi(idx_arr(m)) - numparticles_mpi(idx_arr(i))
    661648      if (mp_partid.eq.idx_arr(m).or.mp_partid.eq.idx_arr(i)) then
    662         if ( npart_per_process(idx_arr(m)).gt.mp_min_redist.and.&
    663              & real(num_trans)/real(npart_per_process(idx_arr(m))).gt.mp_redist_fract) then
     649        if ( numparticles_mpi(idx_arr(m)).gt.mp_min_redist.and.&
     650             & real(num_trans)/real(numparticles_mpi(idx_arr(m))).gt.mp_redist_fract) then
    664651! DBG
    665           ! write(*,*) 'mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, npart_per_process', &
    666           !      &mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, npart_per_process
     652          ! write(*,*) 'mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, numparticles_mpi', &
     653          !      &mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, numparticles_mpi
    667654! DBG
    668655          call mpif_redist_part(itime, idx_arr(m), idx_arr(i), num_trans/2)
     
    672659    end do
    673660
    674     deallocate(idx_arr, sorted)
     661    deallocate(numparticles_mpi, idx_arr, sorted)
    675662
    676663  end subroutine mpif_calculate_part_redist
     
    19741961    if (readclouds) then
    19751962      j=j+1
    1976       call MPI_Irecv(ctwc(:,:,mind),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
     1963      call MPI_Irecv(ctwc(:,:,mind),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    19771964           &MPI_COMM_WORLD,reqs(j),mp_ierr)
    19781965      if (mp_ierr /= 0) goto 600
     
    23392326      if (readclouds) then
    23402327        j=j+1
    2341         call MPI_Irecv(ctwcn(:,:,mind,k),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
     2328        call MPI_Irecv(ctwcn(:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
    23422329             &MPI_COMM_WORLD,reqs(j),mp_ierr)
    23432330        if (mp_ierr /= 0) goto 600
     
    24752462    end if
    24762463
    2477   ! Receptor concentrations   
     2464#else
     2465
     2466      call MPI_Reduce(gridunc, gridunc0, grid_size3d, mp_sp, MPI_SUM, id_root, &
     2467           & mp_comm_used, mp_ierr)
     2468      if (lroot) gridunc = gridunc0
     2469
     2470#endif
     2471
     2472    if ((WETDEP).and.(ldirect.gt.0)) then
     2473      call MPI_Reduce(wetgridunc, wetgridunc0, grid_size2d, mp_cp, MPI_SUM, id_root, &
     2474           & mp_comm_used, mp_ierr)
     2475      if (mp_ierr /= 0) goto 600
     2476    end if
     2477
     2478    if ((DRYDEP).and.(ldirect.gt.0)) then
     2479      call MPI_Reduce(drygridunc, drygridunc0, grid_size2d, mp_cp, MPI_SUM, id_root, &
     2480           & mp_comm_used, mp_ierr)
     2481      if (mp_ierr /= 0) goto 600
     2482    end if
     2483
     2484! Receptor concentrations   
    24782485    if (lroot) then
    24792486      call MPI_Reduce(MPI_IN_PLACE,creceptor,rcpt_size,mp_sp,MPI_SUM,id_root, &
     
    24842491           & mp_comm_used,mp_ierr)
    24852492    end if
    2486 
    2487 #else
    2488 
    2489       call MPI_Reduce(gridunc, gridunc0, grid_size3d, mp_sp, MPI_SUM, id_root, &
    2490            & mp_comm_used, mp_ierr)
    2491       if (mp_ierr /= 0) goto 600
    2492       if (lroot) gridunc = gridunc0
    2493 
    2494       call MPI_Reduce(creceptor, creceptor0,rcpt_size,mp_sp,MPI_SUM,id_root, &
    2495            & mp_comm_used,mp_ierr)
    2496       if (mp_ierr /= 0) goto 600
    2497       if (lroot) creceptor = creceptor0
    2498 
    2499 #endif
    2500 
    2501     if ((WETDEP).and.(ldirect.gt.0)) then
    2502       call MPI_Reduce(wetgridunc, wetgridunc0, grid_size2d, mp_cp, MPI_SUM, id_root, &
    2503            & mp_comm_used, mp_ierr)
    2504       if (mp_ierr /= 0) goto 600
    2505     end if
    2506 
    2507     if ((DRYDEP).and.(ldirect.gt.0)) then
    2508       call MPI_Reduce(drygridunc, drygridunc0, grid_size2d, mp_cp, MPI_SUM, id_root, &
    2509            & mp_comm_used, mp_ierr)
    2510       if (mp_ierr /= 0) goto 600
    2511     end if
    2512 
    25132493
    25142494    if (mp_measure_time) call mpif_mtime('commtime',1)
     
    27202700      end if
    27212701
    2722    case ('readwind')
    2723      if (imode.eq.0) then
    2724        call cpu_time(mp_readwind_time_beg)
    2725        mp_readwind_wtime_beg = mpi_wtime()
    2726      else
    2727        call cpu_time(mp_readwind_time_end)
    2728        mp_readwind_wtime_end = mpi_wtime()
    2729 
    2730        mp_readwind_time_total = mp_readwind_time_total + &
    2731             &(mp_readwind_time_end - mp_readwind_time_beg)
    2732        mp_readwind_wtime_total = mp_readwind_wtime_total + &
    2733             &(mp_readwind_wtime_end - mp_readwind_wtime_beg)
    2734      end if
     2702    case ('readwind')
     2703      if (imode.eq.0) then
     2704        call cpu_time(mp_readwind_time_beg)
     2705        mp_readwind_wtime_beg = mpi_wtime()
     2706      else
     2707        call cpu_time(mp_readwind_time_end)
     2708        mp_readwind_wtime_end = mpi_wtime()
     2709
     2710        mp_readwind_time_total = mp_readwind_time_total + &
     2711             &(mp_readwind_time_end - mp_readwind_time_beg)
     2712        mp_readwind_wtime_total = mp_readwind_wtime_total + &
     2713             &(mp_readwind_wtime_end - mp_readwind_wtime_beg)
     2714      end if
    27352715
    27362716    case ('commtime')
     
    28082788          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR GETFIELDS:',&
    28092789               & mp_getfields_time_total
    2810 !          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR READWIND:',&
    2811 !               & mp_readwind_wtime_total
    2812 !          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR READWIND:',&
    2813 !               & mp_readwind_time_total
     2790          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR READWIND:',&
     2791               & mp_readwind_wtime_total
     2792          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR READWIND:',&
     2793               & mp_readwind_time_total
    28142794          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR FILE IO:',&
    28152795               & mp_io_wtime_total
  • src/netcdf_output_mod.f90

    r0a94e13 r4ad96c5  
    9393  character(len=255), parameter :: institution = 'NILU'
    9494
    95   integer            :: tpointer=0
     95  integer            :: tpointer
    9696  character(len=255) :: ncfname, ncfnamen
    9797
  • src/par_mod.f90

    r0a94e13 r79e0349  
    280280
    281281  integer,parameter :: unitpath=1, unitcommand=1, unitageclasses=1, unitgrid=1
    282   integer,parameter :: unitavailab=1, unitreleases=88, unitpartout=93, unitpartout_average=105
     282  integer,parameter :: unitavailab=1, unitreleases=88, unitpartout=93
    283283  integer,parameter :: unitpartin=93, unitflux=98, unitouttraj=96
    284284  integer,parameter :: unitvert=1, unitoro=1, unitpoin=1, unitreceptor=1
  • src/partoutput.f90

    r0a94e13 rd2a5a83  
    7171  !**************************************
    7272
    73   if (ipout.eq.1.or.ipout.eq.3) then
     73  if (ipout.eq.1) then
    7474    open(unitpartout,file=path(2)(1:length(2))//'partposit_'//adate// &
    7575         atime,form='unformatted')
  • src/partoutput_mpi.f90

    r0a94e13 rd2a5a83  
    7878  !**************************************
    7979
    80   if (ipout.eq.1.or.ipout.eq.3) then
     80  if (ipout.eq.1) then
    8181    open(unitpartout,file=path(2)(1:length(2))//'partposit_'//adate// &
    8282         atime,form='unformatted',status=file_stat,position='append')
  • src/readcommand.f90

    r0a94e13 r20963b1  
    5050  ! ipin                 1 continue simulation with dumped particle data, 0 no *
    5151  ! ipout                0 no particle dump, 1 every output time, 3 only at end*
    52   ! ipoutfac             increase particle dump interval by factor (default 1) *
    5352  ! itsplit [s]          time constant for particle splitting                  *
    5453  ! loutaver [s]         concentration output is an average over loutaver      *
     
    9897  iout, &
    9998  ipout, &
    100   ipoutfac, &
    10199  lsubgrid, &
    102100  lconvection, &
     
    131129  iout=3
    132130  ipout=0
    133   ipoutfac=1
    134131  lsubgrid=1
    135132  lconvection=1
     
    510507  !****************************************************************
    511508
    512   if ((ipout.ne.0).and.(ipout.ne.1).and.(ipout.ne.2).and.(ipout.ne.3)) then
     509  if ((ipout.ne.0).and.(ipout.ne.1).and.(ipout.ne.2)) then
    513510    write(*,*) ' #### FLEXPART MODEL ERROR! FILE COMMAND:     #### '
    514     write(*,*) ' #### IPOUT MUST BE 0, 1, 2 OR 3!             #### '
     511    write(*,*) ' #### IPOUT MUST BE 1, 2 OR 3!                #### '
    515512    stop
    516513  endif
  • src/timemanager.f90

    r0a94e13 rc7d1052  
    45145145      format(i13,' Seconds simulated: ',i13, ' Particles:    Uncertainty: ',3f7.3)
    45245246      format(' Simulated ',f7.1,' hours (',i13,' s), ',i13, ' particles')
    453         if (ipout.ge.1) then
    454           if (mod(itime,ipoutfac*loutstep).eq.0) call partoutput(itime) ! dump particle positions
    455           if (ipout.eq.3) call partoutput_average(itime) ! dump particle positions
    456         endif
     453        if (ipout.ge.1) call partoutput(itime)    ! dump particle positions
    457454        loutnext=loutnext+loutstep
    458455        loutstart=loutnext-loutaver/2
     
    612609!        write (*,*) 'advance: ',prob(1),xmass1(j,1),ztra1(j)
    613610
    614   ! Calculate average position for particle dump output
    615   !****************************************************
    616 
    617         if (ipout.eq.3) call partpos_average(itime,j)
    618 
    619 
    620611  ! Calculate the gross fluxes across layer interfaces
    621612  !***************************************************
  • src/timemanager_mpi.f90

    r0c8c7f2 r20963b1  
    113113  integer :: j,ks,kp,l,n,itime=0,nstop,nstop1,memstat=0
    114114! integer :: ksp
    115   integer :: ip,irec
     115  integer :: ip
    116116  integer :: loutnext,loutstart,loutend
    117117  integer :: ix,jy,ldeltat,itage,nage,idummy
     
    129129! Measure time spent in timemanager
    130130  if (mp_measure_time) call mpif_mtime('timemanager',0)
    131 
    132131
    133132! First output for time 0
     
    533532                end if
    534533
    535               else
     534              else  ! :TODO: check for zeroing in the netcdf module
    536535                call concoutput_surf_nest(itime,outnum)
    537536              end if
     
    59459346      format(' Simulated ',f7.1,' hours (',i13,' s), ',i13, ' particles')
    595594        if (ipout.ge.1) then
    596           if (mp_measure_time) call mpif_mtime('iotime',0)
    597           irec=0
    598595          do ip=0, mp_partgroup_np-1
    599             if (ip.eq.mp_partid) then
    600               if (mod(itime,ipoutfac*loutstep).eq.0) call partoutput(itime) ! dump particle positions
    601               if (ipout.eq.3) call partoutput_average(itime,irec) ! dump particle positions
    602             endif
    603             if (ipout.eq.3) irec=irec+npart_per_process(ip)
     596            if (ip.eq.mp_partid) call partoutput(itime) ! dump particle positions
    604597            call mpif_mpi_barrier
    605598          end do
    606           if (mp_measure_time) call mpif_mtime('iotime',1)
    607599        end if
    608600
     
    765757        if (mp_measure_time) call mpif_mtime('advance',1)
    766758
    767   ! Calculate average position for particle dump output
    768   !****************************************************
    769 
    770         if (ipout.eq.3) call partpos_average(itime,j)
    771 
    772759
    773760! Calculate the gross fluxes across layer interfaces
     
    908895    do ip=0, mp_partgroup_np-1
    909896      if (ip.eq.mp_partid) then
    910         if (mp_dbg_mode) write(*,*) 'call partoutput(itime), proc, mp_partid',ip,mp_partid
     897        !if (mp_dbg_mode) write(*,*) 'call partoutput(itime), proc, mp_partid',ip,mp_partid
    911898        call partoutput(itime)    ! dump particle positions
    912899      end if
  • src/verttransform_ecmwf.f90

    r0a94e13 r437c545  
    7373  use com_mod
    7474  use cmapf_mod, only: cc2gll
     75!  use mpi_mod
    7576
    7677  implicit none
Note: See TracChangeset for help on using the changeset viewer.
hosted by ZAMG