Changeset d005a67 in flexpart.git


Ignore:
Timestamp:
May 21, 2019, 1:09:18 PM (5 years ago)
Author:
Sabine <sabine.eckhardt@…>
Branches:
master, 10.4.1_pesei, GFS_025, bugfixes+enhancements, dev, release-10, release-10.4.1, scaling-bug
Children:
f963113, 0a98afe
Parents:
5d74ed9 (diff), 0c8c7f2 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent.
Message:

Merge remote-tracking branch 'refs/remotes/origin/dev' into dev

Files:
3 added
19 edited

Legend:

Unmodified
Added
Removed
  • options/COMMAND

    r2753a5c r0a94e13  
    1919 IFINE=                 4, ! Reduction for time step in vertical transport, used only if CTL>1
    2020 IOUT=                  1, ! Output type: [1]mass 2]pptv 3]1&2 4]plume 5]1&4, +8 for NetCDF output     
    21  IPOUT=                 0, ! Particle position output: 0]no 1]every output 2]only at end  
     21 IPOUT=                 0, ! Particle position output: 0]no 1]every output 2]only at end 3]time averaged
    2222 LSUBGRID=              0, ! Increase of ABL heights due to sub-grid scale orographic variations;[0]off 1]on
    2323 LCONVECTION=           1, ! Switch for convection parameterization;0]off [1]on   
  • src/FLEXPART.f90

    r2753a5c r0a94e13  
    6868  integer :: detectformat
    6969
    70 
    71 
    72   ! Initialize arrays in com_mod
    73   !*****************************
    74   call com_mod_allocate_part(maxpart)
    7570 
    76 
    7771  ! Generate a large number of random numbers
    7872  !******************************************
     
    172166  endif
    173167
     168  ! Initialize arrays in com_mod
     169  !*****************************
     170  call com_mod_allocate_part(maxpart)
     171
     172
    174173  ! Read the age classes to be used
    175174  !********************************
  • src/FLEXPART_MPI.f90

    r20963b1 r0c8c7f2  
    7777  if (mp_measure_time) call mpif_mtime('flexpart',0)
    7878
    79   ! Initialize arrays in com_mod
    80   !*****************************
    81 
    82   if(.not.(lmpreader.and.lmp_use_reader)) call com_mod_allocate_part(maxpart_mpi)
    83 
    84 
     79 
    8580  ! Generate a large number of random numbers
    8681  !******************************************
     
    180175  endif
    181176
     177  ! Initialize arrays in com_mod
     178  !*****************************
     179
     180  if(.not.(lmpreader.and.lmp_use_reader)) call com_mod_allocate_part(maxpart_mpi)
     181
    182182
    183183! Read the age classes to be used
     
    413413  end if ! (mpif_pid == 0)
    414414
    415   if (mp_measure_time) call mpif_mtime('iotime',0)
     415  if (mp_measure_time) call mpif_mtime('iotime',1)
    416416
    417417  if (verbosity.gt.0 .and. lroot) then
  • src/com_mod.f90

    re9e0f06 r0a94e13  
    1818
    1919  implicit none
     20
     21
    2022
    2123  !****************************************************************
     
    6971
    7072  real :: ctl,fine
    71   integer :: ifine,iout,ipout,ipin,iflux,mdomainfill
     73  integer :: ifine,iout,ipout,ipin,iflux,mdomainfill,ipoutfac
    7274  integer :: mquasilag,nested_output,ind_source,ind_receptor
    7375  integer :: ind_rel,ind_samp,ioutputforeachrelease,linit_cond,surf_only
     
    8284  ! iout     output options: 1 conc. output (ng/m3), 2 mixing ratio (pptv), 3 both
    8385  ! ipout    particle dump options: 0 no, 1 every output interval, 2 only at end
     86  ! ipoutfac increase particle dump interval by factor (default 1)
    8487  ! ipin     read in particle positions from dumped file from a previous run
    8588  ! fine     real(ifine)
     
    128131
    129132  logical :: gdomainfill
    130 
    131133  ! gdomainfill             .T., if domain-filling is global, .F. if not
    132134
     
    651653  real :: receptorarea(maxreceptor)
    652654  real :: creceptor(maxreceptor,maxspec)
     655  real, allocatable, dimension(:,:) :: creceptor0
    653656  character(len=16) :: receptorname(maxreceptor)
    654657  integer :: numreceptor
     
    673676  real, allocatable, dimension(:,:) :: xmass1
    674677  real, allocatable, dimension(:,:) :: xscav_frac1
     678
     679! Variables used for writing out interval averages for partoutput
     680!****************************************************************
     681
     682  integer, allocatable, dimension(:) :: npart_av
     683  real, allocatable, dimension(:) :: part_av_cartx,part_av_carty,part_av_cartz,part_av_z,part_av_topo
     684  real, allocatable, dimension(:) :: part_av_pv,part_av_qv,part_av_tt,part_av_rho,part_av_tro,part_av_hmix
     685  real, allocatable, dimension(:) :: part_av_uu,part_av_vv,part_av_energy
    675686
    676687  ! eso: Moved from timemanager
     
    780791         & idt(nmpart),itramem(nmpart),itrasplit(nmpart),&
    781792         & xtra1(nmpart),ytra1(nmpart),ztra1(nmpart),&
    782          & xmass1(nmpart, maxspec),&
    783          & checklifetime(nmpart,maxspec), species_lifetime(maxspec,2))!CGZ-lifetime
     793         & xmass1(nmpart, maxspec))  ! ,&
     794!         & checklifetime(nmpart,maxspec), species_lifetime(maxspec,2))!CGZ-lifetime
     795
     796    if (ipout.eq.3) then
     797      allocate(npart_av(nmpart),part_av_cartx(nmpart),part_av_carty(nmpart),&
     798           & part_av_cartz(nmpart),part_av_z(nmpart),part_av_topo(nmpart))
     799      allocate(part_av_pv(nmpart),part_av_qv(nmpart),part_av_tt(nmpart),&
     800           & part_av_rho(nmpart),part_av_tro(nmpart),part_av_hmix(nmpart))
     801      allocate(part_av_uu(nmpart),part_av_vv(nmpart),part_av_energy(nmpart))
     802    end if
    784803
    785804
    786805    allocate(uap(nmpart),ucp(nmpart),uzp(nmpart),us(nmpart),&
    787806         & vs(nmpart),ws(nmpart),cbt(nmpart))
    788    
     807
    789808  end subroutine com_mod_allocate_part
    790809
  • src/init_domainfill.f90

    rb5127f9 r0a94e13  
    8686    endif
    8787  endif
     88
     89! Exit here if resuming a run from particle dump
     90!***********************************************
     91  if (gdomainfill.and.ipin.ne.0) return
    8892
    8993! Do not release particles twice (i.e., not at both in the leftmost and rightmost
     
    414418!***************************************************************************
    415419
    416   if (ipin.eq.1) then
     420  if ((ipin.eq.1).and.(.not.gdomainfill)) then
    417421    open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
    418422         form='unformatted')
  • src/init_domainfill_mpi.f90

    rb5127f9 r328fdf9  
    110110  endif
    111111
     112! Exit here if resuming a run from particle dump
     113!***********************************************
     114  if (gdomainfill.and.ipin.ne.0) return
     115
    112116! Do not release particles twice (i.e., not at both in the leftmost and rightmost
    113117! grid cell) for a global domain
     
    213217        colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)
    214218        colmasstotal=colmasstotal+colmass(ix,jy)
    215 
    216219      end do
    217220    end do
     
    466469
    467470! eso TODO: only needed for root process
    468     if (ipin.eq.1) then
     471    if ((ipin.eq.1).and.(.not.gdomainfill)) then
    469472      open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
    470473           form='unformatted')
     
    474477    endif
    475478
    476     numpart = numpart/mp_partgroup_np
    477     if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
    478 
    479   else ! Allocate dummy arrays for receiving processes
    480     allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
    481          & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
    482          & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
    483          & xmass1_tmp(nullsize, nullsize))
     479    if (ipin.eq.0) then   
     480      numpart = numpart/mp_partgroup_np
     481      if (mod(numpart,mp_partgroup_np).ne.0) numpart=numpart+1
     482    end if
     483
     484  else ! Allocate dummy arrays for receiving processes
     485    if (ipin.eq.0) then   
     486      allocate(itra1_tmp(nullsize),npoint_tmp(nullsize),nclass_tmp(nullsize),&
     487           & idt_tmp(nullsize),itramem_tmp(nullsize),itrasplit_tmp(nullsize),&
     488           & xtra1_tmp(nullsize),ytra1_tmp(nullsize),ztra1_tmp(nullsize),&
     489           & xmass1_tmp(nullsize, nullsize))
     490    end if
    484491   
    485   end if ! end if(lroot) 
     492  end if ! end if(lroot)
     493
    486494
    487495
    488496! Distribute particles to other processes (numpart is 'per-process', not total)
    489   call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
    490 ! eso TODO: xmassperparticle: not necessary to send
    491   call MPI_Bcast(xmassperparticle, 1, mp_sp, id_root, mp_comm_used, mp_ierr)
    492   call mpif_send_part_properties(numpart)
     497! Only if not restarting from previous run
     498  if (ipin.eq.0) then
     499    call MPI_Bcast(numpart, 1, MPI_INTEGER, id_root, mp_comm_used, mp_ierr)
     500    call mpif_send_part_properties(npart(1)/mp_partgroup_np)
    493501
    494502! Deallocate the temporary arrays used for all particles
    495   deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
     503    deallocate(itra1_tmp,npoint_tmp,nclass_tmp,idt_tmp,itramem_tmp,&
    496504         & itrasplit_tmp,xtra1_tmp,ytra1_tmp,ztra1_tmp,xmass1_tmp)
     505  end if
    497506
    498507
  • src/makefile

    r7123c70 r0a94e13  
    118118OBJECTS_SERIAL = \
    119119        releaseparticles.o      partoutput.o \
     120        partoutput_average.o \
    120121        conccalc.o \
    121122        init_domainfill.o       concoutput.o  \
     
    132133## For MPI version
    133134OBJECTS_MPI = releaseparticles_mpi.o partoutput_mpi.o \
    134         conccalc_mpi.o \
     135        partoutput_average_mpi.o conccalc_mpi.o \
    135136        init_domainfill_mpi.o concoutput_mpi.o  \
    136137        timemanager_mpi.o FLEXPART_MPI.o        \
     
    149150advance.o               initialize.o            \
    150151writeheader.o           writeheader_txt.o       \
    151 writeprecip.o \
     152partpos_average.o       writeprecip.o \
    152153writeheader_surf.o      assignland.o\
    153154part0.o                 gethourlyOH.o\
     
    348349part0.o: par_mod.o
    349350partdep.o: par_mod.o
     351partpos_average.o: com_mod.o par_mod.o
    350352partoutput.o: com_mod.o par_mod.o
     353partoutput_average.o: com_mod.o par_mod.o
     354partoutput_average_mpi.o: com_mod.o par_mod.o mpi_mod.o
    351355partoutput_mpi.o: com_mod.o mpi_mod.o par_mod.o
    352356partoutput_short.o: com_mod.o par_mod.o
  • src/mpi_mod.f90

    r0ecc1fe r0c8c7f2  
    8888! Variables for MPI processes in the 'particle' group
    8989  integer, allocatable, dimension(:) :: mp_partgroup_rank
     90  integer, allocatable, dimension(:) :: npart_per_process
    9091  integer :: mp_partgroup_comm, mp_partgroup_pid, mp_partgroup_np
    9192
     
    125126! mp_time_barrier   Measure MPI barrier time
    126127! mp_exact_numpart  Use an extra MPI communication to give the exact number of particles
    127 !                   to standard output (this does *not* otherwise affect the simulation)
     128!                   to standard output (this does not otherwise affect the simulation)
    128129  logical, parameter :: mp_dbg_mode = .false.
    129130  logical, parameter :: mp_dev_mode = .false.
     
    190191!   mp_np       number of running processes, decided at run-time
    191192!***********************************************************************
    192     use par_mod, only: maxpart, numwfmem, dep_prec
    193     use com_mod, only: mpi_mode, verbosity
     193    use par_mod, only: maxpart, numwfmem, dep_prec, maxreceptor, maxspec
     194    use com_mod, only: mpi_mode, verbosity, creceptor0
    194195
    195196    implicit none
     
    337338
    338339! Set maxpart per process
    339 ! eso 08/2016: Increase maxpart per process, in case of unbalanced distribution
     340! ESO 08/2016: Increase maxpart per process, in case of unbalanced distribution
    340341    maxpart_mpi=int(mp_maxpart_factor*real(maxpart)/real(mp_partgroup_np))
    341342    if (mp_np == 1) maxpart_mpi = maxpart
     
    365366    end if
    366367
     368! Allocate array for number of particles per process   
     369    allocate(npart_per_process(0:mp_partgroup_np-1))
     370
     371! Write whether MPI_IN_PLACE is used or not
     372#ifdef USE_MPIINPLACE
     373    if (lroot) write(*,*) 'Using MPI_IN_PLACE operations'
     374#else
     375    if (lroot) allocate(creceptor0(maxreceptor,maxspec))
     376    if (lroot) write(*,*) 'Not using MPI_IN_PLACE operations'
     377#endif
    367378    goto 101
    368379
     
    559570! invalid particles at the end of the arrays
    560571
    561 601 do i=num_part, 1, -1
     572601 do i=numpart, 1, -1
    562573      if (itra1(i).eq.-999999999) then
    563574        numpart=numpart-1
     
    598609    integer :: i,jj,nn, num_part=1,m,imin, num_trans
    599610    logical :: first_iter
    600     integer,allocatable,dimension(:) :: numparticles_mpi, idx_arr
     611    integer,allocatable,dimension(:) :: idx_arr
    601612    real,allocatable,dimension(:) :: sorted ! TODO: we don't really need this
    602613
     
    607618! All processes exchange information on number of particles
    608619!****************************************************************************
    609     allocate(numparticles_mpi(0:mp_partgroup_np-1), &
    610          &idx_arr(0:mp_partgroup_np-1), sorted(0:mp_partgroup_np-1))
    611 
    612     call MPI_Allgather(numpart, 1, MPI_INTEGER, numparticles_mpi, &
     620    allocate( idx_arr(0:mp_partgroup_np-1), sorted(0:mp_partgroup_np-1))
     621
     622    call MPI_Allgather(numpart, 1, MPI_INTEGER, npart_per_process, &
    613623         & 1, MPI_INTEGER, mp_comm_used, mp_ierr)
    614624
     
    616626! Sort from lowest to highest
    617627! Initial guess: correct order
    618     sorted(:) = numparticles_mpi(:)
     628    sorted(:) = npart_per_process(:)
    619629    do i=0, mp_partgroup_np-1
    620630      idx_arr(i) = i
    621631    end do
     632
     633! Do not rebalance particles for ipout=3   
     634    if (ipout.eq.3) return
    622635
    623636! For each successive element in index array, see if a lower value exists
     
    645658    m=mp_partgroup_np-1 ! index for last sorted process (most particles)
    646659    do i=0,mp_partgroup_np/2-1
    647       num_trans = numparticles_mpi(idx_arr(m)) - numparticles_mpi(idx_arr(i))
     660      num_trans = npart_per_process(idx_arr(m)) - npart_per_process(idx_arr(i))
    648661      if (mp_partid.eq.idx_arr(m).or.mp_partid.eq.idx_arr(i)) then
    649         if ( numparticles_mpi(idx_arr(m)).gt.mp_min_redist.and.&
    650              & real(num_trans)/real(numparticles_mpi(idx_arr(m))).gt.mp_redist_fract) then
     662        if ( npart_per_process(idx_arr(m)).gt.mp_min_redist.and.&
     663             & real(num_trans)/real(npart_per_process(idx_arr(m))).gt.mp_redist_fract) then
    651664! DBG
    652           ! write(*,*) 'mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, numparticles_mpi', &
    653           !      &mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, numparticles_mpi
     665          ! write(*,*) 'mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, npart_per_process', &
     666          !      &mp_partid, idx_arr(m), idx_arr(i), mp_min_redist, num_trans, npart_per_process
    654667! DBG
    655668          call mpif_redist_part(itime, idx_arr(m), idx_arr(i), num_trans/2)
     
    659672    end do
    660673
    661     deallocate(numparticles_mpi, idx_arr, sorted)
     674    deallocate(idx_arr, sorted)
    662675
    663676  end subroutine mpif_calculate_part_redist
     
    19611974    if (readclouds) then
    19621975      j=j+1
    1963       call MPI_Irecv(ctwc(:,:,mind),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     1976      call MPI_Irecv(ctwc(:,:,mind),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
    19641977           &MPI_COMM_WORLD,reqs(j),mp_ierr)
    19651978      if (mp_ierr /= 0) goto 600
     
    23262339      if (readclouds) then
    23272340        j=j+1
    2328         call MPI_Irecv(ctwcn(:,:,mind,k),d2s1,mp_sp,id_read,MPI_ANY_TAG,&
     2341        call MPI_Irecv(ctwcn(:,:,mind,k),d2s1*5,mp_sp,id_read,MPI_ANY_TAG,&
    23292342             &MPI_COMM_WORLD,reqs(j),mp_ierr)
    23302343        if (mp_ierr /= 0) goto 600
     
    24622475    end if
    24632476
     2477  ! Receptor concentrations   
     2478    if (lroot) then
     2479      call MPI_Reduce(MPI_IN_PLACE,creceptor,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2480           & mp_comm_used,mp_ierr)
     2481      if (mp_ierr /= 0) goto 600
     2482    else
     2483      call MPI_Reduce(creceptor,0,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2484           & mp_comm_used,mp_ierr)
     2485    end if
     2486
    24642487#else
    24652488
    24662489      call MPI_Reduce(gridunc, gridunc0, grid_size3d, mp_sp, MPI_SUM, id_root, &
    24672490           & mp_comm_used, mp_ierr)
     2491      if (mp_ierr /= 0) goto 600
    24682492      if (lroot) gridunc = gridunc0
     2493
     2494      call MPI_Reduce(creceptor, creceptor0,rcpt_size,mp_sp,MPI_SUM,id_root, &
     2495           & mp_comm_used,mp_ierr)
     2496      if (mp_ierr /= 0) goto 600
     2497      if (lroot) creceptor = creceptor0
    24692498
    24702499#endif
     
    24822511    end if
    24832512
    2484 ! Receptor concentrations   
    2485     if (lroot) then
    2486       call MPI_Reduce(MPI_IN_PLACE,creceptor,rcpt_size,mp_sp,MPI_SUM,id_root, &
    2487            & mp_comm_used,mp_ierr)
    2488       if (mp_ierr /= 0) goto 600
    2489     else
    2490       call MPI_Reduce(creceptor,0,rcpt_size,mp_sp,MPI_SUM,id_root, &
    2491            & mp_comm_used,mp_ierr)
    2492     end if
    24932513
    24942514    if (mp_measure_time) call mpif_mtime('commtime',1)
     
    27002720      end if
    27012721
    2702     case ('readwind')
    2703       if (imode.eq.0) then
    2704         call cpu_time(mp_readwind_time_beg)
    2705         mp_readwind_wtime_beg = mpi_wtime()
    2706       else
    2707         call cpu_time(mp_readwind_time_end)
    2708         mp_readwind_wtime_end = mpi_wtime()
    2709 
    2710         mp_readwind_time_total = mp_readwind_time_total + &
    2711              &(mp_readwind_time_end - mp_readwind_time_beg)
    2712         mp_readwind_wtime_total = mp_readwind_wtime_total + &
    2713              &(mp_readwind_wtime_end - mp_readwind_wtime_beg)
    2714       end if
     2722   case ('readwind')
     2723     if (imode.eq.0) then
     2724       call cpu_time(mp_readwind_time_beg)
     2725       mp_readwind_wtime_beg = mpi_wtime()
     2726     else
     2727       call cpu_time(mp_readwind_time_end)
     2728       mp_readwind_wtime_end = mpi_wtime()
     2729
     2730       mp_readwind_time_total = mp_readwind_time_total + &
     2731            &(mp_readwind_time_end - mp_readwind_time_beg)
     2732       mp_readwind_wtime_total = mp_readwind_wtime_total + &
     2733            &(mp_readwind_wtime_end - mp_readwind_wtime_beg)
     2734     end if
    27152735
    27162736    case ('commtime')
     
    27882808          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR GETFIELDS:',&
    27892809               & mp_getfields_time_total
    2790           write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR READWIND:',&
    2791                & mp_readwind_wtime_total
    2792           write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR READWIND:',&
    2793                & mp_readwind_time_total
     2810!          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR READWIND:',&
     2811!               & mp_readwind_wtime_total
     2812!          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL CPU TIME FOR READWIND:',&
     2813!               & mp_readwind_time_total
    27942814          write(*,FMT='(A60,TR1,F9.2)') 'TOTAL WALL TIME FOR FILE IO:',&
    27952815               & mp_io_wtime_total
  • src/netcdf_output_mod.f90

    r4ad96c5 r0a94e13  
    9393  character(len=255), parameter :: institution = 'NILU'
    9494
    95   integer            :: tpointer
     95  integer            :: tpointer=0
    9696  character(len=255) :: ncfname, ncfnamen
    9797
  • src/par_mod.f90

    rdf96ea65 r0a94e13  
    280280
    281281  integer,parameter :: unitpath=1, unitcommand=1, unitageclasses=1, unitgrid=1
    282   integer,parameter :: unitavailab=1, unitreleases=88, unitpartout=93
     282  integer,parameter :: unitavailab=1, unitreleases=88, unitpartout=93, unitpartout_average=105
    283283  integer,parameter :: unitpartin=93, unitflux=98, unitouttraj=96
    284284  integer,parameter :: unitvert=1, unitoro=1, unitpoin=1, unitreceptor=1
  • src/partoutput.f90

    rd2a5a83 r0a94e13  
    7171  !**************************************
    7272
    73   if (ipout.eq.1) then
     73  if (ipout.eq.1.or.ipout.eq.3) then
    7474    open(unitpartout,file=path(2)(1:length(2))//'partposit_'//adate// &
    7575         atime,form='unformatted')
  • src/partoutput_mpi.f90

    rd2a5a83 r0a94e13  
    7878  !**************************************
    7979
    80   if (ipout.eq.1) then
     80  if (ipout.eq.1.or.ipout.eq.3) then
    8181    open(unitpartout,file=path(2)(1:length(2))//'partposit_'//adate// &
    8282         atime,form='unformatted',status=file_stat,position='append')
  • src/readcommand.f90

    r20963b1 r0a94e13  
    5050  ! ipin                 1 continue simulation with dumped particle data, 0 no *
    5151  ! ipout                0 no particle dump, 1 every output time, 3 only at end*
     52  ! ipoutfac             increase particle dump interval by factor (default 1) *
    5253  ! itsplit [s]          time constant for particle splitting                  *
    5354  ! loutaver [s]         concentration output is an average over loutaver      *
     
    9798  iout, &
    9899  ipout, &
     100  ipoutfac, &
    99101  lsubgrid, &
    100102  lconvection, &
     
    129131  iout=3
    130132  ipout=0
     133  ipoutfac=1
    131134  lsubgrid=1
    132135  lconvection=1
     
    507510  !****************************************************************
    508511
    509   if ((ipout.ne.0).and.(ipout.ne.1).and.(ipout.ne.2)) then
     512  if ((ipout.ne.0).and.(ipout.ne.1).and.(ipout.ne.2).and.(ipout.ne.3)) then
    510513    write(*,*) ' #### FLEXPART MODEL ERROR! FILE COMMAND:     #### '
    511     write(*,*) ' #### IPOUT MUST BE 1, 2 OR 3!                #### '
     514    write(*,*) ' #### IPOUT MUST BE 0, 1, 2 OR 3!             #### '
    512515    stop
    513516  endif
  • src/timemanager.f90

    rc7d1052 r0a94e13  
    45145145      format(i13,' Seconds simulated: ',i13, ' Particles:    Uncertainty: ',3f7.3)
    45245246      format(' Simulated ',f7.1,' hours (',i13,' s), ',i13, ' particles')
    453         if (ipout.ge.1) call partoutput(itime)    ! dump particle positions
     453        if (ipout.ge.1) then
     454          if (mod(itime,ipoutfac*loutstep).eq.0) call partoutput(itime) ! dump particle positions
     455          if (ipout.eq.3) call partoutput_average(itime) ! dump particle positions
     456        endif
    454457        loutnext=loutnext+loutstep
    455458        loutstart=loutnext-loutaver/2
     
    609612!        write (*,*) 'advance: ',prob(1),xmass1(j,1),ztra1(j)
    610613
     614  ! Calculate average position for particle dump output
     615  !****************************************************
     616
     617        if (ipout.eq.3) call partpos_average(itime,j)
     618
     619
    611620  ! Calculate the gross fluxes across layer interfaces
    612621  !***************************************************
  • src/timemanager_mpi.f90

    r20963b1 r0c8c7f2  
    113113  integer :: j,ks,kp,l,n,itime=0,nstop,nstop1,memstat=0
    114114! integer :: ksp
    115   integer :: ip
     115  integer :: ip,irec
    116116  integer :: loutnext,loutstart,loutend
    117117  integer :: ix,jy,ldeltat,itage,nage,idummy
     
    129129! Measure time spent in timemanager
    130130  if (mp_measure_time) call mpif_mtime('timemanager',0)
     131
    131132
    132133! First output for time 0
     
    532533                end if
    533534
    534               else  ! :TODO: check for zeroing in the netcdf module
     535              else
    535536                call concoutput_surf_nest(itime,outnum)
    536537              end if
     
    59359446      format(' Simulated ',f7.1,' hours (',i13,' s), ',i13, ' particles')
    594595        if (ipout.ge.1) then
     596          if (mp_measure_time) call mpif_mtime('iotime',0)
     597          irec=0
    595598          do ip=0, mp_partgroup_np-1
    596             if (ip.eq.mp_partid) call partoutput(itime) ! dump particle positions
     599            if (ip.eq.mp_partid) then
     600              if (mod(itime,ipoutfac*loutstep).eq.0) call partoutput(itime) ! dump particle positions
     601              if (ipout.eq.3) call partoutput_average(itime,irec) ! dump particle positions
     602            endif
     603            if (ipout.eq.3) irec=irec+npart_per_process(ip)
    597604            call mpif_mpi_barrier
    598605          end do
     606          if (mp_measure_time) call mpif_mtime('iotime',1)
    599607        end if
    600608
     
    757765        if (mp_measure_time) call mpif_mtime('advance',1)
    758766
     767  ! Calculate average position for particle dump output
     768  !****************************************************
     769
     770        if (ipout.eq.3) call partpos_average(itime,j)
     771
    759772
    760773! Calculate the gross fluxes across layer interfaces
     
    895908    do ip=0, mp_partgroup_np-1
    896909      if (ip.eq.mp_partid) then
    897         !if (mp_dbg_mode) write(*,*) 'call partoutput(itime), proc, mp_partid',ip,mp_partid
     910        if (mp_dbg_mode) write(*,*) 'call partoutput(itime), proc, mp_partid',ip,mp_partid
    898911        call partoutput(itime)    ! dump particle positions
    899912      end if
  • src/verttransform_ecmwf.f90

    r437c545 rd005a67  
    7373  use com_mod
    7474  use cmapf_mod, only: cc2gll
    75 !  use mpi_mod
    7675
    7776  implicit none
  • src/readwind_gfs.f90

    rdb91eb7 r5d74ed9  
    8383
    8484  ! NCEP
    85   integer :: numpt,numpu,numpv,numpw,numprh
     85  integer :: numpt,numpu,numpv,numpw,numprh,numpclwch
    8686  real :: help, temp, ew
    8787  real :: elev
     
    134134  numpw=0
    135135  numprh=0
     136  numpclwch=0
    136137  ifield=0
    13713810   ifield=ifield+1
     
    557558      endif
    558559! SEC & IP 12/2018 read GFS clouds
    559       if(isec1(6).eq.153) then  !! CLWCR  Cloud liquid water content [kg/kg]
    560         clwch(i,j,nlev_ec-k+2,n)=zsec4(nxfield*(ny-j-1)+i+1)
     560      if((isec1(6).eq.153).and.(isec1(7).eq.100)) then  !! CLWCR  Cloud liquid water content [kg/kg]
     561         if((i.eq.0).and.(j.eq.0)) then
     562            do ii=1,nuvz
     563              if ((isec1(8)*100.0).eq.akz(ii)) numpclwch=ii
     564            end do
     565        endif
     566        help=zsec4(nxfield*(ny-j-1)+i+1)
     567        if(i.le.i180) then
     568          clwch(i179+i,j,numpclwch,n)=help
     569        else
     570          clwch(i-i181,j,numpclwch,n)=help
     571        endif
    561572        readclouds=.true.
    562573        sumclouds=.true.
     574!        readclouds=.false.
     575!       sumclouds=.false.
    563576      endif
    564577
  • src/releaseparticles.f90

    r75a4ded r7873bf7  
    114114      average_timecorrect=0.
    115115      do k=1,nspec
    116         if (zpoint1(i).gt.0.5) then      ! point source
     116        if(abs(xpoint2(i)-xpoint1(i)).lt.1.E-4.and.abs(ypoint2(i)-ypoint1(i)).lt.1.E-4) then
     117!        if (zpoint1(i).gt.0.5) then      ! point source
    117118          timecorrect(k)=point_hour(k,nhour)*point_dow(k,ndayofweek)
    118119        else                             ! area source
  • src/verttransform_gfs.f90

    rdb91eb7 r437c545  
    548548        if ((lsp.gt.0.01).or.(convp.gt.0.01)) then ! cloud and precipitation
    549549
    550           do kz=nz,1,-1 !go Bottom up!
     550          do kz=nz,2,-1 !go Bottom up!
    551551            if (clw(ix,jy,kz,n).gt. 0) then ! is in cloud
    552552              cloudsh(ix,jy,n)=cloudsh(ix,jy,n)+height(kz)-height(kz-1)
    553553              clouds(ix,jy,kz,n)=1                               ! is a cloud
    554554              if (lsp.ge.convp) then
    555                 clouds(ix,jy,kz,n)=3                            ! lsp in-cloud
     555                clouds(ix,jy,kz,n)=3                             ! lsp in-cloud
    556556              else
    557557                clouds(ix,jy,kz,n)=2                             ! convp in-cloud
Note: See TracChangeset for help on using the changeset viewer.
hosted by ZAMG