Changeset 0ecc1fe in flexpart.git


Ignore:
Timestamp:
Nov 30, 2017, 4:04:54 PM (6 years ago)
Author:
Espen Sollum ATMOS <eso@…>
Branches:
master, 10.4.1_pesei, GFS_025, bugfixes+enhancements, dev, release-10, release-10.4.1, scaling-bug, univie
Children:
d2a5a83
Parents:
c2bd55e
Message:

Changed handling of nested input fields to be consistent with non-nested

Location:
src
Files:
7 edited

Legend:

Unmodified
Added
Removed
  • src/interpol_rain.f90

    rc7e771d r0ecc1fe  
    5656  ! ix,jy                x,y coordinates of lower left subgrid point          *
    5757  ! level                level at which interpolation shall be done           *
    58   ! memind(3)            points to the places of the wind fields              *
     58  ! iwftouse             points to the place of the wind field                *
    5959  ! nx,ny                actual field dimensions in x,y and z direction       *
    6060  ! nxmax,nymax,nzmax    maximum field dimensions in x,y and z direction      *
  • src/interpol_rain_nests.f90

    rdb712a8 r0ecc1fe  
    2121
    2222subroutine interpol_rain_nests(yy1,yy2,yy3,nxmaxn,nymaxn,nzmax, &
    23        maxnests,ngrid,nxn,nyn,memind,xt,yt,level,itime1,itime2,itime, &
     23       maxnests,ngrid,nxn,nyn,iwftouse,xt,yt,level,itime1,itime2,itime, &
    2424       yint1,yint2,yint3)
    2525  !                                i   i   i    i      i      i
     
    6060  ! ix,jy                x,y coordinates of lower left subgrid point          *
    6161  ! level                level at which interpolation shall be done           *
    62   ! memind(3)            points to the places of the wind fields              *
     62  ! iwftouse             points to the place of the wind field                *
    6363  ! nx,ny                actual field dimensions in x,y and z direction       *
    6464  ! nxmax,nymax,nzmax    maximum field dimensions in x,y and z direction      *
     
    7575
    7676  integer :: maxnests,ngrid
    77   integer :: nxn(maxnests),nyn(maxnests),nxmaxn,nymaxn,nzmax,memind(numwfmem)
     77  integer :: nxn(maxnests),nyn(maxnests),nxmaxn,nymaxn,nzmax,iwftouse
    7878  integer :: m,ix,jy,ixp,jyp,itime,itime1,itime2,level,indexh
    7979  real :: yy1(0:nxmaxn-1,0:nymaxn-1,nzmax,numwfmem,maxnests)
     
    125125  !***********************
    126126
    127   do m=1,2
    128     indexh=memind(m)
     127!  do m=1,2
     128!    indexh=memind(m)
     129    indexh=iwftouse
    129130
    130     y1(m)=p1*yy1(ix ,jy ,level,indexh,ngrid) &
     131    y1(1)=p1*yy1(ix ,jy ,level,indexh,ngrid) &
    131132         + p2*yy1(ixp,jy ,level,indexh,ngrid) &
    132133         + p3*yy1(ix ,jyp,level,indexh,ngrid) &
    133134         + p4*yy1(ixp,jyp,level,indexh,ngrid)
    134     y2(m)=p1*yy2(ix ,jy ,level,indexh,ngrid) &
     135    y2(1)=p1*yy2(ix ,jy ,level,indexh,ngrid) &
    135136         + p2*yy2(ixp,jy ,level,indexh,ngrid) &
    136137         + p3*yy2(ix ,jyp,level,indexh,ngrid) &
    137138         + p4*yy2(ixp,jyp,level,indexh,ngrid)
    138     y3(m)=p1*yy3(ix ,jy ,level,indexh,ngrid) &
     139    y3(1)=p1*yy3(ix ,jy ,level,indexh,ngrid) &
    139140         + p2*yy3(ixp,jy ,level,indexh,ngrid) &
    140141         + p3*yy3(ix ,jyp,level,indexh,ngrid) &
    141142         + p4*yy3(ixp,jyp,level,indexh,ngrid)
    142   end do
     143!  end do
    143144
    144145
     
    147148  !************************************
    148149
    149   dt1=real(itime-itime1)
    150   dt2=real(itime2-itime)
    151   dt=dt1+dt2
     150  ! dt1=real(itime-itime1)
     151  ! dt2=real(itime2-itime)
     152  ! dt=dt1+dt2
    152153
    153   yint1=(y1(1)*dt2+y1(2)*dt1)/dt
    154   yint2=(y2(1)*dt2+y2(2)*dt1)/dt
    155   yint3=(y3(1)*dt2+y3(2)*dt1)/dt
     154  ! yint1=(y1(1)*dt2+y1(2)*dt1)/dt
     155  ! yint2=(y2(1)*dt2+y2(2)*dt1)/dt
     156  ! yint3=(y3(1)*dt2+y3(2)*dt1)/dt
    156157
     158   yint1=y1(1)
     159   yint2=y2(1)
     160   yint3=y3(1)
    157161
    158162end subroutine interpol_rain_nests
  • src/mpi_mod.f90

    rc2bd55e r0ecc1fe  
    25362536! if (mp_ierr /= 0) goto 600
    25372537
     2538#ifdef USE_MPIINPLACE
    25382539! Using in-place reduction
    25392540    if (lroot) then
     
    25442545      call MPI_Reduce(griduncn, 0, grid_size3d, mp_sp, MPI_SUM, id_root, &
    25452546           & mp_comm_used, mp_ierr)
    2546     end if
     2547      if (mp_ierr /= 0) goto 600
     2548    end if
     2549
     2550#else
     2551    call MPI_Reduce(griduncn, griduncn0, grid_size3d, mp_sp, MPI_SUM, id_root, &
     2552         & mp_comm_used, mp_ierr)
     2553    if (mp_ierr /= 0) goto 600
     2554    if (lroot) griduncn = griduncn0
     2555
     2556#endif
    25472557
    25482558    if ((WETDEP).and.(ldirect.gt.0)) then
  • src/netcdf_output_mod.f90

    rc2bd55e r0ecc1fe  
    273273  character(len=3)            :: anspec
    274274  CHARACTER                   :: adate*8,atime*6,timeunit*32
    275   ! ESO DBG: WHY IS THIS HARDCODED TO 1000?
    276275  !REAL, DIMENSION(1000)       :: coord
    277276  real, allocatable, dimension(:) :: coord
  • src/outgrid_init.f90

    r6ecb30a r0ecc1fe  
    210210  allocate(gridunc(0:numxgrid-1,0:numygrid-1,numzgrid,maxspec, &
    211211       maxpointspec_act,nclassunc,maxageclass),stat=stat)
    212     if (stat.ne.0) write(*,*)'ERROR: could not allocate gridunc'
     212  if (stat.ne.0) write(*,*)'ERROR: could not allocate gridunc'
    213213  if (ldirect.gt.0) then
    214214    allocate(wetgridunc(0:numxgrid-1,0:numygrid-1,maxspec, &
    215        maxpointspec_act,nclassunc,maxageclass),stat=stat)
     215         maxpointspec_act,nclassunc,maxageclass),stat=stat)
    216216    if (stat.ne.0) write(*,*)'ERROR: could not allocate wetgridunc'
    217   allocate(drygridunc(0:numxgrid-1,0:numygrid-1,maxspec, &
    218        maxpointspec_act,nclassunc,maxageclass),stat=stat)
     217    allocate(drygridunc(0:numxgrid-1,0:numygrid-1,maxspec, &
     218         maxpointspec_act,nclassunc,maxageclass),stat=stat)
    219219    if (stat.ne.0) write(*,*)'ERROR: could not allocate drygridunc'
    220220  endif
    221221
     222#ifdef USE_MPIINPLACE
     223#else
    222224! Extra field for totals at MPI root process
    223225  if (lroot.and.mpi_mode.gt.0) then
    224 
    225 #ifdef USE_MPIINPLACE
    226 #else
    227     ! If MPI_IN_PLACE option is not used in mpi_mod.f90::mpif_tm_reduce_grid(),
    228     ! then an aux array is needed for parallel grid reduction
     226! If MPI_IN_PLACE option is not used in mpi_mod.f90::mpif_tm_reduce_grid(),
     227! then an aux array is needed for parallel grid reduction
    229228    allocate(gridunc0(0:numxgrid-1,0:numygrid-1,numzgrid,maxspec, &
    230229         maxpointspec_act,nclassunc,maxageclass),stat=stat)
    231230    if (stat.ne.0) write(*,*)'ERROR: could not allocate gridunc0'
     231  else if (.not.lroot.and.mpi_mode.gt.0) then
     232    allocate(gridunc0(1,1,1,1,1,1,1),stat=stat)
     233    if (stat.ne.0) write(*,*)'ERROR: could not allocate gridunc0'
     234  end if
    232235#endif
    233     if (ldirect.gt.0) then
    234       allocate(wetgridunc0(0:numxgrid-1,0:numygrid-1,maxspec, &
    235            maxpointspec_act,nclassunc,maxageclass),stat=stat)
    236       if (stat.ne.0) write(*,*)'ERROR: could not allocate wetgridunc0'
    237       allocate(drygridunc0(0:numxgrid-1,0:numygrid-1,maxspec, &
    238            maxpointspec_act,nclassunc,maxageclass),stat=stat)
    239       if (stat.ne.0) write(*,*)'ERROR: could not allocate drygridunc0'
    240     endif
     236!  if (ldirect.gt.0) then
     237  if (lroot.and.mpi_mode.gt.0) then
     238    allocate(wetgridunc0(0:numxgrid-1,0:numygrid-1,maxspec, &
     239         maxpointspec_act,nclassunc,maxageclass),stat=stat)
     240    if (stat.ne.0) write(*,*)'ERROR: could not allocate wetgridunc0'
     241    allocate(drygridunc0(0:numxgrid-1,0:numygrid-1,maxspec, &
     242         maxpointspec_act,nclassunc,maxageclass),stat=stat)
     243    if (stat.ne.0) write(*,*)'ERROR: could not allocate drygridunc0'
     244 
    241245! allocate a dummy to avoid compilator complaints
    242246  else if (.not.lroot.and.mpi_mode.gt.0) then
  • src/outgrid_init_nest.f90

    r8a65cb0 r0ecc1fe  
    6969  endif
    7070
     71#ifdef USE_MPIINPLACE
     72#else
    7173! Extra field for totals at MPI root process
    7274  if (lroot.and.mpi_mode.gt.0) then
    73     ! allocate(griduncn0(0:numxgridn-1,0:numygridn-1,numzgrid,maxspec, &
    74     !      maxpointspec_act,nclassunc,maxageclass),stat=stat)
    75     ! if (stat.ne.0) write(*,*)'ERROR:could not allocate nested gridunc'
    76 
    77     if (ldirect.gt.0) then
    78       allocate(wetgriduncn0(0:numxgridn-1,0:numygridn-1,maxspec, &
    79            maxpointspec_act,nclassunc,maxageclass),stat=stat)
    80       if (stat.ne.0) write(*,*)'ERROR:could not allocate nested gridunc'
    81       allocate(drygriduncn0(0:numxgridn-1,0:numygridn-1,maxspec, &
    82            maxpointspec_act,nclassunc,maxageclass),stat=stat)
    83       if (stat.ne.0) write(*,*)'ERROR:could not allocate nested gridunc'
    84     endif
     75! If MPI_IN_PLACE option is not used in mpi_mod.f90::mpif_tm_reduce_grid_nest(),
     76! then an aux array is needed for parallel grid reduction
     77    allocate(griduncn0(0:numxgridn-1,0:numygridn-1,numzgrid,maxspec, &
     78         maxpointspec_act,nclassunc,maxageclass),stat=stat)
     79    if (stat.ne.0) write(*,*)'ERROR:could not allocate nested gridunc'
     80! allocate a dummy to avoid compilator complaints
     81  else if (.not.lroot.and.mpi_mode.gt.0) then
     82    allocate(griduncn0(1,1,1,1,1,1,1),stat=stat)
     83  end if
     84#endif
     85!    if (ldirect.gt.0) then
     86  if (lroot.and.mpi_mode.gt.0) then
     87    allocate(wetgriduncn0(0:numxgridn-1,0:numygridn-1,maxspec, &
     88         maxpointspec_act,nclassunc,maxageclass),stat=stat)
     89    if (stat.ne.0) write(*,*)'ERROR:could not allocate nested gridunc'
     90    allocate(drygriduncn0(0:numxgridn-1,0:numygridn-1,maxspec, &
     91         maxpointspec_act,nclassunc,maxageclass),stat=stat)
     92    if (stat.ne.0) write(*,*)'ERROR:could not allocate nested gridunc'
     93!  endif
    8594! allocate a dummy to avoid compilator complaints
    8695  else if (.not.lroot.and.mpi_mode.gt.0) then
  • src/unc_mod.f90

    r6ecb30a r0ecc1fe  
    3838  ! then an aux array is needed for parallel grid reduction
    3939  real,allocatable, dimension (:,:,:,:,:,:,:) :: gridunc0
     40  real,allocatable, dimension (:,:,:,:,:,:,:) :: griduncn0
    4041#endif
    4142  real,allocatable, dimension (:,:,:,:,:,:,:) :: griduncn
Note: See TracChangeset for help on using the changeset viewer.
hosted by ZAMG