source: flexpart.git/src/FLEXPART_MPI.f90 @ b0434e1

10.4.1_peseiGFS_025bugfixes+enhancementsdevrelease-10release-10.4.1scaling-bugunivie
Last change on this file since b0434e1 was b0434e1, checked in by Espen Sollum ATMOS <eso@…>, 8 years ago

Initial code to handle cloud water in nested wind fields (it is not completely implemented in this commit)

  • Property mode set to 100644
File size: 15.0 KB
Line 
1!**********************************************************************
2! Copyright 1998,1999,2000,2001,2002,2005,2007,2008,2009,2010         *
3! Andreas Stohl, Petra Seibert, A. Frank, Gerhard Wotawa,             *
4! Caroline Forster, Sabine Eckhardt, John Burkhart, Harald Sodemann   *
5!                                                                     *
6! This file is part of FLEXPART.                                      *
7!                                                                     *
8! FLEXPART is free software: you can redistribute it and/or modify    *
9! it under the terms of the GNU General Public License as published by*
10! the Free Software Foundation, either version 3 of the License, or   *
11! (at your option) any later version.                                 *
12!                                                                     *
13! FLEXPART is distributed in the hope that it will be useful,         *
14! but WITHOUT ANY WARRANTY; without even the implied warranty of      *
15! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the       *
16! GNU General Public License for more details.                        *
17!                                                                     *
18! You should have received a copy of the GNU General Public License   *
19! along with FLEXPART.  If not, see <http://www.gnu.org/licenses/>.   *
20!**********************************************************************
21
22program flexpart
23
24  !*****************************************************************************
25  !                                                                            *
26  !     This is the Lagrangian Particle Dispersion Model FLEXPART.             *
27  !     The main program manages the reading of model run specifications, etc. *
28  !     All actual computing is done within subroutine timemanager.            *
29  !                                                                            *
30  !     Author: A. Stohl                                                       *
31  !                                                                            *
32  !     18 May 1996                                                            *
33  !                                                                            *
34  !*****************************************************************************
35  !                                                                            *
36  ! Variables:                                                                 *
37  !                                                                            *
38  ! Constants:                                                                 *
39  !                                                                            *
40  !*****************************************************************************
41
42  use point_mod
43  use par_mod
44  use com_mod
45  use conv_mod
46  use mpi_mod
47  use netcdf_output_mod, only: writeheader_netcdf
48  use random_mod, only: gasdev1
49
50  implicit none
51
52  integer :: i,j,ix,jy,inest
53  integer :: idummy = -320
54  character(len=256) :: inline_options  !pathfile, flexversion, arg2
55
56
57! Initialize mpi
58!*********************
59  call mpif_init
60
61  if (mp_measure_time) call mpif_mtime('flexpart',0)
62
63! Initialize arrays in com_mod
64!*****************************
65  call com_mod_allocate_part(maxpart_mpi)
66
67  ! Generate a large number of random numbers
68  !******************************************
69
70  ! eso: Different seed for each MPI process
71  idummy=idummy+mp_seed
72
73  do i=1,maxrand-1,2
74    call gasdev1(idummy,rannumb(i),rannumb(i+1))
75  end do
76  call gasdev1(idummy,rannumb(maxrand),rannumb(maxrand-1))
77
78  ! FLEXPART version string
79  flexversion_major = '10' ! Major version number, also used for species file names
80!  flexversion='Ver. 10 Beta MPI (2015-05-01)'
81  flexversion='Ver. '//trim(flexversion_major)//' Beta MPI (2015-05-01)'
82  verbosity=0
83
84  ! Read the pathnames where input/output files are stored
85  !*******************************************************
86
87  inline_options='none'
88  select case (iargc())
89  case (2)
90    call getarg(1,arg1)
91    pathfile=arg1
92    call getarg(2,arg2)
93    inline_options=arg2
94  case (1)
95    call getarg(1,arg1)
96    pathfile=arg1
97    if (arg1(1:1).eq.'-') then
98      write(pathfile,'(a11)') './pathnames'
99      inline_options=arg1
100    endif
101  case (0)
102    write(pathfile,'(a11)') './pathnames'
103  end select
104 
105  if (lroot) then
106  ! Print the GPL License statement
107  !*******************************************************
108    print*,'Welcome to FLEXPART ', trim(flexversion)
109    print*,'FLEXPART is free software released under the GNU General Public License.'
110  endif
111 
112  if (inline_options(1:1).eq.'-') then
113    if (trim(inline_options).eq.'-v'.or.trim(inline_options).eq.'-v1') then
114      print*, 'Verbose mode 1: display detailed information during run'
115      verbosity=1
116    endif
117    if (trim(inline_options).eq.'-v2') then
118      print*, 'Verbose mode 2: display more detailed information during run'
119      verbosity=2
120    endif
121    if (trim(inline_options).eq.'-i') then
122      print*, 'Info mode: provide detailed run specific information and stop'
123      verbosity=1
124      info_flag=1
125    endif
126    if (trim(inline_options).eq.'-i2') then
127      print*, 'Info mode: provide more detailed run specific information and stop'
128      verbosity=2
129      info_flag=1
130    endif
131  endif
132           
133  if (verbosity.gt.0) then
134    write(*,*) 'call readpaths'
135  endif
136  call readpaths(pathfile)
137 
138  if (verbosity.gt.1) then !show clock info
139     !print*,'length(4)',length(4)
140     !count=0,count_rate=1000
141    call system_clock(count_clock0, count_rate, count_max)
142     !WRITE(*,*) 'SYSTEM_CLOCK',count, count_rate, count_max
143     !WRITE(*,*) 'SYSTEM_CLOCK, count_clock0', count_clock0
144     !WRITE(*,*) 'SYSTEM_CLOCK, count_rate', count_rate
145     !WRITE(*,*) 'SYSTEM_CLOCK, count_max', count_max
146  endif
147
148  ! Read the user specifications for the current model run
149  !*******************************************************
150
151  if (verbosity.gt.0) then
152    write(*,*) 'call readcommand'
153  endif
154  call readcommand
155  if (verbosity.gt.0 .and. lroot) then
156    write(*,*) '    ldirect=', ldirect
157    write(*,*) '    ibdate,ibtime=',ibdate,ibtime
158    write(*,*) '    iedate,ietime=', iedate,ietime
159    if (verbosity.gt.1) then   
160      CALL SYSTEM_CLOCK(count_clock, count_rate, count_max)
161      write(*,*) 'SYSTEM_CLOCK',(count_clock - count_clock0)/real(count_rate) !, count_rate, count_max
162    endif
163  endif
164
165  ! Exit if trying to run backwards
166  if (ldirect.le.0) then
167    write(*,FMT='(80("#"))')
168    write(*,*) '#### FLEXPART_MPI> ERROR: ', &
169         & 'MPI version not (yet) working with backward runs. '
170    write(*,*) '#### Use the serial version instead.'
171    write(*,FMT='(80("#"))')
172    ! call mpif_finalize
173    ! stop
174  end if
175
176
177
178! Read the age classes to be used
179!********************************
180  if (verbosity.gt.0 .and. lroot) then
181    write(*,*) 'call readageclasses'
182  endif
183  call readageclasses
184
185  if (verbosity.gt.1 .and. lroot) then   
186    call system_clock(count_clock, count_rate, count_max)
187    write(*,*) 'SYSTEM_CLOCK',(count_clock - count_clock0)/real(count_rate) !, count_rate, count_max
188  endif     
189
190  ! Read, which wind fields are available within the modelling period
191  !******************************************************************
192
193  if (verbosity.gt.0 .and. lroot) then
194    write(*,*) 'call readavailable'
195  endif 
196  call readavailable
197
198  ! If nested wind fields are used, allocate arrays
199  !************************************************
200
201  if (verbosity.gt.0 .and. lroot) then
202    write(*,*) 'call com_mod_allocate_nests'
203  endif
204  call com_mod_allocate_nests
205
206! Read the model grid specifications,
207! both for the mother domain and eventual nests
208!**********************************************
209
210  if (verbosity.gt.0 .and. lroot) then
211    write(*,*) 'call gridcheck'
212  endif
213
214  call gridcheck
215
216  if (verbosity.gt.1 .and. lroot) then   
217    call system_clock(count_clock, count_rate, count_max)
218    write(*,*) 'SYSTEM_CLOCK',(count_clock - count_clock0)/real(count_rate) !, count_rate, count_max
219  endif     
220
221
222  if (verbosity.gt.0 .and. lroot) then
223    write(*,*) 'call gridcheck_nests'
224  endif 
225  call gridcheck_nests
226
227
228! Read the output grid specifications
229!************************************
230
231  if (verbosity.gt.0 .and. lroot) then
232    WRITE(*,*) 'call readoutgrid'
233  endif
234
235  call readoutgrid
236
237  if (nested_output.eq.1) then
238    call readoutgrid_nest
239    if (verbosity.gt.0.and.lroot) then
240      WRITE(*,*) '# readoutgrid_nest'
241    endif
242  endif
243
244  ! Read the receptor points for which extra concentrations are to be calculated
245  !*****************************************************************************
246
247  if (verbosity.eq.1 .and. lroot) then
248    print*,'call readreceptors'
249  endif
250  call readreceptors
251
252  ! Read the physico-chemical species property table
253  !*************************************************
254  !SEC: now only needed SPECIES are read in readreleases.f
255  !call readspecies
256
257
258  ! Read the landuse inventory
259  !***************************
260
261  if (verbosity.gt.0 .and. lroot) then
262    print*,'call readlanduse'
263  endif
264  call readlanduse
265
266
267! Assign fractional cover of landuse classes to each ECMWF grid point
268!********************************************************************
269
270  if (verbosity.gt.0 .and. lroot) then
271    print*,'call assignland'
272  endif
273  call assignland
274
275  ! Read the coordinates of the release locations
276  !**********************************************
277
278  if (verbosity.gt.0 .and. lroot) then
279    print*,'call readreleases'
280  endif
281  call readreleases
282
283
284! Read and compute surface resistances to dry deposition of gases
285!****************************************************************
286
287  if (verbosity.gt.0 .and. lroot) then
288    print*,'call readdepo'
289  endif
290  call readdepo
291
292  ! Convert the release point coordinates from geografical to grid coordinates
293  !***************************************************************************
294
295  call coordtrafo 
296  if (verbosity.gt.0 .and. lroot) then
297    print*,'call coordtrafo'
298  endif
299
300
301  ! Initialize all particles to non-existent
302  !*****************************************
303
304  if (verbosity.gt.0 .and. lroot) then
305    print*,'Initialize all particles to non-existent'
306  endif
307
308  do j=1, size(itra1) ! maxpart_mpi
309    itra1(j)=-999999999
310  end do
311
312  ! For continuation of previous run, read in particle positions
313  !*************************************************************
314
315  if (ipin.eq.1) then
316    if (verbosity.gt.0 .and. lroot) then
317      print*,'call readpartpositions'
318    endif
319    call readpartpositions
320  else
321    if (verbosity.gt.0 .and. lroot) then
322      print*,'numpart=0, numparticlecount=0'
323    endif
324    numpart=0
325    numparticlecount=0
326  endif
327
328
329  ! Calculate volume, surface area, etc., of all output grid cells
330  ! Allocate fluxes and OHfield if necessary
331  !***************************************************************
332
333
334  if (verbosity.gt.0 .and. lroot) then
335    print*,'call outgrid_init'
336  endif
337  call outgrid_init
338  if (nested_output.eq.1) call outgrid_init_nest
339
340  ! Read the OH field
341  !******************
342
343  if (OHREA.eqv..true.) then
344    if (verbosity.gt.0 .and. lroot) then
345      print*,'call readOHfield'
346    endif
347    call readOHfield
348  endif
349
350  !! testing !!
351  ! open(999,file=trim(path(1))//'OH_FIELDS/jscalar_50N.txt',action='write',status='new')
352  ! open(998,file=trim(path(1))//'OH_FIELDS/jscalar_50S.txt',action='write',status='new')
353
354
355  ! Write basic information on the simulation to a file "header"
356  ! and open files that are to be kept open throughout the simulation
357  !******************************************************************
358
359  if (mp_measure_time) call mpif_mtime('iotime',0)
360  if (lroot) then ! MPI: this part root process only
361
362  if (lnetcdfout.eq.1) then
363    call writeheader_netcdf(lnest=.false.)
364  else
365    call writeheader
366  end if
367
368  if (nested_output.eq.1) then
369    if (lnetcdfout.eq.1) then
370      call writeheader_netcdf(lnest=.true.)
371    else
372      call writeheader_nest
373    endif
374  endif
375
376!
377    if (verbosity.gt.0) then
378      print*,'call writeheader'
379    endif
380
381    call writeheader
382! FLEXPART 9.2 ticket ?? write header in ASCII format
383    call writeheader_txt
384!if (nested_output.eq.1) call writeheader_nest
385    if (nested_output.eq.1.and.surf_only.ne.1) call writeheader_nest
386    if (nested_output.eq.1.and.surf_only.eq.1) call writeheader_nest_surf
387    if (nested_output.ne.1.and.surf_only.eq.1) call writeheader_surf
388  end if ! (mpif_pid == 0)
389
390  if (mp_measure_time) call mpif_mtime('iotime',0)
391
392  !open(unitdates,file=path(2)(1:length(2))//'dates')
393
394  if (verbosity.gt.0 .and. lroot) then
395    print*,'call openreceptors'
396  endif
397  call openreceptors
398  if ((iout.eq.4).or.(iout.eq.5)) call openouttraj
399
400  ! Releases can only start and end at discrete times (multiples of lsynctime)
401  !***************************************************************************
402
403  if (verbosity.gt.0 .and. lroot) then
404    print*,'discretize release times'
405  endif
406  do i=1,numpoint
407    ireleasestart(i)=nint(real(ireleasestart(i))/real(lsynctime))*lsynctime
408    ireleaseend(i)=nint(real(ireleaseend(i))/real(lsynctime))*lsynctime
409  end do
410
411  ! Initialize cloud-base mass fluxes for the convection scheme
412  !************************************************************
413
414  if (verbosity.gt.0 .and. lroot) then
415    print*,'Initialize cloud-base mass fluxes for the convection scheme'
416  endif
417
418  do jy=0,nymin1
419    do ix=0,nxmin1
420      cbaseflux(ix,jy)=0.
421    end do
422  end do
423  do inest=1,numbnests
424    do jy=0,nyn(inest)-1
425      do ix=0,nxn(inest)-1
426        cbasefluxn(ix,jy,inest)=0.
427      end do
428    end do
429  end do
430
431
432! Calculate particle trajectories
433!********************************
434
435  if (verbosity.gt.0.and. lroot) then
436    if (verbosity.gt.1) then   
437      CALL SYSTEM_CLOCK(count_clock, count_rate, count_max)
438      WRITE(*,*) 'SYSTEM_CLOCK',(count_clock - count_clock0)/real(count_rate) !, count_rate, count_max
439    endif
440    print*,'call timemanager'
441  endif
442  if (info_flag.eq.1) then
443    print*, 'info only mode (stop)'   
444    call mpif_finalize
445    stop
446  endif
447
448
449  call timemanager
450
451
452! NIK 16.02.2005
453  if (lroot) then
454    call MPI_Reduce(MPI_IN_PLACE, tot_blc_count, 1, MPI_INTEGER8, MPI_SUM, id_root, &
455         & mp_comm_used, mp_ierr)
456    call MPI_Reduce(MPI_IN_PLACE, tot_inc_count, 1, MPI_INTEGER8, MPI_SUM, id_root, &
457         & mp_comm_used, mp_ierr)
458  else
459    if (mp_partgroup_pid.ge.0) then ! Skip for readwind process
460      call MPI_Reduce(tot_blc_count, tot_blc_count, 1, MPI_INTEGER8, MPI_SUM, id_root, &
461           & mp_comm_used, mp_ierr)
462      call MPI_Reduce(tot_inc_count, tot_inc_count, 1, MPI_INTEGER8, MPI_SUM, id_root, &
463           & mp_comm_used, mp_ierr)
464    end if
465  end if
466
467  if (lroot) then
468    write(*,*) '**********************************************'
469    write(*,*) 'Total number of occurences of below-cloud scavenging', tot_blc_count
470    write(*,*) 'Total number of occurences of in-cloud    scavenging', tot_inc_count
471    write(*,*) '**********************************************'
472
473    write(*,*) 'CONGRATULATIONS: YOU HAVE SUCCESSFULLY COMPLETED A FLE&
474         &XPART MODEL RUN!'
475  end if
476
477  if (mp_measure_time) call mpif_mtime('flexpart',1)
478
479  call mpif_finalize
480
481end program flexpart
Note: See TracBrowser for help on using the repository browser.
hosted by ZAMG