1 | # |
---|
2 | # This software is licensed under the terms of the Apache Licence Version 2.0 |
---|
3 | # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. |
---|
4 | # |
---|
5 | # Functionality provided: Prepare input 3D-wind fields in hybrid coordinates + surface fields for FLEXPART runs |
---|
6 | # |
---|
7 | # Creation: October 2014 - Anne Fouilloux - University of Oslo |
---|
8 | # Extension November 2015 - Leopold Haimberger - University of Vienna for: |
---|
9 | # - using the WebAPI also for general MARS retrievals |
---|
10 | # - job submission on ecgate and cca |
---|
11 | # - job templates suitable for twice daily operational dissemination |
---|
12 | # - dividing retrievals of longer periods into digestable chunks |
---|
13 | # - retrieve also longer term forecasts, not only analyses and short term forecast data |
---|
14 | # - conversion into GRIB2 |
---|
15 | # - conversion into .fp format for faster execution of FLEXPART |
---|
16 | # |
---|
17 | # Extension May 2018 - Anne Philipp - University of Vienna |
---|
18 | # - introduced new CONTROL parameter to seperate accumulation and other |
---|
19 | # data specification |
---|
20 | # - modified code to be able to retrieve ERA-5 data |
---|
21 | # - modified code to be able to retrieve CERA-20C data |
---|
22 | # - added possibility to retrieve data from public data server als as |
---|
23 | # public user |
---|
24 | # - allow specifying TIME/TYPE/STEP combinations only with needed data instead |
---|
25 | # of the full 24 hours |
---|
26 | # |
---|
27 | # Requirements: |
---|
28 | # in addition to a standard python 2.6 or 2.7 installation the following packages need to be installed |
---|
29 | # ECMWF WebMARS, gribAPI with python enabled, emoslib, ecaccess web toolkit, all available from https://software.ecmwf.int/ |
---|
30 | # dateutils |
---|
31 | # matplotlib (optional, for debugging) |
---|
32 | # |
---|
33 | # |
---|
34 | import subprocess |
---|
35 | from argparse import ArgumentParser,ArgumentDefaultsHelpFormatter |
---|
36 | import traceback |
---|
37 | import shutil |
---|
38 | import os, errno,sys,inspect,glob |
---|
39 | import datetime |
---|
40 | |
---|
41 | from string import atoi |
---|
42 | from numpy import * |
---|
43 | |
---|
44 | ecapi=True |
---|
45 | try: |
---|
46 | import ecmwfapi |
---|
47 | except ImportError: |
---|
48 | ecapi=False |
---|
49 | from gribapi import * |
---|
50 | from GribTools import GribTools |
---|
51 | |
---|
52 | def interpret_args_and_control(*args,**kwargs): |
---|
53 | |
---|
54 | parser = ArgumentParser(description='Retrieve FLEXPART input from ECMWF MARS archive', |
---|
55 | formatter_class=ArgumentDefaultsHelpFormatter) |
---|
56 | |
---|
57 | # the most important arguments |
---|
58 | parser.add_argument("--start_date", dest="start_date", |
---|
59 | help="start date YYYYMMDD") |
---|
60 | parser.add_argument( "--end_date", dest="end_date", |
---|
61 | help="end_date YYYYMMDD") |
---|
62 | parser.add_argument( "--date_chunk", dest="date_chunk",default=None, |
---|
63 | help="# of days to be retrieved at once") |
---|
64 | |
---|
65 | # some arguments that override the default in the control file |
---|
66 | parser.add_argument("--basetime", dest="basetime", help="base such as 00/12 (for half day retrievals)") |
---|
67 | parser.add_argument("--step", dest="step", help="steps such as 00/to/48") |
---|
68 | parser.add_argument("--levelist", dest="levelist",help="Vertical levels to be retrieved, e.g. 30/to/60") |
---|
69 | parser.add_argument("--area", dest="area", help="area defined as north/west/south/east") |
---|
70 | |
---|
71 | # set the working directories |
---|
72 | parser.add_argument("--inputdir", dest="inputdir",default=None, |
---|
73 | help="root directory for storing intermediate files") |
---|
74 | parser.add_argument("--outputdir", dest="outputdir",default=None, |
---|
75 | help="root directory for storing output files") |
---|
76 | parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", |
---|
77 | help="FLEXPART root directory (to find grib2flexpart and COMMAND file)\n\ |
---|
78 | Normally flex_extract resides in the scripts directory of the FLEXPART distribution") |
---|
79 | |
---|
80 | # this is only used by prepareFLEXPART.py to rerun a postprocessing step |
---|
81 | parser.add_argument("--ppid", dest="ppid", |
---|
82 | help="Specify parent process id for rerun of prepareFLEXPART") |
---|
83 | |
---|
84 | # arguments for job submission to ECMWF, only needed by submit.py |
---|
85 | parser.add_argument("--job_template", dest='job_template',default="job.temp", |
---|
86 | help="job template file for submission to ECMWF") |
---|
87 | #parser.add_argument("--remote", dest="remote", |
---|
88 | #help="target for submission to ECMWF (ecgate or cca etc.)") |
---|
89 | parser.add_argument("--queue", dest="queue", |
---|
90 | help="queue for submission to ECMWF (e.g. ecgate or cca )") |
---|
91 | |
---|
92 | |
---|
93 | parser.add_argument("--controlfile", dest="controlfile",default='CONTROL.temp', |
---|
94 | help="file with control parameters") |
---|
95 | parser.add_argument("--debug", dest="debug",default=0, |
---|
96 | help="Debug mode - leave temporary files intact") |
---|
97 | parser.add_argument("--public", dest="public", default=0, |
---|
98 | help="Public mode - retrieves the public datasets") |
---|
99 | |
---|
100 | parser.add_argument("--request", dest="request", default=0, |
---|
101 | help="list all mars request in file mars_requests.dat \ |
---|
102 | and skip submission to mars") |
---|
103 | |
---|
104 | args = parser.parse_args() |
---|
105 | |
---|
106 | try: |
---|
107 | c=Control(args.controlfile) |
---|
108 | except IOError: |
---|
109 | try: |
---|
110 | c=Control(localpythonpath+args.controlfile) |
---|
111 | |
---|
112 | except: |
---|
113 | print 'Could not read control file "'+args.controlfile+'"' |
---|
114 | print 'Either it does not exist or its syntax is wrong.' |
---|
115 | print 'Try "'+sys.argv[0].split('/')[-1]+' -h" to print usage information' |
---|
116 | exit(1) |
---|
117 | |
---|
118 | if args.start_date==None and getattr(c,'start_date')==None: |
---|
119 | print 'start_date specified neither in command line nor in control file '+args.controlfile |
---|
120 | print 'Try "'+sys.argv[0].split('/')[-1]+' -h" to print usage information' |
---|
121 | exit(1) |
---|
122 | |
---|
123 | if args.start_date!=None: |
---|
124 | c.start_date=args.start_date |
---|
125 | if args.end_date!=None: |
---|
126 | c.end_date=args.end_date |
---|
127 | if c.end_date==None: |
---|
128 | c.end_date=c.start_date |
---|
129 | if args.date_chunk!=None: |
---|
130 | c.date_chunk=args.date_chunk |
---|
131 | |
---|
132 | if not hasattr(c,'debug'): |
---|
133 | c.debug=args.debug |
---|
134 | |
---|
135 | if not hasattr(c, 'public'): |
---|
136 | setattr(c, 'public', args.public) |
---|
137 | |
---|
138 | if args.inputdir==None and args.outputdir==None: |
---|
139 | c.inputdir='../work' |
---|
140 | c.outputdir='../work' |
---|
141 | else: |
---|
142 | if args.inputdir!=None: |
---|
143 | c.inputdir=args.inputdir |
---|
144 | if args.outputdir==None: |
---|
145 | c.outputdir=args.inputdir |
---|
146 | if args.outputdir!=None: |
---|
147 | c.outputdir=args.outputdir |
---|
148 | if args.inputdir==None: |
---|
149 | c.inputdir=args.outputdir |
---|
150 | |
---|
151 | if hasattr(c,'outputdir')==False and args.outputdir==None: |
---|
152 | c.outputdir=c.inputdir |
---|
153 | else: |
---|
154 | if args.outputdir!=None: |
---|
155 | c.outputdir=args.outputdir |
---|
156 | |
---|
157 | if args.area!=None: |
---|
158 | afloat='.' in args.area |
---|
159 | l=args.area.split('/') |
---|
160 | if afloat: |
---|
161 | for i in range(len(l)): |
---|
162 | l[i]=str(int(float(l[i])*1000)) |
---|
163 | c.upper,c.left,c.lower,c.right=l |
---|
164 | |
---|
165 | # basetime aktiviert den ''operational mode'' |
---|
166 | if args.basetime!=None: |
---|
167 | c.basetime=args.basetime |
---|
168 | #if int(c.basetime)==0: |
---|
169 | #c.start_date=datetime.datetime.strftime(datetime.datetime.strptime(c.start_date,'%Y%m%d')-datetime.timedelta(days=1),'%Y%m%d') |
---|
170 | #c.end_date=c.start_date |
---|
171 | |
---|
172 | if args.step!=None: |
---|
173 | l=args.step.split('/') |
---|
174 | if 'to' in args.step.lower(): |
---|
175 | if 'by' in args.step.lower(): |
---|
176 | ilist=arange(int(l[0]),int(l[2])+1,int(l[4])) |
---|
177 | c.step=['{:0>3}'.format(i) for i in ilist] |
---|
178 | else: |
---|
179 | myerror(None,args.step+':\n'+'please use "by" as well if "to" is used') |
---|
180 | else: |
---|
181 | c.step=l |
---|
182 | |
---|
183 | if args.levelist!=None: |
---|
184 | c.levelist=args.levelist |
---|
185 | if 'to' in c.levelist: |
---|
186 | c.level=c.levelist.split('/')[2] |
---|
187 | else: |
---|
188 | c.level=c.levelist.split('/')[-1] |
---|
189 | |
---|
190 | |
---|
191 | if args.flexpart_root_scripts!=None: |
---|
192 | c.flexpart_root_scripts=args.flexpart_root_scripts |
---|
193 | |
---|
194 | # set request attribute to control file |
---|
195 | if args.request != '0': |
---|
196 | c.request=args.request |
---|
197 | |
---|
198 | if c.request != '0': |
---|
199 | marsfile = os.path.join(c.inputdir, 'mars_requests.csv') |
---|
200 | if os.path.isfile(marsfile): |
---|
201 | os.remove(marsfile) |
---|
202 | |
---|
203 | return args,c |
---|
204 | |
---|
205 | def install_args_and_control(): |
---|
206 | |
---|
207 | parser = ArgumentParser(description='Install flex_extract software locally or on ECMWF machines', |
---|
208 | formatter_class=ArgumentDefaultsHelpFormatter) |
---|
209 | parser.add_argument('--target',dest='install_target', |
---|
210 | help="Valid targets: local | ecgate | cca , the latter two are at ECMWF") |
---|
211 | parser.add_argument("--makefile", dest="makefile",help='Name of Makefile to use for compiling CONVERT2') |
---|
212 | parser.add_argument("--ecuid", dest="ecuid",help='user id at ECMWF') |
---|
213 | parser.add_argument("--ecgid", dest="ecgid",help='group id at ECMWF') |
---|
214 | parser.add_argument("--gateway", dest="gateway",help='name of local gateway server') |
---|
215 | parser.add_argument("--destination", dest="destination",help='ecaccess destination, e.g. leo@genericSftp') |
---|
216 | |
---|
217 | parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", |
---|
218 | help="FLEXPART root directory on ECMWF servers (to find grib2flexpart and COMMAND file)\n\ |
---|
219 | Normally flex_extract resides in the scripts directory of the FLEXPART distribution, thus the:") |
---|
220 | |
---|
221 | # arguments for job submission to ECMWF, only needed by submit.py |
---|
222 | parser.add_argument("--job_template", dest='job_template',default="job.temp.o", |
---|
223 | help="job template file for submission to ECMWF") |
---|
224 | |
---|
225 | parser.add_argument("--controlfile", dest="controlfile",default='CONTROL.temp', |
---|
226 | help="file with control parameters") |
---|
227 | args = parser.parse_args() |
---|
228 | |
---|
229 | try: |
---|
230 | # if True: |
---|
231 | c=Control(args.controlfile) |
---|
232 | # else: |
---|
233 | except: |
---|
234 | print 'Could not read control file "'+args.controlfile+'"' |
---|
235 | print 'Either it does not exist or its syntax is wrong.' |
---|
236 | print 'Try "'+sys.argv[0].split('/')[-1]+' -h" to print usage information' |
---|
237 | exit(1) |
---|
238 | |
---|
239 | if args.install_target!='local': |
---|
240 | if args.ecgid==None or args.ecuid==None or args.gateway==None or args.destination==None : |
---|
241 | print 'Please enter your ECMWF user id and group id as well as the \nname of the local gateway and the ectrans destination ' |
---|
242 | print 'with command line options --ecuid --ecgid --gateway --destination' |
---|
243 | print 'Try "'+sys.argv[0].split('/')[-1]+' -h" to print usage information' |
---|
244 | print 'Please consult ecaccess documentation or ECMWF user support for further details' |
---|
245 | sys.exit(1) |
---|
246 | else: |
---|
247 | c.ecuid=args.ecuid |
---|
248 | c.ecgid=args.ecgid |
---|
249 | c.gateway=args.gateway |
---|
250 | c.destination=args.destination |
---|
251 | |
---|
252 | try: |
---|
253 | c.makefile=args.makefile |
---|
254 | except: |
---|
255 | pass |
---|
256 | |
---|
257 | if args.install_target=='local': |
---|
258 | if args.flexpart_root_scripts==None: |
---|
259 | c.flexpart_root_scripts='../' |
---|
260 | else: |
---|
261 | c.flexpart_root_scripts=args.flexpart_root_scripts |
---|
262 | |
---|
263 | if args.install_target!='local': |
---|
264 | if args.flexpart_root_scripts==None: |
---|
265 | c.ec_flexpart_root_scripts='${HOME}' |
---|
266 | else: |
---|
267 | c.ec_flexpart_root_scripts=args.flexpart_root_scripts |
---|
268 | |
---|
269 | return args,c |
---|
270 | |
---|
271 | def cleanup(c): |
---|
272 | print "cleanup" |
---|
273 | cleanlist=glob.glob(c.inputdir+"/*") |
---|
274 | # cleanlist[-1:-1]=glob.glob(c.inputdir+"/flux*") |
---|
275 | # cleanlist[-1:-1]=glob.glob(c.inputdir+"/*.grb") |
---|
276 | # if c.ecapi==False and (c.ectrans=='1' or c.ecstorage=='1'): |
---|
277 | # cleanlist[-1:-1]=glob.glob(c.inputdir+"/"+c.prefix+"*") |
---|
278 | |
---|
279 | |
---|
280 | # silentremove(c.inputdir+"/VERTICAL.EC") |
---|
281 | # silentremove(c.inputdir+"/date_time_stepRange.idx") |
---|
282 | for cl in cleanlist: |
---|
283 | if c.prefix not in cl: |
---|
284 | silentremove(cl) |
---|
285 | if c.ecapi==False and (c.ectrans=='1' or c.ecstorage=='1'): |
---|
286 | silentremove(cl) |
---|
287 | |
---|
288 | |
---|
289 | |
---|
290 | print "Done" |
---|
291 | |
---|
292 | |
---|
293 | def myerror(c,message='ERROR'): |
---|
294 | |
---|
295 | try: |
---|
296 | target=c.mailfail |
---|
297 | except AttributeError: |
---|
298 | target=os.getenv('USER') |
---|
299 | |
---|
300 | if(type(target) is not list): |
---|
301 | target=[target] |
---|
302 | print message |
---|
303 | # uncomment if user wants email notification directly from python |
---|
304 | #for t in target: |
---|
305 | #p=subprocess.Popen(['mail','-s flex_extract v7.0.4 ERROR', os.path.expandvars(t)], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,bufsize=1) |
---|
306 | #tr='\n'.join(traceback.format_stack()) |
---|
307 | #pout=p.communicate(input=message+'\n\n'+tr)[0] |
---|
308 | #print 'Email sent to '+os.path.expandvars(t) # +' '+pout.decode() |
---|
309 | |
---|
310 | exit(1) |
---|
311 | |
---|
312 | def normalexit(c,message='Done!'): |
---|
313 | |
---|
314 | try: |
---|
315 | target=c.mailops |
---|
316 | |
---|
317 | |
---|
318 | if(type(target) is not list): |
---|
319 | target=[target] |
---|
320 | # Uncomment if user wants notification directly from python |
---|
321 | #for t in target: |
---|
322 | #p=subprocess.Popen(['mail','-s flex_extract v7.0.4 normal exit', os.path.expandvars(t)], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,bufsize=1) |
---|
323 | #pout=p.communicate(input=message+'\n\n')[0] |
---|
324 | #print pout.decode() |
---|
325 | |
---|
326 | except: |
---|
327 | pass |
---|
328 | |
---|
329 | print message |
---|
330 | |
---|
331 | return |
---|
332 | |
---|
333 | def product(*args, **kwds): |
---|
334 | # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy |
---|
335 | # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111 |
---|
336 | pools = map(tuple, args) * kwds.get('repeat', 1) |
---|
337 | result = [[]] |
---|
338 | for pool in pools: |
---|
339 | result = [x+[y] for x in result for y in pool] |
---|
340 | for prod in result: |
---|
341 | yield tuple(prod) |
---|
342 | |
---|
343 | ############################################################### |
---|
344 | # utility to remove a file if it exists |
---|
345 | # it does not fail if the file does not exist |
---|
346 | ############################################################### |
---|
347 | def silentremove(filename): |
---|
348 | try: |
---|
349 | os.remove(filename) |
---|
350 | except OSError as e: # this would be "except OSError, e:" before Python 2.6 |
---|
351 | if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory |
---|
352 | raise # re-raise exception if a different error occured |
---|
353 | |
---|
354 | |
---|
355 | def init128(fn): |
---|
356 | |
---|
357 | table128=dict() |
---|
358 | with open(fn) as f: |
---|
359 | fdata = f.read().split('\n') |
---|
360 | for data in fdata: |
---|
361 | if data[0]!='!': |
---|
362 | table128[data[0:3]]=data[59:64].strip() |
---|
363 | |
---|
364 | return table128 |
---|
365 | |
---|
366 | def toparamId(pars,table): |
---|
367 | |
---|
368 | cpar=pars.upper().split('/') |
---|
369 | ipar=[] |
---|
370 | for par in cpar: |
---|
371 | found=False |
---|
372 | for k,v in table.iteritems(): |
---|
373 | if par==k or par==v: |
---|
374 | ipar.append(int(k)) |
---|
375 | found=True |
---|
376 | break |
---|
377 | if found==False: |
---|
378 | print 'Warning: par '+par+' not found in table 128' |
---|
379 | return ipar |
---|
380 | |
---|
381 | def dapoly(alist): |
---|
382 | |
---|
383 | |
---|
384 | pya=(alist[3]-alist[0]+3.*(alist[1]-alist[2]))/6. |
---|
385 | pyb=(alist[2]+alist[0])/2.-alist[1]-9.*pya/2. |
---|
386 | pyc=alist[1]-alist[0]-7.*pya/2.-2.*pyb |
---|
387 | pyd=alist[0]-pya/4.-pyb/3.-pyc/2. |
---|
388 | sfeld=8.*pya+4.*pyb+2.*pyc+pyd |
---|
389 | |
---|
390 | return sfeld |
---|
391 | |
---|
392 | def darain(alist): |
---|
393 | xa=alist[0] |
---|
394 | xb=alist[1] |
---|
395 | xc=alist[2] |
---|
396 | xd=alist[3] |
---|
397 | xa[xa<0.]=0. |
---|
398 | xb[xb<0.]=0. |
---|
399 | xc[xc<0.]=0. |
---|
400 | xd[xd<0.]=0. |
---|
401 | |
---|
402 | xac=0.5*xb |
---|
403 | mask=xa+xc>0. |
---|
404 | xac[mask]=xb[mask]*xc[mask]/(xa[mask]+xc[mask]) |
---|
405 | xbd=0.5*xc |
---|
406 | mask=xb+xd>0. |
---|
407 | xbd[mask]=xb[mask]*xc[mask]/(xb[mask]+xd[mask]) |
---|
408 | sfeld=xac+xbd |
---|
409 | |
---|
410 | return sfeld |
---|
411 | |
---|
412 | class Control: |
---|
413 | 'class containing the information of the flex_extract control file' |
---|
414 | |
---|
415 | def __init__(self,filename): |
---|
416 | |
---|
417 | #try: |
---|
418 | with open(filename) as f: |
---|
419 | fdata = f.read().split('\n') |
---|
420 | for ldata in fdata: |
---|
421 | data=ldata.split() |
---|
422 | if len(data)>1: |
---|
423 | if 'm_' in data[0].lower(): |
---|
424 | data[0]=data[0][2:] |
---|
425 | if data[0].lower()=='class': |
---|
426 | data[0]='marsclass' |
---|
427 | if data[0].lower()=='day1': |
---|
428 | data[0]='start_date' |
---|
429 | if data[0].lower()=='day2': |
---|
430 | data[0]='end_date' |
---|
431 | if data[0].lower()=='addpar': |
---|
432 | if '/' in data[1]: |
---|
433 | if data[1][0]=='/': |
---|
434 | data[1]=data[1][1:] |
---|
435 | dd=data[1].split('/') |
---|
436 | data=[data[0]] |
---|
437 | for d in dd: |
---|
438 | data.append(d) |
---|
439 | pass |
---|
440 | if len(data)==2: |
---|
441 | if '$' in data[1]: |
---|
442 | # print data[1] |
---|
443 | setattr(self,data[0].lower(),data[1]) |
---|
444 | while '$' in data[1]: |
---|
445 | i=data[1].index('$') |
---|
446 | j=data[1].find('{') |
---|
447 | k=data[1].find('}') |
---|
448 | var=os.getenv(data[1][j+1:k]) |
---|
449 | if var is not None: |
---|
450 | data[1]=data[1][:i]+var+data[1][k+1:] |
---|
451 | else: |
---|
452 | myerror(None,'Could not find variable '+data[1][j+1:k]+' while reading '+filename) |
---|
453 | |
---|
454 | # print data[0],data[1] |
---|
455 | setattr(self,data[0].lower()+'_expanded',data[1]) |
---|
456 | else: |
---|
457 | if data[1].lower()!='none': |
---|
458 | setattr(self,data[0].lower(),data[1]) |
---|
459 | else: |
---|
460 | setattr(self,data[0].lower(),None) |
---|
461 | |
---|
462 | elif len(data)>2: |
---|
463 | setattr(self,data[0].lower(),(data[1:])) |
---|
464 | else: |
---|
465 | pass |
---|
466 | |
---|
467 | if not hasattr(self,'start_date'): |
---|
468 | self.start_date=None |
---|
469 | if not hasattr(self,'end_date'): |
---|
470 | self.end_date=self.start_date |
---|
471 | if not hasattr(self,'accuracy'): |
---|
472 | self.accuracy=24 |
---|
473 | if not hasattr(self,'omega'): |
---|
474 | self.omega='0' |
---|
475 | if not hasattr(self,'cwc'): |
---|
476 | self.cwc='0' |
---|
477 | if not hasattr(self,'omegadiff'): |
---|
478 | self.omegadiff='0' |
---|
479 | if not hasattr(self,'etadiff'): |
---|
480 | self.etadiff='0' |
---|
481 | |
---|
482 | if not isinstance(self.type, list): |
---|
483 | self.type = [self.type] |
---|
484 | if not isinstance(self.time, list): |
---|
485 | self.time = [self.time] |
---|
486 | if not isinstance(self.step, list): |
---|
487 | self.step = [self.step] |
---|
488 | |
---|
489 | max_level_list = [16, 19, 31, 40, 50, 60, 62, 91, 137] |
---|
490 | if not hasattr(self,'levelist'): |
---|
491 | if not hasattr(self,'level'): |
---|
492 | print 'WARNING: neither levelist nor level specified in CONTROL file' |
---|
493 | else: |
---|
494 | if self.level in max_level_list: |
---|
495 | self.levelist='1/to/'+self.level |
---|
496 | else: |
---|
497 | if not hasattr(self, 'level'): |
---|
498 | if 'to' in self.levelist.lower(): |
---|
499 | max_level = self.levelist.split('/')[2] |
---|
500 | if int(max_level) in max_level_list: |
---|
501 | self.level = max_level |
---|
502 | else: |
---|
503 | print 'ERROR: LEVEL-parameter could not be set' |
---|
504 | print 'LEVEL must be the maximum level of a specified level list from ECMWF, e.g.' |
---|
505 | print '16, 19, 31, 40, 50, 60, 62, 91 or 137' |
---|
506 | sys.exit(1) |
---|
507 | else: |
---|
508 | max_level = self.levelist.split('/')[-1] |
---|
509 | if int(max_level) in max_level_list: |
---|
510 | self.level = max_level |
---|
511 | else: |
---|
512 | print 'ERROR: LEVEL-parameter could not be set' |
---|
513 | print 'LEVEL must be the maximum level of a specified level list from ECMWF, e.g.' |
---|
514 | print '16, 19, 31, 40, 50, 60, 62, 91 or 137' |
---|
515 | sys.exit(1) |
---|
516 | else: |
---|
517 | # check if consistent |
---|
518 | if int(self.level) in max_level_list: |
---|
519 | pass |
---|
520 | else: |
---|
521 | print 'ERROR: LEVEL-parameter is not properly selected' |
---|
522 | print 'LEVEL must be the maximum level of a specified level list from ECMWF, e.g.' |
---|
523 | print '16, 19, 31, 40, 50, 60, 62, 91 or 137' |
---|
524 | sys.exit(1) |
---|
525 | |
---|
526 | if not hasattr(self,'maxstep'): |
---|
527 | self.maxstep=0 |
---|
528 | for s in self.step: |
---|
529 | if int(s)>self.maxstep: |
---|
530 | self.maxstep=int(s) |
---|
531 | else: |
---|
532 | self.maxstep=int(self.maxstep) |
---|
533 | if not hasattr(self,'prefix'): |
---|
534 | self.prefix='EN' |
---|
535 | if not hasattr(self,'makefile'): |
---|
536 | self.makefile=None |
---|
537 | if not hasattr(self,'basetime'): |
---|
538 | self.basetime=None |
---|
539 | if not hasattr(self,'date_chunk'): |
---|
540 | self.date_chunk='3' |
---|
541 | |
---|
542 | self.flexextractdir=os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))+'/../' # script directory |
---|
543 | # if not hasattr(self,'exedir'): |
---|
544 | self.exedir=self.flexextractdir+'src/' |
---|
545 | if not hasattr(self,'grib2flexpart'): |
---|
546 | self.grib2flexpart='0' |
---|
547 | if not hasattr(self,'flexpart_root_scripts'): |
---|
548 | self.flexpart_root_scripts=self.flexextractdir |
---|
549 | |
---|
550 | if not hasattr(self,'acctype'): |
---|
551 | print('... Control paramter ACCTYPE was not defined in CONTROL file.') |
---|
552 | print('Important for (forecast) flux data extraction.') |
---|
553 | try: |
---|
554 | if len(self.type) > 1 and self.type[1] != 'AN': |
---|
555 | print('Use old setting by using TYPE[1].') |
---|
556 | self.acctype = self.type[1] |
---|
557 | except: |
---|
558 | print('Use default value "FC"!') |
---|
559 | self.acctype='FC' |
---|
560 | |
---|
561 | if not hasattr(self,'acctime'): |
---|
562 | print('... Control paramter ACCTIME was not defined in CONTROL file.') |
---|
563 | print('Use default value "00/12" for flux forecast!') |
---|
564 | print('This may be different for single data sets, see documentation if retrieval fails!') |
---|
565 | self.acctime='00/12' |
---|
566 | |
---|
567 | if not hasattr(self, 'accmaxstep'): |
---|
568 | print('... Control paramter ACCMAXSTEP was not defined in CONTROL file.') |
---|
569 | print('Use default value "12" for flux forecast!') |
---|
570 | print('This may be different for single data sets, see documentation if retrieval fails!') |
---|
571 | self.accmaxstep='12' |
---|
572 | |
---|
573 | for i in range(len(self.type)): |
---|
574 | if self.type[i] == 'AN' and int(self.step[i]) != 0: |
---|
575 | print('Analysis retrievals must have STEP = 0 (setting to 0)') |
---|
576 | self.type[i] = 0 |
---|
577 | |
---|
578 | if not hasattr(self,'request'): |
---|
579 | self.request='0' |
---|
580 | elif self.request != 0: |
---|
581 | marsfile = os.path.join(self.inputdir, |
---|
582 | 'mars_requests.csv') |
---|
583 | if os.path.isfile(marsfile): |
---|
584 | silentremove(marsfile) |
---|
585 | |
---|
586 | return |
---|
587 | def __str__(self): |
---|
588 | |
---|
589 | attrs = vars(self) |
---|
590 | # {'kids': 0, 'name': 'Dog', 'color': 'Spotted', 'age': 10, 'legs': 2, 'smell': 'Alot'} |
---|
591 | # now dump this in some way or another |
---|
592 | return ', '.join("%s: %s" % item for item in attrs.items()) |
---|
593 | |
---|
594 | def tolist(self): |
---|
595 | attrs=vars(self) |
---|
596 | l=list() |
---|
597 | for item in attrs.items(): |
---|
598 | if '_expanded' in item[0]: |
---|
599 | pass |
---|
600 | elif 'exedir' in item[0]: |
---|
601 | pass |
---|
602 | elif 'flexpart_root_scripts' in item[0]: |
---|
603 | pass |
---|
604 | elif 'flexextractdir' in item[0]: |
---|
605 | pass |
---|
606 | |
---|
607 | else: |
---|
608 | if type(item[1]) is list: |
---|
609 | stot='' |
---|
610 | for s in item[1]: |
---|
611 | stot+=s+' ' |
---|
612 | |
---|
613 | l.append("%s %s" % (item[0],stot)) |
---|
614 | else: |
---|
615 | l.append("%s %s" % item ) |
---|
616 | return sorted(l) |
---|
617 | |
---|
618 | |
---|
619 | |
---|
620 | ############################################################## |
---|
621 | # MARSretrieval class |
---|
622 | ############################################################## |
---|
623 | class MARSretrieval: |
---|
624 | 'class for MARS retrievals' |
---|
625 | |
---|
626 | def __init__(self,server, public, marsclass="ei",type="",levtype="",levelist="", |
---|
627 | repres="", date="",resol="",stream="",area="",time="",step="",expver="1", |
---|
628 | number="",accuracy="", grid="", gaussian="", target="",param="",dataset="",expect=""): |
---|
629 | self.dataset=dataset |
---|
630 | self.marsclass=marsclass |
---|
631 | self.type=type |
---|
632 | self.levtype=levtype |
---|
633 | self.levelist=levelist |
---|
634 | self.repres=repres |
---|
635 | self.date=date |
---|
636 | self.resol=resol |
---|
637 | self.stream=stream |
---|
638 | self.area=area |
---|
639 | self.time=time |
---|
640 | self.step=step |
---|
641 | self.expver=expver |
---|
642 | self.target=target |
---|
643 | self.param=param |
---|
644 | self.number=number |
---|
645 | self.accuracy=accuracy |
---|
646 | self.grid=grid |
---|
647 | self.gaussian=gaussian |
---|
648 | # self.expect=expect |
---|
649 | self.server=server |
---|
650 | self.public=public |
---|
651 | |
---|
652 | def displayInfo(self): |
---|
653 | attrs=vars(self) |
---|
654 | for item in attrs.items(): |
---|
655 | if item[0] in ('server'): |
---|
656 | pass |
---|
657 | else: |
---|
658 | print item[0]+': '+str(item[1]) |
---|
659 | |
---|
660 | return |
---|
661 | |
---|
662 | def print_infodata_csv(self, inputdir, request_number): |
---|
663 | ''' |
---|
664 | @Description: |
---|
665 | Write all request parameter in alpabetical order into a "csv" file. |
---|
666 | |
---|
667 | @Input: |
---|
668 | self: instance of MarsRetrieval |
---|
669 | For description see class documentation. |
---|
670 | |
---|
671 | inputdir: string |
---|
672 | The path where all data from the retrievals are stored. |
---|
673 | |
---|
674 | request_number: integer |
---|
675 | Number of mars requests for flux and non-flux data. |
---|
676 | |
---|
677 | @Return: |
---|
678 | <nothing> |
---|
679 | ''' |
---|
680 | |
---|
681 | # Get all class attributes and their values as a dictionary |
---|
682 | attrs = vars(self).copy() |
---|
683 | del attrs['server'] |
---|
684 | del attrs['public'] |
---|
685 | |
---|
686 | # open a file to store all requests to |
---|
687 | with open(os.path.join(inputdir, 'mars_requests.csv'), 'a') as f: |
---|
688 | f.write(str(request_number) + ', ') |
---|
689 | f.write(', '.join(str(attrs[key]) |
---|
690 | for key in sorted(attrs.iterkeys()))) |
---|
691 | f.write('\n') |
---|
692 | |
---|
693 | return |
---|
694 | |
---|
695 | def dataRetrieve(self): |
---|
696 | attrs=vars(self).copy() |
---|
697 | |
---|
698 | del attrs['server'] |
---|
699 | del attrs['public'] |
---|
700 | mclass = attrs.get('marsclass') |
---|
701 | del attrs['marsclass'] |
---|
702 | attrs['class'] = mclass |
---|
703 | target = attrs.get('target') |
---|
704 | if not int(self.public): |
---|
705 | del attrs['target'] |
---|
706 | print 'target: ', target |
---|
707 | |
---|
708 | delete_keys = [] |
---|
709 | for k, v in attrs.iteritems(): |
---|
710 | if v == '': |
---|
711 | delete_keys.append(str(k)) |
---|
712 | else: |
---|
713 | attrs[k] = str(v) |
---|
714 | |
---|
715 | for k in delete_keys: |
---|
716 | del attrs[k] |
---|
717 | |
---|
718 | if self.server != False: |
---|
719 | try: |
---|
720 | if int(self.public): |
---|
721 | print 'RETRIEVE DATA!' |
---|
722 | self.server.retrieve(attrs) |
---|
723 | else: |
---|
724 | print 'EXECUTE RETRIEVAL!' |
---|
725 | self.server.execute(attrs, target) |
---|
726 | except: |
---|
727 | e = sys.exc_info()[0] |
---|
728 | print "Error: ", e |
---|
729 | print 'MARS Request failed'#, have you already registered at apps.ecmwf.int?' |
---|
730 | #raise IOError |
---|
731 | if not int(self.public) and os.stat(target).st_size==0: |
---|
732 | print 'MARS Request returned no data - please check request' |
---|
733 | raise IOError |
---|
734 | elif int(self.public) and os.stat(attrs.get('target')).st_size==0: |
---|
735 | print 'MARS Request returned no data - please check request' |
---|
736 | raise IOError |
---|
737 | else: |
---|
738 | s='ret' |
---|
739 | for k,v in attrs.iteritems(): |
---|
740 | s=s+','+k+'='+str(v) |
---|
741 | |
---|
742 | s+=',target="'+target+'"' |
---|
743 | p=subprocess.Popen(['mars'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,bufsize=1) |
---|
744 | pout=p.communicate(input=s)[0] |
---|
745 | print pout.decode() |
---|
746 | if 'Some errors reported' in pout.decode(): |
---|
747 | print 'MARS Request failed - please check request' |
---|
748 | raise IOError |
---|
749 | |
---|
750 | if os.stat(target).st_size==0: |
---|
751 | print 'MARS Request returned no data - please check request' |
---|
752 | raise IOError |
---|
753 | |
---|
754 | return |
---|
755 | |
---|
756 | ############################################################## |
---|
757 | class EIFlexpart: |
---|
758 | 'class to retrieve Era Interim data for running FLEXPART' |
---|
759 | ############################################################## |
---|
760 | |
---|
761 | def __init__(self,c,fluxes=False): |
---|
762 | # different mars types for retrieving reanalysis data for flexpart |
---|
763 | |
---|
764 | # set a counter for the number of mars requests generated |
---|
765 | self.mreq_count = 0 |
---|
766 | self.types=dict() |
---|
767 | try: |
---|
768 | if c.maxstep>len(c.type): # Pure forecast mode |
---|
769 | c.type=[c.type[0]] # AP changed this index from 1 to 0 |
---|
770 | c.step=['{:0>3}'.format(int(c.step[0]))] |
---|
771 | c.time=[c.time[0]] |
---|
772 | for i in range(1,c.maxstep+1): |
---|
773 | c.type.append(c.type[0]) |
---|
774 | c.step.append('{:0>3}'.format(i)) |
---|
775 | c.time.append(c.time[0]) |
---|
776 | except: |
---|
777 | pass |
---|
778 | |
---|
779 | self.inputdir=c.inputdir |
---|
780 | self.basetime=c.basetime |
---|
781 | self.dtime=c.dtime |
---|
782 | self.mars={} |
---|
783 | i=0 |
---|
784 | j=0 |
---|
785 | if fluxes==True and c.maxstep<=24: |
---|
786 | self.types[str(c.acctype)]={'times':str(c.acctime), |
---|
787 | 'steps':'{}/to/{}/by/{}'.format( |
---|
788 | c.dtime,c.accmaxstep,c.dtime)} |
---|
789 | i=1 |
---|
790 | for k in [0,12]: |
---|
791 | for j in range(int(c.dtime),13,int(c.dtime)): |
---|
792 | self.mars['{:0>3}'.format(i*int(c.dtime))]=[c.type[1],'{:0>2}'.format(k),'{:0>3}'.format(j)] |
---|
793 | i+=1 |
---|
794 | else: |
---|
795 | for ty,st,ti in zip(c.type,c.step,c.time): |
---|
796 | btlist=range(24) |
---|
797 | if c.basetime=='12': |
---|
798 | btlist=[1,2,3,4,5,6,7,8,9,10,11,12] |
---|
799 | if c.basetime=='00': |
---|
800 | btlist=[13,14,15,16,17,18,19,20,21,22,23,0] |
---|
801 | |
---|
802 | |
---|
803 | if ((ty.upper() == 'AN' and mod(int(c.time[i]),int(c.dtime))==0) or \ |
---|
804 | (ty.upper() != 'AN' and mod(int(c.step[i]),int(c.dtime))==0 and \ |
---|
805 | mod(int(c.step[i]),int(c.dtime))==0) ) and \ |
---|
806 | (int(c.time[i]) in btlist or c.maxstep>24): |
---|
807 | if ty not in self.types.keys(): |
---|
808 | self.types[ty]={'times':'','steps':''} |
---|
809 | if ti not in self.types[ty]['times']: |
---|
810 | if len(self.types[ty]['times'])>0: |
---|
811 | self.types[ty]['times']+='/' |
---|
812 | self.types[ty]['times']+=ti |
---|
813 | if st not in self.types[ty]['steps']: |
---|
814 | if len(self.types[ty]['steps'])>0: |
---|
815 | self.types[ty]['steps']+='/' |
---|
816 | self.types[ty]['steps']+=st |
---|
817 | |
---|
818 | self.mars['{:0>3}'.format(j)]=[ty,'{:0>2}'.format(int(ti)),'{:0>3}'.format(int(st))] |
---|
819 | j+=int(c.dtime) |
---|
820 | |
---|
821 | i+=1 |
---|
822 | |
---|
823 | # Different grids need different retrievals |
---|
824 | # SH=Spherical Harmonics, GG=Gaussian Grid, OG=Output Grid, ML=MultiLevel, SL=SingleLevel |
---|
825 | self.params={'SH__ML':'','SH__SL':'', |
---|
826 | 'GG__ML':'','GG__SL':'', |
---|
827 | 'OG__ML':'','OG__SL':'', |
---|
828 | 'OG_OROLSM_SL':'','OG_acc_SL':''} |
---|
829 | |
---|
830 | self.marsclass=c.marsclass |
---|
831 | self.stream=c.stream |
---|
832 | self.number=c.number |
---|
833 | self.resol=c.resol |
---|
834 | if 'N' in c.grid: # Gaussian output grid |
---|
835 | self.grid=c.grid |
---|
836 | self.area='G' |
---|
837 | else: |
---|
838 | self.grid='{}/{}'.format(int(c.grid)/1000.,int(c.grid)/1000.) |
---|
839 | self.area='{}/{}/{}/{}'.format(int(c.upper)/1000.,int(c.left)/1000.,int(c.lower)/1000.,int(c.right)/1000.) |
---|
840 | |
---|
841 | self.accuracy=c.accuracy |
---|
842 | self.level=c.level |
---|
843 | try: |
---|
844 | self.levelist=c.levelist |
---|
845 | except: |
---|
846 | self.levelist='1/to/'+c.level |
---|
847 | self.glevelist='1/to/'+c.level |
---|
848 | try: |
---|
849 | self.gaussian=c.gaussian |
---|
850 | except: |
---|
851 | self.gaussian='' |
---|
852 | try: |
---|
853 | self.dataset=c.dataset |
---|
854 | except: |
---|
855 | self.dataset='' |
---|
856 | try: |
---|
857 | self.expver=c.expver |
---|
858 | except: |
---|
859 | self.expver='1' |
---|
860 | try: |
---|
861 | self.number=c.number |
---|
862 | except: |
---|
863 | self.number='0' |
---|
864 | |
---|
865 | self.outputfilelist=[] |
---|
866 | |
---|
867 | |
---|
868 | # Now comes the nasty part that deals with the different scenarios we have: |
---|
869 | # 1) Calculation of etadot on |
---|
870 | # a) Gaussian grid |
---|
871 | # b) Output grid |
---|
872 | # c) Output grid using parameter 77 retrieved from MARS |
---|
873 | # 3) Calculation/Retrieval of omega |
---|
874 | # 4) Download also data for WRF |
---|
875 | |
---|
876 | if fluxes==False: |
---|
877 | self.params['SH__SL']=['LNSP','ML','1','OFF'] |
---|
878 | # self.params['OG__SL']=["SD/MSL/TCC/10U/10V/2T/2D/129/172",'SFC','1',self.grid] |
---|
879 | self.params['OG__SL']=["141/151/164/165/166/167/168/129/172",'SFC','1',self.grid] |
---|
880 | if c.marsclass.upper() == 'EA' or c.marsclass.upper() == 'EP': |
---|
881 | self.params['OG_OROLSM__SL']=["160/27/28/244",'SFC','1',self.grid] |
---|
882 | else: |
---|
883 | self.params['OG_OROLSM__SL']=["160/27/28/173",'SFC','1',self.grid] |
---|
884 | |
---|
885 | if len(c.addpar)>0: |
---|
886 | if c.addpar[0]=='/': |
---|
887 | c.addpar=c.addpar[1:] |
---|
888 | self.params['OG__SL'][0]+='/'+'/'.join(c.addpar) |
---|
889 | self.params['OG__ML']=['T/Q','ML',self.levelist,self.grid] |
---|
890 | |
---|
891 | if c.gauss=='0' and c.eta=='1': # the simplest case |
---|
892 | self.params['OG__ML'][0]+='/U/V/77' |
---|
893 | elif c.gauss=='0' and c.eta=='0': # this is not recommended (inaccurate) |
---|
894 | self.params['OG__ML'][0]+='/U/V' |
---|
895 | elif c.gauss=='1' and c.eta=='0': # this is needed for data before 2008, or for reanalysis data |
---|
896 | self.params['GG__SL']=['Q','ML','1','{}'.format((int(self.resol)+1)/2)] |
---|
897 | self.params['SH__ML']=['U/V/D','ML',self.glevelist,'OFF'] |
---|
898 | else: |
---|
899 | print 'Warning: This is a very costly parameter combination, use only for debugging!' |
---|
900 | self.params['GG__SL']=['Q','ML','1','{}'.format((int(self.resol)+1)/2)] |
---|
901 | self.params['GG__ML']=['U/V/D/77','ML',self.glevelist,'{}'.format((int(self.resol)+1)/2)] |
---|
902 | |
---|
903 | if c.omega=='1': |
---|
904 | self.params['OG__ML'][0]+='/W' |
---|
905 | |
---|
906 | try: # add cloud water content if necessary |
---|
907 | if c.cwc=='1': |
---|
908 | self.params['OG__ML'][0]+='/CLWC/CIWC' |
---|
909 | except: |
---|
910 | pass |
---|
911 | |
---|
912 | try: # add vorticity and geopotential height for WRF if necessary |
---|
913 | if c.wrf=='1': |
---|
914 | self.params['OG__ML'][0]+='/Z/VO' |
---|
915 | if '/D' not in self.params['OG__ML'][0]: |
---|
916 | self.params['OG__ML'][0]+='/D' |
---|
917 | # wrf_sfc='sp/msl/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4'.upper() |
---|
918 | # 134/151/235/167/165/166/168/129/172/034/031/141/139/170/183/236/039/040/041/042 |
---|
919 | wrf_sfc='134/235/167/165/166/168/129/172/34/31/141/139/170/183/236/39/40/41/42'.upper() |
---|
920 | lwrt_sfc=wrf_sfc.split('/') |
---|
921 | for par in lwrt_sfc: |
---|
922 | if par not in self.params['OG__SL'][0]: |
---|
923 | self.params['OG__SL'][0]+='/'+par |
---|
924 | |
---|
925 | except: |
---|
926 | pass |
---|
927 | else: |
---|
928 | self.params['OG_acc_SL']=["LSP/CP/SSHF/EWSS/NSSS/SSR",'SFC','1',self.grid] |
---|
929 | |
---|
930 | |
---|
931 | return |
---|
932 | # add additional WRF specific parameters here |
---|
933 | |
---|
934 | def create_namelist(self,c,filename): |
---|
935 | |
---|
936 | # if inputdir=="": |
---|
937 | # self.inputdir='.' |
---|
938 | # else: |
---|
939 | # self.inputdir=inputdir |
---|
940 | |
---|
941 | self.inputdir=c.inputdir |
---|
942 | area=asarray(self.area.split('/')).astype(float) |
---|
943 | grid=asarray(self.grid.split('/')).astype(float) |
---|
944 | |
---|
945 | # if area[3]<0: |
---|
946 | # area[3]+=360 |
---|
947 | if area[1]>area[3]: |
---|
948 | area[1]-=360 |
---|
949 | zyk=abs((area[3]-area[1]-360.)+grid[1])<1.e-6 |
---|
950 | maxl=int((area[3]-area[1])/grid[1])+1 |
---|
951 | maxb=int((area[0]-area[2])/grid[0])+1 |
---|
952 | |
---|
953 | f=open(self.inputdir+'/'+filename,'w') |
---|
954 | f.write('&NAMGEN\n') |
---|
955 | f.write(',\n '.join(['maxl='+str(maxl),'maxb='+str(maxb), |
---|
956 | 'mlevel='+self.level,'mlevelist='+'"'+self.levelist+'"', |
---|
957 | 'mnauf='+self.resol,'metapar='+'77', |
---|
958 | 'rlo0='+str(area[1]),'rlo1='+str(area[3]),'rla0='+str(area[2]),'rla1='+str(area[0]), |
---|
959 | 'momega='+c.omega,'momegadiff='+c.omegadiff,'mgauss='+c.gauss, |
---|
960 | 'msmooth='+c.smooth,'meta='+c.eta,'metadiff='+c.etadiff,'mdpdeta='+c.dpdeta])) |
---|
961 | |
---|
962 | f.write('\n/\n') |
---|
963 | f.close() |
---|
964 | return |
---|
965 | |
---|
966 | |
---|
967 | def retrieve(self, server, public, dates, request, times, inputdir=''): |
---|
968 | self.dates=dates |
---|
969 | self.server=server |
---|
970 | self.public=public |
---|
971 | |
---|
972 | if inputdir=="": |
---|
973 | self.inputdir='.' |
---|
974 | else: |
---|
975 | self.inputdir=inputdir |
---|
976 | |
---|
977 | # Retrieve Q not for using Q but as a template for a reduced gaussian grid one date and time is enough |
---|
978 | # Take analysis at 00 |
---|
979 | # qdate=self.dates |
---|
980 | # idx=qdate.find("/") |
---|
981 | # if (idx >0): |
---|
982 | # qdate=self.dates[:idx] |
---|
983 | |
---|
984 | #QG= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, type="an", levtype="ML", levelist="1", |
---|
985 | #gaussian="reduced",grid='{}'.format((int(self.resol)+1)/2), resol=self.resol,accuracy=self.accuracy,target=self.inputdir+"/"+"QG.grb", |
---|
986 | #date=qdate, time="00",expver=self.expver, param="133.128") |
---|
987 | #QG.displayInfo() |
---|
988 | #QG.dataRetrieve() |
---|
989 | |
---|
990 | oro=False |
---|
991 | for ftype in self.types: |
---|
992 | for pk,pv in self.params.iteritems(): |
---|
993 | if isinstance(pv,str): # or pk!='GG__SL' : |
---|
994 | continue |
---|
995 | mftype=''+ftype |
---|
996 | mftime=self.types[ftype]['times'] |
---|
997 | mfstep=self.types[ftype]['steps'] |
---|
998 | mfdate=self.dates |
---|
999 | mfstream=self.stream |
---|
1000 | mftarget=self.inputdir+"/"+ftype+pk+'.'+self.dates.split('/')[0]+'.'+str(os.getppid())+'.'+str(os.getpid())+".grb" |
---|
1001 | |
---|
1002 | if pk=='OG__SL': |
---|
1003 | pass |
---|
1004 | if pk=='OG_OROLSM__SL': |
---|
1005 | if oro==False: |
---|
1006 | # in CERA20C there is no stream "OPER"! |
---|
1007 | if self.marsclass.upper() != 'EP': |
---|
1008 | mfstream='OPER' |
---|
1009 | mftype='AN' |
---|
1010 | mftime='00' |
---|
1011 | mfstep='000' |
---|
1012 | mfdate=self.dates.split('/')[0] |
---|
1013 | mftarget=self.inputdir+"/"+pk+'.'+mfdate+'.'+str(os.getppid())+'.'+str(os.getpid())+".grb" |
---|
1014 | oro=True |
---|
1015 | else: |
---|
1016 | continue |
---|
1017 | |
---|
1018 | if pk=='GG__SL' and pv[0]=='Q': |
---|
1019 | area="" |
---|
1020 | gaussian='reduced' |
---|
1021 | else: |
---|
1022 | area=self.area |
---|
1023 | gaussian=self.gaussian |
---|
1024 | |
---|
1025 | if self.basetime==None: |
---|
1026 | # increase number of mars requests |
---|
1027 | self.mreq_count += 1 |
---|
1028 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=mfstream, |
---|
1029 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
1030 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
1031 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
1032 | |
---|
1033 | if request == "0": |
---|
1034 | MR.displayInfo() |
---|
1035 | MR.dataRetrieve() |
---|
1036 | elif request == "1": |
---|
1037 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1038 | elif request == "2": |
---|
1039 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1040 | MR.displayInfo() |
---|
1041 | MR.dataRetrieve() |
---|
1042 | # The whole else section is only necessary for operational scripts. It could be removed |
---|
1043 | else: # check if mars job requests fields beyond basetime. If yes eliminate those fields since they may not |
---|
1044 | # be accessible with user's credentials |
---|
1045 | sm1=-1 |
---|
1046 | if 'by' in mfstep: |
---|
1047 | sm1=2 |
---|
1048 | tm1=-1 |
---|
1049 | if 'by' in mftime: |
---|
1050 | tm1=2 |
---|
1051 | maxtime=datetime.datetime.strptime(mfdate.split('/')[-1]+mftime.split('/')[tm1],'%Y%m%d%H')+ \ |
---|
1052 | datetime.timedelta(hours=int(mfstep.split('/')[sm1])) |
---|
1053 | |
---|
1054 | elimit=datetime.datetime.strptime(mfdate.split('/')[-1]+self.basetime,'%Y%m%d%H') |
---|
1055 | |
---|
1056 | if self.basetime=='12': |
---|
1057 | if 'acc' in pk: |
---|
1058 | |
---|
1059 | # Strategy: if maxtime-elimit>=24h reduce date by 1, |
---|
1060 | # if 12h<=maxtime-elimit<12h reduce time for last date |
---|
1061 | # if maxtime-elimit<12h reduce step for last time |
---|
1062 | # A split of the MARS job into 2 is likely necessary. |
---|
1063 | maxtime=elimit-datetime.timedelta(hours=24) |
---|
1064 | mfdate='/'.join(('/'.join(mfdate.split('/')[:-1]),datetime.datetime.strftime(maxtime,'%Y%m%d'))) |
---|
1065 | # increase number of mars requests |
---|
1066 | self.mreq_count += 1 |
---|
1067 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
1068 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
1069 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
1070 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
1071 | |
---|
1072 | if request == "0": |
---|
1073 | MR.displayInfo() |
---|
1074 | MR.dataRetrieve() |
---|
1075 | elif request == "1": |
---|
1076 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1077 | elif request == "2": |
---|
1078 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1079 | MR.displayInfo() |
---|
1080 | MR.dataRetrieve() |
---|
1081 | |
---|
1082 | maxtime=elimit-datetime.timedelta(hours=12) |
---|
1083 | mfdate=datetime.datetime.strftime(maxtime,'%Y%m%d') |
---|
1084 | mftime='00' |
---|
1085 | mftarget=self.inputdir+"/"+ftype+pk+'.'+mfdate+'.'+str(os.getppid())+'.'+str(os.getpid())+".grb" |
---|
1086 | # increase number of mars requests |
---|
1087 | self.mreq_count += 1 |
---|
1088 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
1089 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
1090 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
1091 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
1092 | if request == "0": |
---|
1093 | MR.displayInfo() |
---|
1094 | MR.dataRetrieve() |
---|
1095 | elif request == "1": |
---|
1096 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1097 | elif request == "2": |
---|
1098 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1099 | MR.displayInfo() |
---|
1100 | MR.dataRetrieve() |
---|
1101 | else: |
---|
1102 | # increase number of mars requests |
---|
1103 | self.mreq_count += 1 |
---|
1104 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
1105 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
1106 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
1107 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
1108 | |
---|
1109 | if request == "0": |
---|
1110 | MR.displayInfo() |
---|
1111 | MR.dataRetrieve() |
---|
1112 | elif request == "1": |
---|
1113 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1114 | elif request == "2": |
---|
1115 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1116 | MR.displayInfo() |
---|
1117 | MR.dataRetrieve() |
---|
1118 | else: |
---|
1119 | maxtime=elimit-datetime.timedelta(hours=24) |
---|
1120 | mfdate=datetime.datetime.strftime(maxtime,'%Y%m%d') |
---|
1121 | |
---|
1122 | mftimesave=''.join(mftime) |
---|
1123 | |
---|
1124 | if '/' in mftime: |
---|
1125 | times=mftime.split('/') |
---|
1126 | while int(times[0])+int(mfstep.split('/')[0])<=12 and pk!='OG_OROLSM__SL' and 'acc' not in pk: |
---|
1127 | times=times[1:] |
---|
1128 | if len(times)>1: |
---|
1129 | mftime='/'.join(times) |
---|
1130 | else: |
---|
1131 | mftime=times[0] |
---|
1132 | # increase number of mars requests |
---|
1133 | self.mreq_count += 1 |
---|
1134 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
1135 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
1136 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
1137 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
1138 | |
---|
1139 | if request == "0": |
---|
1140 | MR.displayInfo() |
---|
1141 | MR.dataRetrieve() |
---|
1142 | elif request == "1": |
---|
1143 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1144 | elif request == "2": |
---|
1145 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1146 | MR.displayInfo() |
---|
1147 | MR.dataRetrieve() |
---|
1148 | |
---|
1149 | if int(mftimesave.split('/')[0])==0 and int(mfstep.split('/')[0])==0 and pk!='OG_OROLSM__SL': |
---|
1150 | mfdate=datetime.datetime.strftime(elimit,'%Y%m%d') |
---|
1151 | mftime='00' |
---|
1152 | mfstep='000' |
---|
1153 | mftarget=self.inputdir+"/"+ftype+pk+'.'+mfdate+'.'+str(os.getppid())+'.'+str(os.getpid())+".grb" |
---|
1154 | # increase number of mars requests |
---|
1155 | self.mreq_count += 1 |
---|
1156 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
1157 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
1158 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
1159 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
1160 | |
---|
1161 | if request == "0": |
---|
1162 | MR.displayInfo() |
---|
1163 | MR.dataRetrieve() |
---|
1164 | elif request == "1": |
---|
1165 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1166 | elif request == "2": |
---|
1167 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
1168 | MR.displayInfo() |
---|
1169 | MR.dataRetrieve() |
---|
1170 | |
---|
1171 | |
---|
1172 | if request == "0" or request == "2": |
---|
1173 | print('MARS retrieve done ... ') |
---|
1174 | elif request == "1": |
---|
1175 | print('MARS request printed ...') |
---|
1176 | |
---|
1177 | def getFlexpartTime(self, type,step, time): |
---|
1178 | cstep='{:0>3}'.format(step) |
---|
1179 | ctime='{:0>2}'.format(int(time/100)) |
---|
1180 | |
---|
1181 | ctype = str(type).upper() |
---|
1182 | myinfo = [ctype,ctime, cstep] |
---|
1183 | cflextime = None |
---|
1184 | for t, marsinfo in self.mars.items(): |
---|
1185 | if myinfo == marsinfo: |
---|
1186 | cflextime=t |
---|
1187 | return cflextime |
---|
1188 | |
---|
1189 | def process_output(self, c): |
---|
1190 | |
---|
1191 | print 'Postprocessing:\n Format: {}\n'.format(c.format) |
---|
1192 | if c.ecapi==False: |
---|
1193 | print 'ecstorage: {}\n ecfsdir: {}\n'.format(c.ecstorage,c.ecfsdir) |
---|
1194 | if not hasattr(c,'gateway'): |
---|
1195 | c.gateway=os.getenv('GATEWAY') |
---|
1196 | if not hasattr(c,'destination'): |
---|
1197 | c.destination=os.getenv('DESTINATION') |
---|
1198 | print 'ectrans: {}\n gateway: {}\n destination: {}\n '.format(c.ectrans, c.gateway,c.destination) |
---|
1199 | print 'Output filelist: \n',self.outputfilelist |
---|
1200 | |
---|
1201 | if c.format.lower()=='grib2': |
---|
1202 | for ofile in self.outputfilelist: |
---|
1203 | p=subprocess.check_call(['grib_set','-s','edition=2,productDefinitionTemplateNumber=8',ofile,ofile+'_2']) |
---|
1204 | p=subprocess.check_call(['mv',ofile+'_2',ofile]) |
---|
1205 | |
---|
1206 | if int(c.ectrans)==1 and c.ecapi==False: |
---|
1207 | for ofile in self.outputfilelist: |
---|
1208 | p=subprocess.check_call(['ectrans','-overwrite','-gateway',c.gateway, |
---|
1209 | '-remote',c.destination,'-source',ofile]) |
---|
1210 | print 'ectrans:',p |
---|
1211 | if int(c.ecstorage)==1 and c.ecapi==False: |
---|
1212 | for ofile in self.outputfilelist: |
---|
1213 | p=subprocess.check_call(['ecp','-o',ofile,os.path.expandvars(c.ecfsdir)]) |
---|
1214 | |
---|
1215 | #20131107 000000 EN13110700 ON DISC |
---|
1216 | if c.outputdir!=c.inputdir: |
---|
1217 | for ofile in self.outputfilelist: |
---|
1218 | p=subprocess.check_call(['mv',ofile,c.outputdir]) |
---|
1219 | |
---|
1220 | if c.grib2flexpart=='1': |
---|
1221 | f=open(c.outputdir+'/'+'AVAILABLE','w') |
---|
1222 | clist=[] |
---|
1223 | for ofile in self.outputfilelist: # generate AVAILABLE file |
---|
1224 | fname=ofile.split('/') |
---|
1225 | if '.' in fname[-1]: |
---|
1226 | l=fname[-1].split('.') |
---|
1227 | timestamp=datetime.datetime.strptime(l[0][-6:]+l[1],'%y%m%d%H') |
---|
1228 | timestamp+=datetime.timedelta(hours=int(l[2])) |
---|
1229 | cdate=datetime.datetime.strftime(timestamp,'%Y%m%d') |
---|
1230 | chms=datetime.datetime.strftime(timestamp,'%H%M%S') |
---|
1231 | |
---|
1232 | else: |
---|
1233 | cdate='20'+fname[-1][-8:-2] |
---|
1234 | chms=fname[-1][-2:]+'0000' |
---|
1235 | clist.append(cdate+' '+chms+' '*6+fname[-1]+' '*14+'ON DISC') |
---|
1236 | clist.sort() |
---|
1237 | f.write('\n'.join(clist)+'\n') |
---|
1238 | f.close() |
---|
1239 | |
---|
1240 | pwd=os.path.abspath(c.outputdir) |
---|
1241 | f=open(pwd+'/pathnames','w') |
---|
1242 | f.write(pwd+'/Options/\n') |
---|
1243 | f.write(pwd+'/\n') |
---|
1244 | f.write(pwd+'/\n') |
---|
1245 | f.write(pwd+'/AVAILABLE\n') |
---|
1246 | f.write('==================\n') |
---|
1247 | f.close() |
---|
1248 | |
---|
1249 | if not os.path.exists(pwd+'/Options'): |
---|
1250 | os.makedirs(pwd+'/Options') |
---|
1251 | f=open(os.path.expandvars(os.path.expanduser(c.flexpart_root_scripts))+'/../Options/COMMAND','r') |
---|
1252 | lflist=f.read().split('\n') |
---|
1253 | i=0 |
---|
1254 | for l in lflist: |
---|
1255 | if 'LDIRECT' in l.upper(): |
---|
1256 | break |
---|
1257 | i+=1 |
---|
1258 | |
---|
1259 | clist.sort() |
---|
1260 | lflist=lflist[:i+1]+[clist[0][:16],clist[-1][:16]]+lflist[i+3:] |
---|
1261 | g=open(pwd+'/Options/COMMAND','w') |
---|
1262 | g.write('\n'.join(lflist)+'\n') |
---|
1263 | g.close() |
---|
1264 | |
---|
1265 | os.chdir(c.outputdir) |
---|
1266 | p=subprocess.check_call([os.path.expandvars(os.path.expanduser(c.flexpart_root_scripts))+'/../FLEXPART_PROGRAM/grib2flexpart', |
---|
1267 | 'useAvailable','.']) |
---|
1268 | os.chdir(pwd) |
---|
1269 | |
---|
1270 | def create(self, inputfiles, c,): |
---|
1271 | |
---|
1272 | table128=init128(c.flexextractdir+'/grib_templates/ecmwf_grib1_table_128') |
---|
1273 | wrfpars=toparamId('sp/mslp/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4',table128) |
---|
1274 | # index_keys=["date","time","stepRange"] |
---|
1275 | index_keys=["date","time","step"] |
---|
1276 | indexfile=c.inputdir+"/date_time_stepRange.idx" |
---|
1277 | silentremove(indexfile) |
---|
1278 | grib=GribTools(inputfiles.files) |
---|
1279 | iid=grib.index(index_keys=index_keys, index_file = indexfile) |
---|
1280 | |
---|
1281 | print 'index done...' |
---|
1282 | fdict={'10':None,'11':None,'12':None,'13':None,'16':None,'17':None,'19':None,'21':None,'22':None,'20':None} |
---|
1283 | for f in fdict.keys(): |
---|
1284 | silentremove(c.inputdir+"/fort."+f) |
---|
1285 | |
---|
1286 | index_vals = [] |
---|
1287 | for key in index_keys: |
---|
1288 | key_vals = grib_index_get(iid,key) |
---|
1289 | print key_vals |
---|
1290 | |
---|
1291 | index_vals.append(key_vals) |
---|
1292 | |
---|
1293 | for prod in product(*index_vals): |
---|
1294 | for i in range(len(index_keys)): |
---|
1295 | grib_index_select(iid,index_keys[i],prod[i]) |
---|
1296 | |
---|
1297 | |
---|
1298 | gid = grib_new_from_index(iid) |
---|
1299 | hid = gid |
---|
1300 | cflextime = None |
---|
1301 | for k,f in fdict.iteritems(): |
---|
1302 | fdict[k] = open(c.inputdir+'/fort.'+k,'w') |
---|
1303 | if gid is not None: |
---|
1304 | cdate = str(grib_get(gid, 'date')) |
---|
1305 | time = grib_get(gid, 'time') |
---|
1306 | type = grib_get(gid, 'type') |
---|
1307 | step = grib_get(gid, 'step') |
---|
1308 | # step = grib_get(gid, 'stepRange') |
---|
1309 | # cflextime = self.getFlexpartTime(type,step, time) |
---|
1310 | timestamp=datetime.datetime.strptime(cdate+'{:0>2}'.format(time/100),'%Y%m%d%H') |
---|
1311 | timestamp+=datetime.timedelta(hours=int(step)) |
---|
1312 | # print gid,index_keys[i],prod[i],cdate,time,step,timestamp |
---|
1313 | |
---|
1314 | cdateH=datetime.datetime.strftime(timestamp,'%Y%m%d%H') |
---|
1315 | chms=datetime.datetime.strftime(timestamp,'%H%M%S') |
---|
1316 | |
---|
1317 | if c.basetime !=None: |
---|
1318 | slimit=datetime.datetime.strptime(c.start_date+'00','%Y%m%d%H') |
---|
1319 | bt='23' |
---|
1320 | if c.basetime=='00': |
---|
1321 | bt='00' |
---|
1322 | slimit=datetime.datetime.strptime(c.end_date+bt,'%Y%m%d%H')-datetime.timedelta(hours=12-int(c.dtime)) |
---|
1323 | |
---|
1324 | if c.basetime=='12': |
---|
1325 | bt='12' |
---|
1326 | slimit=datetime.datetime.strptime(c.end_date+bt,'%Y%m%d%H')-datetime.timedelta(hours=12-int(c.dtime)) |
---|
1327 | |
---|
1328 | elimit=datetime.datetime.strptime(c.end_date+bt,'%Y%m%d%H') |
---|
1329 | |
---|
1330 | if timestamp<slimit or timestamp>elimit: |
---|
1331 | continue |
---|
1332 | |
---|
1333 | |
---|
1334 | |
---|
1335 | try: |
---|
1336 | if c.wrf=='1': |
---|
1337 | if 'olddate' not in locals(): |
---|
1338 | fwrf = open(c.outputdir+'/WRF'+cdate+'.{:0>2}'.format(time)+'.000.grb2','w') |
---|
1339 | olddate=cdate[:] |
---|
1340 | else: |
---|
1341 | if cdate!=olddate: |
---|
1342 | fwrf = open(c.outputdir+'/WRF'+cdate+'.{:0>2}'.format(time)+'.000.grb2','w') |
---|
1343 | olddate=cdate[:] |
---|
1344 | except AttributeError: |
---|
1345 | pass |
---|
1346 | |
---|
1347 | # print 'cyear '+cyear+'/'+cmonth+'/'+'/EI'+cyear[2:4]+cmonth+cday+cflextime |
---|
1348 | |
---|
1349 | savedfields=[] |
---|
1350 | while 1: |
---|
1351 | if gid is None: break |
---|
1352 | paramId = grib_get(gid, 'paramId') |
---|
1353 | gridtype = grib_get(gid, 'gridType') |
---|
1354 | datatype = grib_get(gid, 'dataType') |
---|
1355 | levtype = grib_get(gid, 'typeOfLevel') |
---|
1356 | if paramId == 133 and gridtype=='reduced_gg': |
---|
1357 | # Relative humidity (Q.grb) is used as a template only so we need the first we "meet" |
---|
1358 | fout=open(c.inputdir+'/fort.18','w') |
---|
1359 | grib_write(gid,fout) |
---|
1360 | fout.close() |
---|
1361 | elif paramId == 131 or paramId == 132: |
---|
1362 | grib_write(gid, fdict['10']) |
---|
1363 | elif paramId == 130: |
---|
1364 | grib_write(gid, fdict['11']) |
---|
1365 | elif paramId == 133 and gridtype!='reduced_gg': |
---|
1366 | grib_write(gid, fdict['17']) |
---|
1367 | elif paramId == 152: |
---|
1368 | grib_write(gid, fdict['12']) |
---|
1369 | elif paramId == 155 and gridtype=='sh': |
---|
1370 | grib_write(gid, fdict['13']) |
---|
1371 | elif paramId in [129,138,155] and levtype=='hybrid' and c.wrf=='1': |
---|
1372 | # print paramId,'not written' |
---|
1373 | pass |
---|
1374 | elif paramId == 246 or paramId == 247: # cloud liquid water and ice |
---|
1375 | if paramId==246: |
---|
1376 | clwc=grib_get_values(gid) |
---|
1377 | else: |
---|
1378 | clwc+=grib_get_values(gid) |
---|
1379 | grib_set_values(gid,clwc) |
---|
1380 | # grib_set(gid,'shortName','qc') |
---|
1381 | grib_set(gid,'paramId',201031) |
---|
1382 | grib_write(gid, fdict['22']) |
---|
1383 | |
---|
1384 | elif paramId == 135: |
---|
1385 | grib_write(gid, fdict['19']) |
---|
1386 | elif paramId == 77: |
---|
1387 | grib_write(gid, fdict['21']) |
---|
1388 | else: |
---|
1389 | if paramId not in savedfields: |
---|
1390 | grib_write(gid, fdict['16']) |
---|
1391 | savedfields.append(paramId) |
---|
1392 | else: |
---|
1393 | print 'duplicate '+str(paramId)+' not written' |
---|
1394 | |
---|
1395 | try: |
---|
1396 | if c.wrf=='1': |
---|
1397 | if levtype=='hybrid': |
---|
1398 | if paramId in [129,130,131,132,133,138,155]: |
---|
1399 | grib_write(gid,fwrf) |
---|
1400 | else: |
---|
1401 | if paramId in wrfpars: |
---|
1402 | grib_write(gid,fwrf) |
---|
1403 | except AttributeError: |
---|
1404 | pass |
---|
1405 | |
---|
1406 | |
---|
1407 | |
---|
1408 | grib_release(gid) |
---|
1409 | gid = grib_new_from_index(iid) |
---|
1410 | # call for CONVERT2 |
---|
1411 | |
---|
1412 | for f in fdict.values(): |
---|
1413 | f.close() |
---|
1414 | |
---|
1415 | if hid is not None: |
---|
1416 | pwd=os.getcwd() |
---|
1417 | os.chdir(c.inputdir) |
---|
1418 | if os.stat('fort.21').st_size==0 and int(c.eta)==1: |
---|
1419 | print 'Parameter 77 (etadot) is missing, most likely it is not available for this type or date/time\n' |
---|
1420 | print 'Check parameters CLASS, TYPE, STREAM, START_DATE\n' |
---|
1421 | myerror(c,'fort.21 is empty while parameter eta is set to 1 in CONTROL file') |
---|
1422 | |
---|
1423 | p=subprocess.check_call([os.path.expandvars(os.path.expanduser(c.exedir))+'/CONVERT2'],shell=True) |
---|
1424 | os.chdir(pwd) |
---|
1425 | # create the corresponding output file fort.15 (generated by CONVERT2) + fort.16 (paramId 167 and paramId 168) |
---|
1426 | fnout=c.inputdir+'/'+c.prefix |
---|
1427 | if c.maxstep>12: |
---|
1428 | suffix=cdate[2:8]+'.{:0>2}'.format(time/100)+'.{:0>3}'.format(step) |
---|
1429 | else: |
---|
1430 | suffix=cdateH[2:10] |
---|
1431 | |
---|
1432 | fnout+=suffix |
---|
1433 | print "outputfile = " + fnout |
---|
1434 | self.outputfilelist.append(fnout) # needed for final processing |
---|
1435 | fout = open(fnout,'wb') |
---|
1436 | shutil.copyfileobj(open(c.inputdir+'/fort.15','rb'), fout) |
---|
1437 | if c.cwc=='1': |
---|
1438 | shutil.copyfileobj(open(c.inputdir+'/fort.22','rb'), fout) |
---|
1439 | shutil.copyfileobj(open(c.inputdir+'/flux'+cdate[0:2]+suffix,'rb'), fout) |
---|
1440 | shutil.copyfileobj(open(c.inputdir+'/fort.16','rb'), fout) |
---|
1441 | orolsm=glob.glob(c.inputdir+'/OG_OROLSM__SL.*.'+c.ppid+'*')[0] |
---|
1442 | shutil.copyfileobj(open(orolsm,'rb'), fout) |
---|
1443 | fout.close() |
---|
1444 | if c.omega=='1': |
---|
1445 | fnout=c.outputdir+'/OMEGA'+suffix |
---|
1446 | fout = open(fnout,'wb') |
---|
1447 | shutil.copyfileobj(open(c.inputdir+'/fort.25','rb'), fout) |
---|
1448 | |
---|
1449 | |
---|
1450 | |
---|
1451 | try: |
---|
1452 | if c.wrf=='1': |
---|
1453 | fwrf.close() |
---|
1454 | except: |
---|
1455 | pass |
---|
1456 | |
---|
1457 | grib_index_release(iid) |
---|
1458 | |
---|
1459 | def deacc_fluxes(self, inputfiles, c): |
---|
1460 | |
---|
1461 | table128=init128(c.flexextractdir+'/grib_templates/ecmwf_grib1_table_128') |
---|
1462 | pars=toparamId(self.params['OG_acc_SL'][0],table128) |
---|
1463 | index_keys=["date","time","step"] |
---|
1464 | indexfile=c.inputdir+"/date_time_stepRange.idx" |
---|
1465 | silentremove(indexfile) |
---|
1466 | grib=GribTools(inputfiles.files) |
---|
1467 | iid=grib.index(index_keys=index_keys, index_file = indexfile) |
---|
1468 | |
---|
1469 | print 'index done...' |
---|
1470 | |
---|
1471 | index_vals = [] |
---|
1472 | for key in index_keys: |
---|
1473 | key_vals = grib_index_get(iid,key) |
---|
1474 | print key_vals |
---|
1475 | |
---|
1476 | l=[] |
---|
1477 | for k in key_vals: |
---|
1478 | l.append(int(k)) |
---|
1479 | l.sort() |
---|
1480 | key_vals=[] |
---|
1481 | for k in l: |
---|
1482 | key_vals.append(str(k)) |
---|
1483 | |
---|
1484 | index_vals.append(key_vals) |
---|
1485 | |
---|
1486 | |
---|
1487 | valsdict={} |
---|
1488 | svalsdict={} |
---|
1489 | stepsdict={} |
---|
1490 | for p in pars: |
---|
1491 | valsdict[str(p)]=[] |
---|
1492 | svalsdict[str(p)]=[] |
---|
1493 | stepsdict[str(p)]=[] |
---|
1494 | |
---|
1495 | for prod in product(*index_vals): |
---|
1496 | for i in range(len(index_keys)): |
---|
1497 | grib_index_select(iid,index_keys[i],prod[i]) |
---|
1498 | |
---|
1499 | #for k,f in fdict.iteritems(): |
---|
1500 | #fdict[k] = open('fort.'+k,'w') |
---|
1501 | gid = grib_new_from_index(iid) |
---|
1502 | hid = gid |
---|
1503 | cflextime = None |
---|
1504 | if gid is not None: |
---|
1505 | cdate = grib_get(gid, 'date') |
---|
1506 | #cyear = cdate[:4] |
---|
1507 | #cmonth = cdate[4:6] |
---|
1508 | #cday = cdate[6:8] |
---|
1509 | time = grib_get(gid, 'time') |
---|
1510 | type = grib_get(gid, 'type') |
---|
1511 | step = grib_get(gid, 'step') |
---|
1512 | # date+time+step-2*dtime (since interpolated value valid for step-2*dtime) |
---|
1513 | sdate=datetime.datetime(year=cdate/10000,month=mod(cdate,10000)/100, |
---|
1514 | day=mod(cdate,100),hour=time/100) |
---|
1515 | fdate=sdate+datetime.timedelta(hours=step-2*int(c.dtime)) |
---|
1516 | sdates=sdate+datetime.timedelta(hours=step) |
---|
1517 | else: |
---|
1518 | break |
---|
1519 | |
---|
1520 | fnout=c.inputdir+'/' |
---|
1521 | if c.maxstep>12: |
---|
1522 | fnout+='flux'+sdate.strftime('%Y%m%d')+'.{:0>2}'.format(time/100)+'.{:0>3}'.format(step-2*int(c.dtime)) |
---|
1523 | gnout=c.inputdir+'/flux'+sdate.strftime('%Y%m%d')+'.{:0>2}'.format(time/100)+'.{:0>3}'.format(step-int(c.dtime)) |
---|
1524 | hnout=c.inputdir+'/flux'+sdate.strftime('%Y%m%d')+'.{:0>2}'.format(time/100)+'.{:0>3}'.format(step) |
---|
1525 | g=open(gnout,'w') |
---|
1526 | h=open(hnout,'w') |
---|
1527 | else: |
---|
1528 | fnout+='flux'+fdate.strftime('%Y%m%d%H') |
---|
1529 | gnout=c.inputdir+'/flux'+(fdate+datetime.timedelta(hours=int(c.dtime))).strftime('%Y%m%d%H') |
---|
1530 | hnout=c.inputdir+'/flux'+sdates.strftime('%Y%m%d%H') |
---|
1531 | g=open(gnout,'w') |
---|
1532 | h=open(hnout,'w') |
---|
1533 | print "outputfile = " + fnout |
---|
1534 | f=open(fnout,'w') |
---|
1535 | |
---|
1536 | while 1: |
---|
1537 | if gid is None: break |
---|
1538 | cparamId = str(grib_get(gid, 'paramId')) |
---|
1539 | step = grib_get(gid, 'step') |
---|
1540 | atime = grib_get(gid, 'time') |
---|
1541 | ni=grib_get(gid, 'Ni') |
---|
1542 | nj=grib_get(gid, 'Nj') |
---|
1543 | if cparamId in valsdict.keys(): |
---|
1544 | values = grib_get_values(gid) |
---|
1545 | vdp=valsdict[cparamId] |
---|
1546 | svdp=svalsdict[cparamId] |
---|
1547 | sd=stepsdict[cparamId] |
---|
1548 | |
---|
1549 | if cparamId=='142' or cparamId=='143': |
---|
1550 | fak=1./1000. |
---|
1551 | else: |
---|
1552 | fak=3600. |
---|
1553 | |
---|
1554 | values=(reshape(values,(nj,ni))).flatten()/fak |
---|
1555 | vdp.append(values[:]) # save the accumulated values |
---|
1556 | if c.marsclass.upper() == 'EA' or \ |
---|
1557 | step<=int(c.dtime): |
---|
1558 | svdp.append(values[:]/int(c.dtime)) |
---|
1559 | else: |
---|
1560 | svdp.append((vdp[-1]-vdp[-2])/int(c.dtime)) |
---|
1561 | |
---|
1562 | print cparamId,atime,step,len(values),values[0],std(values) |
---|
1563 | #svdp.append(values[:]) # save the 1-hourly or 3-hourly specific values |
---|
1564 | sd.append(step) |
---|
1565 | if len(svdp)>=3: |
---|
1566 | if len(svdp)>3: |
---|
1567 | if cparamId=='142' or cparamId=='143': |
---|
1568 | values=darain(svdp) |
---|
1569 | else: |
---|
1570 | values=dapoly(svdp) |
---|
1571 | |
---|
1572 | if not (step==c.maxstep and c.maxstep>12 or sdates==elimit): |
---|
1573 | vdp.pop(0) |
---|
1574 | svdp.pop(0) |
---|
1575 | else: |
---|
1576 | if c.maxstep>12: |
---|
1577 | values=svdp[1] |
---|
1578 | else: |
---|
1579 | values=svdp[0] |
---|
1580 | |
---|
1581 | grib_set_values(gid, values) |
---|
1582 | if c.maxstep>12: |
---|
1583 | grib_set(gid,'step',max(0,step-2*int(c.dtime))) |
---|
1584 | else: |
---|
1585 | grib_set(gid,'step',0) |
---|
1586 | grib_set(gid,'time',fdate.hour*100) |
---|
1587 | grib_set(gid,'date',fdate.year*10000+fdate.month*100+fdate.day) |
---|
1588 | grib_write(gid, f) |
---|
1589 | |
---|
1590 | if c.basetime is not None: |
---|
1591 | elimit=datetime.datetime.strptime(c.end_date+c.basetime,'%Y%m%d%H') |
---|
1592 | else: |
---|
1593 | elimit=sdate+datetime.timedelta(2*int(c.dtime)) |
---|
1594 | |
---|
1595 | # squeeze out information of last two steps contained in svdp |
---|
1596 | # if step+int(c.dtime)==c.maxstep and c.maxstep>12 or sdates+datetime.timedelta(hours=int(c.dtime))>=elimit: |
---|
1597 | # Note that svdp[0] has not been popped in this case |
---|
1598 | |
---|
1599 | |
---|
1600 | if step==c.maxstep and c.maxstep>12 or sdates==elimit: |
---|
1601 | values=svdp[3] |
---|
1602 | grib_set_values(gid, values) |
---|
1603 | grib_set(gid,'step',0) |
---|
1604 | truedatetime=fdate+datetime.timedelta(hours=2*int(c.dtime)) |
---|
1605 | grib_set(gid,'time',truedatetime.hour*100) |
---|
1606 | grib_set(gid,'date',truedatetime.year*10000+truedatetime.month*100+truedatetime.day) |
---|
1607 | grib_write(gid, h) |
---|
1608 | |
---|
1609 | #values=(svdp[1]+svdp[2])/2. |
---|
1610 | if cparamId=='142' or cparamId=='143': |
---|
1611 | values=darain(list(reversed(svdp))) |
---|
1612 | else: |
---|
1613 | values=dapoly(list(reversed(svdp))) |
---|
1614 | |
---|
1615 | grib_set(gid,'step',0) |
---|
1616 | truedatetime=fdate+datetime.timedelta(hours=int(c.dtime)) |
---|
1617 | grib_set(gid,'time',truedatetime.hour*100) |
---|
1618 | grib_set(gid,'date',truedatetime.year*10000+truedatetime.month*100+truedatetime.day) |
---|
1619 | grib_set_values(gid, values) |
---|
1620 | grib_write(gid, g) |
---|
1621 | |
---|
1622 | grib_release(gid) |
---|
1623 | |
---|
1624 | gid = grib_new_from_index(iid) |
---|
1625 | |
---|
1626 | f.close() |
---|
1627 | g.close() |
---|
1628 | h.close() |
---|
1629 | |
---|
1630 | |
---|
1631 | grib_index_release(iid) |
---|