[e18f4b5] | 1 | # |
---|
| 2 | # This software is licensed under the terms of the Apache Licence Version 2.0 |
---|
| 3 | # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. |
---|
| 4 | # |
---|
| 5 | # Functionality provided: Prepare input 3D-wind fields in hybrid coordinates + surface fields for FLEXPART runs |
---|
| 6 | # |
---|
| 7 | # Creation: October 2014 - Anne Fouilloux - University of Oslo |
---|
| 8 | # Extension November 2015 - Leopold Haimberger - University of Vienna for: |
---|
| 9 | # - using the WebAPI also for general MARS retrievals |
---|
| 10 | # - job submission on ecgate and cca |
---|
| 11 | # - job templates suitable for twice daily operational dissemination |
---|
| 12 | # - dividing retrievals of longer periods into digestable chunks |
---|
| 13 | # - retrieve also longer term forecasts, not only analyses and short term forecast data |
---|
| 14 | # - conversion into GRIB2 |
---|
| 15 | # - conversion into .fp format for faster execution of FLEXPART |
---|
| 16 | # |
---|
| 17 | # Extension May 2018 - Anne Philipp - University of Vienna |
---|
| 18 | # - introduced new CONTROL parameter to seperate accumulation and other |
---|
| 19 | # data specification |
---|
| 20 | # - modified code to be able to retrieve ERA-5 data |
---|
| 21 | # - modified code to be able to retrieve CERA-20C data |
---|
| 22 | # - added possibility to retrieve data from public data server als as |
---|
| 23 | # public user |
---|
| 24 | # - allow specifying TIME/TYPE/STEP combinations only with needed data instead |
---|
| 25 | # of the full 24 hours |
---|
| 26 | # |
---|
| 27 | # Requirements: |
---|
| 28 | # in addition to a standard python 2.6 or 2.7 installation the following packages need to be installed |
---|
| 29 | # ECMWF WebMARS, gribAPI with python enabled, emoslib, ecaccess web toolkit, all available from https://software.ecmwf.int/ |
---|
| 30 | # dateutils |
---|
| 31 | # matplotlib (optional, for debugging) |
---|
| 32 | # |
---|
| 33 | # |
---|
| 34 | import subprocess |
---|
| 35 | from argparse import ArgumentParser,ArgumentDefaultsHelpFormatter |
---|
| 36 | import traceback |
---|
| 37 | import shutil |
---|
| 38 | import os, errno,sys,inspect,glob |
---|
| 39 | import datetime |
---|
| 40 | |
---|
| 41 | from string import atoi |
---|
| 42 | from numpy import * |
---|
| 43 | |
---|
| 44 | ecapi=True |
---|
| 45 | try: |
---|
| 46 | import ecmwfapi |
---|
| 47 | except ImportError: |
---|
| 48 | ecapi=False |
---|
| 49 | from gribapi import * |
---|
| 50 | from GribTools import GribTools |
---|
| 51 | |
---|
| 52 | def interpret_args_and_control(*args,**kwargs): |
---|
| 53 | |
---|
| 54 | parser = ArgumentParser(description='Retrieve FLEXPART input from ECMWF MARS archive', |
---|
| 55 | formatter_class=ArgumentDefaultsHelpFormatter) |
---|
| 56 | |
---|
| 57 | # the most important arguments |
---|
| 58 | parser.add_argument("--start_date", dest="start_date", |
---|
| 59 | help="start date YYYYMMDD") |
---|
| 60 | parser.add_argument( "--end_date", dest="end_date", |
---|
| 61 | help="end_date YYYYMMDD") |
---|
| 62 | parser.add_argument( "--date_chunk", dest="date_chunk",default=None, |
---|
| 63 | help="# of days to be retrieved at once") |
---|
| 64 | |
---|
| 65 | # some arguments that override the default in the control file |
---|
| 66 | parser.add_argument("--basetime", dest="basetime", help="base such as 00/12 (for half day retrievals)") |
---|
| 67 | parser.add_argument("--step", dest="step", help="steps such as 00/to/48") |
---|
| 68 | parser.add_argument("--levelist", dest="levelist",help="Vertical levels to be retrieved, e.g. 30/to/60") |
---|
| 69 | parser.add_argument("--area", dest="area", help="area defined as north/west/south/east") |
---|
| 70 | |
---|
| 71 | # set the working directories |
---|
| 72 | parser.add_argument("--inputdir", dest="inputdir",default=None, |
---|
| 73 | help="root directory for storing intermediate files") |
---|
| 74 | parser.add_argument("--outputdir", dest="outputdir",default=None, |
---|
| 75 | help="root directory for storing output files") |
---|
| 76 | parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", |
---|
| 77 | help="FLEXPART root directory (to find grib2flexpart and COMMAND file)\n\ |
---|
| 78 | Normally flex_extract resides in the scripts directory of the FLEXPART distribution") |
---|
| 79 | |
---|
| 80 | # this is only used by prepareFLEXPART.py to rerun a postprocessing step |
---|
| 81 | parser.add_argument("--ppid", dest="ppid", |
---|
| 82 | help="Specify parent process id for rerun of prepareFLEXPART") |
---|
| 83 | |
---|
| 84 | # arguments for job submission to ECMWF, only needed by submit.py |
---|
| 85 | parser.add_argument("--job_template", dest='job_template',default="job.temp", |
---|
| 86 | help="job template file for submission to ECMWF") |
---|
| 87 | #parser.add_argument("--remote", dest="remote", |
---|
| 88 | #help="target for submission to ECMWF (ecgate or cca etc.)") |
---|
| 89 | parser.add_argument("--queue", dest="queue", |
---|
| 90 | help="queue for submission to ECMWF (e.g. ecgate or cca )") |
---|
| 91 | |
---|
| 92 | |
---|
| 93 | parser.add_argument("--controlfile", dest="controlfile",default='CONTROL.temp', |
---|
| 94 | help="file with control parameters") |
---|
| 95 | parser.add_argument("--debug", dest="debug",default=0, |
---|
| 96 | help="Debug mode - leave temporary files intact") |
---|
| 97 | parser.add_argument("--public", dest="public", default=0, |
---|
| 98 | help="Public mode - retrieves the public datasets") |
---|
| 99 | |
---|
[5862eb9] | 100 | parser.add_argument("--request", dest="request", default=None, |
---|
[51f9853] | 101 | help="list all mars request in file mars_requests.dat \ |
---|
| 102 | and skip submission to mars") |
---|
[e18f4b5] | 103 | |
---|
| 104 | args = parser.parse_args() |
---|
| 105 | |
---|
| 106 | try: |
---|
| 107 | c=Control(args.controlfile) |
---|
| 108 | except IOError: |
---|
| 109 | try: |
---|
| 110 | c=Control(localpythonpath+args.controlfile) |
---|
| 111 | |
---|
| 112 | except: |
---|
| 113 | print 'Could not read control file "'+args.controlfile+'"' |
---|
| 114 | print 'Either it does not exist or its syntax is wrong.' |
---|
| 115 | print 'Try "'+sys.argv[0].split('/')[-1]+' -h" to print usage information' |
---|
| 116 | exit(1) |
---|
| 117 | |
---|
| 118 | if args.start_date==None and getattr(c,'start_date')==None: |
---|
| 119 | print 'start_date specified neither in command line nor in control file '+args.controlfile |
---|
| 120 | print 'Try "'+sys.argv[0].split('/')[-1]+' -h" to print usage information' |
---|
| 121 | exit(1) |
---|
| 122 | |
---|
| 123 | if args.start_date!=None: |
---|
| 124 | c.start_date=args.start_date |
---|
| 125 | if args.end_date!=None: |
---|
| 126 | c.end_date=args.end_date |
---|
| 127 | if c.end_date==None: |
---|
| 128 | c.end_date=c.start_date |
---|
| 129 | if args.date_chunk!=None: |
---|
| 130 | c.date_chunk=args.date_chunk |
---|
| 131 | |
---|
| 132 | if not hasattr(c,'debug'): |
---|
| 133 | c.debug=args.debug |
---|
| 134 | |
---|
| 135 | if not hasattr(c, 'public'): |
---|
| 136 | setattr(c, 'public', args.public) |
---|
| 137 | |
---|
| 138 | if args.inputdir==None and args.outputdir==None: |
---|
| 139 | c.inputdir='../work' |
---|
| 140 | c.outputdir='../work' |
---|
| 141 | else: |
---|
| 142 | if args.inputdir!=None: |
---|
| 143 | c.inputdir=args.inputdir |
---|
| 144 | if args.outputdir==None: |
---|
| 145 | c.outputdir=args.inputdir |
---|
| 146 | if args.outputdir!=None: |
---|
| 147 | c.outputdir=args.outputdir |
---|
| 148 | if args.inputdir==None: |
---|
| 149 | c.inputdir=args.outputdir |
---|
| 150 | |
---|
| 151 | if hasattr(c,'outputdir')==False and args.outputdir==None: |
---|
| 152 | c.outputdir=c.inputdir |
---|
| 153 | else: |
---|
| 154 | if args.outputdir!=None: |
---|
| 155 | c.outputdir=args.outputdir |
---|
| 156 | |
---|
| 157 | if args.area!=None: |
---|
| 158 | afloat='.' in args.area |
---|
| 159 | l=args.area.split('/') |
---|
| 160 | if afloat: |
---|
| 161 | for i in range(len(l)): |
---|
| 162 | l[i]=str(int(float(l[i])*1000)) |
---|
| 163 | c.upper,c.left,c.lower,c.right=l |
---|
| 164 | |
---|
| 165 | # basetime aktiviert den ''operational mode'' |
---|
| 166 | if args.basetime!=None: |
---|
| 167 | c.basetime=args.basetime |
---|
| 168 | #if int(c.basetime)==0: |
---|
| 169 | #c.start_date=datetime.datetime.strftime(datetime.datetime.strptime(c.start_date,'%Y%m%d')-datetime.timedelta(days=1),'%Y%m%d') |
---|
| 170 | #c.end_date=c.start_date |
---|
| 171 | |
---|
| 172 | if args.step!=None: |
---|
| 173 | l=args.step.split('/') |
---|
| 174 | if 'to' in args.step.lower(): |
---|
| 175 | if 'by' in args.step.lower(): |
---|
| 176 | ilist=arange(int(l[0]),int(l[2])+1,int(l[4])) |
---|
| 177 | c.step=['{:0>3}'.format(i) for i in ilist] |
---|
| 178 | else: |
---|
| 179 | myerror(None,args.step+':\n'+'please use "by" as well if "to" is used') |
---|
| 180 | else: |
---|
| 181 | c.step=l |
---|
| 182 | |
---|
| 183 | if args.levelist!=None: |
---|
| 184 | c.levelist=args.levelist |
---|
| 185 | if 'to' in c.levelist: |
---|
| 186 | c.level=c.levelist.split('/')[2] |
---|
| 187 | else: |
---|
| 188 | c.level=c.levelist.split('/')[-1] |
---|
| 189 | |
---|
| 190 | |
---|
| 191 | if args.flexpart_root_scripts!=None: |
---|
| 192 | c.flexpart_root_scripts=args.flexpart_root_scripts |
---|
| 193 | |
---|
[51f9853] | 194 | # set request attribute to control file |
---|
[5862eb9] | 195 | if args.request != None: |
---|
| 196 | c.request=int(args.request) |
---|
| 197 | else: |
---|
| 198 | c.request = 0 |
---|
[51f9853] | 199 | |
---|
[5862eb9] | 200 | if c.request != 0: |
---|
[a9c7f3e] | 201 | marsfile = os.path.join(c.inputdir, 'mars_requests.csv') |
---|
[51f9853] | 202 | if os.path.isfile(marsfile): |
---|
| 203 | os.remove(marsfile) |
---|
| 204 | |
---|
[e18f4b5] | 205 | return args,c |
---|
| 206 | |
---|
| 207 | def install_args_and_control(): |
---|
| 208 | |
---|
| 209 | parser = ArgumentParser(description='Install flex_extract software locally or on ECMWF machines', |
---|
| 210 | formatter_class=ArgumentDefaultsHelpFormatter) |
---|
| 211 | parser.add_argument('--target',dest='install_target', |
---|
| 212 | help="Valid targets: local | ecgate | cca , the latter two are at ECMWF") |
---|
| 213 | parser.add_argument("--makefile", dest="makefile",help='Name of Makefile to use for compiling CONVERT2') |
---|
| 214 | parser.add_argument("--ecuid", dest="ecuid",help='user id at ECMWF') |
---|
| 215 | parser.add_argument("--ecgid", dest="ecgid",help='group id at ECMWF') |
---|
| 216 | parser.add_argument("--gateway", dest="gateway",help='name of local gateway server') |
---|
| 217 | parser.add_argument("--destination", dest="destination",help='ecaccess destination, e.g. leo@genericSftp') |
---|
| 218 | |
---|
| 219 | parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", |
---|
| 220 | help="FLEXPART root directory on ECMWF servers (to find grib2flexpart and COMMAND file)\n\ |
---|
| 221 | Normally flex_extract resides in the scripts directory of the FLEXPART distribution, thus the:") |
---|
| 222 | |
---|
| 223 | # arguments for job submission to ECMWF, only needed by submit.py |
---|
| 224 | parser.add_argument("--job_template", dest='job_template',default="job.temp.o", |
---|
| 225 | help="job template file for submission to ECMWF") |
---|
| 226 | |
---|
| 227 | parser.add_argument("--controlfile", dest="controlfile",default='CONTROL.temp', |
---|
| 228 | help="file with control parameters") |
---|
| 229 | args = parser.parse_args() |
---|
| 230 | |
---|
| 231 | try: |
---|
| 232 | # if True: |
---|
| 233 | c=Control(args.controlfile) |
---|
| 234 | # else: |
---|
| 235 | except: |
---|
| 236 | print 'Could not read control file "'+args.controlfile+'"' |
---|
| 237 | print 'Either it does not exist or its syntax is wrong.' |
---|
| 238 | print 'Try "'+sys.argv[0].split('/')[-1]+' -h" to print usage information' |
---|
| 239 | exit(1) |
---|
| 240 | |
---|
| 241 | if args.install_target!='local': |
---|
| 242 | if args.ecgid==None or args.ecuid==None or args.gateway==None or args.destination==None : |
---|
| 243 | print 'Please enter your ECMWF user id and group id as well as the \nname of the local gateway and the ectrans destination ' |
---|
| 244 | print 'with command line options --ecuid --ecgid --gateway --destination' |
---|
| 245 | print 'Try "'+sys.argv[0].split('/')[-1]+' -h" to print usage information' |
---|
| 246 | print 'Please consult ecaccess documentation or ECMWF user support for further details' |
---|
| 247 | sys.exit(1) |
---|
| 248 | else: |
---|
| 249 | c.ecuid=args.ecuid |
---|
| 250 | c.ecgid=args.ecgid |
---|
| 251 | c.gateway=args.gateway |
---|
| 252 | c.destination=args.destination |
---|
| 253 | |
---|
| 254 | try: |
---|
| 255 | c.makefile=args.makefile |
---|
| 256 | except: |
---|
| 257 | pass |
---|
| 258 | |
---|
| 259 | if args.install_target=='local': |
---|
| 260 | if args.flexpart_root_scripts==None: |
---|
| 261 | c.flexpart_root_scripts='../' |
---|
| 262 | else: |
---|
| 263 | c.flexpart_root_scripts=args.flexpart_root_scripts |
---|
| 264 | |
---|
| 265 | if args.install_target!='local': |
---|
| 266 | if args.flexpart_root_scripts==None: |
---|
| 267 | c.ec_flexpart_root_scripts='${HOME}' |
---|
| 268 | else: |
---|
| 269 | c.ec_flexpart_root_scripts=args.flexpart_root_scripts |
---|
| 270 | |
---|
| 271 | return args,c |
---|
| 272 | |
---|
| 273 | def cleanup(c): |
---|
| 274 | print "cleanup" |
---|
| 275 | cleanlist=glob.glob(c.inputdir+"/*") |
---|
| 276 | # cleanlist[-1:-1]=glob.glob(c.inputdir+"/flux*") |
---|
| 277 | # cleanlist[-1:-1]=glob.glob(c.inputdir+"/*.grb") |
---|
| 278 | # if c.ecapi==False and (c.ectrans=='1' or c.ecstorage=='1'): |
---|
| 279 | # cleanlist[-1:-1]=glob.glob(c.inputdir+"/"+c.prefix+"*") |
---|
| 280 | |
---|
| 281 | |
---|
| 282 | # silentremove(c.inputdir+"/VERTICAL.EC") |
---|
| 283 | # silentremove(c.inputdir+"/date_time_stepRange.idx") |
---|
| 284 | for cl in cleanlist: |
---|
| 285 | if c.prefix not in cl: |
---|
| 286 | silentremove(cl) |
---|
| 287 | if c.ecapi==False and (c.ectrans=='1' or c.ecstorage=='1'): |
---|
| 288 | silentremove(cl) |
---|
| 289 | |
---|
| 290 | |
---|
| 291 | |
---|
| 292 | print "Done" |
---|
| 293 | |
---|
| 294 | |
---|
| 295 | def myerror(c,message='ERROR'): |
---|
| 296 | |
---|
| 297 | try: |
---|
| 298 | target=c.mailfail |
---|
| 299 | except AttributeError: |
---|
| 300 | target=os.getenv('USER') |
---|
| 301 | |
---|
| 302 | if(type(target) is not list): |
---|
| 303 | target=[target] |
---|
| 304 | print message |
---|
| 305 | # uncomment if user wants email notification directly from python |
---|
| 306 | #for t in target: |
---|
[fb8810c] | 307 | #p=subprocess.Popen(['mail','-s flex_extract v7.0.4 ERROR', os.path.expandvars(t)], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,bufsize=1) |
---|
[e18f4b5] | 308 | #tr='\n'.join(traceback.format_stack()) |
---|
| 309 | #pout=p.communicate(input=message+'\n\n'+tr)[0] |
---|
| 310 | #print 'Email sent to '+os.path.expandvars(t) # +' '+pout.decode() |
---|
| 311 | |
---|
| 312 | exit(1) |
---|
| 313 | |
---|
| 314 | def normalexit(c,message='Done!'): |
---|
| 315 | |
---|
| 316 | try: |
---|
| 317 | target=c.mailops |
---|
| 318 | |
---|
| 319 | |
---|
| 320 | if(type(target) is not list): |
---|
| 321 | target=[target] |
---|
| 322 | # Uncomment if user wants notification directly from python |
---|
| 323 | #for t in target: |
---|
[fb8810c] | 324 | #p=subprocess.Popen(['mail','-s flex_extract v7.0.4 normal exit', os.path.expandvars(t)], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,bufsize=1) |
---|
[e18f4b5] | 325 | #pout=p.communicate(input=message+'\n\n')[0] |
---|
| 326 | #print pout.decode() |
---|
| 327 | |
---|
| 328 | except: |
---|
| 329 | pass |
---|
| 330 | |
---|
| 331 | print message |
---|
| 332 | |
---|
| 333 | return |
---|
| 334 | |
---|
| 335 | def product(*args, **kwds): |
---|
| 336 | # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy |
---|
| 337 | # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111 |
---|
| 338 | pools = map(tuple, args) * kwds.get('repeat', 1) |
---|
| 339 | result = [[]] |
---|
| 340 | for pool in pools: |
---|
| 341 | result = [x+[y] for x in result for y in pool] |
---|
| 342 | for prod in result: |
---|
| 343 | yield tuple(prod) |
---|
| 344 | |
---|
| 345 | ############################################################### |
---|
| 346 | # utility to remove a file if it exists |
---|
| 347 | # it does not fail if the file does not exist |
---|
| 348 | ############################################################### |
---|
| 349 | def silentremove(filename): |
---|
| 350 | try: |
---|
| 351 | os.remove(filename) |
---|
| 352 | except OSError as e: # this would be "except OSError, e:" before Python 2.6 |
---|
| 353 | if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory |
---|
| 354 | raise # re-raise exception if a different error occured |
---|
| 355 | |
---|
| 356 | |
---|
| 357 | def init128(fn): |
---|
| 358 | |
---|
| 359 | table128=dict() |
---|
| 360 | with open(fn) as f: |
---|
| 361 | fdata = f.read().split('\n') |
---|
| 362 | for data in fdata: |
---|
| 363 | if data[0]!='!': |
---|
| 364 | table128[data[0:3]]=data[59:64].strip() |
---|
| 365 | |
---|
| 366 | return table128 |
---|
| 367 | |
---|
| 368 | def toparamId(pars,table): |
---|
| 369 | |
---|
| 370 | cpar=pars.upper().split('/') |
---|
| 371 | ipar=[] |
---|
| 372 | for par in cpar: |
---|
| 373 | found=False |
---|
| 374 | for k,v in table.iteritems(): |
---|
| 375 | if par==k or par==v: |
---|
| 376 | ipar.append(int(k)) |
---|
| 377 | found=True |
---|
| 378 | break |
---|
| 379 | if found==False: |
---|
| 380 | print 'Warning: par '+par+' not found in table 128' |
---|
| 381 | return ipar |
---|
| 382 | |
---|
| 383 | def dapoly(alist): |
---|
| 384 | |
---|
| 385 | |
---|
| 386 | pya=(alist[3]-alist[0]+3.*(alist[1]-alist[2]))/6. |
---|
| 387 | pyb=(alist[2]+alist[0])/2.-alist[1]-9.*pya/2. |
---|
| 388 | pyc=alist[1]-alist[0]-7.*pya/2.-2.*pyb |
---|
| 389 | pyd=alist[0]-pya/4.-pyb/3.-pyc/2. |
---|
| 390 | sfeld=8.*pya+4.*pyb+2.*pyc+pyd |
---|
| 391 | |
---|
| 392 | return sfeld |
---|
| 393 | |
---|
| 394 | def darain(alist): |
---|
| 395 | xa=alist[0] |
---|
| 396 | xb=alist[1] |
---|
| 397 | xc=alist[2] |
---|
| 398 | xd=alist[3] |
---|
| 399 | xa[xa<0.]=0. |
---|
| 400 | xb[xb<0.]=0. |
---|
| 401 | xc[xc<0.]=0. |
---|
| 402 | xd[xd<0.]=0. |
---|
| 403 | |
---|
| 404 | xac=0.5*xb |
---|
| 405 | mask=xa+xc>0. |
---|
| 406 | xac[mask]=xb[mask]*xc[mask]/(xa[mask]+xc[mask]) |
---|
| 407 | xbd=0.5*xc |
---|
| 408 | mask=xb+xd>0. |
---|
| 409 | xbd[mask]=xb[mask]*xc[mask]/(xb[mask]+xd[mask]) |
---|
| 410 | sfeld=xac+xbd |
---|
| 411 | |
---|
| 412 | return sfeld |
---|
| 413 | |
---|
| 414 | class Control: |
---|
| 415 | 'class containing the information of the flex_extract control file' |
---|
| 416 | |
---|
| 417 | def __init__(self,filename): |
---|
| 418 | |
---|
| 419 | #try: |
---|
| 420 | with open(filename) as f: |
---|
| 421 | fdata = f.read().split('\n') |
---|
| 422 | for ldata in fdata: |
---|
| 423 | data=ldata.split() |
---|
| 424 | if len(data)>1: |
---|
| 425 | if 'm_' in data[0].lower(): |
---|
| 426 | data[0]=data[0][2:] |
---|
| 427 | if data[0].lower()=='class': |
---|
| 428 | data[0]='marsclass' |
---|
| 429 | if data[0].lower()=='day1': |
---|
| 430 | data[0]='start_date' |
---|
| 431 | if data[0].lower()=='day2': |
---|
| 432 | data[0]='end_date' |
---|
| 433 | if data[0].lower()=='addpar': |
---|
| 434 | if '/' in data[1]: |
---|
| 435 | if data[1][0]=='/': |
---|
| 436 | data[1]=data[1][1:] |
---|
| 437 | dd=data[1].split('/') |
---|
| 438 | data=[data[0]] |
---|
| 439 | for d in dd: |
---|
| 440 | data.append(d) |
---|
| 441 | pass |
---|
| 442 | if len(data)==2: |
---|
| 443 | if '$' in data[1]: |
---|
| 444 | # print data[1] |
---|
| 445 | setattr(self,data[0].lower(),data[1]) |
---|
| 446 | while '$' in data[1]: |
---|
| 447 | i=data[1].index('$') |
---|
| 448 | j=data[1].find('{') |
---|
| 449 | k=data[1].find('}') |
---|
| 450 | var=os.getenv(data[1][j+1:k]) |
---|
| 451 | if var is not None: |
---|
| 452 | data[1]=data[1][:i]+var+data[1][k+1:] |
---|
| 453 | else: |
---|
| 454 | myerror(None,'Could not find variable '+data[1][j+1:k]+' while reading '+filename) |
---|
| 455 | |
---|
| 456 | # print data[0],data[1] |
---|
| 457 | setattr(self,data[0].lower()+'_expanded',data[1]) |
---|
| 458 | else: |
---|
| 459 | if data[1].lower()!='none': |
---|
| 460 | setattr(self,data[0].lower(),data[1]) |
---|
| 461 | else: |
---|
| 462 | setattr(self,data[0].lower(),None) |
---|
| 463 | |
---|
| 464 | elif len(data)>2: |
---|
| 465 | setattr(self,data[0].lower(),(data[1:])) |
---|
| 466 | else: |
---|
| 467 | pass |
---|
| 468 | |
---|
| 469 | if not hasattr(self,'start_date'): |
---|
| 470 | self.start_date=None |
---|
| 471 | if not hasattr(self,'end_date'): |
---|
| 472 | self.end_date=self.start_date |
---|
| 473 | if not hasattr(self,'accuracy'): |
---|
| 474 | self.accuracy=24 |
---|
| 475 | if not hasattr(self,'omega'): |
---|
| 476 | self.omega='0' |
---|
| 477 | if not hasattr(self,'cwc'): |
---|
| 478 | self.cwc='0' |
---|
| 479 | if not hasattr(self,'omegadiff'): |
---|
| 480 | self.omegadiff='0' |
---|
| 481 | if not hasattr(self,'etadiff'): |
---|
| 482 | self.etadiff='0' |
---|
| 483 | |
---|
| 484 | if not isinstance(self.type, list): |
---|
| 485 | self.type = [self.type] |
---|
| 486 | if not isinstance(self.time, list): |
---|
| 487 | self.time = [self.time] |
---|
| 488 | if not isinstance(self.step, list): |
---|
| 489 | self.step = [self.step] |
---|
| 490 | |
---|
| 491 | max_level_list = [16, 19, 31, 40, 50, 60, 62, 91, 137] |
---|
| 492 | if not hasattr(self,'levelist'): |
---|
| 493 | if not hasattr(self,'level'): |
---|
| 494 | print 'WARNING: neither levelist nor level specified in CONTROL file' |
---|
| 495 | else: |
---|
| 496 | if self.level in max_level_list: |
---|
| 497 | self.levelist='1/to/'+self.level |
---|
| 498 | else: |
---|
| 499 | if not hasattr(self, 'level'): |
---|
| 500 | if 'to' in self.levelist.lower(): |
---|
| 501 | max_level = self.levelist.split('/')[2] |
---|
| 502 | if int(max_level) in max_level_list: |
---|
| 503 | self.level = max_level |
---|
| 504 | else: |
---|
| 505 | print 'ERROR: LEVEL-parameter could not be set' |
---|
| 506 | print 'LEVEL must be the maximum level of a specified level list from ECMWF, e.g.' |
---|
| 507 | print '16, 19, 31, 40, 50, 60, 62, 91 or 137' |
---|
| 508 | sys.exit(1) |
---|
| 509 | else: |
---|
| 510 | max_level = self.levelist.split('/')[-1] |
---|
| 511 | if int(max_level) in max_level_list: |
---|
| 512 | self.level = max_level |
---|
| 513 | else: |
---|
| 514 | print 'ERROR: LEVEL-parameter could not be set' |
---|
| 515 | print 'LEVEL must be the maximum level of a specified level list from ECMWF, e.g.' |
---|
| 516 | print '16, 19, 31, 40, 50, 60, 62, 91 or 137' |
---|
| 517 | sys.exit(1) |
---|
| 518 | else: |
---|
| 519 | # check if consistent |
---|
| 520 | if int(self.level) in max_level_list: |
---|
| 521 | pass |
---|
| 522 | else: |
---|
| 523 | print 'ERROR: LEVEL-parameter is not properly selected' |
---|
| 524 | print 'LEVEL must be the maximum level of a specified level list from ECMWF, e.g.' |
---|
| 525 | print '16, 19, 31, 40, 50, 60, 62, 91 or 137' |
---|
| 526 | sys.exit(1) |
---|
| 527 | |
---|
| 528 | if not hasattr(self,'maxstep'): |
---|
| 529 | self.maxstep=0 |
---|
| 530 | for s in self.step: |
---|
| 531 | if int(s)>self.maxstep: |
---|
| 532 | self.maxstep=int(s) |
---|
| 533 | else: |
---|
| 534 | self.maxstep=int(self.maxstep) |
---|
| 535 | if not hasattr(self,'prefix'): |
---|
| 536 | self.prefix='EN' |
---|
| 537 | if not hasattr(self,'makefile'): |
---|
| 538 | self.makefile=None |
---|
| 539 | if not hasattr(self,'basetime'): |
---|
| 540 | self.basetime=None |
---|
| 541 | if not hasattr(self,'date_chunk'): |
---|
| 542 | self.date_chunk='3' |
---|
| 543 | |
---|
| 544 | self.flexextractdir=os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))+'/../' # script directory |
---|
| 545 | # if not hasattr(self,'exedir'): |
---|
| 546 | self.exedir=self.flexextractdir+'src/' |
---|
| 547 | if not hasattr(self,'grib2flexpart'): |
---|
| 548 | self.grib2flexpart='0' |
---|
| 549 | if not hasattr(self,'flexpart_root_scripts'): |
---|
| 550 | self.flexpart_root_scripts=self.flexextractdir |
---|
| 551 | |
---|
| 552 | if not hasattr(self,'acctype'): |
---|
| 553 | print('... Control paramter ACCTYPE was not defined in CONTROL file.') |
---|
| 554 | print('Important for (forecast) flux data extraction.') |
---|
| 555 | try: |
---|
| 556 | if len(self.type) > 1 and self.type[1] != 'AN': |
---|
| 557 | print('Use old setting by using TYPE[1].') |
---|
| 558 | self.acctype = self.type[1] |
---|
| 559 | except: |
---|
| 560 | print('Use default value "FC"!') |
---|
| 561 | self.acctype='FC' |
---|
| 562 | |
---|
| 563 | if not hasattr(self,'acctime'): |
---|
| 564 | print('... Control paramter ACCTIME was not defined in CONTROL file.') |
---|
| 565 | print('Use default value "00/12" for flux forecast!') |
---|
| 566 | print('This may be different for single data sets, see documentation if retrieval fails!') |
---|
| 567 | self.acctime='00/12' |
---|
| 568 | |
---|
| 569 | if not hasattr(self, 'accmaxstep'): |
---|
| 570 | print('... Control paramter ACCMAXSTEP was not defined in CONTROL file.') |
---|
| 571 | print('Use default value "12" for flux forecast!') |
---|
| 572 | print('This may be different for single data sets, see documentation if retrieval fails!') |
---|
| 573 | self.accmaxstep='12' |
---|
| 574 | |
---|
| 575 | for i in range(len(self.type)): |
---|
| 576 | if self.type[i] == 'AN' and int(self.step[i]) != 0: |
---|
| 577 | print('Analysis retrievals must have STEP = 0 (setting to 0)') |
---|
| 578 | self.type[i] = 0 |
---|
| 579 | |
---|
[51f9853] | 580 | if not hasattr(self,'request'): |
---|
[5862eb9] | 581 | self.request=0 |
---|
| 582 | elif int(self.request) != 0: |
---|
| 583 | self.request=int(self.request) |
---|
[a9c7f3e] | 584 | marsfile = os.path.join(self.inputdir, |
---|
| 585 | 'mars_requests.csv') |
---|
| 586 | if os.path.isfile(marsfile): |
---|
| 587 | silentremove(marsfile) |
---|
[e18f4b5] | 588 | |
---|
| 589 | return |
---|
| 590 | def __str__(self): |
---|
| 591 | |
---|
| 592 | attrs = vars(self) |
---|
| 593 | # {'kids': 0, 'name': 'Dog', 'color': 'Spotted', 'age': 10, 'legs': 2, 'smell': 'Alot'} |
---|
| 594 | # now dump this in some way or another |
---|
| 595 | return ', '.join("%s: %s" % item for item in attrs.items()) |
---|
| 596 | |
---|
| 597 | def tolist(self): |
---|
| 598 | attrs=vars(self) |
---|
| 599 | l=list() |
---|
| 600 | for item in attrs.items(): |
---|
| 601 | if '_expanded' in item[0]: |
---|
| 602 | pass |
---|
| 603 | elif 'exedir' in item[0]: |
---|
| 604 | pass |
---|
| 605 | elif 'flexpart_root_scripts' in item[0]: |
---|
| 606 | pass |
---|
| 607 | elif 'flexextractdir' in item[0]: |
---|
| 608 | pass |
---|
| 609 | |
---|
| 610 | else: |
---|
| 611 | if type(item[1]) is list: |
---|
| 612 | stot='' |
---|
| 613 | for s in item[1]: |
---|
| 614 | stot+=s+' ' |
---|
| 615 | |
---|
| 616 | l.append("%s %s" % (item[0],stot)) |
---|
| 617 | else: |
---|
| 618 | l.append("%s %s" % item ) |
---|
| 619 | return sorted(l) |
---|
| 620 | |
---|
| 621 | |
---|
| 622 | |
---|
| 623 | ############################################################## |
---|
| 624 | # MARSretrieval class |
---|
| 625 | ############################################################## |
---|
| 626 | class MARSretrieval: |
---|
| 627 | 'class for MARS retrievals' |
---|
| 628 | |
---|
| 629 | def __init__(self,server, public, marsclass="ei",type="",levtype="",levelist="", |
---|
| 630 | repres="", date="",resol="",stream="",area="",time="",step="",expver="1", |
---|
| 631 | number="",accuracy="", grid="", gaussian="", target="",param="",dataset="",expect=""): |
---|
| 632 | self.dataset=dataset |
---|
| 633 | self.marsclass=marsclass |
---|
| 634 | self.type=type |
---|
| 635 | self.levtype=levtype |
---|
| 636 | self.levelist=levelist |
---|
| 637 | self.repres=repres |
---|
| 638 | self.date=date |
---|
| 639 | self.resol=resol |
---|
| 640 | self.stream=stream |
---|
| 641 | self.area=area |
---|
| 642 | self.time=time |
---|
| 643 | self.step=step |
---|
| 644 | self.expver=expver |
---|
| 645 | self.target=target |
---|
| 646 | self.param=param |
---|
| 647 | self.number=number |
---|
| 648 | self.accuracy=accuracy |
---|
| 649 | self.grid=grid |
---|
| 650 | self.gaussian=gaussian |
---|
| 651 | # self.expect=expect |
---|
| 652 | self.server=server |
---|
| 653 | self.public=public |
---|
| 654 | |
---|
| 655 | def displayInfo(self): |
---|
| 656 | attrs=vars(self) |
---|
| 657 | for item in attrs.items(): |
---|
| 658 | if item[0] in ('server'): |
---|
| 659 | pass |
---|
| 660 | else: |
---|
| 661 | print item[0]+': '+str(item[1]) |
---|
| 662 | |
---|
| 663 | return |
---|
| 664 | |
---|
[51f9853] | 665 | def print_infodata_csv(self, inputdir, request_number): |
---|
| 666 | ''' |
---|
| 667 | @Description: |
---|
| 668 | Write all request parameter in alpabetical order into a "csv" file. |
---|
| 669 | |
---|
| 670 | @Input: |
---|
| 671 | self: instance of MarsRetrieval |
---|
| 672 | For description see class documentation. |
---|
| 673 | |
---|
| 674 | inputdir: string |
---|
| 675 | The path where all data from the retrievals are stored. |
---|
| 676 | |
---|
| 677 | request_number: integer |
---|
| 678 | Number of mars requests for flux and non-flux data. |
---|
| 679 | |
---|
| 680 | @Return: |
---|
| 681 | <nothing> |
---|
| 682 | ''' |
---|
| 683 | |
---|
| 684 | # Get all class attributes and their values as a dictionary |
---|
[a9c7f3e] | 685 | attrs = vars(self).copy() |
---|
[51f9853] | 686 | del attrs['server'] |
---|
[a9c7f3e] | 687 | del attrs['public'] |
---|
[51f9853] | 688 | |
---|
| 689 | # open a file to store all requests to |
---|
[a9c7f3e] | 690 | with open(os.path.join(inputdir, 'mars_requests.csv'), 'a') as f: |
---|
[51f9853] | 691 | f.write(str(request_number) + ', ') |
---|
| 692 | f.write(', '.join(str(attrs[key]) |
---|
| 693 | for key in sorted(attrs.iterkeys()))) |
---|
| 694 | f.write('\n') |
---|
| 695 | |
---|
| 696 | return |
---|
| 697 | |
---|
[e18f4b5] | 698 | def dataRetrieve(self): |
---|
| 699 | attrs=vars(self).copy() |
---|
| 700 | |
---|
| 701 | del attrs['server'] |
---|
| 702 | del attrs['public'] |
---|
| 703 | mclass = attrs.get('marsclass') |
---|
| 704 | del attrs['marsclass'] |
---|
| 705 | attrs['class'] = mclass |
---|
[68f5e1c] | 706 | target = attrs.get('target') |
---|
[e18f4b5] | 707 | if not int(self.public): |
---|
| 708 | del attrs['target'] |
---|
| 709 | print 'target: ', target |
---|
| 710 | |
---|
| 711 | delete_keys = [] |
---|
| 712 | for k, v in attrs.iteritems(): |
---|
| 713 | if v == '': |
---|
| 714 | delete_keys.append(str(k)) |
---|
| 715 | else: |
---|
| 716 | attrs[k] = str(v) |
---|
| 717 | |
---|
| 718 | for k in delete_keys: |
---|
| 719 | del attrs[k] |
---|
| 720 | |
---|
| 721 | if self.server != False: |
---|
| 722 | try: |
---|
| 723 | if int(self.public): |
---|
| 724 | print 'RETRIEVE DATA!' |
---|
| 725 | self.server.retrieve(attrs) |
---|
| 726 | else: |
---|
| 727 | print 'EXECUTE RETRIEVAL!' |
---|
| 728 | self.server.execute(attrs, target) |
---|
| 729 | except: |
---|
| 730 | e = sys.exc_info()[0] |
---|
| 731 | print "Error: ", e |
---|
| 732 | print 'MARS Request failed'#, have you already registered at apps.ecmwf.int?' |
---|
| 733 | #raise IOError |
---|
| 734 | if not int(self.public) and os.stat(target).st_size==0: |
---|
| 735 | print 'MARS Request returned no data - please check request' |
---|
| 736 | raise IOError |
---|
| 737 | elif int(self.public) and os.stat(attrs.get('target')).st_size==0: |
---|
| 738 | print 'MARS Request returned no data - please check request' |
---|
| 739 | raise IOError |
---|
| 740 | else: |
---|
| 741 | s='ret' |
---|
| 742 | for k,v in attrs.iteritems(): |
---|
| 743 | s=s+','+k+'='+str(v) |
---|
| 744 | |
---|
| 745 | s+=',target="'+target+'"' |
---|
| 746 | p=subprocess.Popen(['mars'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,bufsize=1) |
---|
| 747 | pout=p.communicate(input=s)[0] |
---|
| 748 | print pout.decode() |
---|
| 749 | if 'Some errors reported' in pout.decode(): |
---|
| 750 | print 'MARS Request failed - please check request' |
---|
| 751 | raise IOError |
---|
| 752 | |
---|
| 753 | if os.stat(target).st_size==0: |
---|
| 754 | print 'MARS Request returned no data - please check request' |
---|
| 755 | raise IOError |
---|
| 756 | |
---|
| 757 | return |
---|
| 758 | |
---|
| 759 | ############################################################## |
---|
| 760 | class EIFlexpart: |
---|
| 761 | 'class to retrieve Era Interim data for running FLEXPART' |
---|
| 762 | ############################################################## |
---|
| 763 | |
---|
| 764 | def __init__(self,c,fluxes=False): |
---|
[51f9853] | 765 | # different mars types for retrieving reanalysis data for flexpart |
---|
[e18f4b5] | 766 | |
---|
[51f9853] | 767 | # set a counter for the number of mars requests generated |
---|
| 768 | self.mreq_count = 0 |
---|
[e18f4b5] | 769 | self.types=dict() |
---|
| 770 | try: |
---|
| 771 | if c.maxstep>len(c.type): # Pure forecast mode |
---|
| 772 | c.type=[c.type[0]] # AP changed this index from 1 to 0 |
---|
| 773 | c.step=['{:0>3}'.format(int(c.step[0]))] |
---|
| 774 | c.time=[c.time[0]] |
---|
| 775 | for i in range(1,c.maxstep+1): |
---|
| 776 | c.type.append(c.type[0]) |
---|
| 777 | c.step.append('{:0>3}'.format(i)) |
---|
| 778 | c.time.append(c.time[0]) |
---|
| 779 | except: |
---|
| 780 | pass |
---|
| 781 | |
---|
| 782 | self.inputdir=c.inputdir |
---|
| 783 | self.basetime=c.basetime |
---|
| 784 | self.dtime=c.dtime |
---|
| 785 | self.mars={} |
---|
| 786 | i=0 |
---|
| 787 | j=0 |
---|
| 788 | if fluxes==True and c.maxstep<=24: |
---|
| 789 | self.types[str(c.acctype)]={'times':str(c.acctime), |
---|
| 790 | 'steps':'{}/to/{}/by/{}'.format( |
---|
| 791 | c.dtime,c.accmaxstep,c.dtime)} |
---|
| 792 | i=1 |
---|
| 793 | for k in [0,12]: |
---|
| 794 | for j in range(int(c.dtime),13,int(c.dtime)): |
---|
| 795 | self.mars['{:0>3}'.format(i*int(c.dtime))]=[c.type[1],'{:0>2}'.format(k),'{:0>3}'.format(j)] |
---|
| 796 | i+=1 |
---|
| 797 | else: |
---|
| 798 | for ty,st,ti in zip(c.type,c.step,c.time): |
---|
| 799 | btlist=range(24) |
---|
| 800 | if c.basetime=='12': |
---|
| 801 | btlist=[1,2,3,4,5,6,7,8,9,10,11,12] |
---|
| 802 | if c.basetime=='00': |
---|
| 803 | btlist=[13,14,15,16,17,18,19,20,21,22,23,0] |
---|
| 804 | |
---|
| 805 | |
---|
| 806 | if ((ty.upper() == 'AN' and mod(int(c.time[i]),int(c.dtime))==0) or \ |
---|
| 807 | (ty.upper() != 'AN' and mod(int(c.step[i]),int(c.dtime))==0 and \ |
---|
| 808 | mod(int(c.step[i]),int(c.dtime))==0) ) and \ |
---|
| 809 | (int(c.time[i]) in btlist or c.maxstep>24): |
---|
| 810 | if ty not in self.types.keys(): |
---|
| 811 | self.types[ty]={'times':'','steps':''} |
---|
| 812 | if ti not in self.types[ty]['times']: |
---|
| 813 | if len(self.types[ty]['times'])>0: |
---|
| 814 | self.types[ty]['times']+='/' |
---|
| 815 | self.types[ty]['times']+=ti |
---|
| 816 | if st not in self.types[ty]['steps']: |
---|
| 817 | if len(self.types[ty]['steps'])>0: |
---|
| 818 | self.types[ty]['steps']+='/' |
---|
| 819 | self.types[ty]['steps']+=st |
---|
| 820 | |
---|
| 821 | self.mars['{:0>3}'.format(j)]=[ty,'{:0>2}'.format(int(ti)),'{:0>3}'.format(int(st))] |
---|
| 822 | j+=int(c.dtime) |
---|
| 823 | |
---|
| 824 | i+=1 |
---|
| 825 | |
---|
| 826 | # Different grids need different retrievals |
---|
| 827 | # SH=Spherical Harmonics, GG=Gaussian Grid, OG=Output Grid, ML=MultiLevel, SL=SingleLevel |
---|
| 828 | self.params={'SH__ML':'','SH__SL':'', |
---|
| 829 | 'GG__ML':'','GG__SL':'', |
---|
| 830 | 'OG__ML':'','OG__SL':'', |
---|
| 831 | 'OG_OROLSM_SL':'','OG_acc_SL':''} |
---|
| 832 | |
---|
| 833 | self.marsclass=c.marsclass |
---|
| 834 | self.stream=c.stream |
---|
| 835 | self.number=c.number |
---|
| 836 | self.resol=c.resol |
---|
| 837 | if 'N' in c.grid: # Gaussian output grid |
---|
| 838 | self.grid=c.grid |
---|
| 839 | self.area='G' |
---|
| 840 | else: |
---|
| 841 | self.grid='{}/{}'.format(int(c.grid)/1000.,int(c.grid)/1000.) |
---|
| 842 | self.area='{}/{}/{}/{}'.format(int(c.upper)/1000.,int(c.left)/1000.,int(c.lower)/1000.,int(c.right)/1000.) |
---|
| 843 | |
---|
| 844 | self.accuracy=c.accuracy |
---|
| 845 | self.level=c.level |
---|
| 846 | try: |
---|
| 847 | self.levelist=c.levelist |
---|
| 848 | except: |
---|
| 849 | self.levelist='1/to/'+c.level |
---|
| 850 | self.glevelist='1/to/'+c.level |
---|
| 851 | try: |
---|
| 852 | self.gaussian=c.gaussian |
---|
| 853 | except: |
---|
| 854 | self.gaussian='' |
---|
| 855 | try: |
---|
| 856 | self.dataset=c.dataset |
---|
| 857 | except: |
---|
| 858 | self.dataset='' |
---|
| 859 | try: |
---|
| 860 | self.expver=c.expver |
---|
| 861 | except: |
---|
| 862 | self.expver='1' |
---|
| 863 | try: |
---|
| 864 | self.number=c.number |
---|
| 865 | except: |
---|
| 866 | self.number='0' |
---|
| 867 | |
---|
| 868 | self.outputfilelist=[] |
---|
| 869 | |
---|
| 870 | |
---|
| 871 | # Now comes the nasty part that deals with the different scenarios we have: |
---|
| 872 | # 1) Calculation of etadot on |
---|
| 873 | # a) Gaussian grid |
---|
| 874 | # b) Output grid |
---|
| 875 | # c) Output grid using parameter 77 retrieved from MARS |
---|
| 876 | # 3) Calculation/Retrieval of omega |
---|
| 877 | # 4) Download also data for WRF |
---|
| 878 | |
---|
| 879 | if fluxes==False: |
---|
| 880 | self.params['SH__SL']=['LNSP','ML','1','OFF'] |
---|
| 881 | # self.params['OG__SL']=["SD/MSL/TCC/10U/10V/2T/2D/129/172",'SFC','1',self.grid] |
---|
| 882 | self.params['OG__SL']=["141/151/164/165/166/167/168/129/172",'SFC','1',self.grid] |
---|
| 883 | if c.marsclass.upper() == 'EA' or c.marsclass.upper() == 'EP': |
---|
| 884 | self.params['OG_OROLSM__SL']=["160/27/28/244",'SFC','1',self.grid] |
---|
| 885 | else: |
---|
| 886 | self.params['OG_OROLSM__SL']=["160/27/28/173",'SFC','1',self.grid] |
---|
| 887 | |
---|
| 888 | if len(c.addpar)>0: |
---|
| 889 | if c.addpar[0]=='/': |
---|
| 890 | c.addpar=c.addpar[1:] |
---|
| 891 | self.params['OG__SL'][0]+='/'+'/'.join(c.addpar) |
---|
| 892 | self.params['OG__ML']=['T/Q','ML',self.levelist,self.grid] |
---|
| 893 | |
---|
| 894 | if c.gauss=='0' and c.eta=='1': # the simplest case |
---|
| 895 | self.params['OG__ML'][0]+='/U/V/77' |
---|
| 896 | elif c.gauss=='0' and c.eta=='0': # this is not recommended (inaccurate) |
---|
| 897 | self.params['OG__ML'][0]+='/U/V' |
---|
| 898 | elif c.gauss=='1' and c.eta=='0': # this is needed for data before 2008, or for reanalysis data |
---|
| 899 | self.params['GG__SL']=['Q','ML','1','{}'.format((int(self.resol)+1)/2)] |
---|
| 900 | self.params['SH__ML']=['U/V/D','ML',self.glevelist,'OFF'] |
---|
| 901 | else: |
---|
| 902 | print 'Warning: This is a very costly parameter combination, use only for debugging!' |
---|
| 903 | self.params['GG__SL']=['Q','ML','1','{}'.format((int(self.resol)+1)/2)] |
---|
| 904 | self.params['GG__ML']=['U/V/D/77','ML',self.glevelist,'{}'.format((int(self.resol)+1)/2)] |
---|
| 905 | |
---|
| 906 | if c.omega=='1': |
---|
| 907 | self.params['OG__ML'][0]+='/W' |
---|
| 908 | |
---|
| 909 | try: # add cloud water content if necessary |
---|
| 910 | if c.cwc=='1': |
---|
| 911 | self.params['OG__ML'][0]+='/CLWC/CIWC' |
---|
| 912 | except: |
---|
| 913 | pass |
---|
| 914 | |
---|
| 915 | try: # add vorticity and geopotential height for WRF if necessary |
---|
| 916 | if c.wrf=='1': |
---|
| 917 | self.params['OG__ML'][0]+='/Z/VO' |
---|
| 918 | if '/D' not in self.params['OG__ML'][0]: |
---|
| 919 | self.params['OG__ML'][0]+='/D' |
---|
| 920 | # wrf_sfc='sp/msl/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4'.upper() |
---|
| 921 | # 134/151/235/167/165/166/168/129/172/034/031/141/139/170/183/236/039/040/041/042 |
---|
| 922 | wrf_sfc='134/235/167/165/166/168/129/172/34/31/141/139/170/183/236/39/40/41/42'.upper() |
---|
| 923 | lwrt_sfc=wrf_sfc.split('/') |
---|
| 924 | for par in lwrt_sfc: |
---|
| 925 | if par not in self.params['OG__SL'][0]: |
---|
| 926 | self.params['OG__SL'][0]+='/'+par |
---|
| 927 | |
---|
| 928 | except: |
---|
| 929 | pass |
---|
| 930 | else: |
---|
| 931 | self.params['OG_acc_SL']=["LSP/CP/SSHF/EWSS/NSSS/SSR",'SFC','1',self.grid] |
---|
| 932 | |
---|
| 933 | |
---|
| 934 | return |
---|
| 935 | # add additional WRF specific parameters here |
---|
| 936 | |
---|
| 937 | def create_namelist(self,c,filename): |
---|
| 938 | |
---|
| 939 | # if inputdir=="": |
---|
| 940 | # self.inputdir='.' |
---|
| 941 | # else: |
---|
| 942 | # self.inputdir=inputdir |
---|
| 943 | |
---|
| 944 | self.inputdir=c.inputdir |
---|
| 945 | area=asarray(self.area.split('/')).astype(float) |
---|
| 946 | grid=asarray(self.grid.split('/')).astype(float) |
---|
| 947 | |
---|
| 948 | # if area[3]<0: |
---|
| 949 | # area[3]+=360 |
---|
| 950 | if area[1]>area[3]: |
---|
| 951 | area[1]-=360 |
---|
| 952 | zyk=abs((area[3]-area[1]-360.)+grid[1])<1.e-6 |
---|
| 953 | maxl=int((area[3]-area[1])/grid[1])+1 |
---|
| 954 | maxb=int((area[0]-area[2])/grid[0])+1 |
---|
| 955 | |
---|
| 956 | f=open(self.inputdir+'/'+filename,'w') |
---|
| 957 | f.write('&NAMGEN\n') |
---|
| 958 | f.write(',\n '.join(['maxl='+str(maxl),'maxb='+str(maxb), |
---|
| 959 | 'mlevel='+self.level,'mlevelist='+'"'+self.levelist+'"', |
---|
| 960 | 'mnauf='+self.resol,'metapar='+'77', |
---|
| 961 | 'rlo0='+str(area[1]),'rlo1='+str(area[3]),'rla0='+str(area[2]),'rla1='+str(area[0]), |
---|
| 962 | 'momega='+c.omega,'momegadiff='+c.omegadiff,'mgauss='+c.gauss, |
---|
| 963 | 'msmooth='+c.smooth,'meta='+c.eta,'metadiff='+c.etadiff,'mdpdeta='+c.dpdeta])) |
---|
| 964 | |
---|
| 965 | f.write('\n/\n') |
---|
| 966 | f.close() |
---|
| 967 | return |
---|
| 968 | |
---|
| 969 | |
---|
[51f9853] | 970 | def retrieve(self, server, public, dates, request, times, inputdir=''): |
---|
[e18f4b5] | 971 | self.dates=dates |
---|
| 972 | self.server=server |
---|
| 973 | self.public=public |
---|
| 974 | |
---|
| 975 | if inputdir=="": |
---|
| 976 | self.inputdir='.' |
---|
| 977 | else: |
---|
| 978 | self.inputdir=inputdir |
---|
| 979 | |
---|
| 980 | # Retrieve Q not for using Q but as a template for a reduced gaussian grid one date and time is enough |
---|
| 981 | # Take analysis at 00 |
---|
| 982 | # qdate=self.dates |
---|
| 983 | # idx=qdate.find("/") |
---|
| 984 | # if (idx >0): |
---|
| 985 | # qdate=self.dates[:idx] |
---|
| 986 | |
---|
| 987 | #QG= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, type="an", levtype="ML", levelist="1", |
---|
| 988 | #gaussian="reduced",grid='{}'.format((int(self.resol)+1)/2), resol=self.resol,accuracy=self.accuracy,target=self.inputdir+"/"+"QG.grb", |
---|
| 989 | #date=qdate, time="00",expver=self.expver, param="133.128") |
---|
| 990 | #QG.displayInfo() |
---|
| 991 | #QG.dataRetrieve() |
---|
| 992 | |
---|
| 993 | oro=False |
---|
| 994 | for ftype in self.types: |
---|
| 995 | for pk,pv in self.params.iteritems(): |
---|
| 996 | if isinstance(pv,str): # or pk!='GG__SL' : |
---|
| 997 | continue |
---|
| 998 | mftype=''+ftype |
---|
| 999 | mftime=self.types[ftype]['times'] |
---|
| 1000 | mfstep=self.types[ftype]['steps'] |
---|
| 1001 | mfdate=self.dates |
---|
| 1002 | mfstream=self.stream |
---|
| 1003 | mftarget=self.inputdir+"/"+ftype+pk+'.'+self.dates.split('/')[0]+'.'+str(os.getppid())+'.'+str(os.getpid())+".grb" |
---|
| 1004 | |
---|
| 1005 | if pk=='OG__SL': |
---|
| 1006 | pass |
---|
| 1007 | if pk=='OG_OROLSM__SL': |
---|
| 1008 | if oro==False: |
---|
| 1009 | # in CERA20C there is no stream "OPER"! |
---|
| 1010 | if self.marsclass.upper() != 'EP': |
---|
| 1011 | mfstream='OPER' |
---|
| 1012 | mftype='AN' |
---|
| 1013 | mftime='00' |
---|
| 1014 | mfstep='000' |
---|
| 1015 | mfdate=self.dates.split('/')[0] |
---|
| 1016 | mftarget=self.inputdir+"/"+pk+'.'+mfdate+'.'+str(os.getppid())+'.'+str(os.getpid())+".grb" |
---|
| 1017 | oro=True |
---|
| 1018 | else: |
---|
| 1019 | continue |
---|
| 1020 | |
---|
| 1021 | if pk=='GG__SL' and pv[0]=='Q': |
---|
| 1022 | area="" |
---|
| 1023 | gaussian='reduced' |
---|
| 1024 | else: |
---|
| 1025 | area=self.area |
---|
| 1026 | gaussian=self.gaussian |
---|
| 1027 | |
---|
| 1028 | if self.basetime==None: |
---|
[51f9853] | 1029 | # increase number of mars requests |
---|
| 1030 | self.mreq_count += 1 |
---|
[e18f4b5] | 1031 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=mfstream, |
---|
| 1032 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
| 1033 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
| 1034 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
| 1035 | |
---|
[5862eb9] | 1036 | if request == 0: |
---|
[fb8810c] | 1037 | MR.displayInfo() |
---|
| 1038 | MR.dataRetrieve() |
---|
[5862eb9] | 1039 | elif request == 1: |
---|
[51f9853] | 1040 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[5862eb9] | 1041 | elif request == 2: |
---|
[51f9853] | 1042 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[fb8810c] | 1043 | MR.displayInfo() |
---|
| 1044 | MR.dataRetrieve() |
---|
[e18f4b5] | 1045 | # The whole else section is only necessary for operational scripts. It could be removed |
---|
| 1046 | else: # check if mars job requests fields beyond basetime. If yes eliminate those fields since they may not |
---|
| 1047 | # be accessible with user's credentials |
---|
| 1048 | sm1=-1 |
---|
| 1049 | if 'by' in mfstep: |
---|
| 1050 | sm1=2 |
---|
| 1051 | tm1=-1 |
---|
| 1052 | if 'by' in mftime: |
---|
| 1053 | tm1=2 |
---|
| 1054 | maxtime=datetime.datetime.strptime(mfdate.split('/')[-1]+mftime.split('/')[tm1],'%Y%m%d%H')+ \ |
---|
| 1055 | datetime.timedelta(hours=int(mfstep.split('/')[sm1])) |
---|
| 1056 | |
---|
| 1057 | elimit=datetime.datetime.strptime(mfdate.split('/')[-1]+self.basetime,'%Y%m%d%H') |
---|
| 1058 | |
---|
| 1059 | if self.basetime=='12': |
---|
| 1060 | if 'acc' in pk: |
---|
| 1061 | |
---|
| 1062 | # Strategy: if maxtime-elimit>=24h reduce date by 1, |
---|
| 1063 | # if 12h<=maxtime-elimit<12h reduce time for last date |
---|
| 1064 | # if maxtime-elimit<12h reduce step for last time |
---|
| 1065 | # A split of the MARS job into 2 is likely necessary. |
---|
| 1066 | maxtime=elimit-datetime.timedelta(hours=24) |
---|
| 1067 | mfdate='/'.join(('/'.join(mfdate.split('/')[:-1]),datetime.datetime.strftime(maxtime,'%Y%m%d'))) |
---|
[51f9853] | 1068 | # increase number of mars requests |
---|
| 1069 | self.mreq_count += 1 |
---|
[e18f4b5] | 1070 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
| 1071 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
| 1072 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
| 1073 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
| 1074 | |
---|
[5862eb9] | 1075 | if request == 0: |
---|
[fb8810c] | 1076 | MR.displayInfo() |
---|
| 1077 | MR.dataRetrieve() |
---|
[5862eb9] | 1078 | elif request == 1: |
---|
[51f9853] | 1079 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[5862eb9] | 1080 | elif request == 2: |
---|
[51f9853] | 1081 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[fb8810c] | 1082 | MR.displayInfo() |
---|
| 1083 | MR.dataRetrieve() |
---|
[e18f4b5] | 1084 | |
---|
| 1085 | maxtime=elimit-datetime.timedelta(hours=12) |
---|
| 1086 | mfdate=datetime.datetime.strftime(maxtime,'%Y%m%d') |
---|
| 1087 | mftime='00' |
---|
| 1088 | mftarget=self.inputdir+"/"+ftype+pk+'.'+mfdate+'.'+str(os.getppid())+'.'+str(os.getpid())+".grb" |
---|
[51f9853] | 1089 | # increase number of mars requests |
---|
| 1090 | self.mreq_count += 1 |
---|
[e18f4b5] | 1091 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
| 1092 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
| 1093 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
| 1094 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
[5862eb9] | 1095 | if request == 0: |
---|
[fb8810c] | 1096 | MR.displayInfo() |
---|
| 1097 | MR.dataRetrieve() |
---|
[5862eb9] | 1098 | elif request == 1: |
---|
[51f9853] | 1099 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[5862eb9] | 1100 | elif request == 2: |
---|
[51f9853] | 1101 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[fb8810c] | 1102 | MR.displayInfo() |
---|
| 1103 | MR.dataRetrieve() |
---|
[e18f4b5] | 1104 | else: |
---|
[51f9853] | 1105 | # increase number of mars requests |
---|
| 1106 | self.mreq_count += 1 |
---|
[e18f4b5] | 1107 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
| 1108 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
| 1109 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
| 1110 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
| 1111 | |
---|
[5862eb9] | 1112 | if request == 0: |
---|
[fb8810c] | 1113 | MR.displayInfo() |
---|
| 1114 | MR.dataRetrieve() |
---|
[5862eb9] | 1115 | elif request == 1: |
---|
[51f9853] | 1116 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[5862eb9] | 1117 | elif request == 2: |
---|
[51f9853] | 1118 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[fb8810c] | 1119 | MR.displayInfo() |
---|
| 1120 | MR.dataRetrieve() |
---|
[e18f4b5] | 1121 | else: |
---|
| 1122 | maxtime=elimit-datetime.timedelta(hours=24) |
---|
| 1123 | mfdate=datetime.datetime.strftime(maxtime,'%Y%m%d') |
---|
| 1124 | |
---|
| 1125 | mftimesave=''.join(mftime) |
---|
| 1126 | |
---|
| 1127 | if '/' in mftime: |
---|
| 1128 | times=mftime.split('/') |
---|
| 1129 | while int(times[0])+int(mfstep.split('/')[0])<=12 and pk!='OG_OROLSM__SL' and 'acc' not in pk: |
---|
| 1130 | times=times[1:] |
---|
| 1131 | if len(times)>1: |
---|
| 1132 | mftime='/'.join(times) |
---|
| 1133 | else: |
---|
| 1134 | mftime=times[0] |
---|
[51f9853] | 1135 | # increase number of mars requests |
---|
| 1136 | self.mreq_count += 1 |
---|
[e18f4b5] | 1137 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
| 1138 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
| 1139 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
| 1140 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
| 1141 | |
---|
[5862eb9] | 1142 | if request == 0: |
---|
[fb8810c] | 1143 | MR.displayInfo() |
---|
| 1144 | MR.dataRetrieve() |
---|
[5862eb9] | 1145 | elif request == 1: |
---|
[51f9853] | 1146 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[5862eb9] | 1147 | elif request == 2: |
---|
[51f9853] | 1148 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[fb8810c] | 1149 | MR.displayInfo() |
---|
| 1150 | MR.dataRetrieve() |
---|
[e18f4b5] | 1151 | |
---|
| 1152 | if int(mftimesave.split('/')[0])==0 and int(mfstep.split('/')[0])==0 and pk!='OG_OROLSM__SL': |
---|
| 1153 | mfdate=datetime.datetime.strftime(elimit,'%Y%m%d') |
---|
| 1154 | mftime='00' |
---|
| 1155 | mfstep='000' |
---|
| 1156 | mftarget=self.inputdir+"/"+ftype+pk+'.'+mfdate+'.'+str(os.getppid())+'.'+str(os.getpid())+".grb" |
---|
[51f9853] | 1157 | # increase number of mars requests |
---|
| 1158 | self.mreq_count += 1 |
---|
[e18f4b5] | 1159 | MR= MARSretrieval(self.server, self.public, dataset=self.dataset, marsclass=self.marsclass, stream=self.stream, |
---|
| 1160 | type=mftype, levtype=pv[1], levelist=pv[2],resol=self.resol, gaussian=gaussian, |
---|
| 1161 | accuracy=self.accuracy,grid=pv[3],target=mftarget,area=area, |
---|
| 1162 | date=mfdate, time=mftime,number=self.number,step=mfstep, expver=self.expver, param=pv[0]) |
---|
| 1163 | |
---|
[5862eb9] | 1164 | if request == 0: |
---|
[fb8810c] | 1165 | MR.displayInfo() |
---|
| 1166 | MR.dataRetrieve() |
---|
[5862eb9] | 1167 | elif request == 1: |
---|
[51f9853] | 1168 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[5862eb9] | 1169 | elif request == 2: |
---|
[51f9853] | 1170 | MR.print_infodata_csv(self.inputdir, self.mreq_count) |
---|
[fb8810c] | 1171 | MR.displayInfo() |
---|
| 1172 | MR.dataRetrieve() |
---|
[51f9853] | 1173 | |
---|
| 1174 | |
---|
[5862eb9] | 1175 | if request == 0 or request == 2: |
---|
[51f9853] | 1176 | print('MARS retrieve done ... ') |
---|
[5862eb9] | 1177 | elif request == 1: |
---|
[51f9853] | 1178 | print('MARS request printed ...') |
---|
[e18f4b5] | 1179 | |
---|
| 1180 | def getFlexpartTime(self, type,step, time): |
---|
| 1181 | cstep='{:0>3}'.format(step) |
---|
| 1182 | ctime='{:0>2}'.format(int(time/100)) |
---|
| 1183 | |
---|
| 1184 | ctype = str(type).upper() |
---|
| 1185 | myinfo = [ctype,ctime, cstep] |
---|
| 1186 | cflextime = None |
---|
| 1187 | for t, marsinfo in self.mars.items(): |
---|
| 1188 | if myinfo == marsinfo: |
---|
| 1189 | cflextime=t |
---|
| 1190 | return cflextime |
---|
| 1191 | |
---|
| 1192 | def process_output(self, c): |
---|
| 1193 | |
---|
| 1194 | print 'Postprocessing:\n Format: {}\n'.format(c.format) |
---|
| 1195 | if c.ecapi==False: |
---|
| 1196 | print 'ecstorage: {}\n ecfsdir: {}\n'.format(c.ecstorage,c.ecfsdir) |
---|
| 1197 | if not hasattr(c,'gateway'): |
---|
| 1198 | c.gateway=os.getenv('GATEWAY') |
---|
| 1199 | if not hasattr(c,'destination'): |
---|
| 1200 | c.destination=os.getenv('DESTINATION') |
---|
| 1201 | print 'ectrans: {}\n gateway: {}\n destination: {}\n '.format(c.ectrans, c.gateway,c.destination) |
---|
| 1202 | print 'Output filelist: \n',self.outputfilelist |
---|
| 1203 | |
---|
| 1204 | if c.format.lower()=='grib2': |
---|
| 1205 | for ofile in self.outputfilelist: |
---|
| 1206 | p=subprocess.check_call(['grib_set','-s','edition=2,productDefinitionTemplateNumber=8',ofile,ofile+'_2']) |
---|
| 1207 | p=subprocess.check_call(['mv',ofile+'_2',ofile]) |
---|
| 1208 | |
---|
| 1209 | if int(c.ectrans)==1 and c.ecapi==False: |
---|
| 1210 | for ofile in self.outputfilelist: |
---|
| 1211 | p=subprocess.check_call(['ectrans','-overwrite','-gateway',c.gateway, |
---|
| 1212 | '-remote',c.destination,'-source',ofile]) |
---|
| 1213 | print 'ectrans:',p |
---|
| 1214 | if int(c.ecstorage)==1 and c.ecapi==False: |
---|
| 1215 | for ofile in self.outputfilelist: |
---|
| 1216 | p=subprocess.check_call(['ecp','-o',ofile,os.path.expandvars(c.ecfsdir)]) |
---|
| 1217 | |
---|
| 1218 | #20131107 000000 EN13110700 ON DISC |
---|
| 1219 | if c.outputdir!=c.inputdir: |
---|
| 1220 | for ofile in self.outputfilelist: |
---|
| 1221 | p=subprocess.check_call(['mv',ofile,c.outputdir]) |
---|
| 1222 | |
---|
| 1223 | if c.grib2flexpart=='1': |
---|
| 1224 | f=open(c.outputdir+'/'+'AVAILABLE','w') |
---|
| 1225 | clist=[] |
---|
| 1226 | for ofile in self.outputfilelist: # generate AVAILABLE file |
---|
| 1227 | fname=ofile.split('/') |
---|
| 1228 | if '.' in fname[-1]: |
---|
| 1229 | l=fname[-1].split('.') |
---|
| 1230 | timestamp=datetime.datetime.strptime(l[0][-6:]+l[1],'%y%m%d%H') |
---|
| 1231 | timestamp+=datetime.timedelta(hours=int(l[2])) |
---|
| 1232 | cdate=datetime.datetime.strftime(timestamp,'%Y%m%d') |
---|
| 1233 | chms=datetime.datetime.strftime(timestamp,'%H%M%S') |
---|
| 1234 | |
---|
| 1235 | else: |
---|
| 1236 | cdate='20'+fname[-1][-8:-2] |
---|
| 1237 | chms=fname[-1][-2:]+'0000' |
---|
| 1238 | clist.append(cdate+' '+chms+' '*6+fname[-1]+' '*14+'ON DISC') |
---|
| 1239 | clist.sort() |
---|
| 1240 | f.write('\n'.join(clist)+'\n') |
---|
| 1241 | f.close() |
---|
| 1242 | |
---|
| 1243 | pwd=os.path.abspath(c.outputdir) |
---|
| 1244 | f=open(pwd+'/pathnames','w') |
---|
| 1245 | f.write(pwd+'/Options/\n') |
---|
| 1246 | f.write(pwd+'/\n') |
---|
| 1247 | f.write(pwd+'/\n') |
---|
| 1248 | f.write(pwd+'/AVAILABLE\n') |
---|
| 1249 | f.write('==================\n') |
---|
| 1250 | f.close() |
---|
| 1251 | |
---|
| 1252 | if not os.path.exists(pwd+'/Options'): |
---|
| 1253 | os.makedirs(pwd+'/Options') |
---|
| 1254 | f=open(os.path.expandvars(os.path.expanduser(c.flexpart_root_scripts))+'/../Options/COMMAND','r') |
---|
| 1255 | lflist=f.read().split('\n') |
---|
| 1256 | i=0 |
---|
| 1257 | for l in lflist: |
---|
| 1258 | if 'LDIRECT' in l.upper(): |
---|
| 1259 | break |
---|
| 1260 | i+=1 |
---|
| 1261 | |
---|
| 1262 | clist.sort() |
---|
| 1263 | lflist=lflist[:i+1]+[clist[0][:16],clist[-1][:16]]+lflist[i+3:] |
---|
| 1264 | g=open(pwd+'/Options/COMMAND','w') |
---|
| 1265 | g.write('\n'.join(lflist)+'\n') |
---|
| 1266 | g.close() |
---|
| 1267 | |
---|
| 1268 | os.chdir(c.outputdir) |
---|
| 1269 | p=subprocess.check_call([os.path.expandvars(os.path.expanduser(c.flexpart_root_scripts))+'/../FLEXPART_PROGRAM/grib2flexpart', |
---|
| 1270 | 'useAvailable','.']) |
---|
| 1271 | os.chdir(pwd) |
---|
| 1272 | |
---|
| 1273 | def create(self, inputfiles, c,): |
---|
| 1274 | |
---|
| 1275 | table128=init128(c.flexextractdir+'/grib_templates/ecmwf_grib1_table_128') |
---|
| 1276 | wrfpars=toparamId('sp/mslp/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4',table128) |
---|
| 1277 | # index_keys=["date","time","stepRange"] |
---|
| 1278 | index_keys=["date","time","step"] |
---|
| 1279 | indexfile=c.inputdir+"/date_time_stepRange.idx" |
---|
| 1280 | silentremove(indexfile) |
---|
| 1281 | grib=GribTools(inputfiles.files) |
---|
| 1282 | iid=grib.index(index_keys=index_keys, index_file = indexfile) |
---|
| 1283 | |
---|
| 1284 | print 'index done...' |
---|
| 1285 | fdict={'10':None,'11':None,'12':None,'13':None,'16':None,'17':None,'19':None,'21':None,'22':None,'20':None} |
---|
| 1286 | for f in fdict.keys(): |
---|
| 1287 | silentremove(c.inputdir+"/fort."+f) |
---|
| 1288 | |
---|
| 1289 | index_vals = [] |
---|
| 1290 | for key in index_keys: |
---|
| 1291 | key_vals = grib_index_get(iid,key) |
---|
| 1292 | print key_vals |
---|
| 1293 | |
---|
| 1294 | index_vals.append(key_vals) |
---|
| 1295 | |
---|
| 1296 | for prod in product(*index_vals): |
---|
| 1297 | for i in range(len(index_keys)): |
---|
| 1298 | grib_index_select(iid,index_keys[i],prod[i]) |
---|
| 1299 | |
---|
| 1300 | |
---|
| 1301 | gid = grib_new_from_index(iid) |
---|
| 1302 | hid = gid |
---|
| 1303 | cflextime = None |
---|
| 1304 | for k,f in fdict.iteritems(): |
---|
| 1305 | fdict[k] = open(c.inputdir+'/fort.'+k,'w') |
---|
| 1306 | if gid is not None: |
---|
| 1307 | cdate = str(grib_get(gid, 'date')) |
---|
| 1308 | time = grib_get(gid, 'time') |
---|
| 1309 | type = grib_get(gid, 'type') |
---|
| 1310 | step = grib_get(gid, 'step') |
---|
| 1311 | # step = grib_get(gid, 'stepRange') |
---|
| 1312 | # cflextime = self.getFlexpartTime(type,step, time) |
---|
| 1313 | timestamp=datetime.datetime.strptime(cdate+'{:0>2}'.format(time/100),'%Y%m%d%H') |
---|
| 1314 | timestamp+=datetime.timedelta(hours=int(step)) |
---|
| 1315 | # print gid,index_keys[i],prod[i],cdate,time,step,timestamp |
---|
| 1316 | |
---|
| 1317 | cdateH=datetime.datetime.strftime(timestamp,'%Y%m%d%H') |
---|
| 1318 | chms=datetime.datetime.strftime(timestamp,'%H%M%S') |
---|
| 1319 | |
---|
| 1320 | if c.basetime !=None: |
---|
| 1321 | slimit=datetime.datetime.strptime(c.start_date+'00','%Y%m%d%H') |
---|
| 1322 | bt='23' |
---|
| 1323 | if c.basetime=='00': |
---|
| 1324 | bt='00' |
---|
| 1325 | slimit=datetime.datetime.strptime(c.end_date+bt,'%Y%m%d%H')-datetime.timedelta(hours=12-int(c.dtime)) |
---|
| 1326 | |
---|
| 1327 | if c.basetime=='12': |
---|
| 1328 | bt='12' |
---|
| 1329 | slimit=datetime.datetime.strptime(c.end_date+bt,'%Y%m%d%H')-datetime.timedelta(hours=12-int(c.dtime)) |
---|
| 1330 | |
---|
| 1331 | elimit=datetime.datetime.strptime(c.end_date+bt,'%Y%m%d%H') |
---|
| 1332 | |
---|
| 1333 | if timestamp<slimit or timestamp>elimit: |
---|
| 1334 | continue |
---|
| 1335 | |
---|
| 1336 | |
---|
| 1337 | |
---|
| 1338 | try: |
---|
| 1339 | if c.wrf=='1': |
---|
| 1340 | if 'olddate' not in locals(): |
---|
| 1341 | fwrf = open(c.outputdir+'/WRF'+cdate+'.{:0>2}'.format(time)+'.000.grb2','w') |
---|
| 1342 | olddate=cdate[:] |
---|
| 1343 | else: |
---|
| 1344 | if cdate!=olddate: |
---|
| 1345 | fwrf = open(c.outputdir+'/WRF'+cdate+'.{:0>2}'.format(time)+'.000.grb2','w') |
---|
| 1346 | olddate=cdate[:] |
---|
| 1347 | except AttributeError: |
---|
| 1348 | pass |
---|
| 1349 | |
---|
| 1350 | # print 'cyear '+cyear+'/'+cmonth+'/'+'/EI'+cyear[2:4]+cmonth+cday+cflextime |
---|
| 1351 | |
---|
| 1352 | savedfields=[] |
---|
| 1353 | while 1: |
---|
| 1354 | if gid is None: break |
---|
| 1355 | paramId = grib_get(gid, 'paramId') |
---|
| 1356 | gridtype = grib_get(gid, 'gridType') |
---|
| 1357 | datatype = grib_get(gid, 'dataType') |
---|
| 1358 | levtype = grib_get(gid, 'typeOfLevel') |
---|
| 1359 | if paramId == 133 and gridtype=='reduced_gg': |
---|
| 1360 | # Relative humidity (Q.grb) is used as a template only so we need the first we "meet" |
---|
| 1361 | fout=open(c.inputdir+'/fort.18','w') |
---|
| 1362 | grib_write(gid,fout) |
---|
| 1363 | fout.close() |
---|
| 1364 | elif paramId == 131 or paramId == 132: |
---|
| 1365 | grib_write(gid, fdict['10']) |
---|
| 1366 | elif paramId == 130: |
---|
| 1367 | grib_write(gid, fdict['11']) |
---|
| 1368 | elif paramId == 133 and gridtype!='reduced_gg': |
---|
| 1369 | grib_write(gid, fdict['17']) |
---|
| 1370 | elif paramId == 152: |
---|
| 1371 | grib_write(gid, fdict['12']) |
---|
| 1372 | elif paramId == 155 and gridtype=='sh': |
---|
| 1373 | grib_write(gid, fdict['13']) |
---|
| 1374 | elif paramId in [129,138,155] and levtype=='hybrid' and c.wrf=='1': |
---|
| 1375 | # print paramId,'not written' |
---|
| 1376 | pass |
---|
| 1377 | elif paramId == 246 or paramId == 247: # cloud liquid water and ice |
---|
| 1378 | if paramId==246: |
---|
| 1379 | clwc=grib_get_values(gid) |
---|
| 1380 | else: |
---|
| 1381 | clwc+=grib_get_values(gid) |
---|
| 1382 | grib_set_values(gid,clwc) |
---|
| 1383 | # grib_set(gid,'shortName','qc') |
---|
| 1384 | grib_set(gid,'paramId',201031) |
---|
| 1385 | grib_write(gid, fdict['22']) |
---|
| 1386 | |
---|
| 1387 | elif paramId == 135: |
---|
| 1388 | grib_write(gid, fdict['19']) |
---|
| 1389 | elif paramId == 77: |
---|
| 1390 | grib_write(gid, fdict['21']) |
---|
| 1391 | else: |
---|
| 1392 | if paramId not in savedfields: |
---|
| 1393 | grib_write(gid, fdict['16']) |
---|
| 1394 | savedfields.append(paramId) |
---|
| 1395 | else: |
---|
| 1396 | print 'duplicate '+str(paramId)+' not written' |
---|
| 1397 | |
---|
| 1398 | try: |
---|
| 1399 | if c.wrf=='1': |
---|
| 1400 | if levtype=='hybrid': |
---|
| 1401 | if paramId in [129,130,131,132,133,138,155]: |
---|
| 1402 | grib_write(gid,fwrf) |
---|
| 1403 | else: |
---|
| 1404 | if paramId in wrfpars: |
---|
| 1405 | grib_write(gid,fwrf) |
---|
| 1406 | except AttributeError: |
---|
| 1407 | pass |
---|
| 1408 | |
---|
| 1409 | |
---|
| 1410 | |
---|
| 1411 | grib_release(gid) |
---|
| 1412 | gid = grib_new_from_index(iid) |
---|
| 1413 | # call for CONVERT2 |
---|
| 1414 | |
---|
| 1415 | for f in fdict.values(): |
---|
| 1416 | f.close() |
---|
| 1417 | |
---|
| 1418 | if hid is not None: |
---|
| 1419 | pwd=os.getcwd() |
---|
| 1420 | os.chdir(c.inputdir) |
---|
| 1421 | if os.stat('fort.21').st_size==0 and int(c.eta)==1: |
---|
| 1422 | print 'Parameter 77 (etadot) is missing, most likely it is not available for this type or date/time\n' |
---|
| 1423 | print 'Check parameters CLASS, TYPE, STREAM, START_DATE\n' |
---|
| 1424 | myerror(c,'fort.21 is empty while parameter eta is set to 1 in CONTROL file') |
---|
| 1425 | |
---|
| 1426 | p=subprocess.check_call([os.path.expandvars(os.path.expanduser(c.exedir))+'/CONVERT2'],shell=True) |
---|
| 1427 | os.chdir(pwd) |
---|
| 1428 | # create the corresponding output file fort.15 (generated by CONVERT2) + fort.16 (paramId 167 and paramId 168) |
---|
| 1429 | fnout=c.inputdir+'/'+c.prefix |
---|
| 1430 | if c.maxstep>12: |
---|
| 1431 | suffix=cdate[2:8]+'.{:0>2}'.format(time/100)+'.{:0>3}'.format(step) |
---|
| 1432 | else: |
---|
| 1433 | suffix=cdateH[2:10] |
---|
| 1434 | |
---|
| 1435 | fnout+=suffix |
---|
| 1436 | print "outputfile = " + fnout |
---|
| 1437 | self.outputfilelist.append(fnout) # needed for final processing |
---|
| 1438 | fout = open(fnout,'wb') |
---|
| 1439 | shutil.copyfileobj(open(c.inputdir+'/fort.15','rb'), fout) |
---|
| 1440 | if c.cwc=='1': |
---|
| 1441 | shutil.copyfileobj(open(c.inputdir+'/fort.22','rb'), fout) |
---|
| 1442 | shutil.copyfileobj(open(c.inputdir+'/flux'+cdate[0:2]+suffix,'rb'), fout) |
---|
| 1443 | shutil.copyfileobj(open(c.inputdir+'/fort.16','rb'), fout) |
---|
| 1444 | orolsm=glob.glob(c.inputdir+'/OG_OROLSM__SL.*.'+c.ppid+'*')[0] |
---|
| 1445 | shutil.copyfileobj(open(orolsm,'rb'), fout) |
---|
| 1446 | fout.close() |
---|
| 1447 | if c.omega=='1': |
---|
| 1448 | fnout=c.outputdir+'/OMEGA'+suffix |
---|
| 1449 | fout = open(fnout,'wb') |
---|
| 1450 | shutil.copyfileobj(open(c.inputdir+'/fort.25','rb'), fout) |
---|
| 1451 | |
---|
| 1452 | |
---|
| 1453 | |
---|
| 1454 | try: |
---|
| 1455 | if c.wrf=='1': |
---|
| 1456 | fwrf.close() |
---|
| 1457 | except: |
---|
| 1458 | pass |
---|
| 1459 | |
---|
| 1460 | grib_index_release(iid) |
---|
| 1461 | |
---|
| 1462 | def deacc_fluxes(self, inputfiles, c): |
---|
| 1463 | |
---|
| 1464 | table128=init128(c.flexextractdir+'/grib_templates/ecmwf_grib1_table_128') |
---|
| 1465 | pars=toparamId(self.params['OG_acc_SL'][0],table128) |
---|
| 1466 | index_keys=["date","time","step"] |
---|
| 1467 | indexfile=c.inputdir+"/date_time_stepRange.idx" |
---|
| 1468 | silentremove(indexfile) |
---|
| 1469 | grib=GribTools(inputfiles.files) |
---|
| 1470 | iid=grib.index(index_keys=index_keys, index_file = indexfile) |
---|
| 1471 | |
---|
| 1472 | print 'index done...' |
---|
| 1473 | |
---|
| 1474 | index_vals = [] |
---|
| 1475 | for key in index_keys: |
---|
| 1476 | key_vals = grib_index_get(iid,key) |
---|
[fb8810c] | 1477 | |
---|
| 1478 | l=[] |
---|
| 1479 | for k in key_vals: |
---|
| 1480 | l.append(int(k)) |
---|
| 1481 | l.sort() |
---|
| 1482 | key_vals=[] |
---|
| 1483 | for k in l: |
---|
| 1484 | key_vals.append(str(k)) |
---|
[5862eb9] | 1485 | print key_vals |
---|
[fb8810c] | 1486 | index_vals.append(key_vals) |
---|
[e18f4b5] | 1487 | |
---|
| 1488 | |
---|
| 1489 | valsdict={} |
---|
| 1490 | svalsdict={} |
---|
| 1491 | stepsdict={} |
---|
| 1492 | for p in pars: |
---|
| 1493 | valsdict[str(p)]=[] |
---|
| 1494 | svalsdict[str(p)]=[] |
---|
| 1495 | stepsdict[str(p)]=[] |
---|
| 1496 | |
---|
| 1497 | for prod in product(*index_vals): |
---|
| 1498 | for i in range(len(index_keys)): |
---|
| 1499 | grib_index_select(iid,index_keys[i],prod[i]) |
---|
| 1500 | |
---|
| 1501 | #for k,f in fdict.iteritems(): |
---|
| 1502 | #fdict[k] = open('fort.'+k,'w') |
---|
| 1503 | gid = grib_new_from_index(iid) |
---|
| 1504 | hid = gid |
---|
| 1505 | cflextime = None |
---|
| 1506 | if gid is not None: |
---|
| 1507 | cdate = grib_get(gid, 'date') |
---|
| 1508 | #cyear = cdate[:4] |
---|
| 1509 | #cmonth = cdate[4:6] |
---|
| 1510 | #cday = cdate[6:8] |
---|
| 1511 | time = grib_get(gid, 'time') |
---|
| 1512 | type = grib_get(gid, 'type') |
---|
| 1513 | step = grib_get(gid, 'step') |
---|
| 1514 | # date+time+step-2*dtime (since interpolated value valid for step-2*dtime) |
---|
| 1515 | sdate=datetime.datetime(year=cdate/10000,month=mod(cdate,10000)/100, |
---|
| 1516 | day=mod(cdate,100),hour=time/100) |
---|
| 1517 | fdate=sdate+datetime.timedelta(hours=step-2*int(c.dtime)) |
---|
| 1518 | sdates=sdate+datetime.timedelta(hours=step) |
---|
| 1519 | else: |
---|
| 1520 | break |
---|
| 1521 | |
---|
| 1522 | fnout=c.inputdir+'/' |
---|
| 1523 | if c.maxstep>12: |
---|
| 1524 | fnout+='flux'+sdate.strftime('%Y%m%d')+'.{:0>2}'.format(time/100)+'.{:0>3}'.format(step-2*int(c.dtime)) |
---|
| 1525 | gnout=c.inputdir+'/flux'+sdate.strftime('%Y%m%d')+'.{:0>2}'.format(time/100)+'.{:0>3}'.format(step-int(c.dtime)) |
---|
| 1526 | hnout=c.inputdir+'/flux'+sdate.strftime('%Y%m%d')+'.{:0>2}'.format(time/100)+'.{:0>3}'.format(step) |
---|
| 1527 | g=open(gnout,'w') |
---|
| 1528 | h=open(hnout,'w') |
---|
| 1529 | else: |
---|
| 1530 | fnout+='flux'+fdate.strftime('%Y%m%d%H') |
---|
| 1531 | gnout=c.inputdir+'/flux'+(fdate+datetime.timedelta(hours=int(c.dtime))).strftime('%Y%m%d%H') |
---|
| 1532 | hnout=c.inputdir+'/flux'+sdates.strftime('%Y%m%d%H') |
---|
| 1533 | g=open(gnout,'w') |
---|
| 1534 | h=open(hnout,'w') |
---|
| 1535 | print "outputfile = " + fnout |
---|
| 1536 | f=open(fnout,'w') |
---|
| 1537 | |
---|
| 1538 | while 1: |
---|
| 1539 | if gid is None: break |
---|
| 1540 | cparamId = str(grib_get(gid, 'paramId')) |
---|
| 1541 | step = grib_get(gid, 'step') |
---|
| 1542 | atime = grib_get(gid, 'time') |
---|
| 1543 | ni=grib_get(gid, 'Ni') |
---|
| 1544 | nj=grib_get(gid, 'Nj') |
---|
| 1545 | if cparamId in valsdict.keys(): |
---|
| 1546 | values = grib_get_values(gid) |
---|
| 1547 | vdp=valsdict[cparamId] |
---|
| 1548 | svdp=svalsdict[cparamId] |
---|
| 1549 | sd=stepsdict[cparamId] |
---|
| 1550 | |
---|
| 1551 | if cparamId=='142' or cparamId=='143': |
---|
| 1552 | fak=1./1000. |
---|
| 1553 | else: |
---|
| 1554 | fak=3600. |
---|
| 1555 | |
---|
| 1556 | values=(reshape(values,(nj,ni))).flatten()/fak |
---|
| 1557 | vdp.append(values[:]) # save the accumulated values |
---|
| 1558 | if c.marsclass.upper() == 'EA' or \ |
---|
| 1559 | step<=int(c.dtime): |
---|
| 1560 | svdp.append(values[:]/int(c.dtime)) |
---|
| 1561 | else: |
---|
| 1562 | svdp.append((vdp[-1]-vdp[-2])/int(c.dtime)) |
---|
| 1563 | |
---|
| 1564 | print cparamId,atime,step,len(values),values[0],std(values) |
---|
| 1565 | #svdp.append(values[:]) # save the 1-hourly or 3-hourly specific values |
---|
| 1566 | sd.append(step) |
---|
| 1567 | if len(svdp)>=3: |
---|
| 1568 | if len(svdp)>3: |
---|
| 1569 | if cparamId=='142' or cparamId=='143': |
---|
| 1570 | values=darain(svdp) |
---|
| 1571 | else: |
---|
| 1572 | values=dapoly(svdp) |
---|
| 1573 | |
---|
| 1574 | if not (step==c.maxstep and c.maxstep>12 or sdates==elimit): |
---|
| 1575 | vdp.pop(0) |
---|
| 1576 | svdp.pop(0) |
---|
| 1577 | else: |
---|
| 1578 | if c.maxstep>12: |
---|
| 1579 | values=svdp[1] |
---|
| 1580 | else: |
---|
| 1581 | values=svdp[0] |
---|
| 1582 | |
---|
| 1583 | grib_set_values(gid, values) |
---|
| 1584 | if c.maxstep>12: |
---|
[5862eb9] | 1585 | grib_set(gid,'stepRange',max(0,step-2*int(c.dtime))) |
---|
[e18f4b5] | 1586 | else: |
---|
[5862eb9] | 1587 | grib_set(gid,'stepRange',0) |
---|
[e18f4b5] | 1588 | grib_set(gid,'time',fdate.hour*100) |
---|
| 1589 | grib_set(gid,'date',fdate.year*10000+fdate.month*100+fdate.day) |
---|
| 1590 | grib_write(gid, f) |
---|
| 1591 | |
---|
| 1592 | if c.basetime is not None: |
---|
| 1593 | elimit=datetime.datetime.strptime(c.end_date+c.basetime,'%Y%m%d%H') |
---|
| 1594 | else: |
---|
| 1595 | elimit=sdate+datetime.timedelta(2*int(c.dtime)) |
---|
| 1596 | |
---|
| 1597 | # squeeze out information of last two steps contained in svdp |
---|
| 1598 | # if step+int(c.dtime)==c.maxstep and c.maxstep>12 or sdates+datetime.timedelta(hours=int(c.dtime))>=elimit: |
---|
| 1599 | # Note that svdp[0] has not been popped in this case |
---|
| 1600 | |
---|
| 1601 | |
---|
| 1602 | if step==c.maxstep and c.maxstep>12 or sdates==elimit: |
---|
| 1603 | values=svdp[3] |
---|
| 1604 | grib_set_values(gid, values) |
---|
[5862eb9] | 1605 | grib_set(gid,'stepRange',0) |
---|
[e18f4b5] | 1606 | truedatetime=fdate+datetime.timedelta(hours=2*int(c.dtime)) |
---|
| 1607 | grib_set(gid,'time',truedatetime.hour*100) |
---|
| 1608 | grib_set(gid,'date',truedatetime.year*10000+truedatetime.month*100+truedatetime.day) |
---|
| 1609 | grib_write(gid, h) |
---|
| 1610 | |
---|
| 1611 | #values=(svdp[1]+svdp[2])/2. |
---|
| 1612 | if cparamId=='142' or cparamId=='143': |
---|
| 1613 | values=darain(list(reversed(svdp))) |
---|
| 1614 | else: |
---|
| 1615 | values=dapoly(list(reversed(svdp))) |
---|
| 1616 | |
---|
[5862eb9] | 1617 | grib_set(gid,'stepRange',0) |
---|
[e18f4b5] | 1618 | truedatetime=fdate+datetime.timedelta(hours=int(c.dtime)) |
---|
| 1619 | grib_set(gid,'time',truedatetime.hour*100) |
---|
| 1620 | grib_set(gid,'date',truedatetime.year*10000+truedatetime.month*100+truedatetime.day) |
---|
| 1621 | grib_set_values(gid, values) |
---|
| 1622 | grib_write(gid, g) |
---|
| 1623 | |
---|
| 1624 | grib_release(gid) |
---|
| 1625 | |
---|
| 1626 | gid = grib_new_from_index(iid) |
---|
| 1627 | |
---|
| 1628 | f.close() |
---|
| 1629 | g.close() |
---|
| 1630 | h.close() |
---|
| 1631 | |
---|
| 1632 | |
---|
| 1633 | grib_index_release(iid) |
---|