[2fb99de] | 1 | #!/usr/bin/env python |
---|
| 2 | # -*- coding: utf-8 -*- |
---|
| 3 | #******************************************************************************* |
---|
| 4 | # @Author: Anne Fouilloux (University of Oslo) |
---|
| 5 | # |
---|
| 6 | # @Date: October 2014 |
---|
| 7 | # |
---|
| 8 | # @Change History: |
---|
| 9 | # |
---|
| 10 | # November 2015 - Leopold Haimberger (University of Vienna): |
---|
| 11 | # - extended with class Control |
---|
| 12 | # - removed functions mkdir_p, daterange, years_between, months_between |
---|
| 13 | # - added functions darain, dapoly, to_param_id, init128, normal_exit, |
---|
| 14 | # my_error, clean_up, install_args_and_control, |
---|
| 15 | # interpret_args_and_control, |
---|
| 16 | # - removed function __del__ in class EIFLexpart |
---|
| 17 | # - added the following functions in EIFlexpart: |
---|
| 18 | # - create_namelist |
---|
| 19 | # - process_output |
---|
| 20 | # - deacc_fluxes |
---|
| 21 | # - modified existing EIFlexpart - functions for the use in |
---|
| 22 | # flex_extract |
---|
| 23 | # - retrieve also longer term forecasts, not only analyses and |
---|
| 24 | # short term forecast data |
---|
| 25 | # - added conversion into GRIB2 |
---|
| 26 | # - added conversion into .fp format for faster execution of FLEXPART |
---|
| 27 | # (see https://www.flexpart.eu/wiki/FpCtbtoWo4FpFormat) |
---|
| 28 | # |
---|
| 29 | # February 2018 - Anne Philipp (University of Vienna): |
---|
| 30 | # - applied PEP8 style guide |
---|
| 31 | # - added documentation |
---|
| 32 | # - removed function getFlexpartTime in class EcFlexpart |
---|
| 33 | # - outsourced class ControlFile |
---|
| 34 | # - outsourced class MarsRetrieval |
---|
| 35 | # - changed class name from EIFlexpart to EcFlexpart |
---|
| 36 | # - applied minor code changes (style) |
---|
| 37 | # - removed "dead code" , e.g. retrieval of Q since it is not needed |
---|
| 38 | # - removed "times" parameter from retrieve-method since it is not used |
---|
| 39 | # |
---|
| 40 | # @License: |
---|
| 41 | # (C) Copyright 2014-2018. |
---|
| 42 | # |
---|
| 43 | # This software is licensed under the terms of the Apache Licence Version 2.0 |
---|
| 44 | # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. |
---|
| 45 | # |
---|
| 46 | # @Class Description: |
---|
| 47 | # FLEXPART needs grib files in a specifc format. All necessary data fields |
---|
| 48 | # for one time step are stored in a single file. The class represents an |
---|
| 49 | # instance with all the parameter and settings necessary for retrieving |
---|
| 50 | # MARS data and modifing them so they are fitting FLEXPART need. The class |
---|
| 51 | # is able to disaggregate the fluxes and convert grid types to the one needed |
---|
| 52 | # by FLEXPART, therefore using the FORTRAN program. |
---|
| 53 | # |
---|
| 54 | # @Class Content: |
---|
| 55 | # - __init__ |
---|
| 56 | # - write_namelist |
---|
| 57 | # - retrieve |
---|
| 58 | # - process_output |
---|
| 59 | # - create |
---|
| 60 | # - deacc_fluxes |
---|
| 61 | # |
---|
| 62 | # @Class Attributes: |
---|
| 63 | # - dtime |
---|
| 64 | # - basetime |
---|
| 65 | # - server |
---|
| 66 | # - marsclass |
---|
| 67 | # - stream |
---|
| 68 | # - resol |
---|
| 69 | # - accuracy |
---|
| 70 | # - number |
---|
| 71 | # - expver |
---|
| 72 | # - glevelist |
---|
| 73 | # - area |
---|
| 74 | # - grid |
---|
| 75 | # - level |
---|
| 76 | # - levelist |
---|
| 77 | # - types |
---|
| 78 | # - dates |
---|
| 79 | # - area |
---|
| 80 | # - gaussian |
---|
| 81 | # - params |
---|
| 82 | # - inputdir |
---|
| 83 | # - outputfilelist |
---|
| 84 | # |
---|
| 85 | #******************************************************************************* |
---|
| 86 | #pylint: disable=unsupported-assignment-operation |
---|
| 87 | # this is disabled because its an error in pylint for this specific case |
---|
| 88 | #pylint: disable=consider-using-enumerate |
---|
| 89 | # this is not useful in this case |
---|
| 90 | # ------------------------------------------------------------------------------ |
---|
| 91 | # MODULES |
---|
| 92 | # ------------------------------------------------------------------------------ |
---|
| 93 | import subprocess |
---|
| 94 | import shutil |
---|
| 95 | import os |
---|
| 96 | import glob |
---|
| 97 | from datetime import datetime, timedelta |
---|
| 98 | import numpy as np |
---|
| 99 | from gribapi import grib_set, grib_index_select, grib_new_from_index, grib_get,\ |
---|
| 100 | grib_write, grib_get_values, grib_set_values, grib_release,\ |
---|
| 101 | grib_index_release, grib_index_get |
---|
| 102 | |
---|
| 103 | # software specific classes and modules from flex_extract |
---|
| 104 | import _config |
---|
| 105 | from GribTools import GribTools |
---|
| 106 | from tools import init128, to_param_id, silent_remove, product, my_error |
---|
| 107 | from MarsRetrieval import MarsRetrieval |
---|
| 108 | import disaggregation |
---|
| 109 | |
---|
| 110 | # ------------------------------------------------------------------------------ |
---|
| 111 | # CLASS |
---|
| 112 | # ------------------------------------------------------------------------------ |
---|
| 113 | class EcFlexpart(object): |
---|
| 114 | ''' |
---|
| 115 | Class to retrieve FLEXPART specific ECMWF data. |
---|
| 116 | ''' |
---|
| 117 | # -------------------------------------------------------------------------- |
---|
| 118 | # CLASS FUNCTIONS |
---|
| 119 | # -------------------------------------------------------------------------- |
---|
| 120 | def __init__(self, c, fluxes=False): |
---|
| 121 | ''' |
---|
| 122 | @Description: |
---|
| 123 | Creates an object/instance of EcFlexpart with the |
---|
| 124 | associated settings of its attributes for the retrieval. |
---|
| 125 | |
---|
| 126 | @Input: |
---|
| 127 | self: instance of EcFlexpart |
---|
| 128 | The current object of the class. |
---|
| 129 | |
---|
| 130 | c: instance of class ControlFile |
---|
| 131 | Contains all the parameters of CONTROL file, which are e.g.: |
---|
| 132 | DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, |
---|
| 133 | STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, |
---|
| 134 | LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, |
---|
| 135 | OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, |
---|
| 136 | ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, |
---|
| 137 | MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME |
---|
| 138 | DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS |
---|
| 139 | |
---|
| 140 | For more information about format and content of the parameter |
---|
| 141 | see documentation. |
---|
| 142 | |
---|
| 143 | fluxes: boolean, optional |
---|
| 144 | Decides if the flux parameter settings are stored or |
---|
| 145 | the rest of the parameter list. |
---|
| 146 | Default value is False. |
---|
| 147 | |
---|
| 148 | @Return: |
---|
| 149 | <nothing> |
---|
| 150 | ''' |
---|
| 151 | |
---|
| 152 | # different mars types for retrieving data for flexpart |
---|
| 153 | self.types = dict() |
---|
| 154 | |
---|
| 155 | if c.maxstep > len(c.type): # Pure forecast mode |
---|
| 156 | c.type = [c.type[1]] |
---|
| 157 | c.step = ['{:0>3}'.format(int(c.step[0]))] |
---|
| 158 | c.time = [c.time[0]] |
---|
| 159 | for i in range(1, c.maxstep + 1): |
---|
| 160 | c.type.append(c.type[0]) |
---|
| 161 | c.step.append('{:0>3}'.format(i)) |
---|
| 162 | c.time.append(c.time[0]) |
---|
| 163 | |
---|
| 164 | self.inputdir = c.inputdir |
---|
| 165 | self.basetime = c.basetime |
---|
| 166 | self.dtime = c.dtime |
---|
| 167 | i = 0 |
---|
| 168 | if fluxes and c.maxstep <= 24: |
---|
| 169 | # no forecast beyond one day is needed! |
---|
| 170 | # Thus, prepare flux data manually as usual |
---|
| 171 | # with only forecast fields with start times at 00/12 |
---|
| 172 | # (but without 00/12 fields since these are |
---|
| 173 | # the initialisation times of the flux fields |
---|
| 174 | # and therefore are zero all the time) |
---|
| 175 | self.types[c.type[1]] = {'times': '00/12', 'steps': |
---|
| 176 | '{}/to/12/by/{}'.format(c.dtime, c.dtime)} |
---|
| 177 | else: |
---|
| 178 | for ty, st, ti in zip(c.type, c.step, c.time): |
---|
| 179 | btlist = range(24) |
---|
| 180 | if c.basetime == '12': |
---|
| 181 | btlist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] |
---|
| 182 | if c.basetime == '00': |
---|
| 183 | btlist = [13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 0] |
---|
| 184 | |
---|
| 185 | if i % int(c.dtime) == 0 and (i in btlist or c.maxstep > 24): |
---|
| 186 | |
---|
| 187 | if ty not in self.types.keys(): |
---|
| 188 | self.types[ty] = {'times': '', 'steps': ''} |
---|
| 189 | |
---|
| 190 | if ti not in self.types[ty]['times']: |
---|
| 191 | if self.types[ty]['times']: |
---|
| 192 | self.types[ty]['times'] += '/' |
---|
| 193 | self.types[ty]['times'] += ti |
---|
| 194 | |
---|
| 195 | if st not in self.types[ty]['steps']: |
---|
| 196 | if self.types[ty]['steps']: |
---|
| 197 | self.types[ty]['steps'] += '/' |
---|
| 198 | self.types[ty]['steps'] += st |
---|
| 199 | i += 1 |
---|
| 200 | |
---|
| 201 | self.marsclass = c.marsclass |
---|
| 202 | self.stream = c.stream |
---|
| 203 | self.number = c.number |
---|
| 204 | self.resol = c.resol |
---|
| 205 | self.accuracy = c.accuracy |
---|
| 206 | self.level = c.level |
---|
| 207 | |
---|
| 208 | if c.levelist: |
---|
| 209 | self.levelist = c.levelist |
---|
| 210 | else: |
---|
| 211 | self.levelist = '1/to/' + c.level |
---|
| 212 | |
---|
| 213 | # for gaussian grid retrieval |
---|
| 214 | self.glevelist = '1/to/' + c.level |
---|
| 215 | |
---|
| 216 | if hasattr(c, 'gaussian') and c.gaussian: |
---|
| 217 | self.gaussian = c.gaussian |
---|
| 218 | else: |
---|
| 219 | self.gaussian = '' |
---|
| 220 | |
---|
| 221 | if hasattr(c, 'expver') and c.expver: |
---|
| 222 | self.expver = c.expver |
---|
| 223 | else: |
---|
| 224 | self.expver = '1' |
---|
| 225 | |
---|
| 226 | if hasattr(c, 'number') and c.number: |
---|
| 227 | self.number = c.number |
---|
| 228 | else: |
---|
| 229 | self.number = '0' |
---|
| 230 | |
---|
| 231 | if 'N' in c.grid: # Gaussian output grid |
---|
| 232 | self.grid = c.grid |
---|
| 233 | self.area = 'G' |
---|
| 234 | else: |
---|
| 235 | self.grid = '{}/{}'.format(int(c.grid) / 1000., int(c.grid) / 1000.) |
---|
| 236 | self.area = '{}/{}/{}/{}'.format(int(c.upper) / 1000., |
---|
| 237 | int(c.left) / 1000., |
---|
| 238 | int(c.lower) / 1000., |
---|
| 239 | int(c.right) / 1000.) |
---|
| 240 | |
---|
| 241 | self.outputfilelist = [] |
---|
| 242 | |
---|
| 243 | |
---|
| 244 | # Now comes the nasty part that deals with the different |
---|
| 245 | # scenarios we have: |
---|
| 246 | # 1) Calculation of etadot on |
---|
| 247 | # a) Gaussian grid |
---|
| 248 | # b) Output grid |
---|
| 249 | # c) Output grid using parameter 77 retrieved from MARS |
---|
| 250 | # 3) Calculation/Retrieval of omega |
---|
| 251 | # 4) Download also data for WRF |
---|
| 252 | |
---|
| 253 | |
---|
| 254 | # Different grids need different retrievals |
---|
| 255 | # SH = Spherical Harmonics, GG = Gaussian Grid, |
---|
| 256 | # OG = Output Grid, ML = MultiLevel, SL = SingleLevel |
---|
| 257 | self.params = {'SH__ML': '', 'SH__SL': '', |
---|
| 258 | 'GG__ML': '', 'GG__SL': '', |
---|
| 259 | 'OG__ML': '', 'OG__SL': '', |
---|
| 260 | 'OG_OROLSM_SL': '', 'OG_acc_SL': ''} |
---|
| 261 | |
---|
| 262 | if fluxes is False: |
---|
| 263 | self.params['SH__SL'] = ['LNSP', 'ML', '1', 'OFF'] |
---|
| 264 | # "SD/MSL/TCC/10U/10V/2T/2D/129/172" |
---|
| 265 | self.params['OG__SL'] = ["141/151/164/165/166/167/168/129/172", \ |
---|
| 266 | 'SFC', '1', self.grid] |
---|
| 267 | if c.addpar: |
---|
| 268 | if c.addpar[0] == '/': |
---|
| 269 | c.addpar = c.addpar[1:] |
---|
| 270 | self.params['OG__SL'][0] += '/' + '/'.join(c.addpar) |
---|
| 271 | |
---|
| 272 | self.params['OG_OROLSM__SL'] = ["160/27/28/173", \ |
---|
| 273 | 'SFC', '1', self.grid] |
---|
| 274 | |
---|
| 275 | self.params['OG__ML'] = ['T/Q', 'ML', self.levelist, self.grid] |
---|
| 276 | |
---|
| 277 | if c.gauss == '0' and c.eta == '1': |
---|
| 278 | # the simplest case |
---|
| 279 | self.params['OG__ML'][0] += '/U/V/77' |
---|
| 280 | elif c.gauss == '0' and c.eta == '0': |
---|
| 281 | # this is not recommended (inaccurate) |
---|
| 282 | self.params['OG__ML'][0] += '/U/V' |
---|
| 283 | elif c.gauss == '1' and c.eta == '0': |
---|
| 284 | # this is needed for data before 2008, or for reanalysis data |
---|
| 285 | self.params['GG__SL'] = ['Q', 'ML', '1', \ |
---|
| 286 | '{}'.format((int(self.resol) + 1) / 2)] |
---|
| 287 | self.params['SH__ML'] = ['U/V/D', 'ML', self.glevelist, 'OFF'] |
---|
| 288 | else: |
---|
| 289 | print('Warning: This is a very costly parameter combination, ' |
---|
| 290 | 'use only for debugging!') |
---|
| 291 | self.params['GG__SL'] = ['Q', 'ML', '1', \ |
---|
| 292 | '{}'.format((int(self.resol) + 1) / 2)] |
---|
| 293 | self.params['GG__ML'] = ['U/V/D/77', 'ML', self.glevelist, \ |
---|
| 294 | '{}'.format((int(self.resol) + 1) / 2)] |
---|
| 295 | |
---|
| 296 | if hasattr(c, 'omega') and c.omega == '1': |
---|
| 297 | self.params['OG__ML'][0] += '/W' |
---|
| 298 | |
---|
| 299 | # add cloud water content if necessary |
---|
| 300 | if hasattr(c, 'cwc') and c.cwc == '1': |
---|
| 301 | self.params['OG__ML'][0] += '/CLWC/CIWC' |
---|
| 302 | |
---|
| 303 | # add vorticity and geopotential height for WRF if necessary |
---|
| 304 | if hasattr(c, 'wrf') and c.wrf == '1': |
---|
| 305 | self.params['OG__ML'][0] += '/Z/VO' |
---|
| 306 | if '/D' not in self.params['OG__ML'][0]: |
---|
| 307 | self.params['OG__ML'][0] += '/D' |
---|
| 308 | #wrf_sfc = 'sp/msl/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/ / |
---|
| 309 | # stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4'.upper() |
---|
| 310 | wrf_sfc = '134/235/167/165/166/168/129/172/34/31/141/ \ |
---|
| 311 | 139/170/183/236/39/40/41/42'.upper() |
---|
| 312 | lwrt_sfc = wrf_sfc.split('/') |
---|
| 313 | for par in lwrt_sfc: |
---|
| 314 | if par not in self.params['OG__SL'][0]: |
---|
| 315 | self.params['OG__SL'][0] += '/' + par |
---|
| 316 | |
---|
| 317 | else: |
---|
| 318 | self.params['OG_acc_SL'] = ["LSP/CP/SSHF/EWSS/NSSS/SSR", \ |
---|
| 319 | 'SFC', '1', self.grid] |
---|
| 320 | |
---|
| 321 | # if needed, add additional WRF specific parameters here |
---|
| 322 | |
---|
| 323 | return |
---|
| 324 | |
---|
| 325 | |
---|
| 326 | def write_namelist(self, c, filename): |
---|
| 327 | ''' |
---|
| 328 | @Description: |
---|
| 329 | Creates a namelist file in the temporary directory and writes |
---|
| 330 | the following values to it: maxl, maxb, mlevel, |
---|
| 331 | mlevelist, mnauf, metapar, rlo0, rlo1, rla0, rla1, |
---|
| 332 | momega, momegadiff, mgauss, msmooth, meta, metadiff, mdpdeta |
---|
| 333 | |
---|
| 334 | @Input: |
---|
| 335 | self: instance of EcFlexpart |
---|
| 336 | The current object of the class. |
---|
| 337 | |
---|
| 338 | c: instance of class ControlFile |
---|
| 339 | Contains all the parameters of CONTROL files, which are e.g.: |
---|
| 340 | DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, |
---|
| 341 | STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, |
---|
| 342 | LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, |
---|
| 343 | OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, |
---|
| 344 | ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, |
---|
| 345 | MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME |
---|
| 346 | DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS |
---|
| 347 | |
---|
| 348 | For more information about format and content of the parameter |
---|
| 349 | see documentation. |
---|
| 350 | |
---|
| 351 | filename: string |
---|
| 352 | Name of the namelist file. |
---|
| 353 | |
---|
| 354 | @Return: |
---|
| 355 | <nothing> |
---|
| 356 | ''' |
---|
| 357 | |
---|
| 358 | self.inputdir = c.inputdir |
---|
| 359 | area = np.asarray(self.area.split('/')).astype(float) |
---|
| 360 | grid = np.asarray(self.grid.split('/')).astype(float) |
---|
| 361 | |
---|
| 362 | if area[1] > area[3]: |
---|
| 363 | area[1] -= 360 |
---|
| 364 | maxl = int((area[3] - area[1]) / grid[1]) + 1 |
---|
| 365 | maxb = int((area[0] - area[2]) / grid[0]) + 1 |
---|
| 366 | |
---|
| 367 | with open(self.inputdir + '/' + filename, 'w') as f: |
---|
| 368 | f.write('&NAMGEN\n') |
---|
| 369 | f.write(',\n '.join(['maxl = ' + str(maxl), 'maxb = ' + str(maxb), |
---|
| 370 | 'mlevel = ' + str(self.level), |
---|
| 371 | 'mlevelist = ' + '"' + str(self.levelist) |
---|
| 372 | + '"', |
---|
| 373 | 'mnauf = ' + str(self.resol), |
---|
| 374 | 'metapar = ' + '77', |
---|
| 375 | 'rlo0 = ' + str(area[1]), |
---|
| 376 | 'rlo1 = ' + str(area[3]), |
---|
| 377 | 'rla0 = ' + str(area[2]), |
---|
| 378 | 'rla1 = ' + str(area[0]), |
---|
| 379 | 'momega = ' + str(c.omega), |
---|
| 380 | 'momegadiff = ' + str(c.omegadiff), |
---|
| 381 | 'mgauss = ' + str(c.gauss), |
---|
| 382 | 'msmooth = ' + str(c.smooth), |
---|
| 383 | 'meta = ' + str(c.eta), |
---|
| 384 | 'metadiff = ' + str(c.etadiff), |
---|
| 385 | 'mdpdeta = ' + str(c.dpdeta)])) |
---|
| 386 | |
---|
| 387 | f.write('\n/\n') |
---|
| 388 | |
---|
| 389 | return |
---|
| 390 | |
---|
| 391 | def retrieve(self, server, dates, request, inputdir='.'): |
---|
| 392 | ''' |
---|
| 393 | @Description: |
---|
| 394 | Finalizing the retrieval information by setting final details |
---|
| 395 | depending on grid type. |
---|
| 396 | Prepares MARS retrievals per grid type and submits them. |
---|
| 397 | |
---|
| 398 | @Input: |
---|
| 399 | self: instance of EcFlexpart |
---|
| 400 | The current object of the class. |
---|
| 401 | |
---|
| 402 | server: instance of ECMWFService or ECMWFDataServer |
---|
| 403 | The connection to the ECMWF server. This is different |
---|
| 404 | for member state users which have full access and non |
---|
| 405 | member state users which have only access to the public |
---|
| 406 | data sets. The decision is made from command line argument |
---|
| 407 | "public"; for public access its True (ECMWFDataServer) |
---|
| 408 | for member state users its False (ECMWFService) |
---|
| 409 | |
---|
| 410 | dates: string |
---|
| 411 | Contains start and end date of the retrieval in the format |
---|
| 412 | "YYYYMMDD/to/YYYYMMDD" |
---|
| 413 | |
---|
| 414 | inputdir: string, optional |
---|
| 415 | Path to the directory where the retrieved data is about |
---|
| 416 | to be stored. The default is the current directory ('.'). |
---|
| 417 | |
---|
| 418 | @Return: |
---|
| 419 | <nothing> |
---|
| 420 | ''' |
---|
| 421 | self.dates = dates |
---|
| 422 | self.server = server |
---|
| 423 | self.inputdir = inputdir |
---|
| 424 | oro = False |
---|
| 425 | |
---|
| 426 | for ftype in self.types: |
---|
| 427 | for pk, pv in self.params.iteritems(): |
---|
| 428 | if isinstance(pv, str): |
---|
| 429 | continue |
---|
| 430 | mftype = '' + ftype |
---|
| 431 | mftime = self.types[ftype]['times'] |
---|
| 432 | mfstep = self.types[ftype]['steps'] |
---|
| 433 | mfdate = self.dates |
---|
| 434 | mfstream = self.stream |
---|
| 435 | mftarget = self.inputdir + "/" + ftype + pk + '.' + \ |
---|
| 436 | self.dates.split('/')[0] + '.' + str(os.getppid()) +\ |
---|
| 437 | '.' + str(os.getpid()) + ".grb" |
---|
| 438 | if pk == 'OG__SL': |
---|
| 439 | pass |
---|
| 440 | if pk == 'OG_OROLSM__SL': |
---|
| 441 | if oro is False: |
---|
| 442 | mfstream = 'OPER' |
---|
| 443 | mftype = 'AN' |
---|
| 444 | mftime = '00' |
---|
| 445 | mfstep = '000' |
---|
| 446 | mfdate = self.dates.split('/')[0] |
---|
| 447 | mftarget = self.inputdir + "/" + pk + '.' + mfdate + \ |
---|
| 448 | '.' + str(os.getppid()) + '.' + \ |
---|
| 449 | str(os.getpid()) + ".grb" |
---|
| 450 | oro = True |
---|
| 451 | else: |
---|
| 452 | continue |
---|
| 453 | if pk == 'GG__SL' and pv[0] == 'Q': |
---|
| 454 | area = "" |
---|
| 455 | gaussian = 'reduced' |
---|
| 456 | else: |
---|
| 457 | area = self.area |
---|
| 458 | gaussian = self.gaussian |
---|
| 459 | |
---|
| 460 | # ------ on demand path -------------------------------------------------- |
---|
| 461 | if self.basetime is None: |
---|
| 462 | MR = MarsRetrieval(self.server, |
---|
| 463 | marsclass=self.marsclass, |
---|
| 464 | stream=mfstream, |
---|
| 465 | type=mftype, |
---|
| 466 | levtype=pv[1], |
---|
| 467 | levelist=pv[2], |
---|
| 468 | resol=self.resol, |
---|
| 469 | gaussian=gaussian, |
---|
| 470 | accuracy=self.accuracy, |
---|
| 471 | grid=pv[3], |
---|
| 472 | target=mftarget, |
---|
| 473 | area=area, |
---|
| 474 | date=mfdate, |
---|
| 475 | time=mftime, |
---|
| 476 | number=self.number, |
---|
| 477 | step=mfstep, |
---|
| 478 | expver=self.expver, |
---|
| 479 | param=pv[0]) |
---|
| 480 | |
---|
| 481 | if request == 0: |
---|
| 482 | MR.display_info() |
---|
| 483 | MR.data_retrieve() |
---|
| 484 | elif request == 1: |
---|
| 485 | MR.print_info() |
---|
| 486 | elif request == 2: |
---|
| 487 | MR.print_info() |
---|
| 488 | MR.display_info() |
---|
| 489 | MR.data_retrieve() |
---|
| 490 | else: |
---|
| 491 | print 'Failure' |
---|
| 492 | # ------ operational path ------------------------------------------------ |
---|
| 493 | else: |
---|
| 494 | # check if mars job requests fields beyond basetime. |
---|
| 495 | # If yes eliminate those fields since they may not |
---|
| 496 | # be accessible with user's credentials |
---|
| 497 | if 'by' in mfstep: |
---|
| 498 | sm1 = 2 |
---|
| 499 | else: |
---|
| 500 | sm1 = -1 |
---|
| 501 | |
---|
| 502 | if 'by' in mftime: |
---|
| 503 | tm1 = 2 |
---|
| 504 | else: |
---|
| 505 | tm1 = -1 |
---|
| 506 | |
---|
| 507 | maxdate = datetime.strptime(mfdate.split('/')[-1] + |
---|
| 508 | mftime.split('/')[tm1], |
---|
| 509 | '%Y%m%d%H') |
---|
| 510 | istep = int(mfstep.split('/')[sm1]) |
---|
| 511 | maxtime = maxdate + timedelta(hours=istep) |
---|
| 512 | |
---|
| 513 | elimit = datetime.strptime(mfdate.split('/')[-1] + |
---|
| 514 | self.basetime, '%Y%m%d%H') |
---|
| 515 | |
---|
| 516 | if self.basetime == '12': |
---|
| 517 | # -------------- flux data ---------------------------- |
---|
| 518 | if 'acc' in pk: |
---|
| 519 | |
---|
| 520 | # Strategy: |
---|
| 521 | # if maxtime-elimit >= 24h reduce date by 1, |
---|
| 522 | # if 12h <= maxtime-elimit<12h reduce time for last date |
---|
| 523 | # if maxtime-elimit<12h reduce step for last time |
---|
| 524 | # A split of the MARS job into 2 is likely necessary. |
---|
| 525 | maxtime = elimit - timedelta(hours=24) |
---|
| 526 | mfdate = '/'.join(['/'.join(mfdate.split('/')[:-1]), |
---|
| 527 | datetime.strftime(maxtime, |
---|
| 528 | '%Y%m%d')]) |
---|
| 529 | |
---|
| 530 | MR = MarsRetrieval(self.server, |
---|
| 531 | marsclass=self.marsclass, |
---|
| 532 | stream=self.stream, |
---|
| 533 | type=mftype, |
---|
| 534 | levtype=pv[1], |
---|
| 535 | levelist=pv[2], |
---|
| 536 | resol=self.resol, |
---|
| 537 | gaussian=gaussian, |
---|
| 538 | accuracy=self.accuracy, |
---|
| 539 | grid=pv[3], |
---|
| 540 | target=mftarget, |
---|
| 541 | area=area, |
---|
| 542 | date=mfdate, |
---|
| 543 | time=mftime, |
---|
| 544 | number=self.number, |
---|
| 545 | step=mfstep, |
---|
| 546 | expver=self.expver, |
---|
| 547 | param=pv[0]) |
---|
| 548 | |
---|
| 549 | MR.display_info() |
---|
| 550 | MR.data_retrieve() |
---|
| 551 | |
---|
| 552 | maxtime = elimit - timedelta(hours=12) |
---|
| 553 | mfdate = datetime.strftime(maxtime, '%Y%m%d') |
---|
| 554 | mftime = '00' |
---|
| 555 | mftarget = self.inputdir + "/" + ftype + pk + \ |
---|
| 556 | '.' + mfdate + '.' + str(os.getppid()) +\ |
---|
| 557 | '.' + str(os.getpid()) + ".grb" |
---|
| 558 | |
---|
| 559 | MR = MarsRetrieval(self.server, |
---|
| 560 | marsclass=self.marsclass, |
---|
| 561 | stream=self.stream, |
---|
| 562 | type=mftype, |
---|
| 563 | levtype=pv[1], |
---|
| 564 | levelist=pv[2], |
---|
| 565 | resol=self.resol, |
---|
| 566 | gaussian=gaussian, |
---|
| 567 | accuracy=self.accuracy, |
---|
| 568 | grid=pv[3], |
---|
| 569 | target=mftarget, |
---|
| 570 | area=area, |
---|
| 571 | date=mfdate, |
---|
| 572 | time=mftime, |
---|
| 573 | number=self.number, |
---|
| 574 | step=mfstep, |
---|
| 575 | expver=self.expver, |
---|
| 576 | param=pv[0]) |
---|
| 577 | |
---|
| 578 | MR.display_info() |
---|
| 579 | |
---|
| 580 | MR.data_retrieve() |
---|
| 581 | # -------------- non flux data ------------------------ |
---|
| 582 | else: |
---|
| 583 | MR = MarsRetrieval(self.server, |
---|
| 584 | marsclass=self.marsclass, |
---|
| 585 | stream=self.stream, |
---|
| 586 | type=mftype, |
---|
| 587 | levtype=pv[1], |
---|
| 588 | levelist=pv[2], |
---|
| 589 | resol=self.resol, |
---|
| 590 | gaussian=gaussian, |
---|
| 591 | accuracy=self.accuracy, |
---|
| 592 | grid=pv[3], |
---|
| 593 | target=mftarget, |
---|
| 594 | area=area, |
---|
| 595 | date=mfdate, |
---|
| 596 | time=mftime, |
---|
| 597 | number=self.number, |
---|
| 598 | step=mfstep, |
---|
| 599 | expver=self.expver, |
---|
| 600 | param=pv[0]) |
---|
| 601 | |
---|
| 602 | MR.display_info() |
---|
| 603 | MR.data_retrieve() |
---|
| 604 | else: # basetime == 0 ??? #AP |
---|
| 605 | |
---|
| 606 | maxtime = elimit - timedelta(hours=24) |
---|
| 607 | mfdate = datetime.strftime(maxtime, '%Y%m%d') |
---|
| 608 | mftimesave = ''.join(mftime) |
---|
| 609 | |
---|
| 610 | if '/' in mftime: |
---|
| 611 | times = mftime.split('/') |
---|
| 612 | while ((int(times[0]) + |
---|
| 613 | int(mfstep.split('/')[0]) <= 12) and |
---|
| 614 | (pk != 'OG_OROLSM__SL') and 'acc' not in pk): |
---|
| 615 | times = times[1:] |
---|
| 616 | if len(times) > 1: |
---|
| 617 | mftime = '/'.join(times) |
---|
| 618 | else: |
---|
| 619 | mftime = times[0] |
---|
| 620 | |
---|
| 621 | MR = MarsRetrieval(self.server, |
---|
| 622 | marsclass=self.marsclass, |
---|
| 623 | stream=self.stream, |
---|
| 624 | type=mftype, |
---|
| 625 | levtype=pv[1], |
---|
| 626 | levelist=pv[2], |
---|
| 627 | resol=self.resol, |
---|
| 628 | gaussian=gaussian, |
---|
| 629 | accuracy=self.accuracy, |
---|
| 630 | grid=pv[3], |
---|
| 631 | target=mftarget, |
---|
| 632 | area=area, |
---|
| 633 | date=mfdate, |
---|
| 634 | time=mftime, |
---|
| 635 | number=self.number, |
---|
| 636 | step=mfstep, |
---|
| 637 | expver=self.expver, |
---|
| 638 | param=pv[0]) |
---|
| 639 | |
---|
| 640 | MR.display_info() |
---|
| 641 | MR.data_retrieve() |
---|
| 642 | |
---|
| 643 | if (int(mftimesave.split('/')[0]) == 0 and |
---|
| 644 | int(mfstep.split('/')[0]) == 0 and |
---|
| 645 | pk != 'OG_OROLSM__SL'): |
---|
| 646 | |
---|
| 647 | mfdate = datetime.strftime(elimit, '%Y%m%d') |
---|
| 648 | mftime = '00' |
---|
| 649 | mfstep = '000' |
---|
| 650 | mftarget = self.inputdir + "/" + ftype + pk + \ |
---|
| 651 | '.' + mfdate + '.' + str(os.getppid()) +\ |
---|
| 652 | '.' + str(os.getpid()) + ".grb" |
---|
| 653 | |
---|
| 654 | MR = MarsRetrieval(self.server, |
---|
| 655 | marsclass=self.marsclass, |
---|
| 656 | stream=self.stream, |
---|
| 657 | type=mftype, |
---|
| 658 | levtype=pv[1], |
---|
| 659 | levelist=pv[2], |
---|
| 660 | resol=self.resol, |
---|
| 661 | gaussian=gaussian, |
---|
| 662 | accuracy=self.accuracy, |
---|
| 663 | grid=pv[3], |
---|
| 664 | target=mftarget, |
---|
| 665 | area=area, |
---|
| 666 | date=mfdate, |
---|
| 667 | time=mftime, |
---|
| 668 | number=self.number, |
---|
| 669 | step=mfstep, |
---|
| 670 | expver=self.expver, |
---|
| 671 | param=pv[0]) |
---|
| 672 | |
---|
| 673 | MR.display_info() |
---|
| 674 | MR.data_retrieve() |
---|
| 675 | |
---|
| 676 | if request == 0 or request == 2: |
---|
| 677 | print('MARS retrieve done ... ') |
---|
| 678 | elif request == 1: |
---|
| 679 | print('MARS request printed ...') |
---|
| 680 | |
---|
| 681 | return |
---|
| 682 | |
---|
| 683 | |
---|
| 684 | def process_output(self, c): |
---|
| 685 | ''' |
---|
| 686 | @Description: |
---|
| 687 | The grib files are postprocessed depending on the selection in |
---|
| 688 | CONTROL file. The resulting files are moved to the output |
---|
| 689 | directory if its not equla to the input directory. |
---|
| 690 | The following modifications might be done if |
---|
| 691 | properly switched in CONTROL file: |
---|
| 692 | GRIB2 - Conversion to GRIB2 |
---|
| 693 | ECTRANS - Transfer of files to gateway server |
---|
| 694 | ECSTORAGE - Storage at ECMWF server |
---|
| 695 | GRIB2FLEXPART - Conversion of GRIB files to FLEXPART binary format |
---|
| 696 | |
---|
| 697 | @Input: |
---|
| 698 | self: instance of EcFlexpart |
---|
| 699 | The current object of the class. |
---|
| 700 | |
---|
| 701 | c: instance of class ControlFile |
---|
| 702 | Contains all the parameters of CONTROL file, which are e.g.: |
---|
| 703 | DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, |
---|
| 704 | STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, |
---|
| 705 | LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, |
---|
| 706 | OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, |
---|
| 707 | ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, |
---|
| 708 | MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME |
---|
| 709 | DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS |
---|
| 710 | |
---|
| 711 | For more information about format and content of the parameter |
---|
| 712 | see documentation. |
---|
| 713 | |
---|
| 714 | @Return: |
---|
| 715 | <nothing> |
---|
| 716 | |
---|
| 717 | ''' |
---|
| 718 | |
---|
| 719 | print '\n\nPostprocessing:\n Format: {}\n'.format(c.format) |
---|
| 720 | |
---|
| 721 | if c.ecapi is False: |
---|
| 722 | print('ecstorage: {}\n ecfsdir: {}\n'. |
---|
| 723 | format(c.ecstorage, c.ecfsdir)) |
---|
| 724 | if not hasattr(c, 'gateway'): |
---|
| 725 | c.gateway = os.getenv('GATEWAY') |
---|
| 726 | if not hasattr(c, 'destination'): |
---|
| 727 | c.destination = os.getenv('DESTINATION') |
---|
| 728 | print('ectrans: {}\n gateway: {}\n destination: {}\n ' |
---|
| 729 | .format(c.ectrans, c.gateway, c.destination)) |
---|
| 730 | |
---|
| 731 | print 'Output filelist: \n' |
---|
| 732 | print self.outputfilelist |
---|
| 733 | |
---|
| 734 | if c.format.lower() == 'grib2': |
---|
| 735 | for ofile in self.outputfilelist: |
---|
| 736 | p = subprocess.check_call(['grib_set', '-s', 'edition=2, \ |
---|
| 737 | productDefinitionTemplateNumber=8', |
---|
| 738 | ofile, ofile + '_2']) |
---|
| 739 | p = subprocess.check_call(['mv', ofile + '_2', ofile]) |
---|
| 740 | |
---|
| 741 | if int(c.ectrans) == 1 and c.ecapi is False: |
---|
| 742 | for ofile in self.outputfilelist: |
---|
| 743 | p = subprocess.check_call(['ectrans', '-overwrite', '-gateway', |
---|
| 744 | c.gateway, '-remote', c.destination, |
---|
| 745 | '-source', ofile]) |
---|
| 746 | #print('ectrans:', p) |
---|
| 747 | |
---|
| 748 | if int(c.ecstorage) == 1 and c.ecapi is False: |
---|
| 749 | for ofile in self.outputfilelist: |
---|
| 750 | p = subprocess.check_call(['ecp', '-o', ofile, |
---|
| 751 | os.path.expandvars(c.ecfsdir)]) |
---|
| 752 | |
---|
| 753 | if c.outputdir != c.inputdir: |
---|
| 754 | for ofile in self.outputfilelist: |
---|
| 755 | p = subprocess.check_call(['mv', ofile, c.outputdir]) |
---|
| 756 | |
---|
| 757 | # prepare environment for the grib2flexpart run |
---|
| 758 | # to convert grib to flexpart binary |
---|
| 759 | if c.grib2flexpart == '1': |
---|
| 760 | |
---|
| 761 | # generate AVAILABLE file |
---|
| 762 | # Example of AVAILABLE file data: |
---|
| 763 | # 20131107 000000 EN13110700 ON DISC |
---|
| 764 | clist = [] |
---|
| 765 | for ofile in self.outputfilelist: |
---|
| 766 | fname = ofile.split('/') |
---|
| 767 | if '.' in fname[-1]: |
---|
| 768 | l = fname[-1].split('.') |
---|
| 769 | timestamp = datetime.strptime(l[0][-6:] + l[1], |
---|
| 770 | '%y%m%d%H') |
---|
| 771 | timestamp += timedelta(hours=int(l[2])) |
---|
| 772 | cdate = datetime.strftime(timestamp, '%Y%m%d') |
---|
| 773 | chms = datetime.strftime(timestamp, '%H%M%S') |
---|
| 774 | else: |
---|
| 775 | cdate = '20' + fname[-1][-8:-2] |
---|
| 776 | chms = fname[-1][-2:] + '0000' |
---|
| 777 | clist.append(cdate + ' ' + chms + ' '*6 + |
---|
| 778 | fname[-1] + ' '*14 + 'ON DISC') |
---|
| 779 | clist.sort() |
---|
| 780 | with open(c.outputdir + '/' + 'AVAILABLE', 'w') as f: |
---|
| 781 | f.write('\n'.join(clist) + '\n') |
---|
| 782 | |
---|
| 783 | # generate pathnames file |
---|
| 784 | pwd = os.path.abspath(c.outputdir) |
---|
| 785 | with open(pwd + '/pathnames', 'w') as f: |
---|
| 786 | f.write(pwd + '/Options/\n') |
---|
| 787 | f.write(pwd + '/\n') |
---|
| 788 | f.write(pwd + '/\n') |
---|
| 789 | f.write(pwd + '/AVAILABLE\n') |
---|
| 790 | f.write(' = == = == = == = == = == == = \n') |
---|
| 791 | |
---|
| 792 | # create Options dir if necessary |
---|
| 793 | if not os.path.exists(pwd + '/Options'): |
---|
| 794 | os.makedirs(pwd+'/Options') |
---|
| 795 | |
---|
| 796 | # read template COMMAND file |
---|
| 797 | with open(os.path.expandvars(os.path.expanduser( |
---|
| 798 | c.flexpart_root_scripts)) + '/../Options/COMMAND', 'r') as f: |
---|
| 799 | lflist = f.read().split('\n') |
---|
| 800 | |
---|
| 801 | # find index of list where to put in the |
---|
| 802 | # date and time information |
---|
| 803 | # usually after the LDIRECT parameter |
---|
| 804 | i = 0 |
---|
| 805 | for l in lflist: |
---|
| 806 | if 'LDIRECT' in l.upper(): |
---|
| 807 | break |
---|
| 808 | i += 1 |
---|
| 809 | |
---|
| 810 | # insert the date and time information of run start and end |
---|
| 811 | # into the list of lines of COMMAND file |
---|
| 812 | lflist = lflist[:i+1] + \ |
---|
| 813 | [clist[0][:16], clist[-1][:16]] + \ |
---|
| 814 | lflist[i+3:] |
---|
| 815 | |
---|
| 816 | # write the new COMMAND file |
---|
| 817 | with open(pwd + '/Options/COMMAND', 'w') as g: |
---|
| 818 | g.write('\n'.join(lflist) + '\n') |
---|
| 819 | |
---|
| 820 | # change to outputdir and start the grib2flexpart run |
---|
| 821 | # afterwards switch back to the working dir |
---|
| 822 | os.chdir(c.outputdir) |
---|
| 823 | p = subprocess.check_call([ |
---|
| 824 | os.path.expandvars(os.path.expanduser(c.flexpart_root_scripts)) |
---|
| 825 | + '/../FLEXPART_PROGRAM/grib2flexpart', 'useAvailable', '.']) |
---|
| 826 | os.chdir(pwd) |
---|
| 827 | |
---|
| 828 | return |
---|
| 829 | |
---|
| 830 | def create(self, inputfiles, c): |
---|
| 831 | ''' |
---|
| 832 | @Description: |
---|
| 833 | This method is based on the ECMWF example index.py |
---|
| 834 | https://software.ecmwf.int/wiki/display/GRIB/index.py |
---|
| 835 | |
---|
| 836 | An index file will be created which depends on the combination |
---|
| 837 | of "date", "time" and "stepRange" values. This is used to iterate |
---|
| 838 | over all messages in each grib file which were passed through the |
---|
| 839 | parameter "inputfiles" to seperate specific parameters into fort.* |
---|
| 840 | files. Afterwards the FORTRAN program Convert2 is called to convert |
---|
| 841 | the data fields all to the same grid and put them in one file |
---|
| 842 | per unique time step (combination of "date", "time" and |
---|
| 843 | "stepRange"). |
---|
| 844 | |
---|
| 845 | @Input: |
---|
| 846 | self: instance of EcFlexpart |
---|
| 847 | The current object of the class. |
---|
| 848 | |
---|
| 849 | inputfiles: instance of UioFiles |
---|
| 850 | Contains a list of files. |
---|
| 851 | |
---|
| 852 | c: instance of class ControlFile |
---|
| 853 | Contains all the parameters of CONTROL files, which are e.g.: |
---|
| 854 | DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, |
---|
| 855 | STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, |
---|
| 856 | LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, |
---|
| 857 | OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, |
---|
| 858 | ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, |
---|
| 859 | MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME |
---|
| 860 | DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS |
---|
| 861 | |
---|
| 862 | For more information about format and content of the parameter |
---|
| 863 | see documentation. |
---|
| 864 | |
---|
| 865 | @Return: |
---|
| 866 | <nothing> |
---|
| 867 | ''' |
---|
| 868 | |
---|
| 869 | table128 = init128(_config.PATH_GRIBTABLE) |
---|
| 870 | wrfpars = to_param_id('sp/mslp/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/\ |
---|
| 871 | stl1/stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4', |
---|
| 872 | table128) |
---|
| 873 | |
---|
| 874 | index_keys = ["date", "time", "step"] |
---|
| 875 | indexfile = c.inputdir + "/date_time_stepRange.idx" |
---|
| 876 | silent_remove(indexfile) |
---|
| 877 | grib = GribTools(inputfiles.files) |
---|
| 878 | # creates new index file |
---|
| 879 | iid = grib.index(index_keys=index_keys, index_file=indexfile) |
---|
| 880 | |
---|
| 881 | # read values of index keys |
---|
| 882 | index_vals = [] |
---|
| 883 | for key in index_keys: |
---|
| 884 | index_vals.append(grib_index_get(iid, key)) |
---|
| 885 | print index_vals[-1] |
---|
| 886 | # index_vals looks for example like: |
---|
| 887 | # index_vals[0]: ('20171106', '20171107', '20171108') ; date |
---|
| 888 | # index_vals[1]: ('0', '1200', '1800', '600') ; time |
---|
| 889 | # index_vals[2]: ('0', '12', '3', '6', '9') ; stepRange |
---|
| 890 | |
---|
| 891 | fdict = {'10':None, '11':None, '12':None, '13':None, '16':None, |
---|
| 892 | '17':None, '19':None, '21':None, '22':None, '20':None} |
---|
| 893 | |
---|
| 894 | for prod in product(*index_vals): |
---|
| 895 | # flag for Fortran program CONVERT2 and file merging |
---|
| 896 | convertFlag = False |
---|
| 897 | print 'current prod: ', prod |
---|
| 898 | # e.g. prod = ('20170505', '0', '12') |
---|
| 899 | # ( date ,time, step) |
---|
| 900 | # per date e.g. time = 0, 600, 1200, 1800 |
---|
| 901 | # per time e.g. step = 0, 3, 6, 9, 12 |
---|
| 902 | for i in range(len(index_keys)): |
---|
| 903 | grib_index_select(iid, index_keys[i], prod[i]) |
---|
| 904 | |
---|
| 905 | # get first id from current product |
---|
| 906 | gid = grib_new_from_index(iid) |
---|
| 907 | |
---|
| 908 | # if there is data for this product combination |
---|
| 909 | # prepare some date and time parameter before reading the data |
---|
| 910 | if gid is not None: |
---|
| 911 | # Combine all temporary data files into final grib file if |
---|
| 912 | # gid is at least one time not None. Therefore set convertFlag |
---|
| 913 | # to save information. The fortran program CONVERT2 is also |
---|
| 914 | # only done if convertFlag is True |
---|
| 915 | convertFlag = True |
---|
| 916 | # remove old fort.* files and open new ones |
---|
| 917 | # they are just valid for a single product |
---|
| 918 | for k, f in fdict.iteritems(): |
---|
| 919 | silent_remove(c.inputdir + "/fort." + k) |
---|
| 920 | fdict[k] = open(c.inputdir + '/fort.' + k, 'w') |
---|
| 921 | |
---|
| 922 | cdate = str(grib_get(gid, 'date')) |
---|
| 923 | time = grib_get(gid, 'time') |
---|
| 924 | step = grib_get(gid, 'step') |
---|
| 925 | # create correct timestamp from the three time informations |
---|
| 926 | # date, time, step |
---|
| 927 | timestamp = datetime.strptime(cdate + '{:0>2}'.format(time/100), |
---|
| 928 | '%Y%m%d%H') |
---|
| 929 | timestamp += timedelta(hours=int(step)) |
---|
| 930 | cdateH = datetime.strftime(timestamp, '%Y%m%d%H') |
---|
| 931 | |
---|
| 932 | if c.basetime is not None: |
---|
| 933 | slimit = datetime.strptime(c.start_date + '00', '%Y%m%d%H') |
---|
| 934 | bt = '23' |
---|
| 935 | if c.basetime == '00': |
---|
| 936 | bt = '00' |
---|
| 937 | slimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H')\ |
---|
| 938 | - timedelta(hours=12-int(c.dtime)) |
---|
| 939 | if c.basetime == '12': |
---|
| 940 | bt = '12' |
---|
| 941 | slimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H')\ |
---|
| 942 | - timedelta(hours=12-int(c.dtime)) |
---|
| 943 | |
---|
| 944 | elimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H') |
---|
| 945 | |
---|
| 946 | if timestamp < slimit or timestamp > elimit: |
---|
| 947 | continue |
---|
| 948 | |
---|
| 949 | try: |
---|
| 950 | if c.wrf == '1': |
---|
| 951 | if 'olddate' not in locals(): |
---|
| 952 | fwrf = open(c.outputdir + '/WRF' + cdate + |
---|
| 953 | '.{:0>2}'.format(time) + '.000.grb2', 'w') |
---|
| 954 | olddate = cdate[:] |
---|
| 955 | else: |
---|
| 956 | if cdate != olddate: |
---|
| 957 | fwrf = open(c.outputdir + '/WRF' + cdate + |
---|
| 958 | '.{:0>2}'.format(time) + '.000.grb2', |
---|
| 959 | 'w') |
---|
| 960 | olddate = cdate[:] |
---|
| 961 | except AttributeError: |
---|
| 962 | pass |
---|
| 963 | |
---|
| 964 | # helper variable to remember which fields are already used. |
---|
| 965 | savedfields = [] |
---|
| 966 | while 1: |
---|
| 967 | if gid is None: |
---|
| 968 | break |
---|
| 969 | paramId = grib_get(gid, 'paramId') |
---|
| 970 | gridtype = grib_get(gid, 'gridType') |
---|
| 971 | levtype = grib_get(gid, 'typeOfLevel') |
---|
| 972 | if paramId == 133 and gridtype == 'reduced_gg': |
---|
| 973 | # Specific humidity (Q.grb) is used as a template only |
---|
| 974 | # so we need the first we "meet" |
---|
| 975 | with open(c.inputdir + '/fort.18', 'w') as fout: |
---|
| 976 | grib_write(gid, fout) |
---|
| 977 | elif paramId == 131 or paramId == 132: |
---|
| 978 | grib_write(gid, fdict['10']) |
---|
| 979 | elif paramId == 130: |
---|
| 980 | grib_write(gid, fdict['11']) |
---|
| 981 | elif paramId == 133 and gridtype != 'reduced_gg': |
---|
| 982 | grib_write(gid, fdict['17']) |
---|
| 983 | elif paramId == 152: |
---|
| 984 | grib_write(gid, fdict['12']) |
---|
| 985 | elif paramId == 155 and gridtype == 'sh': |
---|
| 986 | grib_write(gid, fdict['13']) |
---|
| 987 | elif paramId in [129, 138, 155] and levtype == 'hybrid' \ |
---|
| 988 | and c.wrf == '1': |
---|
| 989 | pass |
---|
| 990 | elif paramId == 246 or paramId == 247: |
---|
| 991 | # cloud liquid water and ice |
---|
| 992 | if paramId == 246: |
---|
| 993 | clwc = grib_get_values(gid) |
---|
| 994 | else: |
---|
| 995 | clwc += grib_get_values(gid) |
---|
| 996 | grib_set_values(gid, clwc) |
---|
| 997 | grib_set(gid, 'paramId', 201031) |
---|
| 998 | grib_write(gid, fdict['22']) |
---|
| 999 | elif paramId == 135: |
---|
| 1000 | grib_write(gid, fdict['19']) |
---|
| 1001 | elif paramId == 77: |
---|
| 1002 | grib_write(gid, fdict['21']) |
---|
| 1003 | else: |
---|
| 1004 | if paramId not in savedfields: |
---|
| 1005 | grib_write(gid, fdict['16']) |
---|
| 1006 | savedfields.append(paramId) |
---|
| 1007 | else: |
---|
| 1008 | print 'duplicate ' + str(paramId) + ' not written' |
---|
| 1009 | |
---|
| 1010 | try: |
---|
| 1011 | if c.wrf == '1': |
---|
| 1012 | if levtype == 'hybrid': # model layer |
---|
| 1013 | if paramId in [129, 130, 131, 132, 133, 138, 155]: |
---|
| 1014 | grib_write(gid, fwrf) |
---|
| 1015 | else: # sfc layer |
---|
| 1016 | if paramId in wrfpars: |
---|
| 1017 | grib_write(gid, fwrf) |
---|
| 1018 | except AttributeError: |
---|
| 1019 | pass |
---|
| 1020 | |
---|
| 1021 | grib_release(gid) |
---|
| 1022 | gid = grib_new_from_index(iid) |
---|
| 1023 | |
---|
| 1024 | for f in fdict.values(): |
---|
| 1025 | f.close() |
---|
| 1026 | |
---|
| 1027 | # call for CONVERT2 if flag is True |
---|
| 1028 | if convertFlag: |
---|
| 1029 | pwd = os.getcwd() |
---|
| 1030 | os.chdir(c.inputdir) |
---|
| 1031 | if os.stat('fort.21').st_size == 0 and int(c.eta) == 1: |
---|
| 1032 | print 'Parameter 77 (etadot) is missing, most likely it is \ |
---|
| 1033 | not available for this type or date/time\n' |
---|
| 1034 | print 'Check parameters CLASS, TYPE, STREAM, START_DATE\n' |
---|
| 1035 | my_error(c.mailfail, 'fort.21 is empty while parameter eta \ |
---|
| 1036 | is set to 1 in CONTROL file') |
---|
| 1037 | |
---|
| 1038 | # create the corresponding output file fort.15 |
---|
| 1039 | # (generated by CONVERT2) + fort.16 (paramId 167 and 168) |
---|
| 1040 | p = subprocess.check_call( |
---|
| 1041 | [os.path.expandvars(os.path.expanduser(c.exedir)) + |
---|
| 1042 | '/CONVERT2'], shell=True) |
---|
| 1043 | os.chdir(pwd) |
---|
| 1044 | |
---|
| 1045 | # create final output filename, e.g. EN13040500 (ENYYMMDDHH) |
---|
| 1046 | fnout = c.inputdir + '/' + c.prefix |
---|
| 1047 | if c.maxstep > 12: |
---|
| 1048 | suffix = cdate[2:8] + '.{:0>2}'.format(time/100) + \ |
---|
| 1049 | '.{:0>3}'.format(step) |
---|
| 1050 | else: |
---|
| 1051 | suffix = cdateH[2:10] |
---|
| 1052 | fnout += suffix |
---|
| 1053 | print "outputfile = " + fnout |
---|
| 1054 | self.outputfilelist.append(fnout) # needed for final processing |
---|
| 1055 | |
---|
| 1056 | # create outputfile and copy all data from intermediate files |
---|
| 1057 | # to the outputfile (final GRIB files) |
---|
| 1058 | orolsm = os.path.basename(glob.glob( |
---|
| 1059 | c.inputdir + '/OG_OROLSM__SL.*.' + c.ppid + '*')[0]) |
---|
| 1060 | fluxfile = 'flux' + cdate[0:2] + suffix |
---|
| 1061 | if c.cwc != '1': |
---|
| 1062 | flist = ['fort.15', fluxfile, 'fort.16', orolsm] |
---|
| 1063 | else: |
---|
| 1064 | flist = ['fort.15', 'fort.22', fluxfile, 'fort.16', orolsm] |
---|
| 1065 | |
---|
| 1066 | with open(fnout, 'wb') as fout: |
---|
| 1067 | for f in flist: |
---|
| 1068 | shutil.copyfileobj( |
---|
| 1069 | open(c.inputdir + '/' + f, 'rb'), fout) |
---|
| 1070 | |
---|
| 1071 | if c.omega == '1': |
---|
| 1072 | with open(c.outputdir + '/OMEGA', 'wb') as fout: |
---|
| 1073 | shutil.copyfileobj( |
---|
| 1074 | open(c.inputdir + '/fort.25', 'rb'), fout) |
---|
| 1075 | |
---|
| 1076 | if hasattr(c, 'wrf') and c.wrf == '1': |
---|
| 1077 | fwrf.close() |
---|
| 1078 | |
---|
| 1079 | grib_index_release(iid) |
---|
| 1080 | |
---|
| 1081 | return |
---|
| 1082 | |
---|
| 1083 | def deacc_fluxes(self, inputfiles, c): |
---|
| 1084 | ''' |
---|
| 1085 | @Description: |
---|
| 1086 | Goes through all flux fields in ordered time and de-accumulate |
---|
| 1087 | the fields. Afterwards the fields are disaggregated in time. |
---|
| 1088 | Different versions of disaggregation is provided for rainfall |
---|
| 1089 | data (darain, modified linear) and the surface fluxes and |
---|
| 1090 | stress data (dapoly, cubic polynomial). |
---|
| 1091 | |
---|
| 1092 | @Input: |
---|
| 1093 | self: instance of EcFlexpart |
---|
| 1094 | The current object of the class. |
---|
| 1095 | |
---|
| 1096 | inputfiles: instance of UioFiles |
---|
| 1097 | Contains a list of files. |
---|
| 1098 | |
---|
| 1099 | c: instance of class ControlFile |
---|
| 1100 | Contains all the parameters of CONTROL file, which are e.g.: |
---|
| 1101 | DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, |
---|
| 1102 | STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, |
---|
| 1103 | LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, |
---|
| 1104 | OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, |
---|
| 1105 | ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, |
---|
| 1106 | MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME |
---|
| 1107 | DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS |
---|
| 1108 | |
---|
| 1109 | For more information about format and content of the parameter |
---|
| 1110 | see documentation. |
---|
| 1111 | |
---|
| 1112 | @Return: |
---|
| 1113 | <nothing> |
---|
| 1114 | ''' |
---|
| 1115 | |
---|
| 1116 | table128 = init128(_config.PATH_GRIBTABLE) |
---|
| 1117 | pars = to_param_id(self.params['OG_acc_SL'][0], table128) |
---|
| 1118 | index_keys = ["date", "time", "step"] |
---|
| 1119 | indexfile = c.inputdir + "/date_time_stepRange.idx" |
---|
| 1120 | silent_remove(indexfile) |
---|
| 1121 | grib = GribTools(inputfiles.files) |
---|
| 1122 | # creates new index file |
---|
| 1123 | iid = grib.index(index_keys=index_keys, index_file=indexfile) |
---|
| 1124 | |
---|
| 1125 | # read values of index keys |
---|
| 1126 | index_vals = [] |
---|
| 1127 | for key in index_keys: |
---|
| 1128 | key_vals = grib_index_get(iid, key) |
---|
| 1129 | print key_vals |
---|
| 1130 | # have to sort the steps for disaggregation, |
---|
| 1131 | # therefore convert to int first |
---|
| 1132 | if key == 'step': |
---|
| 1133 | key_vals = [int(k) for k in key_vals] |
---|
| 1134 | key_vals.sort() |
---|
| 1135 | key_vals = [str(k) for k in key_vals] |
---|
| 1136 | index_vals.append(key_vals) |
---|
| 1137 | # index_vals looks for example like: |
---|
| 1138 | # index_vals[0]: ('20171106', '20171107', '20171108') ; date |
---|
| 1139 | # index_vals[1]: ('0', '1200') ; time |
---|
| 1140 | # index_vals[2]: (3', '6', '9', '12') ; stepRange |
---|
| 1141 | |
---|
| 1142 | valsdict = {} |
---|
| 1143 | svalsdict = {} |
---|
| 1144 | stepsdict = {} |
---|
| 1145 | for p in pars: |
---|
| 1146 | valsdict[str(p)] = [] |
---|
| 1147 | svalsdict[str(p)] = [] |
---|
| 1148 | stepsdict[str(p)] = [] |
---|
| 1149 | |
---|
| 1150 | print 'maxstep: ', c.maxstep |
---|
| 1151 | |
---|
| 1152 | for prod in product(*index_vals): |
---|
| 1153 | # e.g. prod = ('20170505', '0', '12') |
---|
| 1154 | # ( date ,time, step) |
---|
| 1155 | # per date e.g. time = 0, 1200 |
---|
| 1156 | # per time e.g. step = 3, 6, 9, 12 |
---|
| 1157 | for i in range(len(index_keys)): |
---|
| 1158 | grib_index_select(iid, index_keys[i], prod[i]) |
---|
| 1159 | |
---|
| 1160 | gid = grib_new_from_index(iid) |
---|
| 1161 | if gid is not None: |
---|
| 1162 | cdate = grib_get(gid, 'date') |
---|
| 1163 | time = grib_get(gid, 'time') |
---|
| 1164 | step = grib_get(gid, 'step') |
---|
| 1165 | # date+time+step-2*dtime |
---|
| 1166 | # (since interpolated value valid for step-2*dtime) |
---|
| 1167 | sdate = datetime(year=cdate/10000, |
---|
| 1168 | month=(cdate % 10000)/100, |
---|
| 1169 | day=(cdate % 100), |
---|
| 1170 | hour=time/100) |
---|
| 1171 | fdate = sdate + timedelta(hours=step-2*int(c.dtime)) |
---|
| 1172 | sdates = sdate + timedelta(hours=step) |
---|
| 1173 | elimit = None |
---|
| 1174 | else: |
---|
| 1175 | break |
---|
| 1176 | |
---|
| 1177 | if c.maxstep > 12: |
---|
| 1178 | fnout = c.inputdir + '/flux' + \ |
---|
| 1179 | sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ |
---|
| 1180 | '.{:0>3}'.format(step-2*int(c.dtime)) |
---|
| 1181 | gnout = c.inputdir + '/flux' + \ |
---|
| 1182 | sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ |
---|
| 1183 | '.{:0>3}'.format(step-int(c.dtime)) |
---|
| 1184 | hnout = c.inputdir + '/flux' + \ |
---|
| 1185 | sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ |
---|
| 1186 | '.{:0>3}'.format(step) |
---|
| 1187 | g = open(gnout, 'w') |
---|
| 1188 | h = open(hnout, 'w') |
---|
| 1189 | else: |
---|
| 1190 | fnout = c.inputdir + '/flux' + fdate.strftime('%Y%m%d%H') |
---|
| 1191 | gnout = c.inputdir + '/flux' + (fdate + |
---|
| 1192 | timedelta(hours=int(c.dtime)) |
---|
| 1193 | ).strftime('%Y%m%d%H') |
---|
| 1194 | hnout = c.inputdir + '/flux' + sdates.strftime('%Y%m%d%H') |
---|
| 1195 | g = open(gnout, 'w') |
---|
| 1196 | h = open(hnout, 'w') |
---|
| 1197 | |
---|
| 1198 | print "outputfile = " + fnout |
---|
| 1199 | f = open(fnout, 'w') |
---|
| 1200 | |
---|
| 1201 | # read message for message and store relevant data fields |
---|
| 1202 | # data keywords are stored in pars |
---|
| 1203 | while 1: |
---|
| 1204 | if gid is None: |
---|
| 1205 | break |
---|
| 1206 | cparamId = str(grib_get(gid, 'paramId')) |
---|
| 1207 | step = grib_get(gid, 'step') |
---|
| 1208 | atime = grib_get(gid, 'time') |
---|
| 1209 | ni = grib_get(gid, 'Ni') |
---|
| 1210 | nj = grib_get(gid, 'Nj') |
---|
| 1211 | if cparamId in valsdict.keys(): |
---|
| 1212 | values = grib_get_values(gid) |
---|
| 1213 | vdp = valsdict[cparamId] |
---|
| 1214 | svdp = svalsdict[cparamId] |
---|
| 1215 | sd = stepsdict[cparamId] |
---|
| 1216 | |
---|
| 1217 | if cparamId == '142' or cparamId == '143': |
---|
| 1218 | fak = 1. / 1000. |
---|
| 1219 | else: |
---|
| 1220 | fak = 3600. |
---|
| 1221 | |
---|
| 1222 | values = (np.reshape(values, (nj, ni))).flatten() / fak |
---|
| 1223 | vdp.append(values[:]) # save the accumulated values |
---|
| 1224 | if step <= int(c.dtime): |
---|
| 1225 | svdp.append(values[:] / int(c.dtime)) |
---|
| 1226 | else: # deaccumulate values |
---|
| 1227 | svdp.append((vdp[-1] - vdp[-2]) / int(c.dtime)) |
---|
| 1228 | |
---|
| 1229 | print(cparamId, atime, step, len(values), |
---|
| 1230 | values[0], np.std(values)) |
---|
| 1231 | # save the 1/3-hourly or specific values |
---|
| 1232 | # svdp.append(values[:]) |
---|
| 1233 | sd.append(step) |
---|
| 1234 | # len(svdp) correspond to the time |
---|
| 1235 | if len(svdp) >= 3: |
---|
| 1236 | if len(svdp) > 3: |
---|
| 1237 | if cparamId == '142' or cparamId == '143': |
---|
| 1238 | values = disaggregation.darain(svdp) |
---|
| 1239 | else: |
---|
| 1240 | values = disaggregation.dapoly(svdp) |
---|
| 1241 | |
---|
| 1242 | if not (step == c.maxstep and c.maxstep > 12 \ |
---|
| 1243 | or sdates == elimit): |
---|
| 1244 | vdp.pop(0) |
---|
| 1245 | svdp.pop(0) |
---|
| 1246 | else: |
---|
| 1247 | if c.maxstep > 12: |
---|
| 1248 | values = svdp[1] |
---|
| 1249 | else: |
---|
| 1250 | values = svdp[0] |
---|
| 1251 | |
---|
| 1252 | grib_set_values(gid, values) |
---|
| 1253 | if c.maxstep > 12: |
---|
| 1254 | grib_set(gid, 'step', max(0, step-2*int(c.dtime))) |
---|
| 1255 | else: |
---|
| 1256 | grib_set(gid, 'step', 0) |
---|
| 1257 | grib_set(gid, 'time', fdate.hour*100) |
---|
| 1258 | grib_set(gid, 'date', fdate.year*10000 + |
---|
| 1259 | fdate.month*100+fdate.day) |
---|
| 1260 | grib_write(gid, f) |
---|
| 1261 | |
---|
| 1262 | if c.basetime is not None: |
---|
| 1263 | elimit = datetime.strptime(c.end_date + |
---|
| 1264 | c.basetime, '%Y%m%d%H') |
---|
| 1265 | else: |
---|
| 1266 | elimit = sdate + timedelta(2*int(c.dtime)) |
---|
| 1267 | |
---|
| 1268 | # squeeze out information of last two steps contained |
---|
| 1269 | # in svdp |
---|
| 1270 | # if step+int(c.dtime) == c.maxstep and c.maxstep>12 |
---|
| 1271 | # or sdates+timedelta(hours = int(c.dtime)) |
---|
| 1272 | # >= elimit: |
---|
| 1273 | # Note that svdp[0] has not been popped in this case |
---|
| 1274 | |
---|
| 1275 | if step == c.maxstep and c.maxstep > 12 or \ |
---|
| 1276 | sdates == elimit: |
---|
| 1277 | |
---|
| 1278 | values = svdp[3] |
---|
| 1279 | grib_set_values(gid, values) |
---|
| 1280 | grib_set(gid, 'step', 0) |
---|
| 1281 | truedatetime = fdate + timedelta(hours= |
---|
| 1282 | 2*int(c.dtime)) |
---|
| 1283 | grib_set(gid, 'time', truedatetime.hour * 100) |
---|
| 1284 | grib_set(gid, 'date', truedatetime.year * 10000 + |
---|
| 1285 | truedatetime.month * 100 + |
---|
| 1286 | truedatetime.day) |
---|
| 1287 | grib_write(gid, h) |
---|
| 1288 | |
---|
| 1289 | #values = (svdp[1]+svdp[2])/2. |
---|
| 1290 | if cparamId == '142' or cparamId == '143': |
---|
| 1291 | values = disaggregation.darain(list(reversed(svdp))) |
---|
| 1292 | else: |
---|
| 1293 | values = disaggregation.dapoly(list(reversed(svdp))) |
---|
| 1294 | |
---|
| 1295 | grib_set(gid, 'step', 0) |
---|
| 1296 | truedatetime = fdate + timedelta(hours=int(c.dtime)) |
---|
| 1297 | grib_set(gid, 'time', truedatetime.hour * 100) |
---|
| 1298 | grib_set(gid, 'date', truedatetime.year * 10000 + |
---|
| 1299 | truedatetime.month * 100 + |
---|
| 1300 | truedatetime.day) |
---|
| 1301 | grib_set_values(gid, values) |
---|
| 1302 | grib_write(gid, g) |
---|
| 1303 | |
---|
| 1304 | grib_release(gid) |
---|
| 1305 | |
---|
| 1306 | gid = grib_new_from_index(iid) |
---|
| 1307 | |
---|
| 1308 | f.close() |
---|
| 1309 | g.close() |
---|
| 1310 | h.close() |
---|
| 1311 | |
---|
| 1312 | grib_index_release(iid) |
---|
| 1313 | |
---|
| 1314 | return |
---|