Changeset 092aaf1 in flex_extract.git for source/python/classes/EcFlexpart.py
- Timestamp:
- Dec 13, 2018, 11:53:49 PM (5 years ago)
- Branches:
- master, ctbto, dev
- Children:
- 5c0a578
- Parents:
- 145fbe0
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
source/python/classes/EcFlexpart.py
re5884c9 r092aaf1 90 90 sys.path.append('../') 91 91 import _config 92 from Grib Tools import GribTools92 from GribUtil import GribUtil 93 93 from mods.tools import (init128, to_param_id, silent_remove, product, 94 94 my_error, make_dir, get_informations, get_dimensions) … … 125 125 126 126 ''' 127 # set a counter for the number of mars requests generated127 # set a counter for the number of generated mars requests 128 128 self.mreq_count = 0 129 130 # different mars types for retrieving data for flexpart131 self.types = dict()132 133 # Pure forecast mode134 if c.purefc:135 c.type = [c.type[0]]136 c.step = ['{:0>3}'.format(int(c.step[0]))]137 c.time = [c.time[0]]138 for i in range(1, c.maxstep + 1):139 c.type.append(c.type[0])140 c.step.append('{:0>3}'.format(i))141 c.time.append(c.time[0])142 129 143 130 self.inputdir = c.inputdir … … 145 132 self.basetime = c.basetime 146 133 self.dtime = c.dtime 147 i = 0 148 if fluxes and not c.purefc: 149 self.types[str(c.acctype)] = {'times': str(c.acctime), 150 'steps': '{}/to/{}/by/{}'.format( 151 c.dtime, c.accmaxstep, c.dtime)} 152 else: 153 for ty, st, ti in zip(c.type, c.step, c.time): 154 btlist = range(24) 155 if c.basetime == '12': 156 btlist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] 157 if c.basetime == '00': 158 btlist = [13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 0] 159 160 if ((ty.upper() == 'AN' and (int(c.time[i]) % int(c.dtime)) == 0) or 161 (ty.upper() != 'AN' and (int(c.step[i]) % int(c.dtime)) == 0 and 162 (int(c.step[i]) % int(c.dtime) == 0)) ) and \ 163 (int(c.time[i]) in btlist or c.purefc): 164 165 if ty not in self.types.keys(): 166 self.types[ty] = {'times': '', 'steps': ''} 167 168 if ti not in self.types[ty]['times']: 169 if self.types[ty]['times']: 170 self.types[ty]['times'] += '/' 171 self.types[ty]['times'] += ti 172 173 if st not in self.types[ty]['steps']: 174 if self.types[ty]['steps']: 175 self.types[ty]['steps'] += '/' 176 self.types[ty]['steps'] += st 177 i += 1 178 134 self.acctype = c.acctype 135 self.acctime = c.acctime 136 self.accmaxstep = c.accmaxstep 179 137 self.marsclass = c.marsclass 180 138 self.stream = c.stream … … 182 140 self.resol = c.resol 183 141 self.accuracy = c.accuracy 142 self.addpar = c.addpar 184 143 self.level = c.level 185 144 self.expver = c.expver 186 145 self.levelist = c.levelist 187 # for gaussian grid retrieval 188 self.glevelist = '1/to/' + c.level 146 self.glevelist = '1/to/' + c.level # in case of gaussian grid 189 147 self.gaussian = c.gaussian 190 148 self.grid = c.grid 191 149 self.area = c.area 150 self.purefc = c.purefc 192 151 self.outputfilelist = [] 193 152 194 195 # Now comes the nasty part that deals with the different 196 # scenarios we have: 197 # 1) Calculation of etadot on 198 # a) Gaussian grid 199 # b) Output grid 200 # c) Output grid using parameter 77 retrieved from MARS 201 # 3) Calculation/Retrieval of omega 202 # 4) Download also data for WRF 203 204 # Different grids need different retrievals 205 # SH = Spherical Harmonics, GG = Gaussian Grid, 206 # OG = Output Grid, ML = MultiLevel, SL = SingleLevel 207 self.params = {'SH__ML': '', 'SH__SL': '', 208 'GG__ML': '', 'GG__SL': '', 209 'OG__ML': '', 'OG__SL': '', 210 'OG_OROLSM_SL': '', 'OG_acc_SL': ''} 211 # the self.params dictionary stores a list of 212 # [param, levtype, levelist, grid] per key 213 214 if fluxes is False: 215 self.params['SH__SL'] = ['LNSP', 'ML', '1', 'OFF'] 216 # "SD/MSL/TCC/10U/10V/2T/2D/129/172" 217 self.params['OG__SL'] = ["141/151/164/165/166/167/168/129/172", \ 218 'SFC', '1', self.grid] 219 if c.addpar: 220 if c.addpar[0] == '/': 221 c.addpar = c.addpar[1:] 222 self.params['OG__SL'][0] += '/' + '/'.join(c.addpar) 223 224 if c.marsclass.upper() == 'EA' or c.marsclass.upper() == 'EP': 225 self.params['OG_OROLSM__SL'] = ["160/27/28/244", 226 'SFC', '1', self.grid] 227 else: 228 self.params['OG_OROLSM__SL'] = ["160/27/28/173", \ 229 'SFC', '1', self.grid] 230 231 self.params['OG__ML'] = ['T/Q', 'ML', self.levelist, self.grid] 232 233 #if c.gauss == '0' and c.eta == '1': 234 if not c.gauss and c.eta: 235 # the simplest case 236 self.params['OG__ML'][0] += '/U/V/77' 237 #elif c.gauss == '0' and c.eta == '0': 238 elif not c.gauss and not c.eta: 239 # this is not recommended (inaccurate) 240 self.params['OG__ML'][0] += '/U/V' 241 #elif c.gauss == '1' and c.eta == '0': 242 elif c.gauss and not c.eta: 243 # this is needed for data before 2008, or for reanalysis data 244 self.params['GG__SL'] = ['Q', 'ML', '1', \ 245 '{}'.format((int(self.resol) + 1) / 2)] 246 self.params['SH__ML'] = ['U/V/D', 'ML', self.glevelist, 'OFF'] 247 else: 248 print('Warning: This is a very costly parameter combination, \ 249 use only for debugging!') 250 self.params['GG__SL'] = ['Q', 'ML', '1', \ 251 '{}'.format((int(self.resol) + 1) / 2)] 252 self.params['GG__ML'] = ['U/V/D/77', 'ML', self.glevelist, \ 253 '{}'.format((int(self.resol) + 1) / 2)] 254 255 if c.omega: 256 self.params['OG__ML'][0] += '/W' 257 258 # add cloud water content if necessary 259 if c.cwc: 260 self.params['OG__ML'][0] += '/CLWC/CIWC' 261 262 # add vorticity and geopotential height for WRF if necessary 263 if c.wrf: 264 self.params['OG__ML'][0] += '/Z/VO' 265 if '/D' not in self.params['OG__ML'][0]: 266 self.params['OG__ML'][0] += '/D' 267 #wrf_sfc = 'sp/msl/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/ / 268 # stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4'.upper() 269 wrf_sfc = '134/235/167/165/166/168/129/172/34/31/141/ \ 270 139/170/183/236/39/40/41/42' 271 lwrt_sfc = wrf_sfc.split('/') 272 for par in lwrt_sfc: 273 if par not in self.params['OG__SL'][0]: 274 self.params['OG__SL'][0] += '/' + par 275 153 # Define the different types of field combinations (type, time, step) 154 self.types = {} 155 # Define the parameters and their level types, level list and 156 # grid resolution for the retrieval job 157 self.params = {} 158 159 if fluxes: 160 self._create_params_fluxes() 276 161 else: 277 self.params['OG_acc_SL'] = ["LSP/CP/SSHF/EWSS/NSSS/SSR", \ 278 'SFC', '1', self.grid] 279 162 self._create_params(c.gauss, c.eta, c.omega, c.cwc, c.wrf) 163 164 if fluxes and not c.purefc: 165 self._create_field_types_fluxes() 166 else: 167 self._create_field_types(c.type, c.time, c.step) 168 return 169 170 def _create_field_types(self, ftype, ftime, fstep): 171 '''Create the combination of field type, time and forecast step. 172 173 Parameters: 174 ----------- 175 ftype : :obj:`list` of :obj:`string` 176 List of field types. 177 178 ftime : :obj:`list` of :obj:`string` 179 The time in hours of the field. 180 181 fstep : :obj:`string` 182 Specifies the forecast time step from forecast base time. 183 Valid values are hours (HH) from forecast base time. 184 185 Return 186 ------ 187 188 ''' 189 i = 0 190 for ty, st, ti in zip(ftype, fstep, ftime): 191 btlist = range(24) 192 if self.basetime == '12': 193 btlist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] 194 if self.basetime == '00': 195 btlist = [13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 0] 196 197 # if ((ty.upper() == 'AN' and (int(c.time[i]) % int(c.dtime)) == 0) or 198 # (ty.upper() != 'AN' and (int(c.step[i]) % int(c.dtime)) == 0 and 199 # (int(c.step[i]) % int(c.dtime) == 0)) ) and \ 200 # (int(c.time[i]) in btlist or c.purefc): 201 202 if (int(ti) in btlist) or self.purefc: 203 204 if ((ty.upper() == 'AN' and (int(ti) % int(self.dtime)) == 0) or 205 (ty.upper() != 'AN' and (int(st) % int(self.dtime)) == 0)): 206 207 if ty not in self.types.keys(): 208 self.types[ty] = {'times': '', 'steps': ''} 209 210 if ti not in self.types[ty]['times']: 211 if self.types[ty]['times']: 212 self.types[ty]['times'] += '/' 213 self.types[ty]['times'] += ti 214 215 if st not in self.types[ty]['steps']: 216 if self.types[ty]['steps']: 217 self.types[ty]['steps'] += '/' 218 self.types[ty]['steps'] += st 219 i += 1 220 return 221 222 def _create_field_types_fluxes(self): 223 '''Create the combination of field type, time and forecast step 224 for the flux data. 225 226 Parameters: 227 ----------- 228 229 Return 230 ------ 231 232 ''' 233 self.types[str(self.acctype)] = {'times': str(self.acctime), 234 'steps': '{}/to/{}/by/{}'.format( 235 self.dtime, 236 self.accmaxstep, 237 self.dtime)} 238 return 239 240 def _create_params(self, gauss, eta, omega, cwc, wrf): 241 '''Define the specific parameter settings for retrievment. 242 243 The different parameters need specific grid types and level types 244 for retrievement. We might get following combination of types 245 (depending on selection and availability): 246 (These are short cuts for the grib file names (leading sequence) 247 SH__ML, OG__ML, GG__ML, SH__SL, OG__SL, GG__SL, OG_OROLSM_SL 248 where: 249 SH = Spherical Harmonics, GG = Gaussian Grid, OG = Output Grid, 250 ML = Model Level, SL = Surface Level 251 252 For each of this combination there is a list of parameter names, 253 the level type, the level list and the grid resolution. 254 255 There are different scenarios for data extraction from MARS: 256 1) Retrieval of etadot 257 eta=1, gauss=0, omega=0 258 2) Calculation of etadot from divergence 259 eta=0, gauss=1, omega=0 260 3) Calculation of etadot from omega (for makes sense for debugging) 261 eta=0, gauss=0, omega=1 262 4) Retrieval and Calculation of etadot (only for debugging) 263 eta=1, gauss=1, omega=0 264 5) Download also specific model and surface level data for FLEXPART-WRF 265 266 Parameters: 267 ----------- 268 gauss : :obj:`integer` 269 Gaussian grid is retrieved. 270 271 eta : :obj:`integer` 272 Etadot parameter will be directly retrieved. 273 274 omega : :obj:`integer` 275 The omega paramterwill be retrieved. 276 277 cwc : :obj:`integer` 278 The cloud liquid and ice water content will be retrieved. 279 280 wrf : :obj:`integer` 281 Additional model level and surface level data will be retrieved for 282 WRF/FLEXPART-WRF simulations. 283 284 Return 285 ------ 286 287 ''' 288 # SURFACE FIELDS 289 #----------------------------------------------------------------------- 290 self.params['SH__SL'] = ['LNSP', 'ML', '1', 'OFF'] 291 self.params['OG__SL'] = ['SD/MSL/TCC/10U/10V/2T/2D/Z/LSM', \ 292 'SFC', '1', self.grid] 293 if self.addpar: 294 self.params['OG__SL'][0] += self.addpar 295 296 if self.marsclass.upper() == 'EA' or self.marsclass.upper() == 'EP': 297 self.params['OG_OROLSM__SL'] = ["SDOR/CVL/CVH/FSR", 298 'SFC', '1', self.grid] 299 else: 300 self.params['OG_OROLSM__SL'] = ["SDOR/CVL/CVH/SR", \ 301 'SFC', '1', self.grid] 302 303 # MODEL LEVEL FIELDS 304 #----------------------------------------------------------------------- 305 self.params['OG__ML'] = ['T/Q', 'ML', self.levelist, self.grid] 306 307 if not gauss and eta: 308 self.params['OG__ML'][0] += '/U/V/ETADOT' 309 elif gauss and not eta: 310 self.params['GG__SL'] = ['Q', 'ML', '1', \ 311 '{}'.format((int(self.resol) + 1) / 2)] 312 self.params['SH__ML'] = ['U/V/D', 'ML', self.glevelist, 'OFF'] 313 elif not gauss and not eta: 314 self.params['OG__ML'][0] += '/U/V' 315 else: 316 print('Warning: Collecting etadot and parameters for gaussian grid \ 317 is a very costly parameter combination, \ 318 use this combination only for debugging!') 319 self.params['GG__SL'] = ['Q', 'ML', '1', \ 320 '{}'.format((int(self.resol) + 1) / 2)] 321 self.params['GG__ML'] = ['U/V/D/77', 'ML', self.glevelist, \ 322 '{}'.format((int(self.resol) + 1) / 2)] 323 324 if omega: 325 self.params['OG__ML'][0] += '/W' 326 327 if cwc: 328 self.params['OG__ML'][0] += '/CLWC/CIWC' 329 330 # ADDITIONAL FIELDS FOR FLEXPART-WRF MODEL (IF QUESTIONED) 331 #----------------------------------------------------------------------- 332 if wrf: 333 self.params['OG__ML'][0] += '/Z/VO' 334 if '/D' not in self.params['OG__ML'][0]: 335 self.params['OG__ML'][0] += '/D' 336 wrf_sfc = ['SP','SKT','SST','CI','STL1','STL2', 'STL3','STL4', 337 'SWVL1','SWVL2','SWVL3','SWVL4'] 338 for par in wrf_sfc: 339 if par not in self.params['OG__SL'][0]: 340 self.params['OG__SL'][0] += '/' + par 341 342 return 343 344 345 def _create_params_fluxes(self): 346 '''Define the parameter setting for flux data. 347 348 Flux data are accumulated fields in time and are stored on the 349 surface level. The leading short cut name for the grib files is: 350 "OG_acc_SL" with OG for Regular Output Grid, SL for Surface Level, and 351 acc for Accumulated Grid. 352 The params dictionary stores a list of parameter names, the level type, 353 the level list and the grid resolution. 354 355 The flux data are: LSP/CP/SSHF/EWSS/NSSS/SSR 356 357 Parameters: 358 ----------- 359 360 Return 361 ------ 362 363 ''' 364 self.params['OG_acc_SL'] = ["LSP/CP/SSHF/EWSS/NSSS/SSR", \ 365 'SFC', '1', self.grid] 280 366 return 281 367 … … 401 487 indexfile = os.path.join(inputdir, _config.FILE_GRIB_INDEX) 402 488 silent_remove(indexfile) 403 grib = Grib Tools(inputfiles.files)489 grib = GribUtil(inputfiles.files) 404 490 # creates new index file 405 491 iid = grib.index(index_keys=index_keys, index_file=indexfile) … … 749 835 # index_vals looks like e.g.: 750 836 # index_vals[0]: ('20171106', '20171107', '20171108') ; date 751 # index_vals[1]: ('0', ' 1200', '1800', '600') ; time837 # index_vals[1]: ('0', '600', '1200', '1800') ; time 752 838 # index_vals[2]: ('0', '12', '3', '6', '9') ; stepRange 753 839 754 840 if c.rrint: 755 start_date = datetime.strptime(c.start_date + '00', '%Y%m%d%H') 756 end_date = datetime.strptime(c.end_date + '23', '%Y%m%d%H') 841 if not c.purefc: 842 start_date = datetime.strptime(c.start_date + '00', '%Y%m%d%H') 843 end_date = datetime.strptime(c.end_date + '23', '%Y%m%d%H') 844 else: 845 sdate_str = c.start_date + '{:0>2}'.format(index_vals[1][0]) 846 start_date = datetime.strptime(sdate_str, '%Y%m%d%H') 847 edate_str = c.end_date + '{:0>2}'.format(index_vals[1][-1]) 848 end_date = datetime.strptime(edate_str, '%Y%m%d%H') 849 end_date = end_date + timedelta(hours=c.maxstep) 850 757 851 info = get_informations(os.path.join(c.inputdir, 758 852 inputfiles.files[0])) 759 dims = get_dimensions(c, info) 853 dims = get_dimensions(info, c.purefc, c.dtime, index_vals, 854 start_date, end_date) 760 855 # create numpy array 761 856 lsp_np = np.zeros((dims[1] * dims[0], dims[2]), dtype=np.float64) … … 763 858 it_lsp = 0 764 859 it_cp = 0 860 date_list = [] 861 step_list = [] 765 862 766 863 # initialize dictionaries to store values … … 792 889 # create correct timestamp from the three time informations 793 890 cdate = str(codes_get(gid, 'date')) 794 ctime = '{:0>2}'.format(codes_get(gid, 'time')/100) 795 cstep = '{:0>3}'.format(codes_get(gid, 'step')) 891 time = codes_get(gid, 'time')/100 # integer 892 step = codes_get(gid, 'step') # integer 893 ctime = '{:0>2}'.format(time) 894 cstep = '{:0>3}'.format(step) 895 796 896 t_date = datetime.strptime(cdate + ctime, '%Y%m%d%H') 797 t_dt = t_date + timedelta(hours= int(cstep))798 t_m1dt = t_date + timedelta(hours= int(cstep)-int(c.dtime))799 t_m2dt = t_date + timedelta(hours= int(cstep)-2*int(c.dtime))897 t_dt = t_date + timedelta(hours=step) 898 t_m1dt = t_date + timedelta(hours=step-int(c.dtime)) 899 t_m2dt = t_date + timedelta(hours=step-2*int(c.dtime)) 800 900 t_enddate = None 801 901 … … 823 923 g_handle = open(gnout, 'w') 824 924 825 # read message for message and store relevant data fields 925 # read message for message and store relevant data fields, where 826 926 # data keywords are stored in pars 827 927 while True: 828 928 if not gid: 829 929 break 830 parId = codes_get(gid, 'paramId') 831 step = codes_get(gid, 'step') 832 time = codes_get(gid, 'time') 833 ni = codes_get(gid, 'Ni') 834 nj = codes_get(gid, 'Nj') 930 parId = codes_get(gid, 'paramId') # integer 931 step = codes_get(gid, 'step') # integer 932 time = codes_get(gid, 'time') # integer 933 ni = codes_get(gid, 'Ni') # integer 934 nj = codes_get(gid, 'Nj') # integer 835 935 if parId not in orig_vals.keys(): 836 936 # parameter is not a flux, find next one … … 861 961 # store precipitation if new disaggregation method is selected 862 962 # only the exact days are needed 863 if start_date <= t_dt <= end_date: 864 print 'DATE: ', t_dt 865 if c.rrint and parId == 142: 866 print len(deac_vals[parId]) 867 lsp_np[:,it_lsp] = deac_vals[parId][-1][:] 868 it_lsp += 1 869 elif c.rrint and parId == 143: 870 cp_np[:,it_cp] = deac_vals[parId][-1][:] 871 it_cp += 1 963 if c.rrint: 964 if start_date <= t_dt <= end_date: 965 if not c.purefc: 966 if t_dt not in date_list: 967 date_list.append(t_dt) 968 step_list = [0] 969 else: 970 if t_date not in date_list: 971 date_list.append(t_date) 972 if step not in step_list: 973 step_list.append(step) 974 if c.rrint and parId == 142: 975 lsp_np[:,it_lsp] = deac_vals[parId][-1][:] 976 it_lsp += 1 977 elif c.rrint and parId == 143: 978 cp_np[:,it_cp] = deac_vals[parId][-1][:] 979 it_cp += 1 872 980 873 981 # information printout … … 875 983 876 984 # length of deac_vals[parId] corresponds to the 877 # number of time steps (max. 4) 985 # number of time steps, max. 4 are needed for disaggegration 986 # with the old and original method 878 987 # run over all grib messages and perform 879 # shifting in time for old disaggegration method988 # shifting in time 880 989 if len(deac_vals[parId]) >= 3: 881 990 if len(deac_vals[parId]) > 3: … … 924 1033 if step == c.maxstep and c.purefc or \ 925 1034 t_dt == t_enddate: 926 927 values = deac_vals[parId][3] 928 codes_set_values(gid, values) 929 codes_set(gid, 'stepRange', 0) 930 truedatetime = t_m2dt + timedelta(hours= 931 2*int(c.dtime)) 932 codes_set(gid, 'time', truedatetime.hour * 100) 933 codes_set(gid, 'date', truedatetime.year * 10000 + 934 truedatetime.month * 100 + 935 truedatetime.day) 936 codes_write(gid, h_handle) 937 938 #values = (deac_vals[parId][1]+deac_vals[parId][2])/2. 1035 # last step 1036 if c.purefc: 1037 values = deac_vals[parId][3] 1038 codes_set_values(gid, values) 1039 codes_set(gid, 'stepRange', step) 1040 #truedatetime = t_m2dt + timedelta(hours=2*int(c.dtime)) 1041 codes_write(gid, h_handle) 1042 else: 1043 values = deac_vals[parId][3] 1044 codes_set_values(gid, values) 1045 codes_set(gid, 'stepRange', 0) 1046 truedatetime = t_m2dt + timedelta(hours=2*int(c.dtime)) 1047 codes_set(gid, 'time', truedatetime.hour * 100) 1048 codes_set(gid, 'date', int(truedatetime.strftime('%Y%m%d'))) 1049 codes_write(gid, h_handle) 1050 939 1051 if parId == 142 or parId == 143: 940 1052 values = disaggregation.darain(list(reversed(deac_vals[parId]))) … … 942 1054 values = disaggregation.dapoly(list(reversed(deac_vals[parId]))) 943 1055 944 codes_set(gid, 'stepRange', 0) 945 truedatetime = t_m2dt + timedelta(hours=int(c.dtime)) 946 codes_set(gid, 'time', truedatetime.hour * 100) 947 codes_set(gid, 'date', truedatetime.year * 10000 + 948 truedatetime.month * 100 + 949 truedatetime.day) 950 codes_set_values(gid, values) 951 codes_write(gid, g_handle) 1056 # step before last step 1057 if c.purefc: 1058 codes_set(gid, 'stepRange', step-int(c.dtime)) 1059 #truedatetime = t_m2dt + timedelta(hours=int(c.dtime)) 1060 codes_set_values(gid, values) 1061 codes_write(gid, g_handle) 1062 else: 1063 codes_set(gid, 'stepRange', 0) 1064 truedatetime = t_m2dt + timedelta(hours=int(c.dtime)) 1065 codes_set(gid, 'time', truedatetime.hour * 100) 1066 codes_set(gid, 'date', int(truedatetime.strftime('%Y%m%d'))) 1067 codes_set_values(gid, values) 1068 codes_write(gid, g_handle) 952 1069 953 1070 codes_release(gid) … … 961 1078 codes_index_release(iid) 962 1079 963 # disaggregation.IA3(lsp_np) 964 # pro gitterpunkt die zeitserie disagg, oder geht es auch mit feld 965 # wenn zurück dann über zeitschritte itterieren und rausschreiben. 966 # original in die flux 967 # subgrid wohin? extrafiles die beide oder jeweils einen subgrid beinhalten? 968 # kann time hh und mm fassen? 969 970 print lsp_np.shape 971 print lsp_np 972 1080 if c.rrint: 1081 self._create_rr_grib_dummy(inputfiles.files[0], c.inputdir) 1082 1083 self._prep_new_rrint(ni, nj, lsp_np.shape[1], lsp_np, 1084 cp_np, date_list, step_list, c) 973 1085 974 1086 return 975 1087 1088 def _prep_new_rrint(self, ni, nj, nt, lsp_np, cp_np, date_list, step_list, c): 1089 '''Calculates and writes out the disaggregated precipitation fields. 1090 1091 Disaggregation is done in time and original times are written to the 1092 flux files, while the additional subgrid times are written to 1093 extra files. 1094 1095 Parameters 1096 ---------- 1097 ni : :obj:`integer` 1098 Amount of zonal grid points. 1099 1100 nj : :obj:`integer` 1101 Amount of meridional grid points. 1102 1103 nt : :obj:`integer` 1104 Number of time steps. 1105 1106 lsp_np : :obj:`numpy array` of :obj:`float` 1107 The large scale precipitation fields for each time step. 1108 Shape (ni * nj, nt). 1109 1110 cp_np : :obj:`numpy array` of :obj:`float` 1111 The convective precipitation fields for each time step. 1112 Shape (ni * nj, nt). 1113 1114 date_list : :obj:`list` of :obj:`datetime` 1115 The list of dates for which the disaggregation is to be done. 1116 1117 step_list : :obj:`list` of :obj:`integer` 1118 The list of steps for a single forecast time. 1119 Only necessary for pure forecasts. 1120 1121 c : :obj:`ControlFile` 1122 Contains all the parameters of CONTROL file and 1123 command line. 1124 1125 Return 1126 ------ 1127 1128 ''' 1129 print('... disaggregation or precipitation with new method.') 1130 lsp_new_np = np.zeros((ni * nj, nt * 3), dtype=np.float64) 1131 cp_new_np = np.zeros((ni * nj, nt * 3), dtype=np.float64) 1132 1133 # do the disaggregation, but neglect the last value of the 1134 # time series. This one corresponds for example to 24 hour, 1135 # which we don't need. 1136 for ix in range(ni*nj): 1137 lsp_new_np[ix,:] = disaggregation.IA3(lsp_np[ix,:])[:-1] 1138 cp_new_np[ix,:] = disaggregation.IA3(cp_np[ix,:])[:-1] 1139 1140 # write to grib files (full/orig times to flux file and inbetween 1141 # times into seperate end files) 1142 print('... write disaggregated precipitation to files.') 1143 it = 0 1144 for date in date_list: 1145 for step in step_list: 1146 tmpfile = os.path.join(c.inputdir, 'rr_grib_dummy.grb') 1147 1148 if c.purefc: 1149 fluxfilename = 'flux' + date.strftime('%Y%m%d.%H') + \ 1150 '.{:0>3}'.format(step) 1151 filename1 = c.prefix + date.strftime('%y%m%d.%H') + \ 1152 '.{:0>3}'.format(step) + '_1' 1153 filename2 = c.prefix + date.strftime('%y%m%d.%H') + \ 1154 '.{:0>3}'.format(step) + '_2' 1155 else: 1156 fluxfilename = 'flux' + date.strftime('%Y%m%d%H') 1157 filename1 = c.prefix + date.strftime('%y%m%d%H') + '_1' 1158 filename2 = c.prefix + date.strftime('%y%m%d%H') + '_2' 1159 1160 # collect for final processing 1161 self.outputfilelist.append(os.path.basename(fluxfilename)) 1162 self.outputfilelist.append(os.path.basename(filename1)) 1163 self.outputfilelist.append(os.path.basename(filename2)) 1164 1165 # write original time step to flux file as usual 1166 fluxfile = GribUtil(os.path.join(c.inputdir, fluxfilename)) 1167 fluxfile.set_keys(tmpfile, filemode='a', strict=True, 1168 wherekeynames=['paramId'], wherekeyvalues=[142], 1169 keynames=['date','time','stepRange','values'], 1170 keyvalues=[int(date.strftime('%Y%m%d')), 1171 date.hour*100, step, lsp_new_np[:,it]], 1172 ) 1173 fluxfile.set_keys(tmpfile, filemode='a', strict=True, 1174 wherekeynames=['paramId'], wherekeyvalues=[143], 1175 keynames=['date','time','stepRange','values'], 1176 keyvalues=[int(date.strftime('%Y%m%d')), 1177 date.hour*100, step, cp_new_np[:,it]] 1178 ) 1179 1180 # write first subgrid time step 1181 endfile1 = GribUtil(os.path.join(c.inputdir, filename1)) 1182 endfile1.set_keys(tmpfile, filemode='w', strict=True, 1183 wherekeynames=['paramId'], wherekeyvalues=[142], 1184 keynames=['date','time','stepRange','values'], 1185 keyvalues=[int(date.strftime('%Y%m%d')), 1186 date.hour*100, step, lsp_new_np[:,it+1]] 1187 ) 1188 endfile1.set_keys(tmpfile, filemode='a', strict=True, 1189 wherekeynames=['paramId'], wherekeyvalues=[143], 1190 keynames=['date','time','stepRange','values'], 1191 keyvalues=[int(date.strftime('%Y%m%d')), 1192 date.hour*100, step, cp_new_np[:,it+1]] 1193 ) 1194 1195 # write second subgrid time step 1196 endfile2 = GribUtil(os.path.join(c.inputdir, filename2)) 1197 endfile2.set_keys(tmpfile, filemode='w', strict=True, 1198 wherekeynames=['paramId'], wherekeyvalues=[142], 1199 keynames=['date','time','stepRange','values'], 1200 keyvalues=[int(date.strftime('%Y%m%d')), 1201 date.hour*100, step, lsp_new_np[:,it+2]] 1202 ) 1203 endfile2.set_keys(tmpfile, filemode='a', strict=True, 1204 wherekeynames=['paramId'], wherekeyvalues=[143], 1205 keynames=['date','time','stepRange','values'], 1206 keyvalues=[int(date.strftime('%Y%m%d')), 1207 date.hour*100, step, cp_new_np[:,it+2]] 1208 ) 1209 it = it + 3 # jump to next original time step 1210 return 1211 1212 def _create_rr_grib_dummy(self, ifile, inputdir): 1213 '''Creates a grib file with a dummy message for the two precipitation 1214 types lsp and cp each. 1215 1216 Parameters 1217 ---------- 1218 ifile : :obj:`string` 1219 Filename of the input file to read the grib messages from. 1220 1221 inputdir : :obj:`string`, optional 1222 Path to the directory where the retrieved data is stored. 1223 1224 Return 1225 ------ 1226 1227 ''' 1228 1229 gribfile = GribUtil(os.path.join(inputdir,'rr_grib_dummy.grb')) 1230 1231 gribfile.copy_dummy_msg(ifile, keynames=['paramId'], 1232 keyvalues=[142], filemode='w') 1233 1234 gribfile.copy_dummy_msg(ifile, keynames=['paramId'], 1235 keyvalues=[143], filemode='a') 1236 1237 return 976 1238 977 1239 def create(self, inputfiles, c): … … 1029 1291 # index_vals looks like e.g.: 1030 1292 # index_vals[0]: ('20171106', '20171107', '20171108') ; date 1031 # index_vals[1]: ('0', ' 1200', '1800', '600') ; time1293 # index_vals[1]: ('0', '600', '1200', '1800') ; time 1032 1294 # index_vals[2]: ('0', '12', '3', '6', '9') ; stepRange 1033 1295 … … 1117 1379 codes_set(gid, 'paramId', 201031) 1118 1380 codes_write(gid, fdict['22']) 1381 scwc = None 1119 1382 elif c.wrf and paramId in [129, 138, 155] and \ 1120 1383 levtype == 'hybrid': # Z, VO, D … … 1316 1579 # read template COMMAND file 1317 1580 with open(os.path.expandvars(os.path.expanduser( 1318 c.flexpart _root_scripts)) + '/../Options/COMMAND', 'r') as f:1581 c.flexpartdir)) + '/../Options/COMMAND', 'r') as f: 1319 1582 lflist = f.read().split('\n') 1320 1583 … … 1342 1605 os.chdir(c.outputdir) 1343 1606 p = subprocess.check_call([ 1344 os.path.expandvars(os.path.expanduser(c.flexpart _root_scripts))1607 os.path.expandvars(os.path.expanduser(c.flexpartdir)) 1345 1608 + '/../FLEXPART_PROGRAM/grib2flexpart', 'useAvailable', '.']) 1346 1609 os.chdir(pwd)
Note: See TracChangeset
for help on using the changeset viewer.