Changeset 631ba13 in flex_extract.git
- Timestamp:
- Nov 29, 2018, 10:49:59 PM (5 years ago)
- Branches:
- master, ctbto, dev
- Children:
- 45c6337
- Parents:
- 38e83ba
- Location:
- source/python
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
source/python/classes/ControlFile.py
rff2a11c r631ba13 547 547 return sorted(l) 548 548 549 def check_ppid(self, ppid): 550 '''Sets the current PPID. 551 552 Parameters 553 ---------- 554 ppid : :obj:`int` or :obj:`None` 555 Contains the ppid number provided by the command line parameter 556 of is None otherwise. 557 558 Return 559 ------ 560 561 ''' 562 563 if not ppid: 564 self.ppid = str(os.getppid()) 565 else: 566 self.ppid = ppid 567 568 return -
source/python/mods/get_mars_data.py
r403cbf1 r631ba13 128 128 129 129 ''' 130 c.ecapi = ecapi 130 131 131 132 if not os.path.exists(c.inputdir): … … 139 140 elif c.request == 2: 140 141 print("Retrieving EC data and printing mars request!") 141 # first, write header with the mars parameter to file 142 # create a dummy MarsRetrieval to get parameter 143 MR = MarsRetrieval(None, None) 144 attrs = vars(MR).copy() 145 del attrs['server'] 146 del attrs['public'] 147 marsfile = os.path.join(c.inputdir, _config.FILE_MARS_REQUESTS) 148 with open(marsfile, 'w') as f: 149 f.write('request_number' + ', ') 150 f.write(', '.join(str(key) for key in sorted(attrs.iterkeys()))) 151 f.write('\n') 142 write_reqheader(os.path.join(c.inputdir, _config.FILE_MARS_REQUESTS)) 152 143 153 144 print("start date %s " % (c.start_date)) 154 145 print("end date %s " % (c.end_date)) 155 146 156 if ecapi: 147 server = mk_server(c) 148 149 # if data are to be retrieved, clean up any old grib files 150 if c.request == 0 or c.request == 2: 151 remove_old('*grb') 152 153 # -------------- flux data ------------------------------------------------ 154 start, end, chunk = mk_dates(c, fluxes=True) 155 do_retrievement(c, server, start, end, datechunk, fluxes=True) 156 157 # -------------- non flux data -------------------------------------------- 158 start, end, chunk = mk_dates(c, fluxes=False) 159 do_retrievement(c, server, start, end, datechunk, fluxes=False) 160 161 return 162 163 def write_reqheader(marsfile): 164 '''Writes header with column names into mars request file. 165 166 Parameters 167 ---------- 168 marsfile : :obj:`string` 169 Path to the mars request file. 170 171 Return 172 ------ 173 174 ''' 175 MR = MarsRetrieval(None, None) 176 attrs = vars(MR).copy() 177 del attrs['server'] 178 del attrs['public'] 179 with open(marsfile, 'w') as f: 180 f.write('request_number' + ', ') 181 f.write(', '.join(str(key) for key in sorted(attrs.iterkeys()))) 182 f.write('\n') 183 184 return 185 186 def mk_server(c): 187 '''Creates server connection if ECMWF WebAPI is available. 188 189 Parameters 190 ---------- 191 c : :obj:`ControlFile` 192 Contains all the parameters of CONTROL file and 193 command line. 194 195 Return 196 ------ 197 server : :obj:`ECMWFDataServer` or :obj:`ECMWFService` 198 Connection to ECMWF server via python interface ECMWF WebAPI. 199 200 ''' 201 if c.ecapi: 157 202 if c.public: 158 203 server = ecmwfapi.ECMWFDataServer() … … 162 207 server = False 163 208 164 c.ecapi = ecapi165 209 print('Using ECMWF WebAPI: ' + str(c.ecapi)) 166 210 167 # basetime geht rückwärts 168 169 # if basetime 00 170 # dann wird von 12 am vortag bis 00 am start tag geholt 171 # aber ohne 12 selbst sondern 12 + step 172 173 # if basetime 12 174 # dann wird von 00 + step bis 12 am start tag geholt 175 176 # purer forecast wird vorwärts bestimmt. 177 # purer forecast mode ist dann wenn größer 24 stunden 178 # wie kann das noch festgestellt werden ???? 179 # nur FC und steps mehr als 24 ? 180 # die einzige problematik beim reinen forecast ist die benennung der files! 181 # also sobald es Tagesüberschneidungen gibt 182 # allerdings ist das relevant und ersichtlich an den NICHT FLUSS DATEN 183 211 return server 212 213 214 def mk_dates(c, fluxes): 215 '''Prepares start and end date depending on flux or non flux data. 216 217 If forecast for maximum one day (upto 24h) are to be retrieved, then 218 collect accumulation data (flux data) with additional days in the 219 beginning and at the end (used for complete disaggregation of 220 original period) 221 222 If forecast data longer than 24h are to be retrieved, then 223 collect accumulation data (flux data) with the exact start and end date 224 (disaggregation will be done for the exact time period with 225 boundary conditions) 226 227 Since for basetime the extraction contains the 12 hours upfront, 228 if basetime is 0, the starting date has to be the day before and 229 230 Parameters 231 ---------- 232 c : :obj:`ControlFile` 233 Contains all the parameters of CONTROL file and 234 command line. 235 236 fluxes : :obj:`boolean`, optional 237 Decides if the flux parameter settings are stored or 238 the rest of the parameter list. 239 Default value is False. 240 241 Return 242 ------ 243 start : :obj:`datetime` 244 The start date of the retrieving data set. 245 246 end : :obj:`datetime` 247 The end date of the retrieving data set. 248 249 chunk : :obj:`datetime` 250 Time period in days for one single mars retrieval. 251 252 ''' 184 253 start = datetime.strptime(c.start_date, '%Y%m%d') 185 254 end = datetime.strptime(c.end_date, '%Y%m%d') 186 # time period for one single retrieval 187 datechunk = timedelta(days=int(c.date_chunk)) 188 189 if c.basetime == '00': 255 chunk = timedelta(days=int(c.date_chunk)) 256 257 if c.basetime: 258 if c.basetime == '00': 259 start = start - timedelta(days=1) 260 261 if c.maxstep <= 24 and fluxes: 190 262 start = start - timedelta(days=1) 191 192 if c.maxstep <= 24: 193 startm1 = start - timedelta(days=1) 194 195 if c.basetime == '00' or c.basetime == '12': 196 # endp1 = end + timedelta(days=1) 197 endp1 = end 198 else: 199 # endp1 = end + timedelta(days=2) 200 endp1 = end + timedelta(days=1) 201 202 # -------------- flux data ------------------------------------------------ 203 if c.request == 0 or c.request == 2: 204 print('... removing old flux content of ' + c.inputdir) 205 tobecleaned = UioFiles(c.inputdir, 206 '*_acc_*.' + str(os.getppid()) + '.*.grb') 207 tobecleaned.delete_files() 208 209 # if forecast for maximum one day (upto 24h) are to be retrieved, 210 # collect accumulation data (flux data) 211 # with additional days in the beginning and at the end 212 # (used for complete disaggregation of original period) 213 if c.maxstep <= 24: 214 do_retrievement(c, server, startm1, endp1, datechunk, fluxes=True) 215 216 # if forecast data longer than 24h are to be retrieved, 217 # collect accumulation data (flux data) 218 # with the exact start and end date 219 # (disaggregation will be done for the 220 # exact time period with boundary conditions) 221 else: 222 do_retrievement(c, server, start, end, datechunk, fluxes=True) 223 224 # -------------- non flux data -------------------------------------------- 225 if c.request == 0 or c.request == 2: 226 print('... removing old non flux content of ' + c.inputdir) 227 tobecleaned = UioFiles(c.inputdir, 228 '*__*.' + str(os.getppid()) + '.*.grb') 229 tobecleaned.delete_files() 230 231 do_retrievement(c, server, start, end, datechunk, fluxes=False) 232 233 return 263 end = end + timedelta(days=1) 264 265 return start, end, chunk 266 267 def remove_old(pattern): 268 '''Deletes old retrieval files matching the pattern. 269 270 Parameters 271 ---------- 272 pattern : :obj:`string` 273 The sub string pattern which identifies the files to be deleted. 274 275 Return 276 ------ 277 278 ''' 279 print('... removing old content of ' + c.inputdir) 280 281 tobecleaned = UioFiles(c.inputdir, pattern) 282 tobecleaned.delete_files() 283 284 return 285 234 286 235 287 def do_retrievement(c, server, start, end, delta_t, fluxes=False): -
source/python/mods/prepare_flexpart.py
r38e83ba r631ba13 126 126 127 127 ''' 128 # necessary only if script is running by itself 129 if not ppid: 130 c.ppid = str(os.getppid()) 131 else: 132 c.ppid = ppid 128 c.check_ppid(ppid) 133 129 134 130 c.ecapi = ecapi
Note: See TracChangeset
for help on using the changeset viewer.