Commit 4aee4cea authored by Christoph Heim's avatar Christoph Heim
Browse files

updates for arpege don't know what.

parent c0a02a1e
...@@ -15,7 +15,7 @@ from datetime import datetime, timedelta ...@@ -15,7 +15,7 @@ from datetime import datetime, timedelta
from multiprocessing import Pool from multiprocessing import Pool
from pathlib import Path from pathlib import Path
from cdo import Cdo from cdo import Cdo
from package.utilities import Timer, cdo_mergetime, write_grid_file, cd from package.utilities import Timer, cdo_mergetime, write_grid_des_file, cd
from namelist import domain, padding from namelist import domain, padding
from functions import paste_dir_names from functions import paste_dir_names
############################################################################### ###############################################################################
...@@ -151,9 +151,14 @@ def sellatlon_ARPEGE(inp_file, out_file, dt, box, options, var_name, var_dicts, ...@@ -151,9 +151,14 @@ def sellatlon_ARPEGE(inp_file, out_file, dt, box, options, var_name, var_dicts,
# Changing directory allows to run in parallel by preventing # Changing directory allows to run in parallel by preventing
# _tmpfiles from being overwritten by parallel processes. # _tmpfiles from being overwritten by parallel processes.
with cd(tmp_dir): with cd(tmp_dir):
#subprocess.call([command, inp_file,
# os.path.split(split_files)[1]],
# stdout=subprocess.DEVNULL)
print(inp_file)
subprocess.call([command, inp_file, subprocess.call([command, inp_file,
os.path.split(split_files)[1]], os.path.split(split_files)[1]])
stdout=subprocess.DEVNULL) quit()
# remove all split variables that are irrelevant # remove all split variables that are irrelevant
keep_files = [] keep_files = []
...@@ -172,53 +177,53 @@ def sellatlon_ARPEGE(inp_file, out_file, dt, box, options, var_name, var_dicts, ...@@ -172,53 +177,53 @@ def sellatlon_ARPEGE(inp_file, out_file, dt, box, options, var_name, var_dicts,
for file in search: for file in search:
os.remove(file) os.remove(file)
# Check if valid files remain ## Check if valid files remain
tmp_files = glob.glob(get_splf(split_files, var_dicts[var_name])) #tmp_files = glob.glob(get_splf(split_files, var_dicts[var_name]))
# if not, this means that the grib file is broken ## if not, this means that the grib file is broken
if len(tmp_files) == 0: #if len(tmp_files) == 0:
# write this to the broken grib files list # # write this to the broken grib files list
with open(broken_grib_file, 'a') as f: # with open(broken_grib_file, 'a') as f:
f.write(file_code+'\n') # f.write(file_code+'\n')
print('No valid files for ' + file_code) # print('No valid files for ' + file_code)
return(TM) # return(TM)
TM.stop('prep') #TM.stop('prep')
# if grid does not exist compute it ## if grid does not exist compute it
TM.start('grid') #TM.start('grid')
if not os.path.exists(weights_file): #if not os.path.exists(weights_file):
comp_weights_file(target_grid, weights_file, # comp_weights_file(target_grid, weights_file,
tmp_files[0], grid_def_file, # tmp_files[0], grid_def_file,
res, box, options) # res, box, options)
TM.stop('grid') #TM.stop('grid')
# cdo ## cdo
TM.start('cdo') #TM.start('cdo')
merge_files = [] #merge_files = []
for tmp_file in tmp_files: #for tmp_file in tmp_files:
input = ("-sellonlatbox,{},{},{},{} -setgrid,{}"+ # input = ("-sellonlatbox,{},{},{},{} -setgrid,{}"+
" -setgridtype,regular {}").format( # " -setgridtype,regular {}").format(
box['lon'].start, box['lon'].stop, # box['lon'].start, box['lon'].stop,
box['lat'].start, box['lat'].stop, # box['lat'].start, box['lat'].stop,
grid_def_file, # grid_def_file,
tmp_file) # tmp_file)
if var_dicts[var_name]['vdim'] == '3D': # if var_dicts[var_name]['vdim'] == '3D':
out_file_use = tmp_file + '.nc' # out_file_use = tmp_file + '.nc'
merge_files.append(out_file_use) # merge_files.append(out_file_use)
else: # else:
out_file_use = out_file # out_file_use = out_file
if options['rm_tmp_files'] and os.path.exists(out_file_use): # if options['rm_tmp_files'] and os.path.exists(out_file_use):
os.remove(out_file_use) # os.remove(out_file_use)
if not os.path.exists(out_file_use): # if not os.path.exists(out_file_use):
ofile = cdo.remap(target_grid, weights_file, # ofile = cdo.remap(target_grid, weights_file,
input=input, output=out_file_use, # input=input, output=out_file_use,
options='-f nc4') # options='-f nc4')
# merge vertical levels ## merge vertical levels
if var_dicts[var_name]['vdim'] == '3D': #if var_dicts[var_name]['vdim'] == '3D':
merge_files.sort() # merge_files.sort()
cdo.merge(input=merge_files, output=out_file) # cdo.merge(input=merge_files, output=out_file)
TM.stop('cdo') #TM.stop('cdo')
return(TM) return(TM)
...@@ -255,8 +260,8 @@ if __name__ == '__main__': ...@@ -255,8 +260,8 @@ if __name__ == '__main__':
# missing: P (derive with PS and vertical grid) # missing: P (derive with PS and vertical grid)
# missing: SST # missing: SST
#'3D':['U', 'V'], #'3D':['U', 'V'],
#'3D':['U'], '3D':['T', 'W'],
'2D':['PS'], #'2D':['PS'],
#'2D':['LWUTOA', 'T2M'], #'2D':['LWUTOA', 'T2M'],
} }
...@@ -265,7 +270,7 @@ if __name__ == '__main__': ...@@ -265,7 +270,7 @@ if __name__ == '__main__':
main_vars = {'3D':'T','2D':'LWUTOA'} main_vars = {'3D':'T','2D':'LWUTOA'}
run_var_type = '3D' run_var_type = '3D'
run_var_type = '2D' #run_var_type = '2D'
var_names = var_namess[run_var_type] var_names = var_namess[run_var_type]
main_var = main_vars[run_var_type] main_var = main_vars[run_var_type]
...@@ -296,11 +301,16 @@ if __name__ == '__main__': ...@@ -296,11 +301,16 @@ if __name__ == '__main__':
##### 3D ##### 3D
elif run_var_type == '3D': elif run_var_type == '3D':
pass pass
#first_date = datetime(2016,8,1,3)
#last_date = datetime(2016,9,9)
###### DONE ###### DONE
first_date = datetime(2016,8,1,3)
last_date = datetime(2016,9,9)
###### DONE ###### DONE
#first_date = datetime(2016,8,1,3)
#last_date = datetime(2016,8,2,3)
first_date = datetime(2016,8,4,0)
last_date = datetime(2016,8,4,0)
...@@ -390,7 +400,7 @@ if __name__ == '__main__': ...@@ -390,7 +400,7 @@ if __name__ == '__main__':
# target grid on which to interpolate the model output # target grid on which to interpolate the model output
target_grid = os.path.join('grids','latlon_{}km_dom_{}'.format( target_grid = os.path.join('grids','latlon_{}km_dom_{}'.format(
remap_res, domain['code'])) remap_res, domain['code']))
write_grid_file(box, target_grid, remap_res) write_grid_des_file(box, target_grid, remap_res)
# find times and files that should be extracted # find times and files that should be extracted
# and prepare arguments for function # and prepare arguments for function
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment