Commit 4aee4cea authored by Christoph Heim's avatar Christoph Heim
Browse files

updates for arpege don't know what.

parent c0a02a1e
......@@ -15,7 +15,7 @@ from datetime import datetime, timedelta
from multiprocessing import Pool
from pathlib import Path
from cdo import Cdo
from package.utilities import Timer, cdo_mergetime, write_grid_file, cd
from package.utilities import Timer, cdo_mergetime, write_grid_des_file, cd
from namelist import domain, padding
from functions import paste_dir_names
###############################################################################
......@@ -151,9 +151,14 @@ def sellatlon_ARPEGE(inp_file, out_file, dt, box, options, var_name, var_dicts,
# Changing directory allows to run in parallel by preventing
# _tmpfiles from being overwritten by parallel processes.
with cd(tmp_dir):
#subprocess.call([command, inp_file,
# os.path.split(split_files)[1]],
# stdout=subprocess.DEVNULL)
print(inp_file)
subprocess.call([command, inp_file,
os.path.split(split_files)[1]],
stdout=subprocess.DEVNULL)
os.path.split(split_files)[1]])
quit()
# remove all split variables that are irrelevant
keep_files = []
......@@ -172,53 +177,53 @@ def sellatlon_ARPEGE(inp_file, out_file, dt, box, options, var_name, var_dicts,
for file in search:
os.remove(file)
# Check if valid files remain
tmp_files = glob.glob(get_splf(split_files, var_dicts[var_name]))
# if not, this means that the grib file is broken
if len(tmp_files) == 0:
# write this to the broken grib files list
with open(broken_grib_file, 'a') as f:
f.write(file_code+'\n')
print('No valid files for ' + file_code)
return(TM)
TM.stop('prep')
# if grid does not exist compute it
TM.start('grid')
if not os.path.exists(weights_file):
comp_weights_file(target_grid, weights_file,
tmp_files[0], grid_def_file,
res, box, options)
TM.stop('grid')
# cdo
TM.start('cdo')
merge_files = []
for tmp_file in tmp_files:
input = ("-sellonlatbox,{},{},{},{} -setgrid,{}"+
" -setgridtype,regular {}").format(
box['lon'].start, box['lon'].stop,
box['lat'].start, box['lat'].stop,
grid_def_file,
tmp_file)
if var_dicts[var_name]['vdim'] == '3D':
out_file_use = tmp_file + '.nc'
merge_files.append(out_file_use)
else:
out_file_use = out_file
if options['rm_tmp_files'] and os.path.exists(out_file_use):
os.remove(out_file_use)
if not os.path.exists(out_file_use):
ofile = cdo.remap(target_grid, weights_file,
input=input, output=out_file_use,
options='-f nc4')
# merge vertical levels
if var_dicts[var_name]['vdim'] == '3D':
merge_files.sort()
cdo.merge(input=merge_files, output=out_file)
TM.stop('cdo')
## Check if valid files remain
#tmp_files = glob.glob(get_splf(split_files, var_dicts[var_name]))
## if not, this means that the grib file is broken
#if len(tmp_files) == 0:
# # write this to the broken grib files list
# with open(broken_grib_file, 'a') as f:
# f.write(file_code+'\n')
# print('No valid files for ' + file_code)
# return(TM)
#TM.stop('prep')
## if grid does not exist compute it
#TM.start('grid')
#if not os.path.exists(weights_file):
# comp_weights_file(target_grid, weights_file,
# tmp_files[0], grid_def_file,
# res, box, options)
#TM.stop('grid')
## cdo
#TM.start('cdo')
#merge_files = []
#for tmp_file in tmp_files:
# input = ("-sellonlatbox,{},{},{},{} -setgrid,{}"+
# " -setgridtype,regular {}").format(
# box['lon'].start, box['lon'].stop,
# box['lat'].start, box['lat'].stop,
# grid_def_file,
# tmp_file)
# if var_dicts[var_name]['vdim'] == '3D':
# out_file_use = tmp_file + '.nc'
# merge_files.append(out_file_use)
# else:
# out_file_use = out_file
# if options['rm_tmp_files'] and os.path.exists(out_file_use):
# os.remove(out_file_use)
# if not os.path.exists(out_file_use):
# ofile = cdo.remap(target_grid, weights_file,
# input=input, output=out_file_use,
# options='-f nc4')
## merge vertical levels
#if var_dicts[var_name]['vdim'] == '3D':
# merge_files.sort()
# cdo.merge(input=merge_files, output=out_file)
#TM.stop('cdo')
return(TM)
......@@ -255,8 +260,8 @@ if __name__ == '__main__':
# missing: P (derive with PS and vertical grid)
# missing: SST
#'3D':['U', 'V'],
#'3D':['U'],
'2D':['PS'],
'3D':['T', 'W'],
#'2D':['PS'],
#'2D':['LWUTOA', 'T2M'],
}
......@@ -265,7 +270,7 @@ if __name__ == '__main__':
main_vars = {'3D':'T','2D':'LWUTOA'}
run_var_type = '3D'
run_var_type = '2D'
#run_var_type = '2D'
var_names = var_namess[run_var_type]
main_var = main_vars[run_var_type]
......@@ -296,11 +301,16 @@ if __name__ == '__main__':
##### 3D
elif run_var_type == '3D':
pass
#first_date = datetime(2016,8,1,3)
#last_date = datetime(2016,9,9)
###### DONE
first_date = datetime(2016,8,1,3)
last_date = datetime(2016,9,9)
###### DONE
#first_date = datetime(2016,8,1,3)
#last_date = datetime(2016,8,2,3)
first_date = datetime(2016,8,4,0)
last_date = datetime(2016,8,4,0)
......@@ -390,7 +400,7 @@ if __name__ == '__main__':
# target grid on which to interpolate the model output
target_grid = os.path.join('grids','latlon_{}km_dom_{}'.format(
remap_res, domain['code']))
write_grid_file(box, target_grid, remap_res)
write_grid_des_file(box, target_grid, remap_res)
# find times and files that should be extracted
# and prepare arguments for function
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment