Commit a5112809 authored by Christoph Heim's avatar Christoph Heim
Browse files

Added time to sam and mpas.

parent 37025f0b
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model SAM.
author: Christoph Heim
date created: 20.06.2019
date changed: 18.07.2019
date changed: 22.07.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -55,15 +55,19 @@ def sellatlon_SAM(inp_file, out_file, dt, box, options, var_name, res):
# cdo
TM.start('cdo')
if var_dict[var_name]['loc'] == 'OUT_3D':
time_fmt = '{:%Y-%m-%d,%H:%M:%S,3hour}'
elif var_dict[var_name]['loc'] == 'OUT_2D':
time_fmt = '{:%Y-%m-%d,%H:%M:%S,30min}'
ofile = cdo.sellonlatbox(
box['lon'].start,box['lon'].stop,
box['lat'].start,box['lat'].stop,
input=('-sellevidx,'+str(box['vert0'])+'/'+
str(box['vert1'])+' -settaxis,'+
'{:%Y-%m-%d,%H:%M:%S,3hour}'.format(dt)+
' '+nco_file),
str(box['vert1'])+
' -setreftime,2016-08-01,00:00:00,minutes'+
' -settaxis,'+time_fmt.format(dt)+
' '+nco_file),
output=out_file)
TM.stop('cdo')
# delete tmp_file
......@@ -93,9 +97,10 @@ if __name__ == '__main__':
# variables to extract
var_names = ['QV', 'QC', 'T', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'U10M', 'V10M', 'T2M',
'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
#var_names = ['T2M']
#var_names = ['QV', 'QC', 'T', 'W']
# model resolutions [km] of simulations
ress = [4]
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model MPAS.
author: Christoph Heim
date created: 05.07.2019
date changed: 19.07.2019
date changed: 22.07.2019
usage: arguments:
1st: n jobs for multiprocessing pool
MPAS_3.75 : 3D var: 2 jobs
......@@ -23,6 +23,44 @@ from functions import paste_dir_names
###############################################################################
def fix_time_MPAS(out_file, dt, var_dict, var_name):
if os.path.exists(out_file):
file_code = '{}km_{}_{:%Y%m%d%H%M}'.format(res, var_name, dt)
print('\t\t'+file_code)
domain_str = "{},{},{},{}".format(
box['lon'].start, box['lon'].stop,
box['lat'].start, box['lat'].stop)
levels_str = "{}/{}".format(
box['vert0'], box['vert1'])
tmp_file = os.path.join(os.path.split(out_file)[0],
'temp_'+file_code+'.nc')
subprocess.call(['mv', out_file, tmp_file])
if var_dict['type'] == 'history':
time_fmt = '{:%Y-%m-%d,%H:%M:%S,3hour}'
elif var_dict['type'] == 'diag':
time_fmt = '{:%Y-%m-%d,%H:%M:%S,15min}'
cdo.setreftime('2016-08-01,00:00:00,minutes',
input=(' -settaxis,'+time_fmt.format(dt)+
' '+ tmp_file),
output=out_file)
if var_dict['type'] == 'history':
vdim = var_dict['vdim']
elif var_dict['type'] == 'diag':
vdim = 'nodim'
subprocess.call(['ncpdq', '-O',
'--rdr=time,{},lat,lon'.format(vdim),
out_file, out_file])
os.remove(tmp_file)
def sellatlon_MPAS(inp_file, out_file, dt, box, options, var_dict,
target_grid, var_name, res):
......@@ -42,23 +80,31 @@ def sellatlon_MPAS(inp_file, out_file, dt, box, options, var_dict,
box['lat'].start, box['lat'].stop)
levels_str = "{}/{}".format(
box['vert0'], box['vert1'])
if var_dict['type'] == 'history':
time_fmt = '{:%Y-%m-%d,%H:%M:%S,3hour}'.format(dt)
elif var_dict['type'] == 'diag':
time_fmt = '{:%Y-%m-%d,%H:%M:%S,15min}'.format(dt)
if var_dict['type'] == 'history':
vdim = var_dict['vdim']
elif var_dict['type'] == 'diag':
vdim = 'nodim'
if i_bash_output:
subprocess.call(['./run_MPAS.sh', domain_str, levels_str,
str(res), os.path.split(out_file)[0],
os.path.split(out_file)[1][:-3],
var_dict['file'], inp_file, target_grid,
], stdout=subprocess.DEVNULL)
time_fmt, vdim])
else:
subprocess.call(['./run_MPAS.sh', domain_str, levels_str,
str(res), os.path.split(out_file)[0],
os.path.split(out_file)[1][:-3],
var_dict['file'], inp_file, target_grid,
time_fmt, vdim
], stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
TM.stop('run')
#if i_bash_output:
TM.print_report(short=True)
return(TM)
......@@ -91,11 +137,11 @@ if __name__ == '__main__':
#run_var_type = '2D'
var_names = var_namess[run_var_type]
#var_names = ['T']
#ress = [7.5, 3.75]
ress = [3.75]
ress = [7.5]
#ress = [7.5]
i_bash_output = 0
......@@ -117,10 +163,10 @@ if __name__ == '__main__':
'2019_06_Hackathon_Mainz', 'falko')
var_dict = {
'QV' :{'file':'qv', 'type':'history'},
'QC' :{'file':'qc', 'type':'history'},
'T' :{'file':'temperature', 'type':'history'},
'W' :{'file':'w', 'type':'history'},
'QV' :{'file':'qv', 'type':'history','vdim':'nVertLevels'},
'QC' :{'file':'qc', 'type':'history','vdim':'nVertLevels'},
'T' :{'file':'temperature', 'type':'history','vdim':'nVertLevels'},
'W' :{'file':'w', 'type':'history','vdim':'nVertLevelsP1'},
'U10M' :{'file':'u10', 'type':'diag'},
'V10M' :{'file':'v10', 'type':'diag'},
......@@ -145,7 +191,6 @@ if __name__ == '__main__':
## PREPARING STEPS
TM = Timer()
cdo = Cdo()
if len(sys.argv) > 1:
......@@ -200,6 +245,8 @@ if __name__ == '__main__':
var_dict[var_name],
target_grid, var_name, res) )
#fix_time_MPAS(out_file, dt, var_dict[var_name], var_name)
# run function serial or parallel
if n_tasks > 1:
with Pool(processes=n_tasks) as pool:
......@@ -208,7 +255,7 @@ if __name__ == '__main__':
results = []
for arg in args:
results.append(sellatlon_MPAS(*arg))
# collect timings from subtasks
for task_TM in results:
TM.merge_timings(task_TM)
......
#!/bin/bash
store=/work/ka1081/2019_06_Hackathon_Mainz/christoph_heim
o3_dir=heimc@fog.ethz.ch:/net/o3/hymet_nobackup/heimc/data/dyamond
#domain=N_Atl_Sc
#domain=SE_Pac_Sc
domain=SE_Atl_Sc
models=(GEOS_3 ICON_10 ICON_2.5 IFS_4 IFS_9 MPAS_3.75 MPAS_7.5 \
NICAM_3.5 NICAM_7 SAM_4 UM_5 ARPEGE-NH_2.5 FV3_3.25)
models=(NICAM_7 NICAM_3.5)
#models=(SAM_4)
models=(SAM_4)
#models=(ICON_10 ICON_2.5)
#models=(UM_5)
#models=(MPAS_7.5 MPAS_3.75)
......@@ -13,18 +20,8 @@ models=(NICAM_7 NICAM_3.5)
#models=(ARPEGE-NH_2.5)
#models=(FV3_3.25)
models=(GEOS_3 ICON_10 ICON_2.5 IFS_4 IFS_9 MPAS_7.5 \
NICAM_3.5 NICAM_7 SAM_4 UM_5 ARPEGE-NH_2.5 FV3_3.25)
#domain=N_Atl_Sc
#domain=SE_Pac_Sc
domain=SE_Atl_Sc
store=/work/ka1081/2019_06_Hackathon_Mainz/christoph_heim
o3_dir=heimc@fog.ethz.ch:/net/o3/hymet_nobackup/heimc/data/dyamond
##########################################################################
for model in ${models[@]}; do
echo $model
......@@ -35,12 +32,12 @@ done
##########################################################################
#var_name=T
#var_name=LWUTOA
#for model in ${models[@]}; do
# echo $model
# orig_path=$store/newdata/$model/$domain/${var_name}.nc
# dest_path=heimc@fog.ethz.ch:/net/o3/hymet_nobackup/heimc/data/dyamond/$model/$domain
# dest_path=$o3_dir/$model/$domain
# scp -r $orig_path $dest_path
#done
##########################################################################
......@@ -2,7 +2,7 @@
#description: Extract lat-lon box of data from model MPAS.
#author: Christoph Heim
#date created: 19.07.2019
#date changed: 19.07.2019
#date changed: 22.07.2019
#usage: arguments:
# 1.: cdo sellonlatbox argument
# 2.: cdo sellevidx argument
......@@ -12,6 +12,8 @@
# 6.: var_name: NC key of variable in file
# 7.: inp_file: path to input file
# 8.: target_grid: description in file for target grid
# 9.: time_cdo_fmt: time string for cdo settimeaxis
# 10.: vdim: name of vertical dimension in nc file
###############################################################################
module load nco
......@@ -34,6 +36,8 @@ out_name=$5
var_name=$6
inp_file=$7
target_grid=$8
time_cdo_fmt=$9
vdim=${10}
#echo $domain
#echo $levels
......@@ -43,6 +47,9 @@ target_grid=$8
#echo $var_name
#echo $inp_file
#echo $target_grid
#echo $time_cdo_fmt
#echo $vdim
#exit
out_dir=$out_base_dir/dirtmp_$out_name
grid_def=$out_base_dir/../MPAS_${res}km_grid.nc
......@@ -64,9 +71,12 @@ cdo -P $n_jobs -gennn,$target_grid \
$out_dir/temp2.nc $out_dir/weights.nc
cdo -P $n_jobs -f nc4 -O \
remap,$target_grid,$out_dir/weights.nc \
-setreftime,2016-08-01,00:00:00,minutes \
-settaxis,$time_cdo_fmt \
-remap,$target_grid,$out_dir/weights.nc \
-sellevidx,$levels \
$out_dir/temp2.nc $out_file
#ncpdq -O --rdr=xtime,nVertLevelsP1,lat,lon \
# $out_dir/temp3.nc $out_file
$out_dir/temp2.nc $out_dir/temp3.nc #$out_file
ncpdq -O --rdr=time,$vdim,lat,lon \
$out_dir/temp3.nc $out_file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment