Commit b70926ab authored by Christoph Heim's avatar Christoph Heim
Browse files

Working on extraction on larger domain.

parent 4394b1c3
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model NICAM.
author: Christoph Heim
date created: 27.06.2019
date changed: 18.07.2019
date changed: 04.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -65,7 +65,8 @@ if __name__ == '__main__':
# lat lon vert box to subselect
box = domain
box.update({'vert0':1,'vert1':18})
#box.update({'vert0':1,'vert1':18}) # 3km
box.update({'vert0':1,'vert1':26}) # 6km
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -75,8 +76,8 @@ if __name__ == '__main__':
# variables to extract
var_names = ['QV', 'QC', 'T', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
#var_names = ['T']
'SLHFLX', 'SSHFLX', 'TQC', 'PP']
var_names = ['PP']
# model resolutions [km] of simulations
ress = [7, 3.5]
......@@ -85,7 +86,7 @@ if __name__ == '__main__':
# date range
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,9)
# options for computation
options = {}
......@@ -110,6 +111,7 @@ if __name__ == '__main__':
'SLHFLX':{'file':'ss_lh_sfc', 'dim':'2D', },
'SSHFLX':{'file':'ss_sh_sfc', 'dim':'2D', },
'TQC' :{'file':'sa_cldw', 'dim':'2D', },
'PP' :{'file':'sa_tppn', 'dim':'2D', },
}
###########################################################################
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model SAM.
author: Christoph Heim
date created: 20.06.2019
date changed: 22.07.2019
date changed: 04.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -88,7 +88,8 @@ if __name__ == '__main__':
# lat lon vert box to subselect
box = domain
box.update({'vert0':1,'vert1':28})
#box.update({'vert0':1,'vert1':28}) #3km
box.update({'vert0':1,'vert1':35}) #6km
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -99,15 +100,14 @@ if __name__ == '__main__':
var_names = ['QV', 'QC', 'T', 'W',
'U10M', 'V10M', 'T2M',
'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
#var_names = ['QV', 'QC', 'T', 'W']
'SLHFLX', 'SSHFLX', 'TQC', 'PP']
# model resolutions [km] of simulations
ress = [4]
# date range
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,9)
# options for computation
options = {}
......@@ -145,6 +145,8 @@ if __name__ == '__main__':
'loc':'OUT_2D','fntime':(-20,-10),},
'TQC' :{'file':'CWP',
'loc':'OUT_2D','fntime':(-20,-10),},
'PP' :{'file':'Precac',
'loc':'OUT_2D','fntime':(-23,-13),},
}
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model ICON.
author: Christoph Heim
date created: 27.06.2019
date changed: 18.07.2019
date changed: 04.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -86,7 +86,8 @@ if __name__ == '__main__':
# vert box to subselect
box = domain
box.update({'vert0':73-14,'vert1':91-14})
#box.update({'vert0':73-14,'vert1':91-14}) #3km top
box.update({'vert0':64-14,'vert1':91-14}) #6km top
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -96,17 +97,17 @@ if __name__ == '__main__':
# variables to extract
var_names = ['QV', 'QC', 'T', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWNSFC', 'SWDIFFUSFC',
'SLHFLX', 'SSHFLX', 'TQC']
'SLHFLX', 'SSHFLX', 'TQC', 'PP']
#var_names = ['LWUTOA']
# model resolutions [km] of simulations
#ress = [10,5,2.5]
ress = [2.5]
ress = [10]
#ress = [10]
# date range
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,9)
# options for computation
options = {}
......@@ -134,6 +135,7 @@ if __name__ == '__main__':
'SLHFLX' :{'file':'atm2_2d_ml', 'dim':'2d', 'key':'LHFL_S'},
'SSHFLX' :{'file':'atm2_2d_ml', 'dim':'2d', 'key':'SHFL_S'},
'TQC' :{'file':'atm1_2d_ml', 'dim':'2d', 'key':'TQC_DIA'},
'PP' :{'file':'atm2_2d_ml', 'dim':'2d', 'key':'TOT_PREC'},
}
grid_dict = {
10: {'grid_def_file':os.path.join(grid_def_base_dir,
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model UM.
author: Christoph Heim
date created: 05.07.2019
date changed: 18.07.2019
date changed: 04.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -56,7 +56,8 @@ if __name__ == '__main__':
# box to subselect
box = domain
box.update({'vert0':1,'vert1':20})
#box.update({'vert0':1,'vert1':21}) #3km top
box.update({'vert0':1,'vert1':30}) #6km top
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -66,15 +67,14 @@ if __name__ == '__main__':
# variables to extract
var_names = ['QV', 'QC', 'T', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
#var_names = ['LWUTOA']
'SLHFLX', 'SSHFLX', 'TQC', 'PP']
# model resolutions [km] of simulations
ress = [5]
# date range
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,8)
# options for computation
options = {}
......@@ -99,6 +99,7 @@ if __name__ == '__main__':
'SLHFLX':{'file':'hfls',},
'SSHFLX':{'file':'hfss',},
'TQC' :{'file':'clwvi',},
'PP' :{'file':'pr',},
}
###########################################################################
......@@ -143,6 +144,7 @@ if __name__ == '__main__':
# find times and files that should be extracted
# and prepare arguments for function
for dt in dt_range:
print(dt)
inp_file = glob.glob(os.path.join(inp_dir,
var_dict[var_name]['file'],
'*{:%Y%m%d}*.nc'.format(dt)))[0]
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model MPAS.
author: Christoph Heim
date created: 05.07.2019
date changed: 22.07.2019
date changed: 05.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
MPAS_3.75 : 3D var: 2 jobs
......@@ -120,7 +120,8 @@ if __name__ == '__main__':
# box to subselect
box = domain
box.update({'vert0':1,'vert1':22})
#box.update({'vert0':1,'vert1':22}) #3km top
box.update({'vert0':1,'vert1':31}) #6km top
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -130,15 +131,19 @@ if __name__ == '__main__':
# variables to extract
var_namess = {
'3D':['T', 'QV', 'QC', 'W'],
'2D':['LWUTOA', 'T2M', 'U10M', 'V10M', 'SWDSFC', 'TQC'],
'2D':['LWUTOA', 'T2M', 'U10M', 'V10M', 'SWDSFC', 'TQC', 'PPCONV', 'PPGRID'],
}
run_var_type = '3D'
#run_var_type = '2D'
run_var_type = '2D'
var_names = var_namess[run_var_type]
#var_names = ['T']
# working on
# 7.5 3D and 2D
# 3.75 2D and 3D
#ress = [7.5, 3.75]
ress = [3.75]
#ress = [7.5]
......@@ -147,7 +152,7 @@ if __name__ == '__main__':
# date range
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,9)
# options for computation
options = {}
......@@ -177,6 +182,8 @@ if __name__ == '__main__':
#'SLHFLX':{'file':'', 'type':'diag'},
#'SSHFLX':{'file':'', 'type':'diag'},
'TQC' :{'file':'vert_int_qc', 'type':'diag'},
'PPCONV':{'file':'rainc', 'type':'diag'},
'PPGRID':{'file':'rainnc', 'type':'diag'},
}
grid_dict = {
3.75:{'grid_def_file':os.path.join(grid_def_base_dir,
......@@ -236,14 +243,16 @@ if __name__ == '__main__':
# and prepare arguments for function
args = []
for dt in dt_range:
inp_file = glob.glob(os.path.join(
inp_dir,var_dict[var_name]['type']+
'.{:%Y-%m-%d_%H.%M.%S}.nc'.format(dt)))[0]
out_file = os.path.join(out_tmp_dir,
var_name+'_{:%Y%m%d%H%M}'.format(dt)+'.nc')
args.append( (inp_file, out_file, dt, box, options,
var_dict[var_name],
target_grid, var_name, res) )
# missing time step:
if dt != datetime(2016,9,5,0,0,0):
inp_file = glob.glob(os.path.join(
inp_dir,var_dict[var_name]['type']+
'.{:%Y-%m-%d_%H.%M.%S}.nc'.format(dt)))[0]
out_file = os.path.join(out_tmp_dir,
var_name+'_{:%Y%m%d%H%M}'.format(dt)+'.nc')
args.append( (inp_file, out_file, dt, box, options,
var_dict[var_name],
target_grid, var_name, res) )
#fix_time_MPAS(out_file, dt, var_dict[var_name], var_name)
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model IFS.
author: Christoph Heim
date created: 05.07.2019
date changed: 18.07.2019
date changed: 05.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -81,7 +81,8 @@ if __name__ == '__main__':
# box to subselect
box = domain
box.update({'vert0':105,'vert1':137})
#box.update({'vert0':105,'vert1':137}) #top at 3km
box.update({'vert0':94,'vert1':137}) #top at 6km
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -91,16 +92,17 @@ if __name__ == '__main__':
# variables to extract
var_names = ['QV', 'QC', 'T', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
#var_names = ['W']
'SLHFLX', 'SSHFLX', 'TQC', 'PPCONV', 'PPGRID']
var_names = ['V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC', 'PPCONV', 'PPGRID']
# model resolutions [km] of simulations
ress = [9,4]
#ress = [9]
ress = [4]
# date range
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,9)
# options for computation
options = {}
......@@ -138,6 +140,10 @@ if __name__ == '__main__':
'group':'mars_out',},
'TQC' :{'file':'tclw', 'dim':'2D',
'group':'mars_out',},
'PPCONV':{'file':'crr', 'dim':'2D',
'group':'mars_out',},
'PPGRID':{'file':'lsrr', 'dim':'2D',
'group':'mars_out',},
}
base_time = datetime(2016,8,1)
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model GEOS.
author: Christoph Heim
date created: 09.07.2019
date changed: 18.07.2019
date changed: 06.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -61,7 +61,8 @@ if __name__ == '__main__':
# box to subselect
box = domain
box.update({'vert0':1,'vert1':13})
#box.update({'vert0':1,'vert1':13}) # top 3km
box.update({'vert0':1,'vert1':18}) # top 6km
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -71,14 +72,15 @@ if __name__ == '__main__':
# variables to extract
var_names = ['QV', 'QC', 'T', 'H', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
'SLHFLX', 'SSHFLX', 'TQC', 'PPCONV', 'PPGRID', 'PPANVI']
var_names = ['TQC', 'PPCONV', 'PPGRID', 'PPANVI']
# model resolutions [km] of simulations
ress = [3]
# date range
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,1)
# options for computation
options = {}
......@@ -104,6 +106,9 @@ if __name__ == '__main__':
'SLHFLX':{'file':'geosgcm_surf','key':'LHFX'},
'SSHFLX':{'file':'geosgcm_surf','key':'SHFX'},
'TQC' :{'file':'geosgcm_conv','key':'CWP'},
'PPCONV':{'file':'geosgcm_surf','key':'CNPRCP'},
'PPGRID':{'file':'geosgcm_surf','key':'LSPRCP'},
'PPANVI':{'file':'geosgcm_surf','key':'ANPRCP'},
}
inc_min = {'geosgcm_prog':360, 'geosgcm_conv':15, 'geosgcm_surf':180}
offset_min = {'geosgcm_prog':0, 'geosgcm_conv':0, 'geosgcm_surf':90}
......@@ -151,13 +156,24 @@ if __name__ == '__main__':
# and prepare arguments for function
args = []
for dt in dt_range:
inp_file = glob.glob(os.path.join(
inp_dir,var_dict[var_name]['file'],
'*{:%Y%m%d_%H%M}z.nc4'.format(dt)))[0]
out_file = os.path.join(out_tmp_dir,
var_name+'_{:%Y%m%d%H%M}'.format(dt)+'.nc')
args.append( (inp_file, out_file, dt, box, options, var_name,
var_dict, res) )
# execption, file that does not exist anymore
if dt not in [datetime(2016,9,1,22,30),
datetime(2016,9,1,21,15),datetime(2016,9,1,21,30),
datetime(2016,9,1,21,45),datetime(2016,9,1,22,00),
datetime(2016,9,1,22,15),datetime(2016,9,1,22,30),
datetime(2016,9,1,22,45),datetime(2016,9,1,23,00),
datetime(2016,9,1,23,15),datetime(2016,9,1,23,30),
datetime(2016,9,1,23,45)]:
print(os.path.join(
inp_dir,var_dict[var_name]['file'],
'*{:%Y%m%d_%H%M}z.nc4'.format(dt)))
inp_file = glob.glob(os.path.join(
inp_dir,var_dict[var_name]['file'],
'*{:%Y%m%d_%H%M}z.nc4'.format(dt)))[0]
out_file = os.path.join(out_tmp_dir,
var_name+'_{:%Y%m%d%H%M}'.format(dt)+'.nc')
args.append( (inp_file, out_file, dt, box, options, var_name,
var_dict, res) )
# run function serial or parallel
if n_tasks > 1:
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model ARPEGE-NH.
author: Christoph Heim
date created: 09.07.2019
date changed: 18.07.2019
date changed: 09.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -241,7 +241,8 @@ if __name__ == '__main__':
# box to subselect
box = domain
box.update({'vert0':54,'vert1':75})
#box.update({'vert0':54,'vert1':75}) # 3km top
box.update({'vert0':45,'vert1':75}) # 6km top
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -252,13 +253,13 @@ if __name__ == '__main__':
var_namess = {
'3D':['T', 'QV', 'QC', 'W', 'H'],
'2D':['LWUTOA', 'T2M', 'U10M', 'V10M', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC'],
'SLHFLX', 'SSHFLX', 'TQC' 'PP'],
}
# first variable that is read from grib file
main_vars = {'3D':'T','2D':'LWUTOA'}
run_var_type = '3D'
#run_var_type = '2D'
run_var_type = '2D'
var_names = var_namess[run_var_type]
main_var = main_vars[run_var_type]
......@@ -272,7 +273,7 @@ if __name__ == '__main__':
# 3D 10 - 19
# 2D -
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,9)
# options for computation
options = {}
......@@ -299,6 +300,7 @@ if __name__ == '__main__':
'SLHFLX':{'file':'0.0.10', 'grb_srf':'t1', 'vdim':'2D',},
'SSHFLX':{'file':'0.0.11', 'grb_srf':'t1', 'vdim':'2D',},
'TQC' :{'file':'192.128.78', 'grb_srf':'t1', 'vdim':'2D',},
'PP' :{'file':'8.1.0', 'grb_srf':'t1', 'vdim':'2D',},
}
inc_min = {'3D':180, '2D':15}
remap_res = 2.5
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model FV3.
author: Christoph Heim
date created: 20.07.2019
date changed: 20.07.2019
date changed: 09.07.2019
usage: arguments:
1st: n jobs for multiprocessing pool
FV3_3.25 : 3D var: 5 jobs
......@@ -78,9 +78,14 @@ if __name__ == '__main__':
# box to subselect
box = domain
box.update({'plev' :{'vert0':23,'vert1':31},
'mfull':{'vert0':55,'vert1':79},
'mhalf':{'vert0':55,'vert1':80},},)
## 3km top
#box.update({'plev' :{'vert0':23,'vert1':31},
# 'mfull':{'vert0':55,'vert1':79},
# 'mhalf':{'vert0':55,'vert1':80},},)
## 6km top
box.update({'plev' :{'vert0':18,'vert1':31},
'mfull':{'vert0':42,'vert1':79},
'mhalf':{'vert0':42,'vert1':80},},)
box['lon'] = slice(box['lon'].start - padding, box['lon'].stop + padding)
box['lat'] = slice(box['lat'].start - padding, box['lat'].stop + padding)
......@@ -90,10 +95,10 @@ if __name__ == '__main__':
# variables to extract
var_names = ['H', 'T', 'QV', 'QC', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'TQC']
'TQC', 'PP']
var_names = ['H', 'T', 'QC', 'QV', 'W']
var_names = ['QC', 'QV', 'W']
#var_names = ['H', 'T', 'QC', 'QV', 'W']
#var_names = ['QC', 'QV', 'W']
#var_names = ['U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
# 'TQC']
ress = [3.25]
......@@ -102,7 +107,7 @@ if __name__ == '__main__':
# date range
first_date = datetime(2016,8,11) # must be 1,11,21,31
last_date = datetime(2016,8,20)
last_date = datetime(2016,9,9)
# options for computation
options = {}
......@@ -129,6 +134,7 @@ if __name__ == '__main__':
'SLHFLX':{'file':'lhflx_15min',},
'SSHFLX':{'file':'shflx_15min',},
'TQC' :{'file':'intql_15min',},
'PP' :{'file':'pr_15min',},
}
grid_dict = { 3.25:{'grid_def_file':'gridspec.nc',},}
###########################################################################
......
......@@ -4,7 +4,7 @@
description: Extract lat-lon box of data from model COSMO.
author: Christoph Heim
date created: 24.07.2019
date changed: 24.07.2019
date changed: 09.09.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
......@@ -34,6 +34,23 @@ def sellatlon_COSMO(inp_file, out_file, dt, box, options, var_name, res):
TM.start('cdo')
print('\t\t'+file_code)
split = os.path.split(out_file)
nco_file = os.path.join(split[0],'nco_'+split[1])
# nco
if not os.path.exists(nco_file):
TM.start('nco')
bash_command = 'ncrcat -O -v {} {} {}'.format(
var_dict[var_name]['key'], inp_file,
nco_file)
process = subprocess.Popen(bash_command.split(),
stdout=subprocess.PIPE)
output, error = process.communicate()
TM.stop('nco')
else:
TM.start('nco')
TM.stop('nco')
if var_dict[var_name]['dim'] == '3D':
ofile = cdo.sellonlatbox(
box['lon'].start, box['lon'].stop,
......@@ -102,19 +119,20 @@ if __name__ == '__main__':
###########################################################################
sim_tag = 'SE_Atl'
var_dict = {
'QV' :{'key':'QV', 'folder':'3h_3D_dy', 'dim':'3D'},
'QC' :{'key':'QC', 'folder':'3h_3D_dy', 'dim':'3D'},
'T' :{'key':'T', 'folder':'3h_3D_dy', 'dim':'3D'},
'W' :{'key':'W', 'folder':'3h_3D_dy', 'dim':'3D'},
'U10M' :{'key':'U_10M', 'folder':'1h_dy', 'dim':'2D'},
'V10M' :{'key':'V_10M', 'folder':'1h_dy', 'dim':'2D'},
'T2M' :{'key':'T_2M', 'folder':'1h_dy', 'dim':'2D'},
'LWUTOA':{'key':'ATHB_T', 'folder':'1h_rad_dy', 'dim':'2D'},
'SWDSFC':{'key':'ASWD_S', 'folder':'1h_rad_dy', 'dim':'2D'},
'SLHFLX':{'key':'ALHFL_S', 'folder':'1h_dy', 'dim':'2D'},
'SSHFLX':{'key':'ASHFL_S', 'folder':'1h_dy', 'dim':'2D'},
'TQC' :{'key':'TQC', 'folder':'1h_vint_dy', 'dim':'2D'},
'QV' :{'key':'QV', 'folder':'3h_3D', 'dim':'3D'},
'QC' :{'key':'QC', 'folder':'3h_3D', 'dim':'3D'},
'T' :{'key':'T', 'folder':'3h_3D', 'dim':'3D'},
'W' :{'key':'W', 'folder':'3h_3D', 'dim':'3D'},
'U10M' :{'key':'U_10M', 'folder':'1h_2D', 'dim':'2D'},
'V10M' :{'key':'V_10M', 'folder':'1h_2D', 'dim':'2D'},
'T2M' :{'key':'T_2M', 'folder':'1h_2D', 'dim':'2D'},
'LWUTOA':{'key':'ATHB_T', 'folder':'1h_rad', 'dim':'2D'},
'SWDSFC':{'key':'ASWD_S', 'folder':'1h_rad', 'dim':'2D'},
'SLHFLX':{'key':'ALHFL_S', 'folder':'1h_2D', 'dim':'2D'},
'SSHFLX':{'key':'ASHFL_S', 'folder':'1h_2D', 'dim':'2D'},
'TQC' :{'key':'TQC', 'folder':'15min_water', 'dim':'2D'},
'PP' :{'key':'TOT_PREC', 'folder':'15min_water', 'dim':'2D'},
}
inc_min = {'3D':180, '2D':60}
###########################################################################
......
#!/bin/bash
## SOURCE
# mistral
#store=/work/ka1081/2019_06_Hackathon_Mainz/christoph_heim/newdata
store=/work/ka1081/2019_06_Hackathon_Mainz/christoph_heim/newdata
# daint
store=/project/pr04/heimc/data/dyamond
o3_dir=heimc@fog.ethz.ch:/net/o3/hymet_nobackup/heimc/data/dyamond
#store=/project/pr04/heimc/data/dyamond
## DESTINATION
## o3
#dest_dir=heimc@fog.ethz.ch:/net/o3/hymet_nobackup/heimc/data/dyamond
# ela
dest_dir=heimc@ela.cscs.ch:/project/pr04/heimc/data/dyamond
#domain=N_Atl_Sc
#domain=SE_Pac_Sc
domain=SE_Atl_Sc
domain=DYAMOND_2
models=(GEOS_3 ICON_10 ICON_2.5 IFS_4 IFS_9 MPAS_3.75 MPAS_7.5 \
NICAM_3.5 NICAM_7 SAM_4 UM_5 ARPEGE-NH_2.5 FV3_3.25)
models=(NICAM_7 NICAM_3.5)
models=(SAM_4)
#models=(SAM_4)
#models=(ICON_10 ICON_2.5)
#models=(UM_5)
#models=(MPAS_7.5 MPAS_3.75)
......@@ -22,31 +27,33 @@ models=(SAM_4)
#models=(GEOS_3)
#models=(ARPEGE-NH_2.5)
#models=(FV3_3.25)
models=(COSMO_12)
#models=(COSMO_12)
##########################################################################
#for model in ${models[@]}; do
# echo $model
# orig_path=$store/newdata/$model/$domain
# dest_path=$o3_dir/$model
# scp -r $orig_path $dest_path
#done
##########################################################################