Commit 37025f0b authored by Christoph Heim's avatar Christoph Heim
Browse files

All models extracted.

parent 4755ecbd
import os
from datetime import datetime, timedelta
import matplotlib.pyplot as plt
import xarray as xr
import numpy as np
from cdo import Cdo
if __name__ == '__main__':
base_path_sh = '/work/ka1081/2019_06_Hackathon_Mainz/shweta'
base_path_ch = '/work/ka1081/2019_06_Hackathon_Mainz/christoph_heim'
models = {
'ICON':{
'bp':base_path_sh,
'res':[80,40,20,10,5,2.5],
#'res':[10,5],
'var_key':{'QV':'q', 'QC':'param212.1.0', 'T':'t'},
'var_file':{'QV':'qv', 'QC':'tot_qc_dia', 'T':'t'},
'vcoord':'height',
},
'NICAM':{
'bp':base_path_ch,
'res':[7,3.5],
'var_key':{'QV':'q', 'QC':'ms_qc', 'T':'ms_tem'},
'var_file':{'QV':'qv', 'QC':'ms_qc', 'T':'ms_tem'},
'vcoord':'lev',
},
'SAM':{
'bp':base_path_ch,
'res':[4],
'var_key':{'QV':'QV', 'QC':'QC', 'T':'TABS'},
'var_file':{'QV':'QV', 'QC':'QC', 'T':'TABS'},
'vcoord':'z',
},
}
cdo = Cdo()
var_name = 'T'
#var_name = 'QC'
#dt0 = datetime(2016,8,11)
#dt1 = datetime(2016,8,12)
dt_range = [datetime(2016,8,11), datetime(2016,8,12),
datetime(2016,8,13), datetime(2016,8,14)]
#dt_range = [datetime(2016,8,13)]
fig,axes = plt.subplots(2,2, figsize=(12,12))
for ti,dt in enumerate(dt_range):
ri = ti % 2
ci = (ti - ri) / 2
handles = []
labels = []
for mi,mk in enumerate(models.keys()):
#mk = 'ICON'
print(mk)
for res in models[mk]['res']:
#res = models[mk]['res'][0]
#res = 80
model_name = mk + '-' + str(res) + 'km'
dt_str = str(dt.year) + str(dt.month) + str(dt.day)
dt_str = '{:%Y%m%d}'.format(dt)
file_name = os.path.join(models[mk]['bp'], 'SCu', 'data',
model_name, dt_str,
models[mk]['var_file'][var_name] + '.nc')
print(file_name)
#field = cdo.fldmean(input=file_name, returnXArray='field')
#dataset = xr.open_dataset(cdo.fldmean(input=file_name,
# options='-f nc'))
dataset = xr.open_dataset(cdo.fldmean(input=file_name,
options='-f nc'))
#dataset[models[mk]['var_key'][var_name]].plot()
# TODO: show to Ralf
#field = dataset[models[mk]['var_key'][var_name]]
#print(field)
#try:
if mk == 'ICON':
#kind = 18
height = dataset.coords[models[mk]['vcoord']].values[-18:]
time = dataset.coords['time'].values[-18:]
values = dataset[models[mk]['var_key'][var_name]].\
mean('time').values.squeeze()[-18:]
else:
height = dataset.coords[models[mk]['vcoord']].values
time = dataset.coords['time'].values
values = dataset[models[mk]['var_key'][var_name]].\
mean('time').values.squeeze()
if mk == 'SAM':
values *= 0.001
dataset.close()
#except:
# pass
## fixes that are not nice..
#print(height)
#print(len(height))
#print(np.arange(-len(height),-1,0))
#quit()
if mk == 'ICON':
ICON_hgt = np.loadtxt('ICON_hgt.txt')
ICON_hgt = ICON_hgt[:-1] + np.diff(ICON_hgt)/2
height = ICON_hgt[-len(height):]
print(len(height))
print(values.shape, height.shape)
label = mk + '_' + str(res)
labels.append(label)
ax = axes[ri,ci]
line, = ax.plot(values,height, label=label)
#line, = ax.plot(1000*values,height, label=label)
handles.append(line)
ax.set_title('{:%Y-%m-%d}'.format(dt))
ax.set_ylim(0,3000)
ax.set_xlim(280,305)
ax.set_xlabel('Temperature [K]')
#ax.set_xlabel('QC [g kg-1]')
ax.set_ylabel('Height [m]')
#fig.legend(handles=handles, labels=labels, bbox_to_achnor=[0.9,0.9])
plt.legend(handles=handles, fontsize=9)
#fig.subplots_adjust(right=0.9)
plt.show()
#plt.savefig('qc_prof.pdf', format='pdf')
#plt.savefig('temp_prof.pdf', format='pdf')
#plt.close(fig.number)
import os
from datetime import datetime, timedelta
import matplotlib.pyplot as plt
import xarray as xr
import numpy as np
from cdo import Cdo
if __name__ == '__main__':
base_path_sh = '/work/ka1081/2019_06_Hackathon_Mainz/shweta'
base_path_ch = '/work/ka1081/2019_06_Hackathon_Mainz/christoph_heim'
models = {
'ICON':{
'bp':base_path_sh,
'res':[80,40,20,10,5],#,2.5],
'res':[5],
'var_key':{'QV':'q', 'QC':'param212.1.0', 'T':'t'},
'var_file':{'QV':'qv', 'QC':'tot_qc_dia', 'T':'t'},
'vcoord':'height',
},
'NICAM':{
'bp':base_path_ch,
'res':[7,3.5],
'var_key':{'QV':'q', 'QC':'ms_qc', 'T':'ms_tem'},
'var_file':{'QV':'qv', 'QC':'ms_qc', 'T':'ms_tem'},
'vcoord':'lev',
},
}
cdo = Cdo()
var_name = 'QC'
#dt0 = datetime(2016,8,11)
#dt1 = datetime(2016,8,12)
dt_range = [datetime(2016,8,11), datetime(2016,8,12),
datetime(2016,8,13)]
dt_range = [datetime(2016,8,11)]
for dt in dt_range:
#fig,ax = plt.subplots(1,2, figsize=(10,10))
fig,ax = plt.subplots(1,1, figsize=(10,10))
handles = []
for mi,mk in enumerate(models.keys()):
mk = 'ICON'
print(mk)
for res in models[mk]['res']:
#res = models[mk]['res'][0]
#res = 80
model_name = mk + '-' + str(res) + 'km'
dt_str = str(dt.year) + str(dt.month) + str(dt.day)
dt_str = '{:%Y%m%d}'.format(dt)
file_name = os.path.join(models[mk]['bp'], 'SCu', 'data',
model_name, dt_str,
models[mk]['var_file'][var_name] + '.nc')
print(file_name)
#field = cdo.fldmean(input=file_name, returnXArray='field')
#dataset = xr.open_dataset(cdo.fldmean(input=file_name,
# options='-f nc'))
dataset = xr.open_dataset(cdo.fldmean(input=file_name,
options='-f nc'))
#dataset[models[mk]['var_key'][var_name]].plot()
# TODO: show to Ralf
#field = dataset[models[mk]['var_key'][var_name]]
#print(field)
#try:
valuesc=[]
if mk == 'ICON':
#kind = 18
height = dataset.coords[models[mk]['vcoord']].values[-18:]
time = dataset.coords['time'].values[-18:]
values = dataset[models[mk]['var_key'][var_name]].\
mean('time').values.squeeze()[-18:]
valuesc.append(values)
else:
height = dataset.coords[models[mk]['vcoord']].values
time = dataset.coords['time'].values
values = dataset[models[mk]['var_key'][var_name]].\
mean('time').values.squeeze()
dataset.close()
#except:
# pass
## fixes that are not nice..
print(height)
print(len(height))
#print(np.arange(-len(height),-1,0))
#quit()
if mk == 'ICON':
ICON_hgt = np.loadtxt('ICON_hgt.txt')
ICON_hgt = ICON_hgt[:-1] + np.diff(ICON_hgt)/2
height = ICON_hgt[-len(height):]
print(len(height))
#print(values)
print(height)
#print(time)
#quit()
print(values.shape, height.shape)
#ax[mi].plot(values,height)
label = mk + '_' + str(res)
line, = ax.contourf(time,height,valuesc, label=label)
# line, = ax.plot(values,height, label=label)
handles.append(line)
#ax.set_ylim(0,3000)
plt.legend(handles=handles)
plt.show()
plt.close(fig.number)
......@@ -76,16 +76,16 @@ if __name__ == '__main__':
var_names = ['QV', 'QC', 'T', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
var_names = ['T']
#var_names = ['T']
# model resolutions [km] of simulations
ress = [7, 3.5]
ress = [7]
#ress = [7]
#ress = [3.5]
# date range
first_date = datetime(2016,8,10)
last_date = datetime(2016,8,19)
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
# options for computation
options = {}
......
......@@ -101,8 +101,8 @@ if __name__ == '__main__':
ress = [4]
# date range
first_date = datetime(2016,8,10)
last_date = datetime(2016,8,19)
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
# options for computation
options = {}
......
......@@ -101,13 +101,12 @@ if __name__ == '__main__':
# model resolutions [km] of simulations
#ress = [10,5,2.5]
ress = [10,2.5]
ress = [2.5]
#ress = [10]
ress = [10]
# date range
first_date = datetime(2016,8,10)
last_date = datetime(2016,8,19)
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
# options for computation
options = {}
......
......@@ -73,8 +73,8 @@ if __name__ == '__main__':
ress = [5]
# date range
first_date = datetime(2016,8,10)
last_date = datetime(2016,8,19)
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
# options for computation
options = {}
......
......@@ -7,7 +7,8 @@ date created: 05.07.2019
date changed: 19.07.2019
usage: arguments:
1st: n jobs for multiprocessing pool
python: 3.5.2
MPAS_3.75 : 3D var: 2 jobs
MPAS_7.50 : 3D var: 5 jobs
"""
###############################################################################
import os, glob, subprocess, sys, time, shutil
......@@ -22,131 +23,43 @@ from functions import paste_dir_names
###############################################################################
def comp_weights_file(target_grid, weights_file, inp_file, grid_def_file,
res, box, options):
"""
"""
#if (not os.path.exists(target_grid)) or (options['recompute']):
# write_grid_file(box, target_grid, res)
print('Compute weights file')
#ofile = cdo.gennn(target_grid,
# input=(' -setgrid,mpas:'+grid_def_file+
# ' '+inp_file),
# output=weights_file)
input = "-sellonlatbox,{},{},{},{} -setgrid,mpas:{} {}".format(
box['lon'].start,box['lon'].stop,
box['lat'].start,box['lat'].stop,
grid_def_file,
inp_file)
ofile = cdo.gennn(target_grid,
input=input, output=weights_file)
def sellatlon_MPAS(inp_file, out_file, dt, box, options, var_dict,
weights_file, target_grid, var_name, res):
target_grid, var_name, res):
TM = Timer()
file_code = '{}km_{}_{:%Y%m%d%H%M}'.format(res, var_name, dt)
print(file_code)
domain_str = "{},{},{},{}".format(
box['lon'].start, box['lon'].stop,
box['lat'].start, box['lat'].stop)
levels_str = "{}/{}".format(
box['vert0'], box['vert1'])
TM.start('run')
subprocess.call(['./run_MPAS.sh', domain_str, levels_str,
str(res), os.path.split(out_file)[0],
os.path.split(out_file)[1][:-3],
var_dict['file'], inp_file, target_grid,
])
TM.stop('run')
#quit()
#quit()
#if os.path.exists(out_file) and not options['recompute']:
# TM.start('prepr')
# TM.start('cdo')
# print('\t\t'+file_code+' already computed')
# TM.stop('cdo')
# TM.stop('prepr')
#else:
# print('\t\t'+file_code)
# split = os.path.split(out_file)
# tmp_file = os.path.join(split[0],'tmp_'+split[1])
# # first step
# TM.start('prepr')
# #if not os.path.exists(tmp_file):
# # ofile = cdo.setattribute('*@axis="txz"',
# # input=(
# # '-selname,'+var_dict['file']+' '+inp_file
# # ),
# # output=tmp_file, options='-f nc4')
# TM.stop('prepr')
# TM.start('grid')
# if not os.path.exists(weights_file):
# comp_weights_file(target_grid, weights_file,
# tmp_file, grid_def_file,
# res, box, options)
# TM.stop('grid')
# # cdo
# TM.start('cdo')
# #ofile = cdo.remap(target_grid, weights_file,
# # input=(
# # ' -sellevidx,'+
# # str(box['vert0'])+'/'+str(box['vert1'])+
# # ' -setgrid,mpas:'+grid_def_file+
# # ' '+tmp_file),
# # output=out_file, options='-f nc')
# #input = ("-sellonlatbox,{},{},{},{} -sellevidx,{}/{}"+
# # " -setgrid,mpas:{} {}").format(
# # box['lon'].start, box['lon'].stop,
# # box['lat'].start, box['lat'].stop,
# # box['vert0'], box['vert1'],
# # grid_def_file,
# # tmp_file)
# #ofile = cdo.remap(target_grid, weights_file,
# # input=input, output=out_file, options='-f nc')
# zaxis_def_file = ('/work/ka1081/2019_06_Hackathon_Mainz/christoph_heim/'+
# 'newdata/MPAS_3.75/MPAS_3.75km_zgrid.nc')
# input = ("-sellonlatbox,{},{},{},{} -sellevidx,{}/{}"+
# " -setgridtype,regular -selname,{}"+
# " -setgrid,mpas:{} -selgrid,1 {}").format(
# box['lon'].start, box['lon'].stop,
# box['lat'].start, box['lat'].stop,
# box['vert0'], box['vert1'],
# var_dict['file'],
# grid_def_file,# zaxis_def_file,
# inp_file)
# print(input)
# ofile = cdo.remap(target_grid, weights_file,
# input=input, output=out_file, options='-f nc4 -O')
# quit()
# # delete tmp_file
# TM.start('prepr')
# if options['rm_tmp_files']:
# os.remove(tmp_file)
# TM.stop('prepr')
# TM.stop('cdo')
if os.path.exists(out_file) and not options['recompute']:
TM.start('run')
print('\t\t'+file_code+' already computed')
TM.stop('run')
else:
TM.start('run')
print('\t\t'+file_code)
domain_str = "{},{},{},{}".format(
box['lon'].start, box['lon'].stop,
box['lat'].start, box['lat'].stop)
levels_str = "{}/{}".format(
box['vert0'], box['vert1'])
if i_bash_output:
subprocess.call(['./run_MPAS.sh', domain_str, levels_str,
str(res), os.path.split(out_file)[0],
os.path.split(out_file)[1][:-3],
var_dict['file'], inp_file, target_grid,
], stdout=subprocess.DEVNULL)
else:
subprocess.call(['./run_MPAS.sh', domain_str, levels_str,
str(res), os.path.split(out_file)[0],
os.path.split(out_file)[1][:-3],
var_dict['file'], inp_file, target_grid,
], stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
TM.stop('run')
#if i_bash_output:
TM.print_report(short=True)
return(TM)
......@@ -169,23 +82,26 @@ if __name__ == '__main__':
model_name = 'MPAS'
# variables to extract
var_names = ['QV', 'QC', 'T', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'TQC']
var_names = ['U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'TQC']
#var_names = ['QV', 'QC', 'T', 'W']
var_names = ['QV']
# model resolutions [km] of simulations
ress = [7.5, 3.75]
var_namess = {
'3D':['T', 'QV', 'QC', 'W'],
'2D':['LWUTOA', 'T2M', 'U10M', 'V10M', 'SWDSFC', 'TQC'],
}
run_var_type = '3D'
#run_var_type = '2D'
var_names = var_namess[run_var_type]
#ress = [7.5, 3.75]
ress = [3.75]
#ress = [7.5]
ress = [7.5]
i_bash_output = 0
# date range
first_date = datetime(2016,8,10)
last_date = datetime(2016,8,19)
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
# options for computation
options = {}
......@@ -266,10 +182,6 @@ if __name__ == '__main__':
# prepare grid interpolation
# mpas grid definition file
grid_def_file = grid_dict[res]['grid_def_file']
# weights file to compute once for a grid
weights_file = os.path.join(out_tmp_dir,
'weights_{}km_dom_{}'.format(res,
domain['code']))
# target grid on which to interpolate the model output
target_grid = os.path.join('grids','latlon_{}km_dom_{}'.format(
res, domain['code']))
......@@ -285,21 +197,9 @@ if __name__ == '__main__':
out_file = os.path.join(out_tmp_dir,
var_name+'_{:%Y%m%d%H%M}'.format(dt)+'.nc')
args.append( (inp_file, out_file, dt, box, options,
var_dict[var_name], weights_file,
var_dict[var_name],
target_grid, var_name, res) )
# compute weights file if necessary
if not os.path.exists(weights_file):
dt = dt_range[0]
inp_file = glob.glob(os.path.join(
inp_dir,var_dict[var_name]['type']+
'.{:%Y-%m-%d_%H.%M.%S}.nc'.format(dt)))[0]
out_file = os.path.join(out_tmp_dir,
var_name+'_{:%Y%m%d%H%M}'.format(dt)+'.nc')
sellatlon_MPAS(inp_file, out_file, dt, box, options,
var_dict[var_name], weights_file,
target_grid, var_name, res)
# run function serial or parallel
if n_tasks > 1:
with Pool(processes=n_tasks) as pool:
......
......@@ -99,8 +99,8 @@ if __name__ == '__main__':
#ress = [9]
# date range
first_date = datetime(2016,8,10)
last_date = datetime(2016,8,19)
first_date = datetime(2016,8,11)
last_date = datetime(2016,8,20)
# options for computation
options = {}
......@@ -112,32 +112,6 @@ if __name__ == '__main__':
# IFS SPECIFIC SETTINGS
###########################################################################
#'QV' :{'file':'QV', 'dim':'3D',
# 'group':'mars_out_ml_moist',},
#'QC' :{'file':'clwc', 'dim':'3D',
# 'group':'mars_out_ml_moist',},
#'T' :{'file':'T', 'dim':'3D',
# 'group':'gg_mars_out_ml_upper_sh',},
#'W' :{'file':'param120.128.192','dim':'3D',
# 'group':'gg_mars_out_ml_upper_sh',},
#'U10M' :{'file':'10u', 'dim':'2D',
# 'group':'mars_out',},
#'V10M' :{'file':'10v', 'dim':'2D',
# 'group':'mars_out',},
#'T2M' :{'file':'2t', 'dim':'2D',
# 'group':'mars_out',},
#'LWUTOA':{'file':'ttr', 'dim':'2D',
# 'group':'mars_out',},
#'SWDSFC':{'file':'ssrd', 'dim':'2D',
# 'group':'mars_out',},
#'SLHFLX':{'file':'slhf', 'dim':'2D',
# 'group':'mars_out',},
#'SSHFLX':{'file':'sshf', 'dim':'2D',
# 'group':'mars_out',},
#'TQC' :{'file':'tclw', 'dim':'2D',
# 'group':'mars_out',},
var_dict = {
'QV' :{'file':'q', 'dim':'3D',
'group':'mars_out_ml_moist',},
......
......@@ -72,16 +72,13 @@ if __name__ == '__main__':
var_names = ['QV', 'QC', 'T', 'H', 'W',
'U10M', 'V10M', 'T2M', 'LWUTOA', 'SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
var_names = ['SWDSFC',
'SLHFLX', 'SSHFLX', 'TQC']
# model resolutions [km] of simulations
ress = [3]
# date range
first_date = datetime(2016,8,10)
last_date = datetime(2016,8,19)
first_date = datetime(2016,8,11)