Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
FranziskaWinterstein committed May 17, 2024
2 parents 701312b + 73f9e21 commit fd52810
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 31 deletions.
11 changes: 5 additions & 6 deletions esmvaltool/diag_scripts/lifetime/lifetime.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,7 @@
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import dask.array as da
import seaborn as sns
from iris.coord_categorisation import add_year
import iris.common
Expand Down Expand Up @@ -709,7 +710,8 @@ def _convert_to_mass(self, varname, variables):
hus,
self.z_coord)

var = variables[varname] * grmassdry * m_var / self.cfg['m_air']
var = da.multiply(da.multiply(variables[varname], grmassdry),
da.divide(m_var, self.cfg['m_air']))

return var

Expand Down Expand Up @@ -992,9 +994,9 @@ def create_timeseries_plot(self, region, input_data, base_datasets):
plot_kwargs['axes'] = axes

if self.plots[plot_type]['display_mean'] is not False:
mean = cube.collapsed('time', iris.analysis.MEAN)
mean = cube.collapsed('time', iris.analysis.MEAN).data
plot_kwargs['label'] = (f"{plot_kwargs['label']}"
f" ({mean.data:.2f})")
f" ({mean:.2f})")

# Plot annual means if desired
annual_mean = self.plots[plot_type]['annual_mean']
Expand Down Expand Up @@ -1046,9 +1048,6 @@ def create_timeseries_plot(self, region, input_data, base_datasets):
'long_name': self.names['long_name'],
'units': self.units
}
print(cubes)
print(var_attrs)
print(netcdf_path)
io.save_1d_data(cubes, netcdf_path, 'time', var_attrs)

# Provenance tracking
Expand Down
52 changes: 27 additions & 25 deletions esmvaltool/diag_scripts/lifetime/lifetime_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@

logger = logging.getLogger(__name__)


def create_press(var):
"""Create a pressure variable."""
resolver = iris.common.resolve.Resolve(var, var)
Expand Down Expand Up @@ -200,8 +199,8 @@ def guess_interfaces(coordinate):
# first = np.sign(first) * min([abs(first), 90.])
# last = np.sign(last) * min([abs(last), 90.])

interfaces = np.insert(interfaces, 0, first)
interfaces = np.append(interfaces, last)
interfaces = da.insert(interfaces, 0, first)
interfaces = da.append(interfaces, last)

return interfaces

Expand All @@ -226,13 +225,13 @@ def calculate_area(latitude, longitude):
"""
r_earth = 6378100.0 # from astropy.constants

lat_i = np.deg2rad(guess_interfaces(latitude.points))
lon_i = np.deg2rad(guess_interfaces(longitude.points))
lat_i = da.deg2rad(guess_interfaces(latitude.points))
lon_i = da.deg2rad(guess_interfaces(longitude.points))

delta_x = abs(lon_i[1:] - lon_i[:-1])
delta_y = abs(np.sin(lat_i[1:]) - np.sin(lat_i[:-1]))
delta_y = abs(da.sin(lat_i[1:]) - da.sin(lat_i[:-1]))

output = np.outer(delta_y, delta_x) * r_earth**2
output = da.outer(delta_y, delta_x) * r_earth**2
output = output.astype('float32')

result = iris.cube.Cube(output,
Expand Down Expand Up @@ -277,14 +276,14 @@ def dpres_plevel_4d(plev, pmin, pmax, z_coord='air_pressure'):

if i == 0:
cube = (cubelist_plev[increment[0]] - lev) / 2. + (lev - pmin)
cubelist_dplev[i].data = cube.data
cubelist_dplev[i].data = cube.core_data()
elif i == last:
cube = (pmax - lev) + (lev - cubelist_plev[increment[1]]) / 2.
cubelist_dplev[i].data = cube.data
cubelist_dplev[i].data = cube.core_data()
else:
cube = ((lev - cubelist_plev[increment[0]]) / 2.
+ (cubelist_plev[increment[1]] - lev) / 2.)
cubelist_dplev[i].data = cube.data
cubelist_dplev[i].data = cube.core_data()

cubelist_dplev[i].add_aux_coord(iris.coords.AuxCoord(
lev.coords(z_coord)[0].points[0]))
Expand All @@ -306,8 +305,8 @@ def dpres_plevel_1d(plev, pmin, pmax):
cube. The output of dpres_plevel will have the
same dimensionality.
"""
increasing = (np.diff(plev) >= 0).all()
decreasing = (np.diff(plev) <= 0).all()
increasing = (da.diff(plev) >= 0).all()
decreasing = (da.diff(plev) <= 0).all()

if isinstance(pmax, float):

Expand Down Expand Up @@ -408,11 +407,14 @@ def extract_region(dataset, region, case='reaction'):
var = dataset[case]
z_coord = dataset['z_coord']

# calculate climatological tropopause pressure
# but only if tropopause is not given by data
if (
'ptp' not in dataset['variables']
and 'tp_i' not in dataset['variables']):
tp_clim = climatological_tropopause(var[:, 0, :, :])

# mask regions outside
if region in ['TROP', 'STRA']:
use_z_coord = 'air_pressure'
if z_coord.name() == 'air_pressure':
Expand Down Expand Up @@ -444,13 +446,13 @@ def extract_region(dataset, region, case='reaction'):
)

if region == 'TROP':
var.data = np.ma.array(
var.data,
var.data = da.ma.masked_array(
var.core_data(),
mask=(z_4d <= tp_4d),
)
elif region == 'STRA':
var.data = np.ma.array(
var.data,
var.data = da.ma.masked_array(
var.core_data(),
mask=(z_4d > tp_4d),
)
else:
Expand All @@ -466,7 +468,7 @@ def climatological_tropopause(cube):
" have a latitude cooridnate")

tpp = (300. - 215. * (
np.cos(np.deg2rad(cube.coord('latitude').points)) ** 2)) * 100.
da.cos(da.deg2rad(cube.coord('latitude').points)) ** 2)) * 100.

tp_clim = cube.copy()
tp_clim.data = broadcast_to_shape(
Expand Down Expand Up @@ -502,7 +504,7 @@ def sum_up_to_plot_dimensions(var, plot_type):
cube = var.collapsed(['longitude', 'latitude'], iris.analysis.SUM)
elif plot_type == 'annual_cycle':
# TODO!
# not iris.analysis.SUM but some kind of mean
# not use iris.analysis.SUM but some kind of mean
# cube = var.collapsed(['longitude', 'latitude', z_coord],
# iris.analysis.SUM)
raise NotImplementedError("The sum to plot dimensions for plot_type"
Expand All @@ -523,13 +525,13 @@ def calculate_reaction_rate(temp, reaction_type,

# special reaction rate
if coeff_b is not None:
reaction_rate = (coeff_a
* iris.analysis.maths.exp(coeff_b *
iris.analysis.maths.log(
reaction_rate)
- (
coeff_er
/ reaction_rate)))
reaction_rate = (da.multiply(coeff_a,
iris.analysis.maths.exp(
da.multiply(coeff_b,
iris.analysis.maths.log(
reaction_rate))
- da.divide(coeff_er
,reaction_rate))))
else:
# standard reaction rate (arrhenius)
reaction_rate = coeff_a * iris.analysis.maths.exp(
Expand Down

0 comments on commit fd52810

Please sign in to comment.