diff --git a/.gitignore b/.gitignore index d6441b1e9683b7c6ca17a23ac087469da4d50f23..1f659877bd872cd89401a98784a64f12bb7d0e88 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,4 @@ htmlcov prof .vscode .eggs +build diff --git a/diagonals/density.py b/diagonals/density.py index 010a0168f9370ed7fe1ce466242f6b6a794babdc..44701bbf3c76fd0b6eabd9700e74404aa8a1fce3 100644 --- a/diagonals/density.py +++ b/diagonals/density.py @@ -1,9 +1,11 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- import numpy as np from numba import vectorize # import diagonals -__all__ = ['compute'] +__all__ = ['compute', 'convert'] @vectorize(['float32(float32, float32, float32)'], target='cpu') @@ -28,24 +30,28 @@ def compute(so, bigthetao, p): """ # reduced variables - SAu = 40.*35.16504/35. + + SAu = 40. * 35.16504 / 35. CTu = 40. Zu = 1e4 deltaS = 32. - ss = np.sqrt((so+deltaS)/SAu) + ss = np.sqrt((so + deltaS) / SAu) tt = bigthetao / CTu pp = p / Zu # vertical reference profile of density + R00 = 4.6494977072e+01 R01 = -5.2099962525e+00 R02 = 2.2601900708e-01 R03 = 6.4326772569e-02 R04 = 1.5616995503e-02 R05 = -1.7243708991e-03 - r0 = (((((R05*pp + R04)*pp + R03)*pp + R02)*pp + R01)*pp + R00)*pp + r0 = (((((R05 * pp + R04) * pp + R03) * pp + R02) * pp + R01) * pp + + R00) * pp # density anomaly + R000 = 8.0189615746e+02 R100 = 8.6672408165e+02 R200 = -1.7864682637e+03 @@ -99,21 +105,24 @@ def compute(so, bigthetao, p): R103 = -1.8507636718e-02 R013 = 3.7969820455e-01 - rz3 = R013*tt + R103*ss + R003 - rz2 = (R022*tt+R112*ss+R012)*tt+(R202*ss+R102)*ss+R002 - rz1 = (((R041*tt+R131*ss+R031)*tt + - (R221*ss+R121)*ss+R021)*tt + - ((R311*ss+R211)*ss+R111)*ss+R011)*tt + \ - (((R401*ss+R301)*ss+R201)*ss+R101)*ss+R001 - rz0 = (((((R060*tt+R150*ss+R050)*tt + - (R240*ss+R140)*ss+R040)*tt + - ((R330*ss+R230)*ss+R130)*ss+R030)*tt + - (((R420*ss+R320)*ss+R220)*ss+R120)*ss+R020)*tt + - ((((R510*ss+R410)*ss+R310)*ss+R210)*ss+R110)*ss+R010)*tt + \ - (((((R600*ss+R500)*ss+R400)*ss+R300)*ss+R200)*ss+R100)*ss+R000 - r = ((rz3*pp + rz2)*pp + rz1)*pp + rz0 + rz3 = R013 * tt + R103 * ss + R003 + rz2 = (R022 * tt + R112 * ss + R012) * tt + (R202 * ss + R102) * ss \ + + R002 + rz1 = (((R041 * tt + R131 * ss + R031) * tt + (R221 * ss + R121) + * ss + R021) * tt + ((R311 * ss + R211) * ss + R111) * ss + + R011) * tt + (((R401 * ss + R301) * ss + R201) * ss + + R101) * ss + R001 + rz0 = (((((R060 * tt + R150 * ss + R050) * tt + (R240 * ss + R140) + * ss + R040) * tt + ((R330 * ss + R230) * ss + R130) * ss + + R030) * tt + (((R420 * ss + R320) * ss + R220) * ss + + R120) * ss + R020) * tt + ((((R510 * ss + R410) * ss + + R310) * ss + R210) * ss + R110) * ss + R010) * tt \ + + (((((R600 * ss + R500) * ss + R400) * ss + R300) * ss + R200) + * ss + R100) * ss + R000 + r = ((rz3 * pp + rz2) * pp + rz1) * pp + rz0 # in-situ density + rho = r + r0 # # thermal expansion a @@ -219,3 +228,33 @@ def compute(so, bigthetao, p): # b = b / ss return rho # ,a,b,r0,r + + +@vectorize(['float32(float32, float32)'], target='cpu') +def convert(so, thetao): + x = np.sqrt(0.0248826675584615 * so) + y = thetao * 0.025e0 + enthalpy = 61.01362420681071e0 + y * (168776.46138048015e0 + y + * (-2735.2785605119625e0 + y * (2574.2164453821433e0 + y + * (-1536.6644434977543e0 + y * (545.7340497931629e0 + + (-50.91091728474331e0 - 18.30489878927802e0 * y) * y))))) \ + + x ** 2 * (268.5520265845071e0 + y * (-12019.028203559312e0 + + y * (3734.858026725145e0 + y + * (-2046.7671145057618e0 + y + * (465.28655623826234e0 + (-0.6370820302376359e0 + - 10.650848542359153e0 * y) * y)))) + x + * (937.2099110620707e0 + y * (588.1802812170108e0 + + y * (248.39476522971285e0 + (-3.871557904936333e0 + - 2.6268019854268356e0 * y) * y)) + x + * (-1687.914374187449e0 + x * (246.9598888781377e0 + + x * (123.59576582457964e0 - 48.5891069025409e0 + * x)) + y * (936.3206544460336e0 + y + * (-942.7827304544439e0 + y * (369.4389437509002e0 + + (-33.83664947895248e0 - 9.987880382780322e0 * y) + * y)))))) + + # enthalpy/Specific heat + + bigthetao = enthalpy / 3991.86795711963 + + return bigthetao diff --git a/diagonals/moc.py b/diagonals/moc.py index 211d62edaa9f8561ab86c03bb64773b293bde344..ef405143fdc737b9400ac57551a341b5c3f0f198 100644 --- a/diagonals/moc.py +++ b/diagonals/moc.py @@ -5,6 +5,7 @@ from numba import vectorize from numba import guvectorize from numba import cuda from numba import float32 +import dask.array as da import diagonals @@ -69,7 +70,8 @@ def _compute_moc_cpu(vo, area): """ moc = {} for basin, mask in area.items(): - moc_basin = np.sum(_multiply_vo_basin(vo, mask), axis=3) + moc_basin = da.sum(_multiply_vo_basin(vo, mask), axis=3) + moc_basin = moc_basin.compute() _vertical_cumsum(moc_basin, moc_basin) moc[basin] = moc_basin return moc diff --git a/diagonals/ohc.py b/diagonals/ohc.py index e5cd6c63fb0951bf496a00a9c7e20a8f318cb1a3..d08f26dcff67870493cc595f59cb943da3e549b5 100644 --- a/diagonals/ohc.py +++ b/diagonals/ohc.py @@ -1,4 +1,5 @@ import numpy as np +import dask.array as da from numba import vectorize from numba import cuda @@ -162,7 +163,7 @@ def _compute_ohc_cpu(layers, thetao, weights, area): """ ohc = [] for layer in range(len(layers)): - ohc_layer = _sum_red.reduce( + ohc_layer = da.nansum( _multiply_array(thetao, weights[layer]), axis=1 ) @@ -170,7 +171,7 @@ def _compute_ohc_cpu(layers, thetao, weights, area): ohc1d_total = [] for i, basin in enumerate(area): ohc_basin = _multiply_array(ohc_layer, area[basin]) - ohc1d = np.sum(ohc_basin, axis=(1, 2)) + ohc1d = da.nansum(ohc_basin, axis=(1, 2)) ohc1d_total.append(ohc1d) return ohc, ohc1d_total @@ -259,8 +260,6 @@ def _calculate_weight_numba_cuda(min_depth, max_depth, e3t, depth, mask): weight: float32 Masked array containing the weights for a given layer. """ - if not mask: - return 0 top = depth bottom = top + e3t if bottom < min_depth or top > max_depth: @@ -270,6 +269,8 @@ def _calculate_weight_numba_cuda(min_depth, max_depth, e3t, depth, mask): top = min_depth if bottom > max_depth: bottom = max_depth + if not mask: + return np.nan return (bottom - top) * 1020 * 4000 @@ -361,8 +362,6 @@ def _calculate_weight_numba(min_depth, max_depth, e3t, depth, mask): weight: float32 Masked array containing the weights for a given layer. """ - if not mask: - return 0 top = depth bottom = top + e3t if bottom < min_depth or top > max_depth: @@ -372,6 +371,8 @@ def _calculate_weight_numba(min_depth, max_depth, e3t, depth, mask): top = min_depth if bottom > max_depth: bottom = max_depth + if not mask: + return np.nan return (bottom - top) * 1020 * 4000 diff --git a/diagonals/regmean.py b/diagonals/regmean.py index efce77b9fa102a2a5aa050bcd148beb5c0e5c8c8..03a7802f2e8339effcfa69b3815eb82ddc3c7c03 100644 --- a/diagonals/regmean.py +++ b/diagonals/regmean.py @@ -1,5 +1,6 @@ import logging import numpy as np +import dask.array as da from numba import vectorize @@ -111,14 +112,14 @@ def _compute_regmean_2d_cpu(var, basins, area): regmean_total: float32 List containing regional mean for variable var. """ - times = var.shape[0] regmean_total = {} for basin, mask in basins.items(): weights = _compute_weights_2d(mask, area) - regmean = np.empty(times) - for t in range(times): - regmean[t] = np.ma.average( - var[t, :, :], axis=(0, 1), weights=np.squeeze(weights)) + regmean = da.ma.average( + var, + axis=(1, 2), + weights=da.broadcast_to(weights, var.shape) + ) regmean_total[basin] = regmean return regmean_total @@ -142,14 +143,14 @@ def _compute_regmean_3d_cpu(var, basins, volume): regmean_total: float32 List containing the regional mean for variable var. """ - times = var.shape[0] regmean_total = {} for basin, mask in basins.items(): weights = _compute_weights_3d(mask, volume) - regmean = np.empty(times) - for t in range(times): - regmean[t] = np.ma.average( - var[t, :, :, :], axis=(0, 1, 2), weights=np.squeeze(weights)) + regmean = da.ma.average( + var, + axis=(1, 2, 3), + weights=da.broadcast_to(weights, var.shape) + ) regmean_total[basin] = regmean return regmean_total @@ -174,19 +175,14 @@ def _compute_regmean_levels_cpu(var, basins, volume): regmean_total: float32 List containing regional mean at every depth level for variable var. """ - times = var.shape[0] - levs = var.shape[1] regmean_total = {} for basin, mask in basins.items(): - regmean = np.empty((times, levs)) - w = _compute_weights_3d(mask, volume) - for time in range(times): - for lev in range(levs): - regmean[time, lev] = np.ma.average( - var[time, lev, :, :], - axis=(0, 1), - weights=np.squeeze(w)[lev, :, :] - ) + weights = _compute_weights_3d(mask, volume) + regmean = da.ma.average( + var, + axis=(2, 3), + weights=da.broadcast_to(weights, var.shape) + ) regmean_total[basin] = regmean return regmean_total diff --git a/diagonals/regsum.py b/diagonals/regsum.py index d4ea3e950ee26230330fff22895a0e6dde97d375..d105ad3c8b8eaaed6ddbfab86224d10dbb208737 100644 --- a/diagonals/regsum.py +++ b/diagonals/regsum.py @@ -1,9 +1,10 @@ import logging -import numpy as np from numba import vectorize import diagonals +import dask.array as da + logger = logging.getLogger(__name__) __all__ = ['compute_regsum_2d', 'compute_regsum_3d', 'compute_regsum_levels'] @@ -83,7 +84,7 @@ def _compute_regsum_2d_cpu(var, basins, area): regsum = {} for basin, mask in basins.items(): weighted_var = _weigh_var_2d(var, mask, area) - regsum[basin] = np.sum(weighted_var, axis=(1, 2)) + regsum[basin] = da.sum(weighted_var, axis=(1, 2)) return regsum @@ -109,7 +110,7 @@ def _compute_regsum_3d_cpu(var, basins, volume, tmask): regsum = {} for basin, mask in basins.items(): weighted_var = _weigh_var_3d(var, mask, volume, tmask) - regsum[basin] = np.sum(weighted_var, axis=(1, 2, 3)) + regsum[basin] = da.sum(weighted_var, axis=(1, 2, 3)) return regsum @@ -134,7 +135,7 @@ def compute_regsum_levels(var, basins, volume, tmask): regsum = {} for basin, mask in basins.items(): weighted_var = _weigh_var_3d(var, mask, volume, tmask) - regsum[basin] = np.sum(weighted_var, axis=(2, 3)) + regsum[basin] = da.sum(weighted_var, axis=(2, 3)) return regsum diff --git a/diagonals/zonmean.py b/diagonals/zonmean.py index 51833351d69ead5fbd46008c3a773e287fc688bb..24dc42f633debc99120149a4246fddcec213e091 100644 --- a/diagonals/zonmean.py +++ b/diagonals/zonmean.py @@ -148,8 +148,8 @@ def _zonal_mean_cpu(variable, weight, latitude): if weight[i, j] == 0: continue bin_value = int(round(latitude[i, j]) + 90) - weights[bin_value] += weight[i, j] - total[bin_value] += variable[i, j] * weight[i, j] + weights[bin_value-1] += weight[i, j] + total[bin_value-1] += variable[i, j] * weight[i, j] return total / weights diff --git a/setup.py b/setup.py index 2d084008dad0da81273c741911f8338d85652e6f..59082e67038596c102db7e2e04c2ea7b890aab14 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ REQUIREMENTS = { ], 'install': [ 'netCDF4', - 'numba', + 'numba==0.52', 'numpy', 'six', ], @@ -36,7 +36,7 @@ REQUIREMENTS = { setup(name='diagonals', - version='0.3.1', + version='0.3.4', description='Compute diagnostics targeting the CPU or the GPU', url='https://earth.bsc.es/gitlab/es/diagonals', author='BSC-CNS Earth Sciences Department', diff --git a/test/unit/test_ohc.py b/test/unit/test_ohc.py index a7641dfd58804a2580ef7babd3b8132130f90779..fd885cd6a13defa0fce955af070b7cdf98d27a01 100644 --- a/test/unit/test_ohc.py +++ b/test/unit/test_ohc.py @@ -23,7 +23,7 @@ class TestOhc(unittest.TestCase): cell_height=np.array(((1., 2.), (4., 3.)), dtype=np.float32), cell_top_depth=np.array(((0., 0.), (1., 2.)), dtype=np.float32) ) - self.assertTrue(np.all(weights == np.zeros((2, 2), dtype=np.float32))) + self.assertTrue(np.all(np.isnan(weights))) def test_weights_mul_layers(self): weights = ohc.get_weights(