Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
# coding=utf-8
"""Compute the indices for oceanic basins"""
from bscearth.utils.log import Log
import iris
import iris.analysis
from earthdiagnostics.diagnostic import Diagnostic
from earthdiagnostics.modelingrealm import ModelingRealms
from earthdiagnostics.utils import Utils, TempFile
class Indices(Diagnostic):
"""
Compute the MOC for oceanic basins
:created: March 2012
:last modified: June 2016
:param data_manager: data management object
:type data_manager: DataManager
:param startdate: startdate
:type startdate: str
:param member: member number
:type member: int
:param chunk: chunk's number
:type chunk: int
"""
alias = 'indices'
"Diagnostic alias for the configuration file"
def __init__(self, data_manager, startdate, member, chunk):
Diagnostic.__init__(self, data_manager)
self.startdate = startdate
self.member = member
self.chunk = chunk
self.results = {}
self.region_metadata = {}
self.generated = {}
def __str__(self):
return 'Indices Startdate: {0.startdate} Member: {0.member} ' \
'Chunk: {0.chunk}'.format(self)
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
if self._different_type(other):
return False
return (
self.startdate == other.startdate and
self.member == other.member and self.chunk == other.chunk
)
@classmethod
def generate_jobs(cls, diags, options):
"""
Create a job for each chunk to compute the diagnostic
:param diags: Diagnostics manager class
:type diags: Diags
:param options: None
:type options: list[str]
:return:
"""
job_list = list()
for sdate, mem, chunk in diags.config.experiment.get_chunk_list():
job_list.append(
Indices(diags.data_manager, sdate, mem, chunk))
return job_list
def request_data(self):
"""Request data required by the diagnostic"""
self.variable_file = self.request_chunk(
ModelingRealms.ocean, 'tosmean',
self.startdate, self.member, self.chunk)
def declare_data_generated(self):
"""Declare data to be generated by the diagnostic"""
self._declare_var('amv')
self._declare_var('ipo')
self._declare_var('iod')
def _declare_var(self, var_name):
self.generated[var_name] = self.declare_chunk(
ModelingRealms.ocean, var_name,
self.startdate, self.member, self.chunk)
def compute(self):
"""Run the diagnostic"""
tosmean = iris.load_cube(self.variable_file.local_file)
data_regions = tosmean.coord('region').points
amv_regions = ['AMV_North_Atlantic', 'AMV_trend']
ipo_regions = ['Pacific_TPI1', 'Pacific_TPI2', 'Pacific_TPI3']
iod_regions = ['Indian_dipole_east', 'Indian_dipole_west']
check_amv = set(amv_regions).issubset(set(data_regions))
if check_amv:
data = {}
for region in amv_regions:
data[region] = tosmean.extract(iris.Constraint(region=region))
self.compute_amv(data)
else:
Log.info('Input data does not contain the basins required to '
'compute the AMV index. Skipping AMV computations.')
check_ipo = set(ipo_regions).issubset(set(data_regions))
if check_ipo:
data = {}
for region in ipo_regions:
data[region] = tosmean.extract(iris.Constraint(region=region))
self.compute_ipo(data)
else:
Log.info('Input data does not contain the basins required to '
'compute the IPO index. Skipping IPO computations.')
check_iod = set(iod_regions).issubset(set(data_regions))
if check_iod:
data = {}
for region in iod_regions:
data[region] = tosmean.extract(iris.Constraint(region=region))
self.compute_iod(data)
else:
Log.info('Input data does not contain the basins required to '
'compute the IOD index. Skipping IOD computations.')
self.save()
def compute_amv(self, data):
self.results['amv'] = (data['AMV_North_Atlantic'].data -
data['AMV_trend'].data)
self.region_metadata['amv'] = (
'AMV_North_Atlantic Box (lat: [0, 60], lon:[-80, 0]), '
'AMV_trend Box (lat: [-60, 60], lon: [-180, 180])')
def compute_ipo(self, data):
self.results['ipo'] = data['Pacific_TPI2'].data - 0.5*(
data['Pacific_TPI1'].data + data['Pacific_TPI3'].data
)
self.region_metadata['ipo'] = (
'Pacific_TPI1 Box ( (lat: [25, 45], lon:[140, 180]), '
'(lat: [25, 45], lon:[-180, -145]) ) '
'Pacific_TPI2 Box ( (lat: [-10, 10], lon:[170, 180]), '
'(lat: [-10, 10], lon:[-180, -90]) ) '
'Pacific_TPI3 Box ( (lat: [-50, -15], lon:[150, 180]), '
'(lat: [-50, -15], lon:[-180, -160]) )'
)
def compute_iod(self, data):
self.results['iod'] = (data['Indian_dipole_west'].data -
data['Indian_dipole_east'].data)
self.region_metadata['iod'] = (
'Indian_dipole_west Box (lat: [-10, 10], lon:[50,70]) '
'Indian_dipole_east Box (lat: [-10, 0], lon:[90, 110])'
)
def save(self):
for var in self.results.keys():
res = self.results[var]
temp = TempFile.get()
handler_source = Utils.open_cdf(self.variable_file.local_file)
handler_temp = Utils.open_cdf(temp, 'w')
Utils.copy_variable(
handler_source, handler_temp, 'time', True, True)
var_res = handler_temp.createVariable(
'{0}'.format(var), float, ('time',))
var_res[...] = res[...]
var_res.units = 'degC'
var_res.comment = '{var} index computed at {region}'.format(
var=var,
region=self.region_metadata[var]
)
handler_temp.close()
self.generated[var].set_local_file(temp, diagnostic=self)