diff --git a/VERSION b/VERSION index b68884dd019aa160f491c3a61fcaa49d92f8d244..187366829bf28bb4094ebffa20e3e1959623e9de 100644 --- a/VERSION +++ b/VERSION @@ -1,2 +1,2 @@ -3.0.0b28 +3.0.0b40 diff --git a/diags.conf b/diags.conf index c096fcbddc64d2c7b29786814d10fe1001e1d101..38fc56a033b358f423e998b9165224335ae184cc 100644 --- a/diags.conf +++ b/diags.conf @@ -9,7 +9,6 @@ DATA_DIR = /esnas:/esarchive DATA_TYPE = exp # CMORization type to use. Important also for THREDDS as it affects variable name conventions. # Options: SPECS (default), PRIMAVERA, CMIP6 -DATA_CONVENTION = SPECS # Path to NEMO's mask and grid files needed for CDFTools CON_FILES = /esnas/autosubmit/con_files/ @@ -20,9 +19,9 @@ DIAGS = # DIAGS = OHC # Frequency of the data you want to use by default. Some diagnostics do not use this value: i.e. monmean always stores # its results at monthly frequency (obvious) and has a parameter to specify input's frequency. -FREQUENCY = 6hr +FREQUENCY = mon # Path to CDFTOOLS binaries -CDFTOOLS_PATH = +CDFTOOLS_PATH = ~jvegas/CDFTOOLS/bin # If true, copies the mesh files regardless of presence in scratch dir RESTORE_MESHES = False # Limits the maximum amount of threads used. Default: 0 (no limitation, one per virtual core available) @@ -33,10 +32,11 @@ MAX_CORES = 1 FORCE = False # If true, CMORizes ocean files. Default = True OCEAN_FILES = True +FILTER_FILES = # If true, CMORizes atmosphere files. Default = True -ATMOSPHERE_FILES = True +ATMOSPHERE_FILES = False # You can specify the variable to cmorize, in the way domain:var domain:var2 domain2:var -VARIABLE_LIST = ocean:tos +VARIABLE_LIST = # Variables to be CMORized from the grib atmospheric files, separated by comma. # You can also specify the levels to extract using the following syntax @@ -68,11 +68,10 @@ SERVER_URL = https://earth.bsc.es/thredds [EXPERIMENT] # Experiments parameters as defined in CMOR standard -INSTITUTE = IC3 -MODEL = EC-EARTH3 -NAME = windstress +INSTITUTE = BSC +MODEL = EC-EARTH # Model version: Available versions -MODEL_VERSION =Ec2.3_O1L46 +MODEL_VERSION =Ec3.2_O1L75 # Atmospheric output timestep in hours ATMOS_TIMESTEP = 6 # Ocean output timestep in hours @@ -86,12 +85,12 @@ OCEAN_TIMESTEP = 6 # if 2, fc00 # CHUNK_SIZE is the size of each data file, given in months # CHUNKS is the number of chunks. You can specify less chunks than present on the experiment -EXPID = a07o -STARTDATES = 20000201 20000501 20010201 20010501 20020201 20020501 20030201 20030501 20040201 20040501 20050201 20050501 20060201 20060501 20070201 20070501 20080201 20080501 20090201 20090501 -MEMBERS = 0 1 2 3 4 5 6 7 +EXPID = a0c2 +STARTDATES = 19900101 +MEMBERS = 0 MEMBER_DIGITS = 1 -CHUNK_SIZE = 7 -CHUNKS = 1 +CHUNK_SIZE = 12 +CHUNKS = 2 # CHUNKS = 1 diff --git a/doc/source/codedoc/general.rst b/doc/source/codedoc/general.rst index 5dc0a023ef3af8bc8399795ab95ff68ee29632bd..d7b2f88b41fc2cd0b539687ea8ece99701ff4fcc 100644 --- a/doc/source/codedoc/general.rst +++ b/doc/source/codedoc/general.rst @@ -19,6 +19,13 @@ earthdiagnostics.general.relink :show-inheritance: :members: +earthdiagnostics.general.relinkall +---------------------------------- +.. automodule:: earthdiagnostics.general.relinkall + :show-inheritance: + :members: + + earthdiagnostics.general.rewrite -------------------------------- .. automodule:: earthdiagnostics.general.rewrite diff --git a/doc/source/conf.py b/doc/source/conf.py index fdfc92c2d14744fd11f505435717d0d2914991b4..33a724e873d0e579779fcefcf15c954f643fe18f 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -64,7 +64,7 @@ copyright = u'2016, BSC-CNS Earth Sciences Department' # The short X.Y version. version = '3.0b' # The full version, including alpha/beta/rc tags. -release = '3.0.0b28' +release = '3.0.0b40' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -187,7 +187,7 @@ html_static_path = ['_static'] #html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'EarthDiagnosticsdoc' +htmlhelp_basename = 'EarthDiagnosticsd' # -- Options for LaTeX output --------------------------------------------- diff --git a/doc/source/config_file.rst b/doc/source/config_file.rst new file mode 100644 index 0000000000000000000000000000000000000000..b4bd369813afe34a9155bd45422413ac5b959272 --- /dev/null +++ b/doc/source/config_file.rst @@ -0,0 +1,233 @@ +Configuration file options +========================== + +This section contains the list and explanation about all the options that are available on the configuration file. Use +it as a reference while preparing your configuration file. Each subsection will refer to the matching section from the +config file. Those subsections explanation may be divided itself for the shake of clarity but this further divisions +have nothing to do with the config file syntax itself. + +DIAGNOSTICS +----------- + +This section contains the general configuration for the diagnostics. The explanation has been divided in two subsections: the first +one will cover all the mandatory options that you must specify in every configuration, while the second will cover all +the optional configurations. + +Mandatory configurations +~~~~~~~~~~~~~~~~~~~~~~~~ + +* SCRATCH_DIR: + Temporary folder for the calculations. Final results will never be stored here. + +* DATA_DIR: + ':' separated list of folders to look for data in. It will look for file in the path $DATA_FOLDER/$EXPID and + $DATA_FOLDER/$DATA_TYPE/$MODEL/$EXPID + +* CON_FILES: + Folder containing mask and mesh files for the dataset. + +* FREQUENCY: + Default data frequency to be used by the diagnostics. Some diagnostics can override this configuration or even + ignore it completely. + + +Optional configurations +~~~~~~~~~~~~~~~~~~~~~~~ + +* DATA_ADAPTOR + This is used to choose the mechanism for storing and retrieving data. Options are CMOR (for our own experiments) or + THREDDS (for anything else). Default value is CMOR + +* DATA_TYPE + Type of the dataset to use. It can be exp, obs or recon. Default is exp. + +* DATA_CONVENTION + Convention to use for file paths and names and variable naming among other things. Can be SPECS, PRIMAVERA or CMIP6. + Default is SPECS. + +* CDFTOOLS_PATH + Path to the folder containing CDFTOOLS executables. By default is empty, so CDFTOOLS binaries must be added to the + system path. + +* MAX_CORES + Maximum number of cores to use. By default the diagnostics will use all cores available to them. It is not + necessary when launching through a scheduler, as Earthdiagnostics can detect how many cores the scheduler has + allocated to it. + +EXPERIMENT +---------- + +This sections contains options related to the experiment's definition or configuration. + +* MODEL + Name of the model used for the experiment. + +* MODEL_VERSION + Model version. Used to get the correct mask and mesh files + +* ATMOS_TIMESTEP + Time between outputs from the atmosphere. This is not the model simulation timestep! + +* OCEAN_TIMESTEP + Time between outputs from the ocean. This is not the model simulation timestep! + +* ATMOS_GRID + Atmospheric grid definition. Will be used as a default target for interpolation diagnostics. + +* INSTITUTE + Institute that made the experiment, observation or reconstruction + +* EXPID + Unique identifier for the experiment + +* NAME + Experiment's name. By default it is the EXPID. + +* STARTDATES + Startdates to run as a space separated list + +* MEMBER + Members to run as a space separated integer list + +* MEMBER_DIGITS + Number of minimum digits to compose the member name. By default it is 1. For example, for member 1 member name + will be fc1 if MEMBER_DIGITS is 1 or fc01 if MEMBER_DIGITS is 2 + +* CHUNK_SIZE + Length of the chunks in months + +* CHUNKS + Number of chunks to run + +* CALENDAR + Calendar to use for date calculation. All calendars supported by Autosubmit are available. Default is 'standard' + +CMOR +---- + +In this section, you can control how will work the cmorization process. All options belonging to this section are optional. + +Cmorization options +~~~~~~~~~~~~~~~~~~~ + +This options control when and which varibales will be cmorized. + +* FORCE + If True, launches the cmorization, regardless of existence of the extracted files or the package containing the + online-cmorized ones. If False, only the non-present chunks will be cmorized. Default value is False + +* FORCE_UNTAR + Unpacks the online-cmorized files regardless of exstience of extracted files. If 'FORCE is True, this parameter has + no effect. If False, only the non-present chunks will be unpacked. Default value is False. + +* FILTER_FILES + Only cmorize original files containing any of the given strings. This is a space separated list. Default is the + empty string. + +* OCEAN_FILES + Boolean flag to activate or no NEMO files cmorization. Default is True. + +* ATMOSPHERE_FILES + Boolean flag to activate or no IFS files cmorization. Default is True. + +* USE_GRIB + Boolean flag to activate or no GRIB files cmorization for the atmosphere. If activated and no GRIB files are present, + it will cmorize using the MMA files instead (as if it was set to False). Default is True. + +* CHUNKS + Space separated list of chunks to be cmorized. If not provided, all chunks are cmorized + +* VARIABLE_LIST + Space separated list of variables to cmorize. Variables must be specified as domain:var_name. If no one is specified, + all the variables will be cmorized + +Grib variables extraction +************************* + +These three options ares used to configure the variables to be CMORized from the grib atmospheric files. +They must be specified using the IFS code in a list separated by comma. + +You can also specify the levels to extract using one of the the following syntaxes: + +* VARIABLE_CODE +* VARIABLE_CODE:LEVEL, +* VARIABLE_CODE:LEVEL_1-LEVEL_2-...-LEVEL_N +* VARIABLE_CODE:MIN_LEVEL:MAX_LEVEL:STEP + +Some examples to clarify it further: +* Variable with code 129 at level 30000: 129:30000 +* Variable with code 129 at levels 30000, 40000 and 60000: 129:30000-40000-60000 +* Variable with code 129 at levels between 30000 and 600000 with 10000 intervals: + 129:30000:60000:10000 equivalent to 129:30000-40000-50000-60000 + +* ATMOS_HOURLY_VARS + Configuration of variables to be extracted in an hourly basis + +* ATMOS_DAILY_VARS + Configuration of variables to be extracted in a daily basis + +* ATMOS_MONTHLY_VARS + Configuration of variables to be extracted in a monthly basis + +Metadata options +~~~~~~~~~~~~~~~~ +All the options in this subsection will serve just to add the given values to the homonymous attributes in the +cmorized files. + +* ASSOCIATED_EXPERIMENT + Default value is 'to be filled' + +* ASSOCIATED_MODEL + Default value is 'to be filled' + +* INITIALIZATION_DESCRIPTION + Default value is 'to be filled' + +* INITIALIZATION_METHOD + Default value is '1' + +* PHYSICS_DESCRIPTION + Default value is 'to be filled' + +* PHYSICS_VERSION + Default value is '1' + +* SOURCE + Default value is 'to be filled' + + +THREDDS +------- + +For now, there is only one option for the THREDDS server configuration. + +* SERVER_URL + THREDDS server URL + + +ALIAS +----- + +This config file section is different from all the others because it does not contain a set of configurations. Instead, +in this section the user can define a set of aliases to be able to launch its most used configurations with ease. To do +this, the user must add an option with named after the desired alias and assign to it the configuration or configurations to launch +when this ALIAS is invoked. See the next example: + +.. code-block:: ini + + ALIAS_NAME = diag,opt1,opt2 diag,opt1new,opt2 + +In this case, the user has defined a new alias 'ALIAS' that can be used launch two times the diagnostic 'diag', +the first with the options 'opt1' and 'opt2' and the second replacing 'opt1' with 'opt1new'. + +In this example, configuring the DIAGS as + +.. code-block:: ini + + DIAGS = ALIAS_NAME + +will be identical to + +.. code-block:: ini + + DIAGS = diag,opt1,opt2 diag,opt1new,opt2 diff --git a/doc/source/diagnostic_list.rst b/doc/source/diagnostic_list.rst index 2087ccd24b66cbc5e4342fac29baa889ac960fcb..74c5b46a99b9958f9d4f00038ec2d413cfb35007 100644 --- a/doc/source/diagnostic_list.rst +++ b/doc/source/diagnostic_list.rst @@ -1,8 +1,10 @@ +# coding=utf-8 + Diagnostic list =============== In this section you have a list of the available diagnostics, with a small description of each one and a link to -the full documentation. To see what options are available for each diagnostic, see generate_jobs' documentation. +the full documentation. To see what options are available for each diagnostic, see generate_jobs documentation. Remember that diagnostics are specified separated by spaces while options are given separated by commas: @@ -10,102 +12,543 @@ Remember that diagnostics are specified separated by spaces while options are gi DIAGS = diag1 diag2,option1,option2 diag3 - General ------- -- att: - Writes a global attributte to all the netCDF files. - See :class:`~earthdiagnostics.general.attribute.Attribute` -- monmean: - Calculates the monthly mean of the given variable. - See :class:`~earthdiagnostics.general.monthlymean.MonthlyMean` - -- relink: - Regenerates the links created in the monthly_mean, daily_mean, folders. - See :class:`~earthdiagnostics.general.relink.Relink` - -- rewrite: - Just rewrites the CMOR output of a given variable. Useful to correct metadata or variable units. - See :class:`~earthdiagnostics.general.rewrite.Rewrite` - -- scale: - Scales a given variable using a given scale factor and offset. Useful to correct erros on the data. + +The diagnostics from this section are of general use and can be used with any variable you may have. Most of them are +meant to help you to solve usual issues that you may have with the data: incorrect metadata, scaled up or down variables, +links missing. This section also contains the diagnostic used to calculate the monthly means. + +att +~~~~ + +Writes a global attributte to all the netCDF files for a given variable. +See :class:`~earthdiagnostics.general.attribute.Attribute` + +Options: +******** + +1. Variable: + Variable name + +2. Domain: + Variable domain + +3. Attributte name: + Attributte to write + +4. Attribute value: + Atrribute's new value. Replace ',' with '&;' and ' ' with '&.' to avoid parsing errors when processing the diags + +5. Grid = '': + Variable grid. Only required in case that you want to use interpolated data. + +monmean +~~~~~~~ +Calculates the monthly mean for a given variable. See :class:`~earthdiagnostics.general.monthlymean.MonthlyMean` + +.. warning:: + + This diagnostic does not use the frequency configuration from the config file. You must specify the original + frequency when calling it. Otherwise, it will always try to use daily data. + +Options: +******** + +1. Variable: + Variable name + +2. Domain: + Variable domain + +3. Original frequency = daily: + Original frequency to use + +4. Grid = '': + Variable grid. Only required in case that you want to use interpolated data. + +relink +~~~~~~ + +Regenerates the links created in the monthly_mean, daily_mean, etc folders for a given varible. +See :class:`~earthdiagnostics.general.relink.Relink` + +Options: +******** + +1. Variable: + Variable name + +2. Domain: + Variable domain + +3. Move old = + True: If True, any data founded in the target directory will be moved to another folder + (called FOLDER_NAME_old) instead of deleted. + +4. Grid = '': + Variable grid. Only required in case that you want to use interpolated data. + + +relinkall +~~~~~~~~~ + +Regenerates the links created in the monthly_mean, daily_mean, etc folders for all variables +See :class:`~earthdiagnostics.general.relinkall.RelinkAll` + +Options: +******** + +This diagnostic has no options + +rewrite: +~~~~~~~~ + +Just rewrites the CMOR output of a given variable. Useful to correct metadata or variable units. +See :class:`~earthdiagnostics.general.rewrite.Rewrite` + +Options: +******** + +1. Variable: + Variable name + +2. Domain: + Variable domain + +3. Grid = '': + Variable grid. Only required in case that you want to use interpolated data. + + +scale +~~~~~ + +Scales a given variable using a given scale factor and offset (NEW_VALUE = OLD_VALUE * scale + offset). Useful to +correct errors on the data. See :class:`~earthdiagnostics.general.scale.Scale` +Options: +******** + +1. Variable: + Variable name + +2. Domain: + Variable domain + +3. Scale value: + Scale factor for the variable + +4. Offset value: + Value to add to the original value after scaling + +5. Grid = '': + Variable grid. Only required in case that you want to use interpolated data. + +6. Min limit = NaN: + If there is any value below this threshold, scale will not be applied + +7. Max limit = NaN: + If there is any value above this threshold, scale will not be applied + Ocean ----- -- areamoc: - Compute an Atlantic MOC index. See :class:`~earthdiagnostics.ocean.areamoc.AreaMoc` -- averagesection: - Compute an average of a given zone. The variable MUST be in a regular grid - See :class:`~earthdiagnostics.ocean.averagesection.AverageSection` +The diagnostics from this section are meant to be used with NEMO variables. Some of them will compute new variables +while others just calculate means or sections for variables in the ORCA grid. The interpolation diagnostics are also +included here as they are usually used with variables in the ORCA grid. + +areamoc +~~~~~~~ + +Compute an Atlantic MOC index by averaging the meridional overturning +in a latitude band between 1km and 2km or any other index averaging the meridional overturning in +a given basin and a given domain. See :class:`~earthdiagnostics.ocean.areamoc.AreaMoc` + +.. warning:: + The MOC for the given basin must be calculated previously. Usually, it will suffice to call the 'moc' diagnostic + earlier in the DIAGS list. + +Options: +******** + +1. Min latitude: + Minimum latitude to compute + +2. Max latitude: + Maximum latitude to compute + +3. Min depth: + Minimum depth (in levels) + +4. Max depth: + Maximum depth (in levels) + +5. Basin = 'Global': + Basin to calculate the diagnostic on. + + +averagesection +~~~~~~~~~~~~~~ + +Compute an average of a given zone. The variable MUST be in a regular grid +See :class:`~earthdiagnostics.ocean.averagesection.AverageSection` + +Options: +******** + +1. Variable: + Variable to average + +2. Min longitude: + Minimum longitude to compute + +3. Max longitude: + Maximum longitude to compute + +4. Min latitude: + Minimum latitude to compute + +5. Max latitude: + Maximum latitude to compute + +6. Domain = ocean: + Variable domain + +convectionsites +~~~~~~~~~~~~~~~ + +Compute the intensity of convection in the four main convection sites. +See :class:`~earthdiagnostics.ocean.convectionsites.ConvectionSites` + +Options: +******** + +This diagnostic has no options + +cutsection +~~~~~~~~~~ + +Cuts a meridional or zonal section. See :class:`~earthdiagnostics.ocean.cutsection.CutSection` + +Options: +******** + +1. Variable: + Variable to cut the section on + +2. Zonal: + If True, calculates a zonal section. If False, it will be a meridional one + +3. Value: + Reference value for the section + +4. Domain = ocean: + Variable's domain + + +gyres +~~~~~ + +Compute the intensity of the subtropical and subpolar gyres. See :class:`~earthdiagnostics.ocean.gyres.Gyres` + +Options: +******** -- convectionsites: - Compute the intensity of convection in the four main convection sites. - See :class:`~earthdiagnostics.ocean.convectionsites.ConvectionSites` +This diagnostic has no options -- cutsection: - Cuts a meridional or zonal section. See :class:`~earthdiagnostics.ocean.cutsection.CutSection` +heatcontent +~~~~~~~~~~~ -- gyres: - Compute the intensity of the subtropical and subpolar gyres. See :class:`~earthdiagnostics.ocean.gyres.Gyres` +Compute the total and mean ocean heat content. See :class:`~earthdiagnostics.ocean.heatcontent.HeatContent` -- heatcontent: - Compute the total ocean heat content. See :class:`~earthdiagnostics.ocean.heatcontent.HeatContent` +Options: +******** -- heatcontentlayer: - Point-wise Ocean Heat Content in a specified ocean thickness. - See :class:`~earthdiagnostics.ocean.heatcontentlayer.HeatContentLayer` +1. Basin + Basin to calculate the heat content one -- interpolate: - 3-dimensional conservative interpolation to the regular atmospheric grid. - It can also be used for 2D (i,j) variables. See :class:`~earthdiagnostics.ocean.interpolate.Interpolate` +2. Mixed layer: + If 1, reduces the compuation to the mixed layer. If -1, excludes the mixed layer from the computations. + If 0, no effect. -- interpolateCDO: - Bilinear interpolation to a given grid using CDO. See :class:`~earthdiagnostics.ocean.interpolatecdo.InterpolateCDO` +3. Min depth: + Minimum depth for the calculation in levels. If 0, whole depth is used -- maxmoc: - Compute an Atlantic MOC index by finding the maximum of the annual mean meridional overturning in a - latitude / depth region See :class:`~earthdiagnostics.ocean.maxmoc.MaxMoc` +4. Max depth: + Maximum depth for the calculation in levels -- mixedlayerheatcontent: - Compute mixed layer heat content. - See :class:`~earthdiagnostics.ocean.mixedlayerheatcontent.MixedLayerHeatContent` +heatcontentlayer +~~~~~~~~~~~~~~~~ -- mixedlayersaltcontent: - Compute mixed layer salt content. See - :class:`~earthdiagnostics.ocean.mixedlayersaltcontent.MixedLayerSaltContent` +Point-wise Ocean Heat Content in a specified ocean thickness. +See :class:`~earthdiagnostics.ocean.heatcontentlayer.HeatContentLayer` -- moc: - Compute the MOC for oceanic basins. See :class:`~earthdiagnostics.ocean.moc.Moc` +Options: +******** -- psi: - Compute the barotropic stream function. See :class:`~earthdiagnostics.ocean.psi.Psi` +3. Min depth: + Minimum depth for the calculation in meteres -- siasiesiv: - Compute the sea ice extent , area and volume in both hemispheres or a specified region. - See :class:`~earthdiagnostics.ocean.siasiesiv.Siasiesiv` +4. Max depth: + Maximum depth for the calculation in meters -- verticalmean: - Chooses vertical level in ocean, or vertically averages between 2 or more ocean levels. - See :class:`~earthdiagnostics.ocean.verticalmean.VerticalMean` +5. Basin = 'Global': + Basin to calculate the heat content on. -- verticalmeanmeters: - Averages vertically any given variable. - See :class:`~earthdiagnostics.ocean.verticalmeanmeters.VerticalMeanMeters` + options_available = (DiagnosticIntOption('min_depth'), + DiagnosticIntOption('max_depth'), + DiagnosticBasinOption('basin', Basins.Global)) + +interpolate +~~~~~~~~~~~ + +3-dimensional conservative interpolation to the regular atmospheric grid. +It can also be used for 2D (i,j) variables. See :class:`~earthdiagnostics.ocean.interpolate.Interpolate` + +.. warning:: + This interpolation requires the pre-generated weights that can be found in '/esnas/autosubmit/con_files/weights'. + Make sure that they are available for your configuration. + +Options: +******** + +1. Target grid: + New grid for the data + +2. Variable: + Variable to interpolate + +3. Domain = ocean: + Variable's domain + +4. Invert latitude: + If True, inverts the latitude in the output file. + +interpolateCDO +~~~~~~~~~~~~~~ + +Bilinear interpolation to a given grid using CDO. See :class:`~earthdiagnostics.ocean.interpolatecdo.InterpolateCDO` + +.. warning:: + This interpolation is non-conservative, so treat its output with care. It has the advantage that does not require the + pre-generated weights so it can be used when the 'interp' diagnostic is not available. + +Options: +******** + +1. Variable: + variable to interpolate + +2. Target grid: + Variable domain + +3. Domain = ocean: + Variable's domain + +4. Mask oceans = True: + If True, replaces the values in the ocean by NaN. You must only set it to false if, for some reason, you are + interpolating an atmospheric or land variable that is stored in the NEMO grid (yes, this can happen, i.e. with tas). + + +maxmoc +~~~~~~ + +Compute an Atlantic MOC index by finding the maximum of the annual mean meridional overturning in a +latitude / depth region. Output from this diagnostic will be always in yearly frequency. +See :class:`~earthdiagnostics.ocean.maxmoc.MaxMoc` + +.. warning:: + The MOC for the given basin must be calculated previously. Usually, it will suffice to call the 'moc' diagnostic + earlier in the DIAGS list. + +.. warning:: + This diagnostic can only be computed for full years. It will discard incomplete years and only compute the index in + those with the full 12 months available. + +Options: +******** + +1. Min latitude: + Minimum latitude to compute + +2. Max latitude: + Maximum latitude to compute + +3. Min depth: + Minimum depth (in levels) + +4. Max depth: + Maximum depth (in levels) + +5. Basin = 'Global': + Basin to calculate the diagnostic on. + +mixedlayerheatcontent +~~~~~~~~~~~~~~~~~~~~~ + +Compute mixed layer heat content. +See :class:`~earthdiagnostics.ocean.mixedlayerheatcontent.MixedLayerHeatContent` + +Options: +******** + +This diagnostic has no options + +mixedlayersaltcontent +~~~~~~~~~~~~~~~~~~~~~ + +Compute mixed layer salt content. See :class:`~earthdiagnostics.ocean.mixedlayersaltcontent.MixedLayerSaltContent` + +Options: +******** + +This diagnostic has no options + +moc +~~~~ + +Compute the MOC for oceanic basins. Required for 'areamoc' and 'maxmoc' See :class:`~earthdiagnostics.ocean.moc.Moc` + +Options: +******** + +This diagnostic has no options + +psi +~~~~ + +Compute the barotropic stream function. See :class:`~earthdiagnostics.ocean.psi.Psi` + +Options: +******** + +This diagnostic has no options + +regmean +~~~~~~~ + +Compute an average of a given zone using cdfmean from CDFTOOLS +See :class:`~earthdiagnostics.ocean.regionmean.RegionMean` + +.. warning:: + This diagnostic is a recent addition and needs more testing to be reliable + +Options: +******** + +1. Domain: + Variable domain + +2. Variable: + Variable to average + +3. Grid: + NEMO grid used to store the variable: T, U, V ... + +4. Basin = Global: + Basin to compute + +5. Save 3d = False: + If True, it also stores the average per level + +6. Min depth: + Minimum depth to compute in levels. If -1, average from the surface + +7. Max depth: + Maximum depth to compute in levels. If -1, average to the bottom + + + +siasiesiv +~~~~~~~~~ + +Compute the sea ice extent , area and volume in both hemispheres or a specified region. +See :class:`~earthdiagnostics.ocean.siasiesiv.Siasiesiv` + +Options: +******** + +1. Basin = 'Global': + Basin to restrict the computation to. + +verticalmean +~~~~~~~~~~~~ + +Chooses vertical level in ocean, or vertically averages between 2 or more ocean levels. +See :class:`~earthdiagnostics.ocean.verticalmean.VerticalMean` + +Options: +******** + +1. Variable: + Variable to average + +2. Min depth = -1: + Minimum level to compute. If -1, average from the surface + +3. Max depth: + Maximum level to compute. If -1, average to the bottom + + +verticalmeanmeters +~~~~~~~~~~~~~~~~~~ + +Averages vertically any given variable. +See :class:`~earthdiagnostics.ocean.verticalmeanmeters.VerticalMeanMeters` + +Options: +******** + +1. Variable: + Variable to average + +2. Min depth = -1: + Minimum depth to compute in meters. If -1, average from the surface + +3. Max depth: + Maximum depth to compute in meters. If -1, average to the bottom Statistics ---------- -- climpercent: - Calculates the specified climatological percentile of a given variable. - See :class:`~earthdiagnostics.statistics.climatologicalpercentile.ClimatologicalPercentile` +climpercent +~~~~~~~~~~~ + +Calculates the specified climatological percentile of a given variable. +See :class:`~earthdiagnostics.statistics.climatologicalpercentile.ClimatologicalPercentile` + +Options: +******** + +1. Domain: + Variable's domain + +2. Variable: + Variable to compute diagnostic on + +3. Leadtimes: + Leadtimes to compute + +4. Bins: + Number of bins to use to discretize the variable + +monpercent +~~~~~~~~~~ + +Calculates the specified monthly percentile of a given variable. +See :class:`~earthdiagnostics.statistics.monthlypercentile.MonthlyPercentile` + +Options: +******** +1. Domain: + Variable's domain -- monpercent: - Calculates the specified monthly percentile of a given variable. - See :class:`~earthdiagnostics.statistics.monthlypercentile.MonthlyPercentile` +2. Variable: + Variable to compute diagnostic on +3. Percentiles: + List of requested percentiles ('-' separated) diff --git a/doc/source/index.rst b/doc/source/index.rst index c7858dc9f19a612427cff53bb5a6be5e26fc9152..af80a81d839e7576465d416b928020bcaea78874 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -10,6 +10,7 @@ Welcome to Earth Diagnostics's documentation! :maxdepth: 3 tutorial + config_file diagnostic_list tips errors diff --git a/doc/source/tips.rst b/doc/source/tips.rst index 0cd7de979daf07ea96ad15066099f5d485b315fd..b9f886619a5d7e32063ea7f951a7d3aa4ff8ea72 100644 --- a/doc/source/tips.rst +++ b/doc/source/tips.rst @@ -21,5 +21,10 @@ NEMO files Unlike the bash version of the ocean diagnostics, this program keeps the NEMO files in the scratch folder so you can launch different configurations for the same experiment with reduced start time. You will need to remove the experiment's -folder in the scratch directory at the end of the experiment to avoid wasting resources. +folder in the scratch directory at the end of the experiment to avoid wasting resources. To do this, just use +.. code-block:: bash + + earthdiags -f PATH_TO_CONF --clean + +If you plan to run the earthdiagnostics only once, you can add this line after the execution \ No newline at end of file diff --git a/doc/source/tutorial.rst b/doc/source/tutorial.rst index 136c7bc307c5f4a0a2d513c89896392922ef4627..c2b99900d72bea39a2fe0f65ac5a2ccf4811c0f6 100644 --- a/doc/source/tutorial.rst +++ b/doc/source/tutorial.rst @@ -11,18 +11,27 @@ From now on this tutorial will guide you through all the process from installati Installation ------------ -For now, you only have one option: download the diagnostics directly from BSC-ES's Gitlab: +If you have access to the BSC-ES machines, you don't need to install it. Just use the available module: + +.. code-block:: + + module load EarthDiagnostics + + +In case that you need a custom installation for development or can not use the BSC-ES machines, +install it from BSC-ES GitLab repository: .. code-block:: sh - git clone https://earth.bsc.es/gitlab/es/ocean_diagnostics.git + pip install git+https://earth.bsc.es/gitlab/es/ocean_diagnostics.git You will also need -* CDO version 1.6.9 (other versions could work, but this is the one we use) +* CDO version 1.7.2 (other versions could work, but this is the one we use) * NCO version 4.5.4 or newer -* Python 2.7 or newer (but no 3.x) with Autosubmit, CDO and NCO packages, among others. A virtual environment with all requisites fullfilled is available at /shared/earth/ClimatePrediction/EarthDiagnostics -* Access to CDFTOOLS_3.0 executables for BSC-ES. At this point, those are located at /shared/earth/ClimatePrediction/CDFTOOLS_CMOR/bin. +* Python 2.7 or newer (but no 3.x) with bscearth.utils, CDO and NCO packages, among others. +* Access to CDFTOOLS_3.0 executables for BSC-ES. The source code is available on Github (https://github.com/jvegasbsc/CDFTOOLS) and it can be compiled with CMake + Creating a config file ---------------------- diff --git a/earthdiagnostics/EarthDiagnostics.pdf b/earthdiagnostics/EarthDiagnostics.pdf index c81cd0eb00aa2b2eb28d562c0bf5843075455420..cfc8af858302bbaee43ef698e7be55de1fca2018 100644 Binary files a/earthdiagnostics/EarthDiagnostics.pdf and b/earthdiagnostics/EarthDiagnostics.pdf differ diff --git a/earthdiagnostics/cdftools.py b/earthdiagnostics/cdftools.py index 96368d7a88fdc17628f948d3ce2a878c1a5f53c5..792853ba2100f58f53db5961ef57ef7cbda5758f 100644 --- a/earthdiagnostics/cdftools.py +++ b/earthdiagnostics/cdftools.py @@ -1,7 +1,7 @@ # coding=utf-8 from earthdiagnostics.utils import Utils import os -from autosubmit.config.log import Log +from bscearth.utils.log import Log class CDFTools(object): @@ -16,7 +16,7 @@ class CDFTools(object): self.path = path # noinspection PyShadowingBuiltins - def run(self, command, input, output=None, options=None, log_level=Log.INFO): + def run(self, command, input, output=None, options=None, log_level=Log.INFO, input_option=None): """ Runs one of the CDFTools @@ -35,6 +35,8 @@ class CDFTools(object): line = [os.path.join(self.path, command)] self._check_command_existence(line[0]) + if input_option: + line.append(input_option) self._check_input(command, input, line) if options: if isinstance(options, basestring): diff --git a/earthdiagnostics/cmor_tables/cmip6 b/earthdiagnostics/cmor_tables/cmip6 index 8bae68e85e2dfa6ecd71bccb94479344d3acf75c..8415b26f6dda7b699501c6963a0ec6cb155eb1ab 160000 --- a/earthdiagnostics/cmor_tables/cmip6 +++ b/earthdiagnostics/cmor_tables/cmip6 @@ -1 +1 @@ -Subproject commit 8bae68e85e2dfa6ecd71bccb94479344d3acf75c +Subproject commit 8415b26f6dda7b699501c6963a0ec6cb155eb1ab diff --git a/earthdiagnostics/cmor_tables/default.csv b/earthdiagnostics/cmor_tables/default.csv index fc2742efab4bc0e096eccfd8683b659f2f838646..811422d90c4e40f3db21315432063028d98ec89c 100644 --- a/earthdiagnostics/cmor_tables/default.csv +++ b/earthdiagnostics/cmor_tables/default.csv @@ -295,5 +295,46 @@ rsdo,rsds,downwelling_shortwave_flux_in_sea_water,Downwelling Shortwave Radiatio wo,wo,sea_water_upward_velocity,Sea Water Upward Velocity ,ocean,,,,,, w2o,wosq,square_of_sea_water_upward_velocity,Square of Sea Water Upward Velocity ,ocean,,,,,, difvho,difvho,ocean_vertical_heat_diffusivity,Ocean Vertical Heat Diffusivity,ocean,,,,,, -vovematr,wmo,upward_ocean_mass_transport,Upward Ocean Mass Transport ,ocean,,,,,, +vovematr,wmo,upward_ocean_mass_transport,Upward Ocean Mass Transport,ocean,,,,,, qtr_ice,qtr,shortwave_flux_transmitted_through_ice,Shortwave Flux Transmitted Through The Ice,seaIce,,,,,, +poc,poc,small_organic_carbon_concentration,Small organic carbon Concentration,ocnBgchem,,,,,, +nanophy,nanophy,nanopthyoplankton_concentration,(Nano)Phytoplankton Concentration,ocnBgchem,,,,,, +dsi,dsi,diatoms_silicate_concentration,Diatoms Silicate Concentration,ocnBgchem,,,,,, +goc,goc,big_organic_carbon_concentration,Big organic carbon Concentration,ocnBgchem,,,,,, +sfe,sfe,small_iron_particles_concentration,Small iron particles Concentration,ocnBgchem,,,,,, +nfe,nfe,nano_iron_concentration,Nano iron Concentration,ocnBgchem,,,,,, +nchl,nchl,nano_chlorophyl_concentration,Nano chlorophyl Concentration,ocnBgchem,,,,,, +pno3tot,pno3tot,global_mean_nitrate_concentration,Global mean nitrate concentration,ocnBgchem,,,,,, +psiltot,psiltot,global_mean_silicate_concentration,Global mean silicate concentration,ocnBgchem,,,,,, +palktot,palktot,global_mean_alkalinity_concentration,Global mean alkalinity concentration,ocnBgchem,,,,,, +pfertot,pfertot,global_mean_iron_concentration,Global mean iron concentration,ocnBgchem,,,,,, +tcflx,tcflx,total_flux_carbon_out_of_the_ocean,total Flux of Carbon out of the ocean,ocnBgchem,,,,,, +tcflxcum,tcflxcum,cumulative_total_flux_of_carbon_out_of_the_ocean,cumulative total Flux of Carbon out of the ocean,ocnBgchem,,,,,, +c-export,c-export,total_carbon_export_at_100m,total Carbon export at 100m,ocnBgchem,,,,,, +tintpp,tintpp,global_total_integrated_primary_production,global total integrated primary production,ocnBgchem,,,,,, +tnfix,tnfix,global_total_nitrogen_fixation,global total nitrogen fixation,ocnBgchem,,,,,, +tdenit,tdenit,total_denitrification,Total denitrification,ocnBgchem,,,,,, +inttpp,inttpp,total_primary_production_of_phyto,Total Primary production of phyto,ocnBgchem,,,,,, +intppnew,intppnew,new_primary_production_of_phyto,New Primary production of phyto,ocnBgchem,,,,,, +intppphy,intppphy,vertically_integrated_primary_production_by_nanophy,Vertically integrated primary production by nanophy,ocnBgchem,,,,,, +ppphy,ppphy,primary_production_of_nanooplakton,Primary production of nanooplakton,ocnBgchem,,,,,, +intpbcal,intpbcal,vertically_integrated_of_calcite_productdic_fluxion,Vertically integrated of calcite productDIC fluxion,ocnBgchem,,,,,, +cflx,cflx,dic_flux,DIC flux,ocnBgchem,,,,,, +remin,remin,oxic_remineralization_of_om,Oxic remineralization of OM,ocnBgchem,,,,,, +denit,denit,anoxic_remineralization_of_om,Anoxic remineralization of OM,ocnBgchem,,,,,, +nfix,nfix,nitrogen_fixation,Nitrogen fixation,ocnBgchem,,,,,, +sdenit,sdenit,nitrate_reduction_in_the_sediments,Nitrate reduction in the sediments,ocnBgchem,,,,,, +par,par,photosynthetically_available_radiation,photosynthetically Available Radiation,ocnBgchem,,,,,, +lnnut,lnnut,nutrient_limitation_term_in_nanophyto,Nutrient limitation term in Nanophyto,ocnBgchem,,,,,, +ldnut,ldnut,nutrient_limitation_term_in_diatoms,Nutrient limitation term in Diatoms,ocnBgchem,,,,,, +lnfe,lnfe,iron_limitation_term_in_nanophyoto,Iron limitation term in Nanophyoto,ocnBgchem,,,,,, +lnlight,lnlight,light_limitation_term_in_nanophyto,Light limitation term in Nanophyto,ocnBgchem,,,,,, +ldlight,ldlight,light_limitation_term_in_diatoms,Light limitation term in Diatoms,ocnBgchem,,,,,, +graz1,graz1,grazing_by_microzooplankton,Grazing by microzooplankton,ocnBgchem,,,,,, +graz2,graz2,grazing_by_mesozooplankto_,Grazing by mesozooplankton,ocnBgchem,,,,,, +mumax,mumax,maximum_growth_rate,Maximum growth rate,ocnBgchem,,,,,, +mun,mun,realized_growth_rate_for_nanophyto,Realized growth rate for nanophyto,ocnBgchem,,,,,, +mud,mud,realized_growth_rate_for_diatomes,Realized growth rate for diatomes,ocnBgchem,,,,,, +ppnewn,ppnewn,new_primary_production_of_nanophyto,New Primary production of nanophyto,ocnBgchem,,,,,, +ppnewd,ppnewd,new_primary_production_of_diatoms,New Primary production of diatoms,ocnBgchem,,,,,, +dic,dic,disolved_inorganic_carbon,Disolved Inorganic Carbon,ocnBgchem,,,,,, diff --git a/earthdiagnostics/cmorizer.py b/earthdiagnostics/cmorizer.py index 9ed85f87ed304c0bd5e89004a6a6d222b1f85de9..d704f87f52ad5dad1fc3aa065d157d4caff710bf 100644 --- a/earthdiagnostics/cmorizer.py +++ b/earthdiagnostics/cmorizer.py @@ -7,12 +7,13 @@ import os from datetime import datetime import pygrib -from autosubmit.config.log import Log -from autosubmit.date.chunk_date_lib import parse_date, chunk_end_date, previous_day, date2str, add_months +from bscearth.utils.log import Log +from bscearth.utils.date import parse_date, chunk_end_date, previous_day, date2str, add_months from earthdiagnostics.frequency import Frequency, Frequencies from earthdiagnostics.modelingrealm import ModelingRealms from earthdiagnostics.utils import TempFile, Utils +from earthdiagnostics.variable import VariableManager class Cmorizer(object): @@ -35,6 +36,7 @@ class Cmorizer(object): 'time_counter_bounds', 'ncatice', 'nav_lat_grid_V', 'nav_lat_grid_U', 'nav_lat_grid_T', 'nav_lon_grid_V', 'nav_lon_grid_U', 'nav_lon_grid_T', 'depth', 'depth_2', 'depth_3', 'depth_4', + 'depth_bnds', 'depth_2_bnds', 'depth_3_bnds', 'depth_4_bnds', 'mlev', 'hyai', 'hybi', 'hyam', 'hybm') ALT_COORD_NAMES = {'time_counter': 'time', 'time_counter_bnds': 'time_bnds', 'time_counter_bounds': 'time_bnds', @@ -51,7 +53,7 @@ class Cmorizer(object): self.original_files_path = os.path.join(self.config.data_dir, self.experiment.expid, 'original_files', self.startdate, self.member_str, 'outputs') self.atmos_timestep = None - self.cmor_scratch = os.path.join(self.config.scratch_dir, 'CMOR', self.startdate, self.member_str) + self.cmor_scratch = str(os.path.join(self.config.scratch_dir, 'CMOR', self.startdate, self.member_str)) def cmorize_ocean(self): """ @@ -70,6 +72,7 @@ class Cmorizer(object): tar_folder = os.path.join(self.original_files_path, '{0}*'.format(prefix)) tar_files = glob.glob(tar_folder) tar_files.sort() + count = 1 for tarfile in tar_files: if not self.cmorization_required(self.get_chunk(os.path.basename(tarfile)), ModelingRealms.ocean): @@ -86,27 +89,53 @@ class Cmorizer(object): Log.error('Could not CMORize oceanic file {0}: {1}', count, ex) count += 1 + def _filter_files(self, file_list): + if not self.cmor.filter_files: + return file_list + filtered = list() + filters = self.cmor.filter_files.split(' ') + for filename in file_list: + if any(f in filename for f in filters): + filtered.append(filename) + else: + os.remove(filename) + if len(filtered) == 0: + Log.warning('Filters {0} do not match any of the files', filters) + return filtered + def _cmorize_nc_files(self): - for filename in glob.glob(os.path.join(self.cmor_scratch, '*.nc')): + nc_files = glob.glob(os.path.join(self.cmor_scratch, '*.nc')) + for filename in nc_files: self._cmorize_nc_file(filename) + self._clean_cmor_scratch() def _correct_fluxes(self): - fluxes_vars = ("prsn", "rss", "rls", "rsscs", "rsds", "rlds", "hfss", 'hfls') + fluxes_vars = [self.data_manager.variable_list.get_variable(cmor_var, True).short_name + for cmor_var in ("prsn", "rss", "rls", "rsscs", "rsds", "rlds", "hfss", 'hfls')] + for filename in glob.glob(os.path.join(self.cmor_scratch, '*.nc')): handler = Utils.openCdf(filename) for varname in handler.variables.keys(): cmor_var = self.data_manager.variable_list.get_variable(varname, True) if cmor_var is None or cmor_var.short_name not in fluxes_vars: continue - handler.variables[varname][:] = handler.variables[varname][:] / self.experiment.atmos_timestep * 3600 + handler.variables[varname][:] = handler.variables[varname][:] / (self.experiment.atmos_timestep * 3600) handler.close() def _unpack_tar_file(self, tarfile): - if os.path.exists(self.cmor_scratch): - shutil.rmtree(self.cmor_scratch) + self._clean_cmor_scratch() os.makedirs(self.cmor_scratch) Utils.untar((tarfile,), self.cmor_scratch) - Utils.unzip(glob.glob(os.path.join(self.cmor_scratch, '*.gz'))) + zip_files = glob.glob(os.path.join(self.cmor_scratch, '*.gz')) + for zip_file in self._filter_files(zip_files): + try: + Utils.unzip(zip_file) + except Utils.UnzipException as ex: + Log.error('File {0} could not be unzipped: {1}', tarfile, ex) + + def _clean_cmor_scratch(self): + if os.path.exists(self.cmor_scratch): + shutil.rmtree(self.cmor_scratch) def _merge_mma_files(self, tarfile): temp = TempFile.get() @@ -173,8 +202,8 @@ class Cmorizer(object): os.path.exists(self.get_original_grib_path(chunk_start, 'SH')): if self.cmorization_required(chunk, ModelingRealms.atmos): - chunk_end = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', 'standard') - chunk_end = previous_day(chunk_end, 'standard') + chunk_end = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', self.experiment.calendar) + chunk_end = previous_day(chunk_end, self.experiment.calendar) Log.info('CMORizing chunk {0}-{1}', date2str(chunk_start), date2str(chunk_end)) try: for grid in ('SH', 'GG'): @@ -186,12 +215,12 @@ class Cmorizer(object): except Exception as ex: Log.error('Can not cmorize GRIB file for chunk {0}-{1}: {2}', date2str(chunk_start), date2str(chunk_end), ex) - chunk_start = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', 'standard') + chunk_start = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', self.experiment.calendar) chunk += 1 def cmorize_grib_file(self, chunk_end, chunk_start, grid): for month in range(0, self.experiment.chunk_size): - current_date = add_months(chunk_start, month, 'standard') + current_date = add_months(chunk_start, month, self.experiment.calendar) original_gribfile = self.get_original_grib_path(current_date, grid) Log.info('Processing month {1}', grid, date2str(current_date)) gribfile = self.get_scratch_grib_path(current_date, grid) @@ -200,27 +229,10 @@ class Cmorizer(object): Utils.copy_file(original_gribfile, gribfile) self._obtain_atmos_timestep(gribfile) + full_file = self._get_monthly_grib(current_date, gribfile, grid) + self._unpack_grib(full_file, gribfile, grid) - prev_gribfile = self.get_scratch_grib_path(add_months(current_date, -1, 'standard'), grid) - if os.path.exists(prev_gribfile): - self._merge_grib_files(current_date, prev_gribfile, gribfile) - full_file = 'ICM' - else: - full_file = gribfile - - Log.info('Unpacking... ') - # remap on regular Gauss grid - if grid == 'SH': - Utils.cdo.splitparam(input='-sp2gpl {0}'.format(full_file), output=gribfile + '_', - options='-f nc4') - else: - Utils.cdo.splitparam(input=full_file, output=gribfile + '_', options='-R -f nc4') - # total precipitation (remove negative values) - Utils.cdo.setcode(228, input='-setmisstoc,0 -setvrange,0,Inf -add ' - '{0}_{{142,143}}.128.nc'.format(gribfile), - output='{0}_228.128.nc'.format(gribfile)) - Utils.remove_file('ICM') - next_gribfile = self.get_original_grib_path(add_months(current_date, 1, 'standard'), grid) + next_gribfile = self.get_original_grib_path(add_months(current_date, 1, self.experiment.calendar), grid) if not os.path.exists(next_gribfile): os.remove(gribfile) @@ -241,6 +253,29 @@ class Cmorizer(object): self._merge_and_cmorize_atmos(chunk_start, chunk_end, grid, '{0}hr'.format(self.atmos_timestep)) + def _unpack_grib(self, full_file, gribfile, grid): + Log.info('Unpacking... ') + # remap on regular Gauss grid + if grid == 'SH': + Utils.cdo.splitparam(input='-sp2gpl {0}'.format(full_file), output=gribfile + '_', + options='-f nc4') + else: + Utils.cdo.splitparam(input=full_file, output=gribfile + '_', options='-R -f nc4') + # total precipitation (remove negative values) + Utils.cdo.setcode(228, input='-setmisstoc,0 -setvrange,0,Inf -add ' + '{0}_{{142,143}}.128.nc'.format(gribfile), + output='{0}_228.128.nc'.format(gribfile)) + Utils.remove_file('ICM') + + def _get_monthly_grib(self, current_date, gribfile, grid): + prev_gribfile = self.get_scratch_grib_path(add_months(current_date, -1, self.experiment.calendar), grid) + if os.path.exists(prev_gribfile): + self._merge_grib_files(current_date, prev_gribfile, gribfile) + full_file = 'ICM' + else: + full_file = gribfile + return full_file + def get_scratch_grib_path(self, current_date, grid): return os.path.join(self.config.scratch_dir, self._get_grib_filename(grid, current_date)) @@ -312,9 +347,9 @@ class Cmorizer(object): parse_date(file_parts[1]) frequency = Frequency('m') except ValueError: - frequency = Frequency(file_parts[1][1]) + frequency = Frequency(file_parts[1]) else: - frequency = Frequency(file_parts[1][1]) + frequency = Frequency(file_parts[1]) return frequency def _contains_requested_variables(self, filename): @@ -334,13 +369,14 @@ class Cmorizer(object): :param variable: variable's name :type variable: str """ - temp = TempFile.get() - alias, var_cmor = self.data_manager.variable_list.get_variable_and_alias(variable) + alias, var_cmor = VariableManager().get_variable_and_alias(variable) if var_cmor is None: return + if not self.cmor.cmorize(var_cmor): return - frequency = Frequency.parse(frequency) + + temp = TempFile.get() Utils.nco.ncks(input=file_path, output=temp, options='-v {0}'.format(variable)) self._rename_level_variables(temp, var_cmor) @@ -361,6 +397,7 @@ class Cmorizer(object): self.data_manager.send_file(temp, var_cmor.domain, var_cmor.short_name, self.startdate, self.member, frequency=frequency, rename_var=variable, date_str=date_str, region=region, move_old=True, grid=alias.grid, cmorized=True) + Log.info('Variable {0.domain}:{0.short_name} processed', var_cmor) def get_date_str(self, file_path): file_parts = os.path.basename(file_path).split('_') @@ -382,7 +419,7 @@ class Cmorizer(object): current_date = parse_date(self.startdate) chunk = 1 while current_date < chunk_start: - current_date = chunk_end_date(current_date, self.experiment.chunk_size, 'month', 'standard') + current_date = chunk_end_date(current_date, self.experiment.chunk_size, 'month', self.experiment.calendar) chunk += 1 if current_date != chunk_start: @@ -409,12 +446,6 @@ class Cmorizer(object): if var_cmor.domain == ModelingRealms.atmos: Utils.rename_variables(temp, {'depth': 'plev'}, False, True) - @staticmethod - def translate_frequency(frequency): - if frequency == 'h': - frequency = '6hr' - return Frequency(frequency) - @staticmethod def _merge_grib_files(current_month, prev_gribfile, gribfile): Log.info('Merging data from different files...') @@ -508,12 +539,12 @@ class Cmorizer(object): '{0}_*_{1}.nc'.format(self._get_grib_filename(grid, chunk_start), frequency))) for first_file in files: shutil.move(first_file, merged_file) - current_month = add_months(chunk_start, 1, 'standard') + current_month = add_months(chunk_start, 1, self.experiment.calendar) while current_month < chunk_end: month_file = first_file.replace('+{0}.grb'.format(date2str(chunk_start)[:-2]), '+{0}.grb'.format(date2str(current_month)[:-2])) Utils.concat_variables(month_file, merged_file, True) - current_month = add_months(current_month, 1, 'standard') + current_month = add_months(current_month, 1, self.experiment.calendar) self._cmorize_nc_file(merged_file) @@ -583,7 +614,11 @@ class Cmorizer(object): return len(gribfiles) > 0 def cmorization_required(self, chunk, domain): - return self.config.cmor.force or not self.data_manager.is_cmorized(self.startdate, self.member, chunk, domain) + if not self.config.cmor.chunk_cmorization_requested(chunk): + return False + if self.config.cmor.force: + return True + return not self.data_manager.is_cmorized(self.startdate, self.member, chunk, domain) class CMORException(Exception): diff --git a/earthdiagnostics/cmormanager.py b/earthdiagnostics/cmormanager.py index 86259cc40e068aecd45608bd16894bde7cb533d8..dcfed271f3e1607756959351bbc8d949d81fd21a 100644 --- a/earthdiagnostics/cmormanager.py +++ b/earthdiagnostics/cmormanager.py @@ -1,11 +1,10 @@ # coding=utf-8 import glob -import shutil from datetime import datetime import os -from autosubmit.config.log import Log -from autosubmit.date.chunk_date_lib import parse_date, chunk_start_date, chunk_end_date, previous_day +from bscearth.utils.log import Log +from bscearth.utils.date import parse_date, chunk_start_date, chunk_end_date, previous_day from earthdiagnostics.cmorizer import Cmorizer from earthdiagnostics.datamanager import DataManager, NetCDFFile @@ -32,10 +31,14 @@ class CMORManager(DataManager): if os.path.isdir(os.path.join(data_folder, self.experiment.expid)): self.config.data_dir = data_folder break + test_folder = os.path.join(data_folder, self.experiment.model.lower().replace('-', '')) + if os.path.isdir(os.path.join(test_folder, self.experiment.expid)): + self.config.data_dir = test_folder + break - data_folder = os.path.join(data_folder, self.config.data_type, experiment_folder) - if os.path.isdir(os.path.join(data_folder, self.experiment.expid)): - self.config.data_dir = data_folder + test_folder = os.path.join(data_folder, self.config.data_type, experiment_folder) + if os.path.isdir(os.path.join(test_folder, self.experiment.expid)): + self.config.data_dir = test_folder break if not self.config.data_dir: @@ -44,7 +47,8 @@ class CMORManager(DataManager): def file_exists(self, domain, var, startdate, member, chunk, grid=None, box=None, frequency=None, vartype=VariableType.MEAN): - filepath = self.get_file_path(startdate, member, domain, var, chunk, frequency, box, grid, None, None) + cmor_var = self.variable_list.get_variable(var) + filepath = self.get_file_path(startdate, member, domain, var, cmor_var, chunk, frequency, grid, None, None) # noinspection PyBroadException try: @@ -78,14 +82,16 @@ class CMORManager(DataManager): :return: path to the copy created on the scratch folder :rtype: str """ - filepath = self.get_file_path(startdate, member, domain, var, chunk, frequency, box, grid, None, None) + cmor_var = self.variable_list.get_variable(var) + var = self._get_final_var_name(box, var) + filepath = self.get_file_path(startdate, member, domain, var, cmor_var, chunk, frequency, grid, None, None) temp_path = TempFile.get() Utils.copy_file(filepath, temp_path) return temp_path - def get_file_path(self, startdate, member, domain, var, chunk, frequency, - box=None, grid=None, year=None, date_str=None): + def get_file_path(self, startdate, member, domain, var, cmor_var, chunk, frequency, + grid=None, year=None, date_str=None): """ Returns the path to a concrete file :param startdate: file's startdate @@ -100,8 +106,6 @@ class CMORManager(DataManager): :type chunk: int :param frequency: file's frequency :type frequency: Frequency - :param box: file's box - :type box: Box :param grid: file's grid :type grid: str|NoneType :param year: file's year @@ -113,16 +117,15 @@ class CMORManager(DataManager): """ if not frequency: frequency = self.config.frequency - var = self._get_final_var_name(box, var) folder_path = self._get_full_cmor_folder_path(startdate, member, domain, var, frequency, grid) - file_name = self._get_cmor_file_name(startdate, member, domain, var, frequency, chunk, year, date_str, grid) + file_name = self._get_cmor_file_name(startdate, member, domain, var, cmor_var, frequency, + chunk, year, date_str, grid) filepath = os.path.join(folder_path, file_name) return filepath - def _get_cmor_file_name(self, startdate, member, domain, var, frequency, chunk, year, date_str, grid): - cmor_var = self.variable_list.get_variable(var) + def _get_cmor_file_name(self, startdate, member, domain, var, cmor_var, frequency, chunk, year, date_str, grid, ): if cmor_var is None: cmor_table = domain.get_table(frequency, self.config.data_convention) else: @@ -146,22 +149,18 @@ class CMORManager(DataManager): if self.config.data_convention == 'specs': - file_name = '{0}_{1}_{2}_{3}_S{4}_r{5}i1p1{6}'.format(var, - cmor_table.name, - self.experiment.model, - self.experiment.experiment_name, - startdate, - member + 1, - time_bound) + file_name = '{0}_{1}_{2}_{3}_S{4}_{5}{6}'.format(var, cmor_table.name, self.experiment.model, + self.experiment.experiment_name, startdate, + self._get_member_str(member), time_bound) elif self.config.data_convention in ('primavera', 'cmip6'): - file_name = '{0}_{1}_{2}_{3}_S{4}-r{5}i1p1_{6}{7}'.format(var, - cmor_table.name, - self.experiment.experiment_name, - self.experiment.model, - startdate, - member + 1, - grid, - time_bound) + if grid: + grid = '_{0}'.format(grid) + else: + grid = '' + + file_name = '{0}_{1}_{2}_{3}_S{4}-{5}{6}{7}'.format(var, cmor_table.name, self.experiment.experiment_name, + self.experiment.model, startdate, + self._get_member_str(member), grid, time_bound) else: raise Exception('Data convention {0} not supported'.format(self.config.data_convention)) return file_name @@ -170,20 +169,21 @@ class CMORManager(DataManager): folder_path = os.path.join(self._get_startdate_path(startdate), str(frequency), domain.name, var) if grid: folder_path = os.path.join(folder_path, grid) - folder_path = os.path.join(folder_path, 'r{0}i1p1'.format(member + 1)) + folder_path = os.path.join(folder_path, self._get_member_str(member)) return folder_path def _get_chunk_time_bounds(self, startdate, chunk): start = parse_date(startdate) - chunk_start = chunk_start_date(start, chunk, self.experiment.chunk_size, 'month', 'standard') - chunk_end = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', 'standard') - chunk_end = previous_day(chunk_end, 'standard') + chunk_start = chunk_start_date(start, chunk, self.experiment.chunk_size, 'month', self.experiment.calendar) + chunk_end = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', self.experiment.calendar) + chunk_end = previous_day(chunk_end, self.experiment.calendar) time_bound = "{0:04}{1:02}-{2:04}{3:02}".format(chunk_start.year, chunk_start.month, chunk_end.year, chunk_end.month) return time_bound - def link_file(self, domain, var, startdate, member, chunk=None, grid=None, box=None, + def link_file(self, domain, var, cmor_var, startdate, member, chunk=None, grid=None, frequency=None, year=None, date_str=None, move_old=False, vartype=VariableType.MEAN): + """ Creates the link of a given file from the CMOR repository. @@ -203,8 +203,6 @@ class CMORManager(DataManager): :type chunk: int :param grid: file's grid (only needed if it is not the original) :type grid: str - :param box: file's box (only needed to retrieve sections or averages) - :type box: Box :param frequency: file's frequency (only needed if it is different from the default) :type frequency: Frequency :param vartype: Variable type (mean, statistic) @@ -212,12 +210,11 @@ class CMORManager(DataManager): :return: path to the copy created on the scratch folder :rtype: str """ - var = self._get_final_var_name(box, var) if not frequency: frequency = self.config.frequency - filepath = self.get_file_path(startdate, member, domain, var, chunk, frequency, grid=grid, year=str(year), - date_str=date_str) + filepath = self.get_file_path(startdate, member, domain, var, cmor_var, chunk, frequency, + grid=grid, year=str(year), date_str=date_str) self._create_link(domain, filepath, frequency, var, grid, move_old, vartype) def send_file(self, filetosend, domain, var, startdate, member, chunk=None, grid=None, region=None, @@ -263,21 +260,25 @@ class CMORManager(DataManager): :param vartype: Variable type (mean, statistic) :type vartype: VariableType """ - original_var = var - cmor_var = self.variable_list.get_variable(original_var) - var = self._get_final_var_name(box, var) - if rename_var and rename_var != var: - Utils.rename_variable(filetosend, rename_var, var) - elif original_var != var: - Utils.rename_variable(filetosend, original_var, var) + if rename_var: + original_name = rename_var + else: + original_name = var + + cmor_var = self.variable_list.get_variable(var) + final_name = self._get_final_var_name(box, var) + + if final_name != original_name: + Utils.rename_variable(filetosend, original_name, final_name) if not frequency: frequency = self.config.frequency - filepath = self.get_file_path(startdate, member, domain, var, chunk, frequency, None, - grid, year, date_str) - netcdf_file = NetCDFFile(filepath, filetosend, domain, var, cmor_var, self.config.data_convention, region) + filepath = self.get_file_path(startdate, member, domain, final_name, cmor_var, chunk, frequency, grid, year, + date_str) + netcdf_file = NetCDFFile(filepath, filetosend, domain, final_name, cmor_var, self.config.data_convention, + region) netcdf_file.frequency = frequency if diagnostic: netcdf_file.add_diagnostic_history(diagnostic) @@ -288,7 +289,7 @@ class CMORManager(DataManager): 'using the CMORManager') netcdf_file.send() - self._create_link(domain, filepath, frequency, var, grid, move_old, vartype) + self._create_link(domain, filepath, frequency, final_name, grid, move_old, vartype) def get_year(self, domain, var, startdate, member, year, grid=None, box=None): """ @@ -325,7 +326,27 @@ class CMORManager(DataManager): @staticmethod def _select_data_of_given_year(data_file, year): temp2 = TempFile.get() - Utils.cdo.selyear(str(year), input=data_file, output=temp2) + handler = Utils.openCdf(data_file) + times = Utils.get_datetime_from_netcdf(handler) + x = 0 + first_index = None + last_index = None + while x < times.size: + if times[x].year == year: + first_index = x + break + else: + x += 1 + + while x < times.size: + if times[x].year != year: + last_index = x + break + else: + x += 1 + if last_index is None: + last_index = times.size + Utils.nco.ncks(input=data_file, output=temp2, options=['-d time,{0},{1}'.format(first_index, last_index - 1)]) return temp2 @staticmethod @@ -369,7 +390,8 @@ class CMORManager(DataManager): domain.name) if os.path.isdir(domain_path): for var in os.listdir(domain_path): - var_path = self.get_file_path(startdate, member, domain, var, chunk, Frequency(freq)) + cmor_var = self.variable_list.get_variable(var, True) + var_path = self.get_file_path(startdate, member, domain, var, cmor_var, chunk, Frequency(freq)) if os.path.isfile(var_path): return True return False @@ -381,7 +403,7 @@ class CMORManager(DataManager): cmorizer = Cmorizer(self, startdate, member) cmorizer.cmorize_ocean() cmorizer.cmorize_atmos() - Log.result('CMORized startdate {0} member {1}! Ellpased time: {2}\n\n', startdate, member_str, + Log.result('CMORized startdate {0} member {1}! Elapsed time: {2}\n\n', startdate, member_str, datetime.now() - start_time) def _unpack_cmor_files(self, startdate, member): @@ -405,18 +427,22 @@ class CMORManager(DataManager): filepaths = self._get_transferred_cmor_data_filepaths(startdate, member, chunk, 'tar.gz') if len(filepaths) > 0: - Log.info('Unzipping cmorized data for {0} {1} {2}...', startdate, member, chunk) - Utils.unzip(filepaths, True) + if self.config.cmor.chunk_cmorization_requested(chunk): + Log.info('Unzipping cmorized data for {0} {1} {2}...', startdate, member, chunk) + Utils.unzip(filepaths, True) + else: + return True if not os.path.exists(self.cmor_path): os.mkdir(self.cmor_path) filepaths = self._get_transferred_cmor_data_filepaths(startdate, member, chunk, 'tar') if len(filepaths) > 0: - Log.info('Unpacking cmorized data for {0} {1} {2}...', startdate, member, chunk) - Utils.untar(filepaths, self.cmor_path) - self._correct_paths(startdate) - self._create_links(startdate) + if self.config.cmor.chunk_cmorization_requested(chunk): + Log.info('Unpacking cmorized data for {0} {1} {2}...', startdate, member, chunk) + Utils.untar(filepaths, self.cmor_path) + self._correct_paths(startdate, member) + self.create_links(startdate, member) return True return False @@ -434,11 +460,11 @@ class CMORManager(DataManager): filepaths += glob.glob(os.path.join(tar_original_files, 'outputs', file_name)) return filepaths - def _correct_paths(self, startdate): + def _correct_paths(self, startdate, member): self._remove_extra_output_folder() - self._fix_model_as_experiment_error(startdate) + self._fix_model_as_experiment_error(startdate, member) - def _fix_model_as_experiment_error(self, startdate): + def _fix_model_as_experiment_error(self, startdate, member): if self.experiment.experiment_name != self.experiment.model: bad_path = os.path.join(self.cmor_path, self.experiment.institute, self.experiment.model, self.experiment.model) @@ -446,6 +472,10 @@ class CMORManager(DataManager): for (dirpath, dirnames, filenames) in os.walk(bad_path, False): for filename in filenames: + if '_S{0}_'.format(startdate) in filename: + continue + if self._get_member_str(member) in filename: + continue filepath = os.path.join(dirpath, filename) good = filepath.replace('_{0}_output_'.format(self.experiment.model), '_{0}_{1}_S{2}_'.format(self.experiment.model, @@ -464,25 +494,14 @@ class CMORManager(DataManager): bad_path = os.path.join(self.cmor_path, 'output') if os.path.exists(bad_path): Log.debug('Moving CMOR files out of the output folder') - CMORManager.copytree(bad_path, self.cmor_path) - shutil.rmtree(bad_path) + Utils.move_tree(bad_path, self.cmor_path) Log.debug('Done') - @staticmethod - def copytree(source, destiny): - if not os.path.exists(destiny): - os.makedirs(destiny) - shutil.copystat(source, destiny) - lst = os.listdir(source) - for item in lst: - item_source = os.path.join(source, item) - item_destiny = os.path.join(destiny, item) - if os.path.isdir(item_source): - CMORManager.copytree(item_source, item_destiny) - else: - shutil.copy2(item_source, item_destiny) - - def _create_links(self, startdate): + def create_links(self, startdate, member=None): + if member: + member_str = self._get_member_str(member) + else: + member_str = None Log.info('Creating links for CMOR files ({0})', startdate) path = self._get_startdate_path(startdate) for freq in os.listdir(path): @@ -490,6 +509,8 @@ class CMORManager(DataManager): for domain in os.listdir(os.path.join(path, freq)): for var in os.listdir(os.path.join(path, freq, domain)): for member in os.listdir(os.path.join(path, freq, domain, var)): + if member_str != member: + continue for name in os.listdir(os.path.join(path, freq, domain, var, member)): filepath = os.path.join(path, freq, domain, var, member, name) if os.path.isfile(filepath): @@ -512,4 +533,6 @@ class CMORManager(DataManager): return os.path.join(self.config.data_dir, self.experiment.expid, 'cmorfiles', self.experiment.institute, self.experiment.model, self.experiment.experiment_name, 'S' + startdate) + def _get_member_str(self, member): + return 'r{0}i1p1'.format(member + 1) diff --git a/earthdiagnostics/config.py b/earthdiagnostics/config.py index 2b9c736f963c9105f7fa2c229db0f016ff06a936..8e062a9b814d75a84578bd70bda234705dc5c12d 100644 --- a/earthdiagnostics/config.py +++ b/earthdiagnostics/config.py @@ -1,13 +1,12 @@ # coding=utf-8 import os -from autosubmit.config.log import Log -from autosubmit.date.chunk_date_lib import parse_date, chunk_start_date, chunk_end_date, date2str +from bscearth.utils.log import Log +from bscearth.utils.date import parse_date, chunk_start_date, chunk_end_date, date2str +from bscearth.utils.config_parser import ConfigParser from earthdiagnostics.frequency import Frequency, Frequencies -from earthdiagnostics.parser import Parser from earthdiagnostics.variable import VariableManager -from utils import Utils class Config(object): @@ -19,31 +18,32 @@ class Config(object): """ def __init__(self, path): - parser = Parser() + parser = ConfigParser() parser.optionxform = str parser.read(path) # Read diags config - self.data_adaptor = parser.get_option('DIAGNOSTICS', 'DATA_ADAPTOR', 'CMOR').upper() + self.data_adaptor = parser.get_choice_option('DIAGNOSTICS', 'DATA_ADAPTOR', ('CMOR', 'THREDDS'), 'CMOR') "Scratch folder path" - self.scratch_dir = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'SCRATCH_DIR')) + self.scratch_dir = parser.get_path_option('DIAGNOSTICS', 'SCRATCH_DIR') "Scratch folder path" - self.data_dir = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'DATA_DIR')) + self.scratch_masks = parser.get_path_option('DIAGNOSTICS', 'SCRATCH_MASKS', '') + "Common scratch folder for masks" + self.data_dir = parser.get_path_option('DIAGNOSTICS', 'DATA_DIR') "Root data folder path" - self.data_type = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'DATA_TYPE', 'exp')).lower() + self.data_type = parser.get_choice_option('DIAGNOSTICS', 'DATA_TYPE', ('exp', 'obs', 'recon'), 'exp') "Data type (experiment, observation or reconstruction)" - if self.data_type not in ('exp', 'obs', 'recon'): - raise Exception('Data type must be exp, obs or recon') - self.con_files = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'CON_FILES')) + self.con_files = parser.get_path_option('DIAGNOSTICS', 'CON_FILES') "Mask and meshes folder path" - self.data_convention = parser.get_option('DIAGNOSTICS', 'DATA_CONVENTION', 'SPECS').lower() + self.data_convention = parser.get_choice_option('DIAGNOSTICS', 'DATA_CONVENTION', + ('specs', 'primavera', 'cmip6'), 'specs', ignore_case=True) self._diags = parser.get_option('DIAGNOSTICS', 'DIAGS') self.frequency = Frequency(parser.get_option('DIAGNOSTICS', 'FREQUENCY')) "Default data frequency to be used by the diagnostics" - self.cdftools_path = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'CDFTOOLS_PATH')) + self.cdftools_path = parser.get_path_option('DIAGNOSTICS', 'CDFTOOLS_PATH', '') "Path to CDFTOOLS executables" - self.max_cores = parser.get_int_option('DIAGNOSTICS', 'MAX_CORES', 100000) + self.max_cores = parser.get_int_option('DIAGNOSTICS', 'MAX_CORES', 0) "Maximum number of cores to use" self.restore_meshes = parser.get_bool_option('DIAGNOSTICS', 'RESTORE_MESHES', False) "If True, forces the tool to copy all the mesh and mask files for the model, regardless of existence" @@ -59,7 +59,7 @@ class Config(object): self._aliases = dict() if parser.has_section('ALIAS'): for option in parser.options('ALIAS'): - self._aliases[option.lower()] = parser.get_option('ALIAS', option).lower().split() + self._aliases[option.lower()] = parser.get_list_option('ALIAS', option) Log.debug('Preparing command list') commands = self._diags.split() self._real_commands = list() @@ -92,9 +92,11 @@ class CMORConfig(object): def __init__(self, parser): self.force = parser.get_bool_option('CMOR', 'FORCE', False) self.force_untar = parser.get_bool_option('CMOR', 'FORCE_UNTAR', False) + self.filter_files = parser.get_option('CMOR', 'FILTER_FILES', '') self.ocean = parser.get_bool_option('CMOR', 'OCEAN_FILES', True) self.atmosphere = parser.get_bool_option('CMOR', 'ATMOSPHERE_FILES', True) self.use_grib = parser.get_bool_option('CMOR', 'USE_GRIB', True) + self._chunks = parser.get_int_list_option('CMOR', 'CHUNKS') self.associated_experiment = parser.get_option('CMOR', 'ASSOCIATED_EXPERIMENT', 'to be filled') self.associated_model = parser.get_option('CMOR', 'ASSOCIATED_MODEL', 'to be filled') self.initialization_description = parser.get_option('CMOR', 'INITIALIZATION_DESCRIPTION', 'to be filled') @@ -102,8 +104,6 @@ class CMORConfig(object): self.physics_description = parser.get_option('CMOR', 'PHYSICS_DESCRIPTION', 'to be filled') self.physics_version = parser.get_option('CMOR', 'PHYSICS_VERSION', '1') self.source = parser.get_option('CMOR', 'SOURCE', 'to be filled') - self.add_name = parser.get_bool_option('CMOR', 'ADD_NAME') - self.add_startdate = parser.get_bool_option('CMOR', 'ADD_STARTDATE') vars_string = parser.get_option('CMOR', 'VARIABLE_LIST', '') if vars_string: @@ -140,6 +140,11 @@ class CMORConfig(object): return False + def chunk_cmorization_requested(self, chunk): + if len(self._chunks) == 0: + return True + return chunk in self._chunks + @staticmethod def _parse_variables(raw_string): variables = dict() @@ -197,27 +202,17 @@ class ExperimentConfig(object): self.expid = parser.get_option('EXPERIMENT', 'EXPID') self.experiment_name = parser.get_option('EXPERIMENT', 'NAME', self.expid) - members = list() - for member in parser.get_option('EXPERIMENT', 'MEMBERS').split(): - members.append(int(member)) - - member_digits = parser.get_int_option('EXPERIMENT', 'MEMBER_DIGITS', 1) - startdates = parser.get_option('EXPERIMENT', 'STARTDATES').split() - chunk_size = parser.get_int_option('EXPERIMENT', 'CHUNK_SIZE') - chunks = parser.get_int_option('EXPERIMENT', 'CHUNKS') - calendar = parser.get_option('EXPERIMENT', 'CALENDAR', 'standard') + self.members = parser.get_int_list_option('EXPERIMENT', 'MEMBERS') + self.member_digits = parser.get_int_option('EXPERIMENT', 'MEMBER_DIGITS', 1) + self.startdates = parser.get_option('EXPERIMENT', 'STARTDATES').split() + self.chunk_size = parser.get_int_option('EXPERIMENT', 'CHUNK_SIZE') + self.num_chunks = parser.get_int_option('EXPERIMENT', 'CHUNKS') + self.calendar = parser.get_option('EXPERIMENT', 'CALENDAR', 'standard') self.model = parser.get_option('EXPERIMENT', 'MODEL') self.atmos_timestep = parser.get_int_option('EXPERIMENT', 'ATMOS_TIMESTEP', 6) self.ocean_timestep = parser.get_int_option('EXPERIMENT', 'OCEAN_TIMESTEP', 6) - self.model_version = parser.get_option('EXPERIMENT', 'MODEL_VERSION') - self.atmos_grid = parser.get_option('EXPERIMENT', 'ATMOS_GRID') - - self.startdates = startdates - self.members = members - self.num_chunks = chunks - self.chunk_size = chunk_size - self.member_digits = member_digits - self.calendar = calendar + self.model_version = parser.get_option('EXPERIMENT', 'MODEL_VERSION', '') + self.atmos_grid = parser.get_option('EXPERIMENT', 'ATMOS_GRID', '') def get_chunk_list(self): """ diff --git a/earthdiagnostics/datamanager.py b/earthdiagnostics/datamanager.py index 3e6f6f3d628adfeabfd8a5992f575f726dac3aba..9e4c7a369053812f086c63ceba58c5ce1579b9f3 100644 --- a/earthdiagnostics/datamanager.py +++ b/earthdiagnostics/datamanager.py @@ -178,51 +178,57 @@ class DataManager(object): vargrid_folder = self.get_varfolder(domain, var, grid) self.lock.acquire() - if grid == 'original': - link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, variable_folder) - if os.path.islink(link_path): - link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, vargrid_folder) + try: + if grid == 'original': + link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, variable_folder) + if os.path.islink(link_path): + link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, vargrid_folder) - Utils.create_folder_tree(link_path) - else: - link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, vargrid_folder) - Utils.create_folder_tree(link_path) - default_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, variable_folder) - original_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, - vargrid_folder.replace('-{0}_f'.format(grid), '-original_f')) - - if os.path.islink(default_path): - os.remove(default_path) - elif os.path.isdir(default_path): - shutil.move(default_path, original_path) - os.symlink(link_path, default_path) - - if move_old and link_path not in self._checked_vars: - self._checked_vars.append(link_path) - old_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, - 'old_{0}_f{1}h'.format(var, self.experiment.atmos_timestep)) - regex = re.compile(var + '_[0-9]{6,8}\.nc') - for filename in os.listdir(link_path): - if regex.match(filename): - Utils.create_folder_tree(old_path) - Utils.move_file(os.path.join(link_path, filename), - os.path.join(old_path, filename)) - - link_path = os.path.join(link_path, os.path.basename(filepath)) - if os.path.lexists(link_path): - os.remove(link_path) - if not os.path.exists(filepath): + Utils.create_folder_tree(link_path) + else: + link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, vargrid_folder) + Utils.create_folder_tree(link_path) + default_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, variable_folder) + original_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, + vargrid_folder.replace('-{0}_f'.format(grid), '-original_f')) + + if os.path.islink(default_path): + os.remove(default_path) + elif os.path.isdir(default_path): + shutil.move(default_path, original_path) + os.symlink(link_path, default_path) + + if move_old and link_path not in self._checked_vars: + self._checked_vars.append(link_path) + old_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, + 'old_{0}_f{1}h'.format(var, self.experiment.atmos_timestep)) + regex = re.compile(var + '_[0-9]{6,8}\.nc') + for filename in os.listdir(link_path): + if regex.match(filename): + Utils.create_folder_tree(old_path) + Utils.move_file(os.path.join(link_path, filename), + os.path.join(old_path, filename)) + + link_path = os.path.join(link_path, os.path.basename(filepath)) + if os.path.lexists(link_path): + os.remove(link_path) + if not os.path.exists(filepath): + raise ValueError('Original file {0} does not exists'.format(filepath)) + if not os.path.isdir(os.path.dirname(link_path)): + Utils.create_folder_tree(os.path.dirname(link_path)) + os.symlink(filepath, link_path) + except: + raise + finally: self.lock.release() - raise ValueError('Original file {0} does not exists'.format(filepath)) - os.symlink(filepath, link_path) - self.lock.release() # Overridable methods (not mandatory) - def link_file(self, domain, var, startdate, member, chunk=None, grid=None, box=None, + def link_file(self, domain, var, cmor_var, startdate, member, chunk=None, grid=None, frequency=None, year=None, date_str=None, move_old=False, vartype=VariableType.MEAN): """ Creates the link of a given file from the CMOR repository. + :param cmor_var: :param move_old: :param date_str: :param year: if frequency is yearly, this parameter is used to give the corresponding year @@ -239,8 +245,6 @@ class DataManager(object): :type chunk: int :param grid: file's grid (only needed if it is not the original) :type grid: str - :param box: file's box (only needed to retrieve sections or averages) - :type box: Box :param frequency: file's frequency (only needed if it is different from the default) :type frequency: str :param vartype: Variable type (mean, statistic) @@ -285,20 +289,20 @@ class NetCDFFile(object): def send(self): Utils.convert2netcdf4(self.local_file) - if self.region: - self._prepare_region() - - if self.cmor_var: - self._correct_metadata() + self._correct_metadata() + self._prepare_region() self._rename_coordinate_variables() Utils.move_file(self.local_file, self.remote_file) def _prepare_region(self): + if not self.region: + return if not os.path.exists(self.remote_file): self._add_region_dimension_to_var() else: self._update_var_with_region_data() + self._correct_metadata() Utils.nco.ncks(input=self.local_file, output=self.local_file, options='-O --fix_rec_dmn region') def _update_var_with_region_data(self): @@ -337,6 +341,8 @@ class NetCDFFile(object): Utils.rename_variable(self.local_file, 'new_var', self.var) def _correct_metadata(self): + if not self.cmor_var: + return handler = Utils.openCdf(self.local_file) var_handler = handler.variables[self.var] self._fix_variable_name(var_handler) diff --git a/earthdiagnostics/diagnostic.py b/earthdiagnostics/diagnostic.py index 86787556c32cc91a34b34b2e39436305c737b7f9..3548451d0e7e039979b8c3db6426c84d5f48b27b 100644 --- a/earthdiagnostics/diagnostic.py +++ b/earthdiagnostics/diagnostic.py @@ -1,8 +1,9 @@ # coding=utf-8 -from earthdiagnostics.constants import Basins +from earthdiagnostics.constants import Basins, Basin from earthdiagnostics.frequency import Frequency from earthdiagnostics.variable_type import VariableType -from earthdiagnostics.modelingrealm import ModelingRealm +from earthdiagnostics.modelingrealm import ModelingRealms +from earthdiagnostics.variable import VariableManager class Diagnostic(object): @@ -82,6 +83,8 @@ class Diagnostic(object): :type vartype: VariableType :return: """ + if isinstance(region, Basin): + region = region.fullname self.data_manager.send_file(filetosend, domain, var, startdate, member, chunk, grid, region, box, rename_var, frequency, year, date_str, move_old, diagnostic=self, vartype=vartype) @@ -174,16 +177,38 @@ class DiagnosticIntOption(DiagnosticOption): class DiagnosticListIntOption(DiagnosticOption): + + def __init__(self, name, default_value=None, min_limit=None, max_limit=None): + super(DiagnosticListIntOption, self).__init__(name, default_value) + self.min_limit = min_limit + self.max_limit = max_limit + def parse(self, option_value): option_value = self.check_default(option_value) if isinstance(option_value, tuple) or isinstance(option_value, list): return option_value - return [int(i) for i in option_value.split('-')] + values = [int(i) for i in option_value.split('-')] + for value in values: + if self.min_limit is not None and value < self.min_limit: + raise DiagnosticOptionError('Value {0} is lower than minimum ({1})'.format(value, self.min_limit)) + if self.max_limit is not None and value > self.max_limit: + raise DiagnosticOptionError('Value {0} is higher than maximum ({1})'.format(value, self.max_limit)) + + return values + + +class DiagnosticVariableOption(DiagnosticOption): + def parse(self, option_value): + option_value = self.check_default(option_value) + real_name = VariableManager().get_variable(option_value, False) + if real_name is None: + return option_value + return real_name.short_name class DiagnosticDomainOption(DiagnosticOption): def parse(self, option_value): - return ModelingRealm.parse(self.check_default(option_value)) + return ModelingRealms.parse(self.check_default(option_value)) class DiagnosticFrequencyOption(DiagnosticOption): diff --git a/earthdiagnostics/earthdiags.py b/earthdiagnostics/earthdiags.py index 990b9213b9d12db697d297cf5d58f5db4098ef65..909854b407267b07922fc868b7d32e4979d57f5b 100755 --- a/earthdiagnostics/earthdiags.py +++ b/earthdiagnostics/earthdiags.py @@ -9,7 +9,8 @@ import pkg_resources import netCDF4 import operator import os -from autosubmit.date.chunk_date_lib import * +from bscearth.utils.date import * +import bscearth.utils.path from earthdiagnostics.config import Config from earthdiagnostics.cmormanager import CMORManager @@ -21,6 +22,7 @@ from earthdiagnostics.ocean import * from earthdiagnostics.general import * from earthdiagnostics.statistics import * from earthdiagnostics.variable import VariableManager +from earthdiagnostics.diagnostic import DiagnosticOptionError class EarthDiags(object): @@ -46,7 +48,7 @@ class EarthDiags(object): version = pkg_resources.require("earthdiagnostics")[0].version def __init__(self, config_file): - Log.debug('Initialising Diags') + Log.info('Initialising Earth Diagnostics Version {0}', EarthDiags.version) self.config = Config(config_file) TempFile.scratch_folder = self.config.scratch_dir @@ -100,24 +102,28 @@ class EarthDiags(object): if Log.console_handler.level <= Log.DEBUG: Utils.cdo.debug = True - Utils.nco.debug = True + Utils.nco.debug = False # This is due to a bug in nco. Must change when it's solved if args.logfilepath: - Log.set_file(Utils.expand_path(args.logfilepath)) + Log.set_file(bscearth.utils.path.expand_path(args.logfilepath)) - config_file_path = Utils.expand_path(args.configfile) + config_file_path = bscearth.utils.path.expand_path(args.configfile) if not os.path.isfile(config_file_path): Log.critical('Configuration file {0} can not be found', config_file_path) return False + try: + diags = EarthDiags(config_file_path) + if args.clean: + result = diags.clean() + elif args.report: + result = diags.report() + else: + result = diags.run() + except Exception: + raise + finally: + TempFile.clean() - diags = EarthDiags(config_file_path) - if args.clean: - result = diags.clean() - elif args.report: - result = diags.report() - else: - result = diags.run() - TempFile.clean() return result def _create_dic_variables(self): @@ -150,10 +156,13 @@ class EarthDiags(object): # Run diagnostics Log.info('Running diagnostics') list_jobs = self.prepare_job_list() + self._failed_jobs = [] time = datetime.datetime.now() Log.info("Starting to compute at {0}", time) - self.threads = min(Utils.available_cpu_count(), self.config.max_cores) + self.threads = Utils.available_cpu_count() + if 0 < self.config.max_cores < self.threads: + self.threads = self.config.max_cores Log.info('Using {0} threads', self.threads) threads = list() for num_thread in range(0, self.threads): @@ -168,9 +177,10 @@ class EarthDiags(object): TempFile.clean() finish_time = datetime.datetime.now() Log.result("Diagnostics finished at {0}", finish_time) - Log.result("Time ellapsed: {0}\n", finish_time - time) + Log.result("Elapsed time: {0}\n", finish_time - time) + self.print_errors() self.print_stats() - return self.had_errors + return not self.had_errors def _prepare_data_manager(self): if self.config.data_adaptor == 'CMOR': @@ -192,6 +202,16 @@ class EarthDiags(object): for diag, time in sorted(total.items(), key=operator.itemgetter(1)): Log.info('{0:23} {1:}', diag.__name__, time) + def print_errors(self): + if len(self._failed_jobs) == 0: + return + self.had_errors = True + Log.error('Failed jobs') + Log.error('-----------') + for job in self._failed_jobs: + Log.error(str(job)) + Log.info('') + def prepare_job_list(self): list_jobs = Queue.Queue() for fulldiag in self.config.get_commands(): @@ -200,11 +220,16 @@ class EarthDiags(object): diag_class = Diagnostic.get_diagnostic(diag_options[0]) if diag_class: - for job in diag_class.generate_jobs(self, diag_options): - list_jobs.put(job) - continue + try: + for job in diag_class.generate_jobs(self, diag_options): + list_jobs.put(job) + continue + except DiagnosticOptionError as ex: + Log.error('Can not configure diagnostic {0}: {1}', diag_options[0], ex) + self.had_errors = True else: Log.error('{0} is not an available diagnostic', diag_options[0]) + self.had_errors = True return list_jobs @staticmethod @@ -223,6 +248,7 @@ class EarthDiags(object): Diagnostic.register(MonthlyMean) Diagnostic.register(Rewrite) Diagnostic.register(Relink) + Diagnostic.register(RelinkAll) Diagnostic.register(Scale) Diagnostic.register(Attribute) @@ -245,6 +271,8 @@ class EarthDiags(object): Diagnostic.register(MixedLayerHeatContent) Diagnostic.register(HeatContentLayer) Diagnostic.register(HeatContent) + Diagnostic.register(RegionMean) + Diagnostic.register(Rotation) def clean(self): Log.info('Removing scratch folder...') @@ -338,9 +366,7 @@ class EarthDiags(object): else: Log.result('Thread {0} finished after running successfully {1} of {2} tasks', numthread, count, count + len(failed_jobs)) - self.had_errors = False - for job in failed_jobs: - Log.error('Job {0} could not be run', job) + self._failed_jobs += failed_jobs return def _prepare_mesh_files(self): @@ -348,35 +374,58 @@ class EarthDiags(object): con_files = self.config.con_files model_version = self.config.experiment.model_version restore_meshes = self.config.restore_meshes - - self._copy_file(os.path.join(con_files, 'mesh_mask_nemo.{0}.nc'.format(model_version)), 'mesh_hgr.nc', - restore_meshes) - self._link_file('mesh_hgr.nc', 'mesh_zgr.nc') - self._link_file('mesh_hgr.nc', 'mask.nc') - - self._copy_file(os.path.join(con_files, 'new_maskglo.{0}.nc'.format(model_version)), 'new_maskglo.nc', - restore_meshes) - self._copy_file(os.path.join(con_files, 'mask.regions.{0}.nc'.format(model_version)), - 'mask_regions.nc', restore_meshes) - self._copy_file(os.path.join(con_files, 'mask.regions.3d.{0}.nc'.format(model_version)), - 'mask_regions.3d.nc', restore_meshes) + mesh_mask = 'mesh_mask_nemo.{0}.nc'.format(model_version) + new_mask_glo = 'new_maskglo.{0}.nc'.format(model_version) + mask_regions = 'mask.regions.{0}.nc'.format(model_version) + mask_regions_3d = 'mask.regions.3d.{0}.nc'.format(model_version) + if self.config.scratch_masks: + Utils.create_folder_tree(self.config.scratch_masks) + + if self._copy_file(os.path.join(con_files, mesh_mask), os.path.join(self.config.scratch_masks, mesh_mask), + restore_meshes): + self._link_file(os.path.join(self.config.scratch_masks, mesh_mask), 'mesh_hgr.nc') + self._link_file(os.path.join(self.config.scratch_masks, mesh_mask), 'mesh_zgr.nc') + self._link_file(os.path.join(self.config.scratch_masks, mesh_mask), 'mask.nc') + + if self._copy_file(os.path.join(con_files, new_mask_glo), + os.path.join(self.config.scratch_masks, new_mask_glo), restore_meshes): + self._link_file(os.path.join(self.config.scratch_masks, new_mask_glo), 'new_maskglo.nc') + + if self._copy_file(os.path.join(con_files, mask_regions), + os.path.join(self.config.scratch_masks, mask_regions), restore_meshes): + self._link_file(os.path.join(self.config.scratch_masks, mask_regions), 'mask_regions.nc') + + if self._copy_file(os.path.join(con_files, mask_regions_3d), + os.path.join(self.config.scratch_masks, mask_regions_3d), restore_meshes): + self._link_file(os.path.join(self.config.scratch_masks, mask_regions_3d), 'mask_regions.3d.nc') + else: + self._copy_file(os.path.join(con_files, mesh_mask), 'mesh_hgr.nc', restore_meshes) + self._link_file('mesh_hgr.nc', 'mesh_zgr.nc') + self._link_file('mesh_hgr.nc', 'mask.nc') + self._copy_file(os.path.join(con_files, new_mask_glo), 'new_maskglo.nc', + restore_meshes) + self._copy_file(os.path.join(con_files, mask_regions), + 'mask_regions.nc', restore_meshes) + self._copy_file(os.path.join(con_files, mask_regions_3d), + 'mask_regions.3d.nc', restore_meshes) Log.result('Mesh files ready!') def _copy_file(self, source, destiny, force): if not os.path.exists(source): Log.user_warning('File {0} is not available for {1}', destiny, self.config.experiment.model_version) - return + return False if not force and os.path.exists(destiny): if os.stat(source).st_size == os.stat(destiny).st_size: Log.info('File {0} already exists', destiny) - return + return True Log.info('Creating file {0}', destiny) shutil.copy(source, destiny) Log.info('File {0} ready', destiny) - Utils.rename_variables('mesh_hgr.nc', self.dic_variables, False, True) + Utils.rename_variables(destiny, self.dic_variables, False, True) + return True def _link_file(self, source, destiny): if not os.path.exists(source): diff --git a/earthdiagnostics/frequency.py b/earthdiagnostics/frequency.py index 12e1cbe42c9ce4b570b4339af650ac999094e0ce..ff8b7e21e0e8bfbd7b24c54f19577eb9d33a46b3 100644 --- a/earthdiagnostics/frequency.py +++ b/earthdiagnostics/frequency.py @@ -6,12 +6,13 @@ class Frequency(object): _recognized = {'f': 'fx', 'fx': 'fx', 'fixed': 'fx', 'c': 'clim', 'clim': 'clim', 'climatology': 'clim', 'monclim': 'clim', '1hrclimmon': 'clim', + 'dec': 'dec', 'decadal': 'dec', 'y': 'year', 'yr': 'year', 'year': 'year', 'yearly': 'year', - 'm': 'mon', 'mon': 'mon', 'monthly': 'mon', - 'd': 'day', 'daily': 'day', 'day': 'day', - '6': '6hr', '6hr': '6hr', '6_hourly': '6hr', '6hourly': '6hr', '6 hourly': '6hr', - '3': '3hr', '3hr': '3hr', '3_hourly': '3hr', '3hourly': '3hr', '3 hourly': '3hr', - '1': '1hr', 'hr': '1hr', 'hourly': '1hr', '1hr': '1hr', '1 hourly': '1hr', + 'm': 'mon', '1m': 'mon', 'mon': 'mon', 'monthly': 'mon', + 'd': 'day', '1d': 'day', 'daily': 'day', 'day': 'day', + '6': '6hr', '6h': '6hr', '6hr': '6hr', '6_hourly': '6hr', '6hourly': '6hr', '6 hourly': '6hr', + '3': '3hr', '3h': '3hr', '3hr': '3hr', '3_hourly': '3hr', '3hourly': '3hr', '3 hourly': '3hr', + '1': '1hr', 'hr': '1hr', '1h': '1hr', 'hourly': '1hr', '1hr': '1hr', '1 hourly': '1hr', 'subhr': 'subhr'} def __init__(self, freq): @@ -19,7 +20,7 @@ class Frequency(object): try: self.frequency = Frequency._recognized[freq] except KeyError: - raise Exception('Frequency {0} not supported'.format(freq)) + raise ValueError('Frequency {0} not supported'.format(freq)) def __eq__(self, other): return self.frequency == other.frequency diff --git a/earthdiagnostics/general/__init__.py b/earthdiagnostics/general/__init__.py index 1a3cf92a4f96319537cb0b137f3e494e76081b01..1c321cb46d27751ec57e05fd65a68a0128890e2e 100644 --- a/earthdiagnostics/general/__init__.py +++ b/earthdiagnostics/general/__init__.py @@ -4,3 +4,4 @@ from earthdiagnostics.general.rewrite import Rewrite from earthdiagnostics.general.relink import Relink from earthdiagnostics.general.scale import Scale from earthdiagnostics.general.attribute import Attribute +from earthdiagnostics.general.relinkall import RelinkAll diff --git a/earthdiagnostics/general/attribute.py b/earthdiagnostics/general/attribute.py index 4b0bc129de0f3eb69478b5930b1b7fd5c64ec28b..178dae42a8d974ff7034f21fa939bc471b9cddb5 100644 --- a/earthdiagnostics/general/attribute.py +++ b/earthdiagnostics/general/attribute.py @@ -1,5 +1,6 @@ # coding=utf-8 -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticComplexStrOption, DiagnosticDomainOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticComplexStrOption, \ + DiagnosticDomainOption, DiagnosticVariableOption from earthdiagnostics.utils import Utils from earthdiagnostics.modelingrealm import ModelingRealm @@ -64,16 +65,16 @@ class Attribute(Diagnostic): :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticDomainOption('domain'), DiagnosticOption('name'), DiagnosticComplexStrOption('value'), - DiagnosticOption('grid')) + DiagnosticOption('grid', '')) options = cls.process_options(options, options_available) job_list = list() for startdate, member, chunk in diags.config.experiment.get_chunk_list(): job_list.append(Attribute(diags.data_manager, startdate, member, chunk, - options['domain'], options['variable'], options['grid'], options['grid'], + options['domain'], options['variable'], options['grid'], options['name'], options['value'])) return job_list diff --git a/earthdiagnostics/general/monthlymean.py b/earthdiagnostics/general/monthlymean.py index 5c2e5ece56cd8fbf02f90646b189bf8584f4958f..9b084dd23f5839f02ecacb81e71a3df2f29388dd 100644 --- a/earthdiagnostics/general/monthlymean.py +++ b/earthdiagnostics/general/monthlymean.py @@ -1,7 +1,8 @@ # coding=utf-8 import os -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticFrequencyOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, \ + DiagnosticFrequencyOption, DiagnosticVariableOption from earthdiagnostics.frequency import Frequencies from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.modelingrealm import ModelingRealm @@ -67,7 +68,7 @@ class MonthlyMean(Diagnostic): :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticDomainOption('domain'), DiagnosticFrequencyOption('frequency', Frequencies.daily), DiagnosticOption('grid', '')) diff --git a/earthdiagnostics/general/relink.py b/earthdiagnostics/general/relink.py index f12764b20e2ad2da13ece731156005ca96364516..ecfd024748649c6ec9ca19020b3384a05267fe68 100644 --- a/earthdiagnostics/general/relink.py +++ b/earthdiagnostics/general/relink.py @@ -1,6 +1,8 @@ # coding=utf-8 -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticBoolOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticBoolOption, \ + DiagnosticVariableOption from earthdiagnostics.modelingrealm import ModelingRealm +from earthdiagnostics.variable import VariableManager class Relink(Diagnostic): @@ -30,7 +32,7 @@ class Relink(Diagnostic): alias = 'relink' "Diagnostic alias for the configuration file" - def __init__(self, data_manager, startdate, member, chunk, domain, variable, move_old): + def __init__(self, data_manager, startdate, member, chunk, domain, variable, move_old, grid): Diagnostic.__init__(self, data_manager) self.startdate = startdate self.member = member @@ -38,15 +40,17 @@ class Relink(Diagnostic): self.variable = variable self.domain = domain self.move_old = move_old + self.grid = grid def __str__(self): return 'Relink output Startdate: {0} Member: {1} Chunk: {2} Move old: {5} ' \ - 'Variable: {3}:{4}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable, - self.move_old) + 'Variable: {3}:{4} Grid: {6}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable, + self.move_old, self.grid) def __eq__(self, other): return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ - self.domain == other.domain and self.variable == other.variable + self.domain == other.domain and self.variable == other.variable and self.move_old == other.move_old and \ + self.grid == other.grid @classmethod def generate_jobs(cls, diags, options): @@ -59,20 +63,22 @@ class Relink(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticDomainOption('domain'), - DiagnosticBoolOption('move_old', True)) + DiagnosticBoolOption('move_old', True), + DiagnosticOption('grid', '')) options = cls.process_options(options, options_available) job_list = list() for startdate, member, chunk in diags.config.experiment.get_chunk_list(): job_list.append(Relink(diags.data_manager, startdate, member, chunk, - options['domain'], options['variable'], options['move_old'])) + options['domain'], options['variable'], options['move_old'], options['grid'])) return job_list def compute(self): """ Runs the diagnostic """ - self.data_manager.link_file(self.domain, self.variable, self.startdate, self.member, self.chunk, - move_old=self.move_old) + self.data_manager.link_file(self.domain, self.variable, VariableManager().get_variable(self.variable), + self.startdate, self.member, self.chunk, + move_old=self.move_old, grid=self.grid) diff --git a/earthdiagnostics/general/relinkall.py b/earthdiagnostics/general/relinkall.py new file mode 100644 index 0000000000000000000000000000000000000000..6ee2d226a763d3fb63260130ddb2b3c6d72363af --- /dev/null +++ b/earthdiagnostics/general/relinkall.py @@ -0,0 +1,55 @@ +# coding=utf-8 +from earthdiagnostics.diagnostic import Diagnostic + + +class RelinkAll(Diagnostic): + """ + Recreates the links for the variable specified + + :original author: Javier Vegas-Regidor + + :created: September 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + """ + + alias = 'relinkall' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + + def __str__(self): + return 'Relink all output Startdate: {0}'.format(self.startdate) + + def __eq__(self, other): + return self.startdate == other.startdate + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, domain, move_old=False + :type options: list[str] + :return: + """ + if len(options) > 1: + raise Exception('The Relink All diagnostic has no options') + job_list = list() + for startdate in diags.config.experiment.startdates: + job_list.append(RelinkAll(diags.data_manager, startdate)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + self.data_manager.create_links(self.startdate) + diff --git a/earthdiagnostics/general/rewrite.py b/earthdiagnostics/general/rewrite.py index ab6b87c7146f350b4633cdf31b5ad953d33e2275..6b82a1dd1b5d9df39d6dd59a580f24be224c7ea7 100644 --- a/earthdiagnostics/general/rewrite.py +++ b/earthdiagnostics/general/rewrite.py @@ -1,5 +1,5 @@ # coding=utf-8 -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticVariableOption from earthdiagnostics.modelingrealm import ModelingRealm @@ -58,7 +58,7 @@ class Rewrite(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticDomainOption('domain'), DiagnosticOption('grid', '')) options = cls.process_options(options, options_available) diff --git a/earthdiagnostics/general/scale.py b/earthdiagnostics/general/scale.py index ceaac656cedcac6ee8c7b17f6881a471eea72b58..1605df2ef6bd810ef3608363bf9a373b75a36a99 100644 --- a/earthdiagnostics/general/scale.py +++ b/earthdiagnostics/general/scale.py @@ -1,5 +1,6 @@ # coding=utf-8 -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticFloatOption, DiagnosticDomainOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticFloatOption, DiagnosticDomainOption, \ + DiagnosticVariableOption from earthdiagnostics.utils import Utils from earthdiagnostics.modelingrealm import ModelingRealm import numpy as np @@ -70,10 +71,10 @@ class Scale(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticFloatOption('value'), - DiagnosticFloatOption('offset'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticDomainOption('domain'), - DiagnosticOption('variable'), + DiagnosticFloatOption('value'), + DiagnosticFloatOption('offset'), DiagnosticOption('grid', ''), DiagnosticFloatOption('min_limit', float('nan')), DiagnosticFloatOption('max_limit', float('nan'))) @@ -102,9 +103,9 @@ class Scale(Diagnostic): grid=self.grid) def _check_limits(self): - if not math.isnan(self.min_limit) and np.amin(self.original_values) < self.min_limit: + if not math.isnan(self.min_limit) and (self.original_values < self.min_limit).any(): return False - if not math.isnan(self.max_limit) is not None and np.amax(self.original_values) > self.max_limit: + if not math.isnan(self.max_limit) and (self.original_values > self.max_limit).any(): return False return True diff --git a/earthdiagnostics/modelingrealm.py b/earthdiagnostics/modelingrealm.py index 3a970eee675dcc735145737b6cb1c903e4c80b5f..acc5646c9c4b8b999dd200725f83e1e4bbf7ac90 100644 --- a/earthdiagnostics/modelingrealm.py +++ b/earthdiagnostics/modelingrealm.py @@ -4,12 +4,6 @@ from earthdiagnostics.frequency import Frequencies class ModelingRealm(object): - @staticmethod - def parse(domain_name): - if isinstance(domain_name, ModelingRealm): - return domain_name - return ModelingRealm(domain_name) - def __init__(self, domain_name): domain_name = domain_name.lower() if domain_name == 'seaice': @@ -23,7 +17,7 @@ class ModelingRealm(object): elif domain_name in ['ocean', 'atmos', 'land', 'aerosol']: self.name = domain_name else: - raise ValueError('Domain {0} not recognized!'.format(domain_name)) + raise ValueError('Modelling realm {0} not recognized!'.format(domain_name)) def __eq__(self, other): return other.__class__ == ModelingRealm and self.name == other.name @@ -74,3 +68,27 @@ class ModelingRealms(object): atmosChem = ModelingRealm('atmosChem') ocnBgchem = ModelingRealm('ocnBgchem') + @classmethod + def parse(cls, modelling_realm): + """ + Return the basin matching the given name. If the parameter basin is a Basin instance, directly returns the same + instance. This bahaviour is intended to facilitate the development of methods that can either accept a name + or a Basin instance to characterize the basin. + + :param modelling_realm: basin name or basin instance + :type modelling_realm: str | Basin + :return: basin instance corresponding to the basin name + :rtype: Basin + """ + if isinstance(modelling_realm, ModelingRealm): + return modelling_realm + for name in cls.__dict__.keys(): + if name.startswith('_'): + continue + # noinspection PyCallByClass + value = cls.__getattribute__(cls, name) + if isinstance(value, ModelingRealm): + if modelling_realm.lower() in [value.name.lower()]: + return value + raise ValueError('Modelling realm {0} not recognized!'.format(modelling_realm)) + diff --git a/earthdiagnostics/ocean/__init__.py b/earthdiagnostics/ocean/__init__.py index 0628fb68cbf6a22162783c241bd460736c9a3138..14f2bd65898c5f46465845ec408688e9f1d7bacd 100644 --- a/earthdiagnostics/ocean/__init__.py +++ b/earthdiagnostics/ocean/__init__.py @@ -19,3 +19,5 @@ from earthdiagnostics.ocean.mixedlayersaltcontent import MixedLayerSaltContent from earthdiagnostics.ocean.siasiesiv import Siasiesiv from earthdiagnostics.ocean.heatcontentlayer import HeatContentLayer from earthdiagnostics.ocean.mixedlayerheatcontent import MixedLayerHeatContent +from earthdiagnostics.ocean.regionmean import RegionMean +from earthdiagnostics.ocean.rotation import Rotation diff --git a/earthdiagnostics/ocean/areamoc.py b/earthdiagnostics/ocean/areamoc.py index d2ea66ef1699f48bc4912eef7fdedadec62b3be2..e65fe622669435ca5f30abd12ec9e025be99cde0 100644 --- a/earthdiagnostics/ocean/areamoc.py +++ b/earthdiagnostics/ocean/areamoc.py @@ -54,8 +54,9 @@ class AreaMoc(Diagnostic): self.basin == other.basin and self.box == other.box def __str__(self): - return 'Area MOC Startdate: {0} Member: {1} Chunk: {2} Box: {3}'.format(self.startdate, self.member, - self.chunk, self.box) + return 'Area MOC Startdate: {0} Member: {1} Chunk: {2} Box: {3} Basin: {4}'.format(self.startdate, self.member, + self.chunk, self.box, + self.basin) @classmethod def generate_jobs(cls, diags, options): @@ -112,9 +113,8 @@ class AreaMoc(Diagnostic): if len(basin_index) == 0: raise Exception('Basin {0} not defined in file') basin_index = basin_index[0][0] - nco.ncks(input=temp, output=temp, options='-O -d basin,{0}'.format(basin_index)) - # To remove basin dimension - nco.ncwa(input=temp, output=temp, options='-O -a basin') + # To select basin and remove dimension + nco.ncwa(input=temp, output=temp, options='-O -d basin,{0} -a basin'.format(basin_index)) source = Utils.openCdf(temp) destiny = Utils.openCdf(temp2, 'w') diff --git a/earthdiagnostics/ocean/averagesection.py b/earthdiagnostics/ocean/averagesection.py index c4ddfb4132c6d2918ad3a12d6a987df2ba05c759..96c1162f83f243269ba9985065366a39ef0ce923 100644 --- a/earthdiagnostics/ocean/averagesection.py +++ b/earthdiagnostics/ocean/averagesection.py @@ -1,7 +1,8 @@ # coding=utf-8 import os from earthdiagnostics.box import Box -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticIntOption, DiagnosticOption, DiagnosticDomainOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticIntOption, DiagnosticDomainOption, \ + DiagnosticVariableOption from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.modelingrealm import ModelingRealm, ModelingRealms @@ -64,7 +65,7 @@ class AverageSection(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticIntOption('min_lon'), DiagnosticIntOption('max_lon'), DiagnosticIntOption('min_lat'), diff --git a/earthdiagnostics/ocean/convectionsites.py b/earthdiagnostics/ocean/convectionsites.py index ca140c2233fc82e7ee73ed2ca84472361584704b..e2bcddf69181c540b0332eafbb7cd2b75f4fabcd 100644 --- a/earthdiagnostics/ocean/convectionsites.py +++ b/earthdiagnostics/ocean/convectionsites.py @@ -1,6 +1,6 @@ # coding=utf-8 import numpy as np -from autosubmit.config.log import Log +from bscearth.utils.log import Log from earthdiagnostics.diagnostic import Diagnostic from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.constants import Models diff --git a/earthdiagnostics/ocean/cutsection.py b/earthdiagnostics/ocean/cutsection.py index 899c89ccae5d703a3d46fcc043833d1978145d4e..282c0f5dded4d31b541400565439dda866d4f27b 100644 --- a/earthdiagnostics/ocean/cutsection.py +++ b/earthdiagnostics/ocean/cutsection.py @@ -1,9 +1,9 @@ # coding=utf-8 import numpy as np -from autosubmit.config.log import Log +from bscearth.utils.log import Log -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticBoolOption, DiagnosticIntOption, \ - DiagnosticDomainOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticBoolOption, DiagnosticIntOption, \ + DiagnosticDomainOption, DiagnosticVariableOption from earthdiagnostics.box import Box from earthdiagnostics.utils import Utils from earthdiagnostics.modelingrealm import ModelingRealms @@ -71,7 +71,7 @@ class CutSection(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticBoolOption('zonal'), DiagnosticIntOption('value'), DiagnosticDomainOption('domain', ModelingRealms.ocean)) diff --git a/earthdiagnostics/ocean/gyres.py b/earthdiagnostics/ocean/gyres.py index c7eb720f6dc659ba78a983ca844e57d80938b7f1..adecfb0ca1a8ec2ea4e7c6285aa006b7cd5d90b8 100644 --- a/earthdiagnostics/ocean/gyres.py +++ b/earthdiagnostics/ocean/gyres.py @@ -1,6 +1,6 @@ # coding=utf-8 import numpy as np -from autosubmit.config.log import Log +from bscearth.utils.log import Log from earthdiagnostics.constants import Models from earthdiagnostics.diagnostic import Diagnostic diff --git a/earthdiagnostics/ocean/heatcontent.py b/earthdiagnostics/ocean/heatcontent.py index a82a4da64bca7383255f8e859a11ff9929fcafd0..c67555828cf9f87e88258080a5e7104a5ac1c466 100644 --- a/earthdiagnostics/ocean/heatcontent.py +++ b/earthdiagnostics/ocean/heatcontent.py @@ -1,8 +1,6 @@ # coding=utf-8 import shutil -from autosubmit.config.log import Log - from earthdiagnostics import cdftools from earthdiagnostics.constants import Basins from earthdiagnostics.utils import Utils, TempFile @@ -98,12 +96,14 @@ class HeatContent(Diagnostic): nco.ncks(input=mlotst_file, output=temperature_file, options='-A -v mlotst') para = list() - para.append('0') - para.append('0') - para.append('0') - para.append('0') - para.append(self.box.min_depth) - para.append(self.box.max_depth) + if self.box.min_depth != 0: + para.append('-zoom') + para.append(0) + para.append(0) + para.append(0) + para.append(0) + para.append(self.box.min_depth) + para.append(self.box.max_depth) if self.mxloption != 0: para.append('-mxloption') para.append(str(self.mxloption)) @@ -113,13 +113,15 @@ class HeatContent(Diagnostic): raise Exception('Basin {0} is not defined on mask_regions.nc'.format(self.basin.fullname)) handler.close() - para.append('-maskfile') + para.append('-M') para.append('mask_regions.3d.nc') - para.append('-mask') para.append(self.basin.fullname) - shell_output = cdftools.run('cdfheatc', options=para, input=temperature_file) + temp2 = TempFile.get() + + cdftools.run('cdfheatc', options=para, input=temperature_file, output=temp2, input_option='-f') + results = Utils.openCdf(temp2) heatcsum_temp = TempFile.get() heatcvmean_temp = TempFile.get() nco.ncks(input=temperature_file, output=heatcsum_temp, options='-O -v time') @@ -130,36 +132,19 @@ class HeatContent(Diagnostic): thc.standard_name = "integral_of_sea_water_potential_temperature_expressed_as_heat_content" thc.long_name = "Total heat content" thc.units = "J" + thc[:] = results.variables['heatc3d'][:, 0, 0] + heatcsum_handler.close() heatcvmean_handler = Utils.openCdf(heatcvmean_temp) uhc = heatcvmean_handler.createVariable('heatcvmean', float, 'time') uhc.standard_name = "integral_of_sea_water_potential_temperature_expressed_as_heat_content" uhc.long_name = "Heat content per unit volume" uhc.units = "J*m^-3" - - time = 0 - # noinspection PyUnboundLocalVariable - for lines in shell_output: - if not lines: - continue - - for line in lines.split('\n'): - line = line.lstrip() - if line.startswith("Heat Content at level"): - Log.info(line) - elif line.startswith("Total Heat content/volume"): - Log.user_warning(line) - uhc[time] = line[line.index(':') + 1: line.index('Joules')] - time += 1 - if line.startswith("Total Heat content "): - Log.result(line) - thc[time] = line[line.index(':') + 1: line.index('Joules')] - elif line.startswith('TIME : '): - Log.info(line) - - heatcsum_handler.close() + uhc[:] = results.variables['heatc3dpervol'][:, 0, 0] heatcvmean_handler.close() + results.close() + if self.box.min_depth == 0: # For cdftools, this is all levels box_save = None @@ -168,7 +153,7 @@ class HeatContent(Diagnostic): Utils.setminmax(heatcsum_temp, 'heatcsum') self.send_file(heatcsum_temp, ModelingRealms.ocean, 'heatcsum', self.startdate, self.member, self.chunk, - box=box_save, region=self.basin.fullname, rename_var='heatcsum') + box=box_save, region=self.basin.fullname) Utils.setminmax(heatcvmean_temp, 'heatcvmean') self.send_file(heatcvmean_temp, ModelingRealms.ocean, 'heatcvmean', self.startdate, self.member, self.chunk, - box=box_save, region=self.basin.fullname, rename_var='heatcvmean') + box=box_save, region=self.basin.fullname) diff --git a/earthdiagnostics/ocean/interpolate.py b/earthdiagnostics/ocean/interpolate.py index e1f0ff48297cbd07d572e4ccaf02e3b571ab317b..123447c4a9d03b4106959f09af614ea73033c501 100644 --- a/earthdiagnostics/ocean/interpolate.py +++ b/earthdiagnostics/ocean/interpolate.py @@ -3,8 +3,10 @@ import shutil import threading import os -from autosubmit.config.log import Log -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticBoolOption +from bscearth.utils.log import Log +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticBoolOption, \ + DiagnosticVariableOption + from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.modelingrealm import ModelingRealms @@ -80,7 +82,7 @@ class Interpolate(Diagnostic): :return: """ options_available = (DiagnosticOption('target_grid'), - DiagnosticOption('variable'), + DiagnosticVariableOption('variable'), DiagnosticDomainOption('domain', ModelingRealms.ocean), DiagnosticBoolOption('invert_lat', False)) options = cls.process_options(options, options_available) diff --git a/earthdiagnostics/ocean/interpolatecdo.py b/earthdiagnostics/ocean/interpolatecdo.py index 68a04f5817dac95412b05bf64a16bdfaab5ce3d9..404e4aa520e852808c22ee850cc597a809ea960c 100644 --- a/earthdiagnostics/ocean/interpolatecdo.py +++ b/earthdiagnostics/ocean/interpolatecdo.py @@ -1,6 +1,7 @@ # coding=utf-8 from earthdiagnostics.constants import Basins -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticBoolOption, \ + DiagnosticVariableOption from earthdiagnostics.utils import Utils, TempFile import numpy as np @@ -35,7 +36,8 @@ class InterpolateCDO(Diagnostic): alias = 'interpcdo' "Diagnostic alias for the configuration file" - def __init__(self, data_manager, startdate, member, chunk, domain, variable, target_grid, model_version): + def __init__(self, data_manager, startdate, member, chunk, domain, variable, target_grid, model_version, + mask_oceans): Diagnostic.__init__(self, data_manager) self.startdate = startdate self.member = member @@ -47,6 +49,7 @@ class InterpolateCDO(Diagnostic): self.generated_vars = [variable] self.tempTemplate = '' self.grid = target_grid + self.mask_oceans = mask_oceans def __eq__(self, other): return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ @@ -54,7 +57,7 @@ class InterpolateCDO(Diagnostic): self.variable == other.variable and self.grid == other.grid def __str__(self): - return 'Interpolate Startdate: {0} Member: {1} Chunk: {2} ' \ + return 'Interpolate with CDO Startdate: {0} Member: {1} Chunk: {2} ' \ 'Variable: {3}:{4} Target grid: {5} ' \ 'Model: {6}' .format(self.startdate, self.member, self.chunk, self.domain, self.variable, self.grid, self.model_version) @@ -70,26 +73,27 @@ class InterpolateCDO(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticOption('target_grid', diags.config.experiment.atmos_grid.lower()), - DiagnosticDomainOption('domain', ModelingRealms.ocean)) + DiagnosticDomainOption('domain', ModelingRealms.ocean), + DiagnosticBoolOption('mask_oceans', True)) options = cls.process_options(options, options_available) target_grid = cls._translate_ifs_grids_to_cdo_names(options['target_grid']) job_list = list() for startdate, member, chunk in diags.config.experiment.get_chunk_list(): job_list.append(InterpolateCDO(diags.data_manager, startdate, member, chunk, options['domain'], options['variable'], target_grid, - diags.config.experiment.model_version)) + diags.config.experiment.model_version, options['mask_oceans'])) return job_list @classmethod def _translate_ifs_grids_to_cdo_names(cls, target_grid): if target_grid.upper().startswith('T159L'): - target_grid = 't159grid' + target_grid = 't106grid' if target_grid.upper().startswith('T255L'): - target_grid = 't255grid' + target_grid = 't170grid' if target_grid.upper().startswith('T511L'): - target_grid = 't511grid' + target_grid = 't340grid' return target_grid def compute(self): @@ -97,13 +101,14 @@ class InterpolateCDO(Diagnostic): Runs the diagnostic """ variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk) - handler = Utils.openCdf(variable_file) - var = handler.variables[self.variable] - mask = Utils.get_mask(Basins.Global).astype(float) - mask[mask == 0] = np.nan - var[:] = mask * var[:] - handler.close() + if self.mask_oceans: + handler = Utils.openCdf(variable_file) + var = handler.variables[self.variable] + mask = Utils.get_mask(Basins.Global).astype(float) + mask[mask == 0] = np.nan + var[:] = mask * var[:] + handler.close() cdo = Utils.cdo temp = TempFile.get() diff --git a/earthdiagnostics/ocean/maxmoc.py b/earthdiagnostics/ocean/maxmoc.py index 18f27417203fdfcb9ba029da3940f373a54e4fb3..2717d413a9875703e443d0869f4e860ff29fbcf4 100644 --- a/earthdiagnostics/ocean/maxmoc.py +++ b/earthdiagnostics/ocean/maxmoc.py @@ -2,13 +2,14 @@ import netCDF4 import numpy as np import os -from autosubmit.config.log import Log +from bscearth.utils.log import Log from earthdiagnostics.constants import Basins from earthdiagnostics.box import Box -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticIntOption, DiagnosticBasinOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticBasinOption, DiagnosticFloatOption from earthdiagnostics.frequency import Frequencies from earthdiagnostics.utils import Utils from earthdiagnostics.modelingrealm import ModelingRealms +from earthdiagnostics.variable_type import VariableType class MaxMoc(Diagnostic): @@ -67,13 +68,13 @@ class MaxMoc(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticIntOption('min_lat'), - DiagnosticIntOption('max_lat'), - DiagnosticIntOption('min_depth'), - DiagnosticIntOption('max_depth'), + options_available = (DiagnosticFloatOption('min_lat'), + DiagnosticFloatOption('max_lat'), + DiagnosticFloatOption('min_depth'), + DiagnosticFloatOption('max_depth'), DiagnosticBasinOption('basin', Basins.Global)) options = cls.process_options(options, options_available) - box = Box() + box = Box(True) box.min_lat = options['min_lat'] box.max_lat = options['max_lat'] box.min_depth = options['min_depth'] @@ -156,7 +157,7 @@ class MaxMoc(Diagnostic): var[0] = maximum handler.close() self.send_file(temp, ModelingRealms.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, - frequency=Frequencies.yearly, year=self.year) + frequency=Frequencies.yearly, year=self.year, vartype=VariableType.STATISTIC) handler = self._create_output_file(temp) var = handler.createVariable('vsftmyzmaxlat', float, ('time',)) @@ -167,7 +168,7 @@ class MaxMoc(Diagnostic): var[0] = max_lat handler.close() self.send_file(temp, ModelingRealms.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, - frequency=Frequencies.yearly, year=self.year) + frequency=Frequencies.yearly, year=self.year, vartype=VariableType.STATISTIC) handler = self._create_output_file(temp) var = handler.createVariable('vsftmyzmaxlev', float, ('time',)) @@ -178,7 +179,7 @@ class MaxMoc(Diagnostic): var[0] = max_lev handler.close() self.send_file(temp, ModelingRealms.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, - frequency=Frequencies.yearly, year=self.year) + frequency=Frequencies.yearly, year=self.year, vartype=VariableType.STATISTIC) handler = self._create_output_file(temp) var = handler.createVariable('vsftmyzmin', float, ('time',)) @@ -189,7 +190,7 @@ class MaxMoc(Diagnostic): var[0] = minimum handler.close() self.send_file(temp, ModelingRealms.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, - frequency=Frequencies.yearly, year=self.year) + frequency=Frequencies.yearly, year=self.year, vartype=VariableType.STATISTIC) handler = self._create_output_file(temp) var = handler.createVariable('vsftmyzminlat', float, ('time',)) @@ -200,7 +201,7 @@ class MaxMoc(Diagnostic): var[0] = min_lat handler.close() self.send_file(temp, ModelingRealms.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, - frequency=Frequencies.yearly, year=self.year) + frequency=Frequencies.yearly, year=self.year, vartype=VariableType.STATISTIC) handler = self._create_output_file(temp) var = handler.createVariable('vsftmyzminlev', float, ('time',)) @@ -211,7 +212,7 @@ class MaxMoc(Diagnostic): var[0] = min_lev handler.close() self.send_file(temp, ModelingRealms.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, - frequency=Frequencies.yearly, year=self.year) + frequency=Frequencies.yearly, year=self.year, vartype=VariableType.STATISTIC) def _create_output_file(self, temp): handler = netCDF4.Dataset(temp, 'w') diff --git a/earthdiagnostics/ocean/moc.py b/earthdiagnostics/ocean/moc.py index ee070c19e9bb6b06af376a650700649aa79a7cf8..52bbc018ab9d3d0d93cf2bebc30d8faab02f32ec 100644 --- a/earthdiagnostics/ocean/moc.py +++ b/earthdiagnostics/ocean/moc.py @@ -1,6 +1,6 @@ # coding=utf-8 import numpy as np -from autosubmit.config.log import Log +from bscearth.utils.log import Log from earthdiagnostics import cdftools from earthdiagnostics.constants import Basins @@ -80,26 +80,31 @@ class Moc(Diagnostic): Log.debug('Reformatting variables') handler = Utils.openCdf(temp) - handler.createDimension('basin', 5) + basins_list = [Basins.Global.fullname] + if 'zomsfatl' in handler.variables: + basins_list += [Basins.Atlantic.fullname, Basins.Pacific.fullname, Basins.IndoPacific.fullname, + Basins.Indian.fullname] + + handler.createDimension('basin', len(basins_list)) handler.createVariable('basin', str, 'basin') - handler.variables['basin'][:] = np.array([Basins.Global.fullname, Basins.Atlantic.fullname, - Basins.Pacific.fullname, Basins.IndoPacific.fullname, - Basins.Indian.fullname], dtype=object) + handler.variables['basin'][:] = np.array(basins_list, dtype=object) example = handler.variables['zomsfglo'] # noinspection PyProtectedMember moc = handler.createVariable('vsftmyz', example.datatype, ('time', 'lev', 'i', 'j', 'basin'), fill_value=example._FillValue) - moc.units = example.units + moc.units = Utils.convert_to_ASCII_if_possible(example.units) moc.add_offset = example.add_offset moc.scale_factor = example.scale_factor moc[:, :, :, :, 0] = handler.variables['zomsfglo'][:] - moc[:, :, :, :, 1] = handler.variables['zomsfatl'][:] - moc[:, :, :, :, 2] = handler.variables['zomsfpac'][:] - moc[:, :, :, :, 3] = handler.variables['zomsfinp'][:] - moc[:, :, :, :, 4] = handler.variables['zomsfind'][:] + + if 'zomsfatl' in handler.variables: + moc[:, :, :, :, 1] = handler.variables['zomsfatl'][:] + moc[:, :, :, :, 2] = handler.variables['zomsfpac'][:] + moc[:, :, :, :, 3] = handler.variables['zomsfinp'][:] + moc[:, :, :, :, 4] = handler.variables['zomsfind'][:] handler.close() diff --git a/earthdiagnostics/ocean/regionmean.py b/earthdiagnostics/ocean/regionmean.py new file mode 100644 index 0000000000000000000000000000000000000000..99734b8b5e7cac5872990a12c561fea7a1bd3f7f --- /dev/null +++ b/earthdiagnostics/ocean/regionmean.py @@ -0,0 +1,118 @@ +# coding=utf-8 +from earthdiagnostics import cdftools +from earthdiagnostics.box import Box +from earthdiagnostics.constants import Basins +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticIntOption, DiagnosticDomainOption, \ + DiagnosticBoolOption, DiagnosticBasinOption, DiagnosticVariableOption +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.modelingrealm import ModelingRealms + + +class RegionMean(Diagnostic): + """ + Chooses vertical level in ocean, or vertically averages between + 2 or more ocean levels + + :original author: Javier Vegas-Regidor + + :created: January 2017 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable to average + :type variable: str + :param box: box used to restrict the vertical mean + :type box: Box + """ + + alias = 'regmean' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, domain, variable, grid, box, save3d, basin): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.domain = domain + self.variable = variable + self.grid = grid.upper() + self.box = box + self.save3d = save3d + self.basin = basin + self.required_vars = [variable] + self.generated_vars = [variable + 'vmean'] + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.box == other.box and self.variable == other.variable + + def __str__(self): + return 'Vertical mean Startdate: {0} Member: {1} Chunk: {2} Variable: {3} ' \ + 'Box: {4}'.format(self.startdate, self.member, self.chunk, self.variable, self.box) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, minimum depth (level), maximum depth (level) + :type options: list[str] + :return: + """ + options_available = (DiagnosticDomainOption('domain'), + DiagnosticVariableOption('variable'), + DiagnosticOption('grid'), + DiagnosticBasinOption('basin', Basins.Global), + DiagnosticBoolOption('save3D', False), + DiagnosticIntOption('min_depth', 0), + DiagnosticIntOption('max_depth', 0)) + options = cls.process_options(options, options_available) + + box = Box() + box.min_depth = options['min_depth'] + box.max_depth = options['max_depth'] + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(RegionMean(diags.data_manager, startdate, member, chunk, + options['domain'], options['variable'], options['grid'], box, options['save3D'], + options['basin'])) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + temp = TempFile.get() + variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk) + + cdfmean_options = [self.variable, self.grid, 0, 0, 0, 0, self.box.min_depth, self.box.max_depth] + if self.basin != Basins.Global: + cdfmean_options.append('-M') + cdfmean_options.append('mask_regions.3d.nc') + cdfmean_options.append(self.basin.shortname) + + cdftools.run('cdfmean', input=variable_file, output=temp, options=cdfmean_options) + Utils.setminmax(temp, 'mean_{0}'.format(self.variable)) + + if self.box.min_depth == 0: + # For cdftools, this is all levels + box_save = None + else: + box_save = self.box + + self.send_file(temp, ModelingRealms.ocean, self.variable + 'mean', self.startdate, self.member, self.chunk, + box=box_save, rename_var='mean_{0}'.format(self.variable), region=self.basin) + if self.save3d: + Utils.setminmax(temp, 'mean_3D{0}'.format(self.variable)) + self.send_file(temp, ModelingRealms.ocean, self.variable + '3dmean', self.startdate, self.member, + self.chunk, box=box_save, rename_var='mean_3D{0}'.format(self.variable), region=self.basin) + diff --git a/earthdiagnostics/ocean/rotation.py b/earthdiagnostics/ocean/rotation.py new file mode 100644 index 0000000000000000000000000000000000000000..70525d60c686db2c5676cc52b55329760df16c8f --- /dev/null +++ b/earthdiagnostics/ocean/rotation.py @@ -0,0 +1,115 @@ +# coding=utf-8 +from bscearth.utils.log import Log +import os + +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticVariableOption +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.modelingrealm import ModelingRealms + + +class Rotation(Diagnostic): + """ + Cuts a meridional or zonal section + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: September 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable's name + :type variable: str + :param domain: variable's domain + :type domain: Domain + """ + + alias = 'rotate' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, domain, variableu, variablev, executable): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variableu = variableu + self.variablev = variablev + self.domain = domain + self.executable = executable + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.domain == other.domain and self.variableu == other.variableu and self.variablev == other.variablev + + def __str__(self): + return 'Rotate variables Startdate: {0} Member: {1} Chunk: {2} Variables: {3}:{4} , ' \ + '{3}:{5}'.format(self.startdate, self.member, self.chunk, self.domain, self.variableu, + self.variablev) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, zonal, value, domain=ocean + :type options: list[str] + :return: + """ + options_available = (DiagnosticVariableOption('variableu'), + DiagnosticVariableOption('variablev'), + DiagnosticDomainOption('domain', ModelingRealms.ocean), + DiagnosticOption('executable', + '/home/Earth/jvegas/pyCharm/cfutools/interpolation/rotateUVorca')) + options = cls.process_options(options, options_available) + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(Rotation(diags.data_manager, startdate, member, chunk, + options['domain'], options['variableu'], options['variablev'], + options['executable'])) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + ufile = self.data_manager.get_file(self.domain, self.variableu, self.startdate, self.member, self.chunk) + vfile = self.data_manager.get_file(self.domain, self.variablev, self.startdate, self.member, self.chunk) + + urotated = TempFile.get() + vrotated = TempFile.get() + + namelist_file = TempFile.get(suffix='') + rotate_namelist = open(namelist_file, 'w') + rotate_namelist.write("&nam_rotUV\n") + rotate_namelist.write(" Ufilein = {0}\n".format(ufile)) + rotate_namelist.write(" Uvarin = {0}\n".format(self.variableu)) + rotate_namelist.write(" Vfilein = {0}\n".format(vfile)) + rotate_namelist.write(" Vvarin = {0}\n".format(self.variablev)) + rotate_namelist.write(" meshmask = mask.nc\n") + rotate_namelist.write(" Ufileout = {0}\n".format(urotated)) + rotate_namelist.write(" Vfileout = {0}\n".format(vrotated)) + + rotate_namelist.writelines("/\n") + rotate_namelist.close() + + Utils.execute_shell_command('{0} {1}'.format(self.executable, namelist_file), Log.INFO) + + os.remove(ufile) + os.remove(vfile) + + self.send_file(urotated, self.domain, self.variableu, self.startdate, self.member, self.chunk, grid='rotated') + self.send_file(vrotated, self.domain, self.variablev, self.startdate, self.member, self.chunk, grid='rotated') + + + + diff --git a/earthdiagnostics/ocean/verticalmean.py b/earthdiagnostics/ocean/verticalmean.py index 4564bb0aae96cd23d279796624a7b2802bfec0b4..693aa080cdc2fe780f8406886b41930b76072b9f 100644 --- a/earthdiagnostics/ocean/verticalmean.py +++ b/earthdiagnostics/ocean/verticalmean.py @@ -1,7 +1,7 @@ # coding=utf-8 from earthdiagnostics import cdftools from earthdiagnostics.box import Box -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticIntOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticIntOption, DiagnosticVariableOption from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.modelingrealm import ModelingRealms @@ -64,7 +64,7 @@ class VerticalMean(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticIntOption('min_depth', -1), DiagnosticIntOption('max_depth', -1)) options = cls.process_options(options, options_available) diff --git a/earthdiagnostics/ocean/verticalmeanmeters.py b/earthdiagnostics/ocean/verticalmeanmeters.py index 3f280356c7e5e3f46e3b199b887cdd337357c1d1..7fbe1e423a7d1278ede9496034f023f5bcc46cb0 100644 --- a/earthdiagnostics/ocean/verticalmeanmeters.py +++ b/earthdiagnostics/ocean/verticalmeanmeters.py @@ -1,7 +1,8 @@ # coding=utf-8 from earthdiagnostics import cdftools from earthdiagnostics.box import Box -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticFloatOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticFloatOption, DiagnosticDomainOption, \ + DiagnosticVariableOption from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.modelingrealm import ModelingRealms @@ -34,11 +35,12 @@ class VerticalMeanMeters(Diagnostic): alias = 'vertmeanmeters' "Diagnostic alias for the configuration file" - def __init__(self, data_manager, startdate, member, chunk, variable, box): + def __init__(self, data_manager, startdate, member, chunk, domain, variable, box): Diagnostic.__init__(self, data_manager) self.startdate = startdate self.member = member self.chunk = chunk + self.domain = domain self.variable = variable self.box = box self.required_vars = [variable] @@ -49,8 +51,8 @@ class VerticalMeanMeters(Diagnostic): self.box == other.box and self.variable == other.variable def __str__(self): - return 'Vertical mean meters Startdate: {0} Member: {1} Chunk: {2} Variable: {3} ' \ - 'Box: {4}'.format(self.startdate, self.member, self.chunk, self.variable, self.box) + return 'Vertical mean meters Startdate: {0} Member: {1} Chunk: {2} Variable: {3}:{4} ' \ + 'Box: {5}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable, self.box) @classmethod def generate_jobs(cls, diags, options): @@ -63,9 +65,10 @@ class VerticalMeanMeters(Diagnostic): :type options: list[str] :return: """ - options_available = (DiagnosticOption('variable'), + options_available = (DiagnosticVariableOption('variable'), DiagnosticFloatOption('min_depth', -1), - DiagnosticFloatOption('max_depth', -1)) + DiagnosticFloatOption('max_depth', -1), + DiagnosticDomainOption('domain', ModelingRealms.ocean)) options = cls.process_options(options, options_available) box = Box(True) @@ -76,7 +79,8 @@ class VerticalMeanMeters(Diagnostic): job_list = list() for startdate, member, chunk in diags.config.experiment.get_chunk_list(): - job_list.append(VerticalMeanMeters(diags.data_manager, startdate, member, chunk, options['variable'], box)) + job_list.append(VerticalMeanMeters(diags.data_manager, startdate, member, chunk, + options['domain'], options['variable'], box)) return job_list def compute(self): @@ -84,7 +88,7 @@ class VerticalMeanMeters(Diagnostic): Runs the diagnostic """ temp = TempFile.get() - variable_file = self.data_manager.get_file(ModelingRealms.ocean, self.variable, self.startdate, self.member, + variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk) handler = Utils.openCdf(variable_file) @@ -102,5 +106,5 @@ class VerticalMeanMeters(Diagnostic): cdftools.run('cdfvertmean', input=variable_file, output=temp, options=[self.variable, 'T', lev_min, lev_max, '-debug']) Utils.setminmax(temp, '{0}_vert_mean'.format(self.variable)) - self.send_file(temp, ModelingRealms.ocean, self.variable + 'vmean', self.startdate, self.member, self.chunk, + self.send_file(temp, self.domain, self.variable + 'vmean', self.startdate, self.member, self.chunk, box=self.box, rename_var='{0}_vert_mean'.format(self.variable)) diff --git a/earthdiagnostics/parser.py b/earthdiagnostics/parser.py deleted file mode 100644 index 25678218ab32edc15b99dc27a4d61dc3c12d8a90..0000000000000000000000000000000000000000 --- a/earthdiagnostics/parser.py +++ /dev/null @@ -1,213 +0,0 @@ -# coding=utf-8 -from ConfigParser import SafeConfigParser -from autosubmit.config.log import Log -import re - - -# noinspection PyClassicStyleClass -class Parser(SafeConfigParser): - """ - Class to manage the config file. It add options to manage default values and to convert strings to the - desired types (int, bool, list ...) - """ - - def get_option(self, section, option, default=''): - """ - Gets an option - - :param section: section that contains the option - :type section: str - :param option: option to get - :type option: str - :param default: value to be returned if option is not present - :type default: object - :return: option value - :rtype: str - """ - if self.has_option(section, option): - return self.get(section, option) - else: - return default - - def get_list_option(self, section, option, default=list(), separator=' '): - """ - Gets a list option - - :param section: section that contains the option - :type section: str - :param option: option to get - :type option: str - :param default: value to be returned if option is not present - :type default: object - :param separator: separator used to split the list - :type separator: str - :return: option value - :rtype: list - """ - if self.has_option(section, option): - return self.get(section, option).split(separator) - else: - return default - - def get_bool_option(self, section, option, default=True): - """ - Gets a boolean option - - :param section: section that contains the option - :type section: str - :param option: option to get - :type option: str - :param default: value to be returned if option is not present - :type default: bool - :return: option value - :rtype: bool - """ - if self.has_option(section, option): - return self.get(section, option).lower().strip() == 'true' - else: - return default - - def get_int_option(self, section, option, default=0): - """ - Gets an integer option - - :param section: section that contains the option - :type section: str - :param option: option to get - :type option: str - :param default: value to be returned if option is not present - :type default: int - :return: option value - :rtype: int - """ - if self.has_option(section, option): - return int(self.get(section, option)) - else: - return default - - def get_float_option(self, section, option, default=0.0): - """ - Gets a float option - - :param section: section that contains the option - :type section: str - :param option: option to get - :type option: str - :param default: value to be returned if option is not present - :type default: float - :return: option value - :rtype: float - """ - if self.has_option(section, option): - return float(self.get(section, option)) - else: - return default - - def check_exists(self, section, option): - """ - Checks if an option exists - - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :return: True if option exists, False otherwise - :rtype: bool - """ - if self.has_option(section, option): - return True - else: - Log.error('Option {0} in section {1} not found'.format(option, section)) - return False - - def check_is_boolean(self, section, option, must_exist): - """ - Checks if an option is a boolean value - - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :param must_exist: if True, option must exist - :type must_exist: bool - :return: True if option value is boolean, False otherwise - :rtype: bool - """ - if must_exist and not self.check_exists(section, option): - Log.error('Option {0} in section {1} must exist'.format(option, section)) - return False - if self.get_option(section, option, 'false').lower() not in ['false', 'true']: - Log.error('Option {0} in section {1} must be true or false'.format(option, section)) - return False - return True - - def check_is_choice(self, section, option, must_exist, choices): - """ - Checks if an option is a valid choice in given self - - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :param must_exist: if True, option must exist - :type must_exist: bool - :param choices: valid choices - :type choices: list - :return: True if option value is a valid choice, False otherwise - :rtype: bool - """ - if must_exist and not self.check_exists(section, option): - return False - value = self.get_option(section, option, choices[0]) - if value not in choices: - Log.error('Value {2} in option {0} in section {1} is not a valid choice'.format(option, section, value)) - return False - return True - - def check_is_int(self, section, option, must_exist): - """ - Checks if an option is an integer value - - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :param must_exist: if True, option must exist - :type must_exist: bool - :return: True if option value is integer, False otherwise - :rtype: bool - """ - if must_exist and not self.check_exists(section, option): - return False - value = self.get_option(section, option, '1') - try: - int(value) - except ValueError: - Log.error('Option {0} in section {1} is not valid an integer'.format(option, section)) - return False - return True - - def check_regex(self, section, option, must_exist, regex): - """ - Checks if an option complies with a regular expression - - :param section: section that contains the option - :type section: str - :param option: option to check - :type option: str - :param must_exist: if True, option must exist - :type must_exist: bool - :param regex: regular expression to check - :type regex: str - :return: True if option complies with regex, False otherwise - :rtype: bool - """ - if must_exist and not self.check_exists(section, option): - return False - prog = re.compile(regex) - value = self.get_option(section, option, '1') - if not prog.match(value): - Log.error('Option {0} in section {1} is not valid: {2}'.format(option, section, value)) - return False - return True - diff --git a/earthdiagnostics/statistics/climatologicalpercentile.py b/earthdiagnostics/statistics/climatologicalpercentile.py index 33fe83d967c36795be04cb9ec7d2c2786480f12b..dd4d6acb0c5813415903a84af1e1e6a8187d80ef 100644 --- a/earthdiagnostics/statistics/climatologicalpercentile.py +++ b/earthdiagnostics/statistics/climatologicalpercentile.py @@ -1,8 +1,8 @@ # coding=utf-8 -from autosubmit.config.log import Log +from bscearth.utils.log import Log -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticListIntOption, \ - DiagnosticIntOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticVariableOption, DiagnosticDomainOption, \ + DiagnosticListIntOption, DiagnosticIntOption from earthdiagnostics.frequency import Frequencies from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.variable_type import VariableType @@ -51,8 +51,8 @@ class ClimatologicalPercentile(Diagnostic): return self.domain == other.domain and self.variable == other.variable and self.leadtimes == other.leadtimes def __str__(self): - return 'Climatological percentile Variable: {0}:{1} Leadtimes: {2}'.format(self.domain, self.variable, - self.leadtimes) + return 'Climatological percentile Variable: {0}:{1} Leadtimes: {2} ' \ + 'Bins: {3}'.format(self.domain, self.variable, self.leadtimes, self.num_bins) @classmethod def generate_jobs(cls, diags, options): @@ -66,7 +66,7 @@ class ClimatologicalPercentile(Diagnostic): :return: """ options_available = (DiagnosticDomainOption('domain'), - DiagnosticOption('variable'), + DiagnosticVariableOption('variable'), DiagnosticListIntOption('leadtimes'), DiagnosticIntOption('bins', 2000)) options = cls.process_options(options, options_available) diff --git a/earthdiagnostics/statistics/monthlypercentile.py b/earthdiagnostics/statistics/monthlypercentile.py index 45b7652c16da2689003c8e71dd8caa378a2ee37a..eab4ea0260b1d752792a5b315643643974bffdf0 100644 --- a/earthdiagnostics/statistics/monthlypercentile.py +++ b/earthdiagnostics/statistics/monthlypercentile.py @@ -1,9 +1,9 @@ # coding=utf-8 import shutil -from autosubmit.config.log import Log +from bscearth.utils.log import Log -from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticIntOption +from earthdiagnostics.diagnostic import Diagnostic, DiagnosticOption, DiagnosticDomainOption, DiagnosticListIntOption from earthdiagnostics.frequency import Frequencies from earthdiagnostics.utils import Utils, TempFile from earthdiagnostics.variable_type import VariableType @@ -29,23 +29,24 @@ class MonthlyPercentile(Diagnostic): alias = 'monpercent' "Diagnostic alias for the configuration file" - def __init__(self, data_manager, startdate, member, chunk, variable, domain, percentile): + def __init__(self, data_manager, startdate, member, chunk, domain, variable, percentiles): Diagnostic.__init__(self, data_manager) self.startdate = startdate self.member = member self.chunk = chunk self.variable = variable self.domain = domain - self.percentile = percentile + self.percentiles = percentiles def __eq__(self, other): return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ - self.domain == other.domain and self.variable == other.variable and self.percentile == other.percentile + self.domain == other.domain and self.variable == other.variable and self.percentiles == other.percentiles def __str__(self): - return 'Monthly percentile {0} Startdate: {0} Member: {1} Chunk: {2} ' \ - 'Variable: {3}:{4} Percentile: {5}'.format(self.startdate, self.member, self.chunk, - self.domain, self.variable, self.percentile) + return 'Monthly percentile Startdate: {0} Member: {1} Chunk: {2} ' \ + 'Variable: {3}:{4} Percentiles: {5}'.format(self.startdate, self.member, self.chunk, + self.domain, self.variable, + ', '.join(str(i) for i in self.percentiles)) @classmethod def generate_jobs(cls, diags, options): @@ -60,13 +61,13 @@ class MonthlyPercentile(Diagnostic): """ options_available = (DiagnosticOption('domain'), DiagnosticDomainOption('variable'), - DiagnosticIntOption('percentile', None, 0, 100)) + DiagnosticListIntOption('percentiles', None, 0, 100)) options = cls.process_options(options, options_available) job_list = list() for startdate, member, chunk in diags.config.experiment.get_chunk_list(): job_list.append(MonthlyPercentile(diags.data_manager, startdate, member, chunk, - options['variable'], options['domain'], options['percentile'])) + options['variable'], options['domain'], options['percentiles'])) return job_list def compute(self): @@ -108,12 +109,13 @@ class MonthlyPercentile(Diagnostic): monmax_file = TempFile.get() Utils.cdo.monmax(input=variable_file, output=monmax_file) - Log.debug('Computing percentile') - Utils.cdo.monpctl(str(self.percentile), input=[variable_file, monmin_file, monmax_file], output=temp) - Utils.rename_variable(temp, 'lev', 'ensemble', False, True) - self.send_file(temp, self.domain, '{0}_q{1}'.format(self.variable, self.percentile), self.startdate, - self.member, self.chunk, frequency=Frequencies.monthly, rename_var=self.variable, - vartype=VariableType.STATISTIC) + for percentile in self.percentiles: + Log.debug('Computing percentile {0}', percentile) + Utils.cdo.monpctl(str(percentile), input=[variable_file, monmin_file, monmax_file], output=temp) + Utils.rename_variable(temp, 'lev', 'ensemble', False, True) + self.send_file(temp, self.domain, '{0}_q{1}'.format(self.variable, percentile), self.startdate, + self.member, self.chunk, frequency=Frequencies.monthly, rename_var=self.variable, + vartype=VariableType.STATISTIC) diff --git a/earthdiagnostics/threddsmanager.py b/earthdiagnostics/threddsmanager.py index de75cd022e101d2dd1b2eef996399811872eeb62..ce7ebe14dce0066939edd61aa310a244b4e79d82 100644 --- a/earthdiagnostics/threddsmanager.py +++ b/earthdiagnostics/threddsmanager.py @@ -1,6 +1,6 @@ # coding=utf-8 import os -from autosubmit.date.chunk_date_lib import parse_date, add_months, chunk_start_date, chunk_end_date +from bscearth.utils.date import parse_date, add_months, chunk_start_date, chunk_end_date from earthdiagnostics.datamanager import DataManager, NetCDFFile from earthdiagnostics.utils import TempFile, Utils @@ -37,15 +37,15 @@ class THREDDSManager(DataManager): aggregation_path = self.get_var_url(variable, startdate, frequency, None, vartype) startdate = parse_date(startdate) start_chunk = chunk_start_date(startdate, self.experiment.num_chunks, self.experiment.chunk_size, - 'month', 'standard') - end_chunk = chunk_end_date(start_chunk, self.experiment.chunk_size, 'month', 'standard') + 'month', self.experiment.calendar) + end_chunk = chunk_end_date(start_chunk, self.experiment.chunk_size, 'month', self.experiment.calendar) thredds_subset = THREDDSSubset(aggregation_path, variable, startdate, end_chunk).get_url() - selected_months = ','.join([str(add_months(startdate, i, 'standard').month) for i in leadtimes]) + selected_months = ','.join([str(add_months(startdate, i, self.experiment.calendar).month) for i in leadtimes]) temp = TempFile.get() if self.config.data_type == 'exp': select_months = '-selmonth,{0} {1}'.format(selected_months, thredds_subset) - selected_years = ','.join([str(add_months(startdate, i, 'standard').year) for i in leadtimes]) + selected_years = ','.join([str(add_months(startdate, i, self.experiment.calendar).year) for i in leadtimes]) Utils.cdo.selyear(selected_years, input=select_months, output=temp) else: Utils.cdo.selmonth(selected_months, input=thredds_subset, output=temp) @@ -79,8 +79,9 @@ class THREDDSManager(DataManager): """ aggregation_path = self.get_var_url(var, startdate, frequency, box, vartype) - start_chunk = chunk_start_date(parse_date(startdate), chunk, self.experiment.chunk_size, 'month', 'standard') - end_chunk = chunk_end_date(start_chunk, self.experiment.chunk_size, 'month', 'standard') + start_chunk = chunk_start_date(parse_date(startdate), chunk, self.experiment.chunk_size, 'month', + self.experiment.calendar) + end_chunk = chunk_end_date(start_chunk, self.experiment.chunk_size, 'month', self.experiment.calendar) thredds_subset = THREDDSSubset(aggregation_path, var, start_chunk, end_chunk) return thredds_subset.check() @@ -113,8 +114,9 @@ class THREDDSManager(DataManager): """ aggregation_path = self.get_var_url(var, startdate, frequency, box, vartype) - start_chunk = chunk_start_date(parse_date(startdate), chunk, self.experiment.chunk_size, 'month', 'standard') - end_chunk = chunk_end_date(start_chunk, self.experiment.chunk_size, 'month', 'standard') + start_chunk = chunk_start_date(parse_date(startdate), chunk, self.experiment.chunk_size, 'month', + self.experiment.calendar) + end_chunk = chunk_end_date(start_chunk, self.experiment.chunk_size, 'month', self.experiment.calendar) thredds_subset = THREDDSSubset(aggregation_path, var, start_chunk, end_chunk) return thredds_subset.download() @@ -210,7 +212,7 @@ class THREDDSManager(DataManager): var = self._get_final_var_name(box, var) folder_path = self._get_folder_path(frequency, domain, var, grid, vartype) - file_name = self._get_file_name(startdate, var) + file_name = self._get_file_name(var, startdate) filepath = os.path.join(folder_path, file_name) return filepath @@ -275,12 +277,12 @@ class THREDDSManager(DataManager): full_path = os.path.join(self.server_url, 'dodsC', self.config.data_type, self.experiment.institute, self.experiment.model, frequency.folder_name(vartype)) if self.config.data_type == 'exp': - full_path = os.path.join(full_path, var, self._get_file_name(startdate, var)) + full_path = os.path.join(full_path, var, self._get_file_name(var, startdate)) else: - full_path = os.path.join(full_path, self._get_file_name(None, var)) + full_path = os.path.join(full_path, self._get_file_name(var, None)) return full_path - def _get_file_name(self, startdate, var): + def _get_file_name(self, var, startdate): if startdate: if self.config.data_type != 'exp': startdate = startdate[0:6] @@ -288,7 +290,7 @@ class THREDDSManager(DataManager): else: return '{0}.nc'.format(var) - def link_file(self, domain, var, startdate, member, chunk=None, grid=None, box=None, + def link_file(self, domain, var, cmor_var, startdate, member, chunk=None, grid=None, frequency=None, year=None, date_str=None, move_old=False, vartype=VariableType.MEAN): """ Creates the link of a given file from the CMOR repository. @@ -309,8 +311,6 @@ class THREDDSManager(DataManager): :type chunk: int :param grid: file's grid (only needed if it is not the original) :type grid: str - :param box: file's box (only needed to retrieve sections or averages) - :type box: Box :param frequency: file's frequency (only needed if it is different from the default) :type frequency: str :param vartype: Variable type (mean, statistic) diff --git a/earthdiagnostics/utils.py b/earthdiagnostics/utils.py index f8248ad19ce3e28413f806d66d100c4ac99b32de..67697b06007c8a45b328a9206cfdc70403debd18 100644 --- a/earthdiagnostics/utils.py +++ b/earthdiagnostics/utils.py @@ -2,13 +2,14 @@ import hashlib import shutil import subprocess +import tarfile import netCDF4 import numpy as np import os import re import tempfile -from autosubmit.config.log import Log +from bscearth.utils.log import Log from cdo import Cdo, CDOException from nco import Nco @@ -245,6 +246,25 @@ class Utils(object): if os.path.isfile(path): os.remove(path) + @staticmethod + def copy_tree(source, destiny): + if not os.path.exists(destiny): + os.makedirs(destiny) + shutil.copystat(source, destiny) + lst = os.listdir(source) + for item in lst: + item_source = os.path.join(source, item) + item_destiny = os.path.join(destiny, item) + if os.path.isdir(item_source): + Utils.copy_tree(item_source, item_destiny) + else: + shutil.copy2(item_source, item_destiny) + + @staticmethod + def move_tree(source, destiny): + Utils.copy_tree(source, destiny) + shutil.rmtree(source) + @staticmethod def get_file_hash(filepath): """ @@ -426,7 +446,8 @@ class Utils(object): translated_dimensions = Utils._translate(source.variables[variable].dimensions, new_names) if not set(translated_dimensions).issubset(destiny.dimensions): if not add_dimensions: - raise Exception('Variable {0} can not be added because dimensions does not match'.format(variable)) + raise Exception('Variable {0} can not be added because dimensions does not match: ' + '{1} {2}'.format(variable, translated_dimensions, destiny.dimensions)) for dimension in source.variables[variable].dimensions: Utils.copy_dimension(source, destiny, dimension, must_exist, new_names) if new_name in destiny.variables.keys(): @@ -510,16 +531,6 @@ class Utils(object): shutil.copy(source, destiny) Utils.convert2netcdf4(destiny) - @staticmethod - def expand_path(path): - """ - Expands character ~ and system variables on the given path - :param path: path to expand - :type path: str - :return: path after the expansion - """ - return os.path.expandvars(os.path.expanduser(path)) - class ExecutionError(Exception): """ Exception to raise when a command execution fails @@ -544,12 +555,13 @@ class Utils(object): :type path: str """ if not os.path.exists(path): - # This can be a race condition # noinspection PyBroadException try: os.makedirs(path) - except Exception: - pass + except: + # Here we can have a race condition. Let's check again for existence and rethrow if still not exists + if not os.path.isdir(path): + raise @staticmethod def untar(files, destiny_path): @@ -562,17 +574,28 @@ class Utils(object): """ for filepath in files: Log.debug('Unpacking {0}', filepath) - Utils.execute_shell_command('tar -xvf {0} -C {1}'.format(filepath, destiny_path)) + tar = tarfile.open(filepath) + for file_compressed in tar.getmembers(): + if file_compressed.isdir(): + if os.path.isdir(os.path.join(destiny_path, file_compressed.name)): + continue + else: + if os.path.exists(os.path.join(destiny_path, file_compressed.name)): + os.remove(os.path.join(destiny_path, file_compressed.name)) + tar.extract(file_compressed, destiny_path) + tar.close() @staticmethod def unzip(files, force=False): """ Unzip a list of files :param files: files to unzip - :type files: list + :type files: list | str :param force: if True, it will overwrite unzipped files :type force: bool """ + if isinstance(files, basestring): + files = [files] for filepath in files: Log.debug('Unzipping {0}', filepath) if force: @@ -633,6 +656,7 @@ class TempFile(object): path = os.path.join(TempFile.scratch_folder, filename) else: fd, path = tempfile.mkstemp(dir=TempFile.scratch_folder, prefix=TempFile.prefix, suffix=suffix) + path = str(path) os.close(fd) if clean: diff --git a/earthdiagnostics/variable.py b/earthdiagnostics/variable.py index 30ae1154e28e7ccc54fe402ab1a3a4d1658c8033..0ad73fba648335ef21902226dbdfb5cad7779349 100644 --- a/earthdiagnostics/variable.py +++ b/earthdiagnostics/variable.py @@ -4,11 +4,11 @@ import json import openpyxl import os -from autosubmit.config.log import Log +from bscearth.utils.log import Log from earthdiagnostics.constants import Basins from earthdiagnostics.frequency import Frequency -from earthdiagnostics.modelingrealm import ModelingRealm +from earthdiagnostics.modelingrealm import ModelingRealms class VariableJsonException(Exception): @@ -144,6 +144,7 @@ class VariableManager(object): def _load_json_variables(self, json_data, table): for short_name in json_data.keys(): + short_name = str.strip(str(short_name)) if short_name.lower() in self._dict_variables: self._dict_variables[short_name.lower()].tables.append(table) continue @@ -176,6 +177,7 @@ class VariableManager(object): cmor_vars = [] for alias in aliases: + alias = str.strip(alias) if alias.lower() in self._dict_variables: cmor_vars.append(self._dict_variables[alias.lower()]) if len(cmor_vars) == 0: @@ -263,13 +265,7 @@ class VariableManager(object): if value is None: value = '' modelling_realm = value.split(' ') - if len(modelling_realm) > 1: - Log.warning('Multiple modeling realms assigned to variable {0}: {1}. ' - 'We wil use first ({1[0]}) as modelling realm'.format(var.short_name, modelling_realm)) - if not modelling_realm[0]: - Log.warning('Variable {0} has no modeling realm defined'.format(var.short_name)) - else: - var.domain = ModelingRealm(modelling_realm[0]) + var.get_modelling_realm(modelling_realm) def _load_missing_defaults(self): self._load_file('default', True) @@ -301,30 +297,46 @@ class Variable(object): def parse_json(self, json_var, key): if 'out_name' in json_var: - self.short_name = json_var['out_name'] + self.short_name = json_var['out_name'].strip() else: raise VariableJsonException('Variable has no out name defined'.format(key)) - self.standard_name = json_var['standard_name'] - self.long_name = json_var['long_name'] + self.standard_name = json_var['standard_name'].strip() + self.long_name = json_var['long_name'].strip() domain = json_var['modeling_realm'].split(' ') - if len(domain) > 1: - Log.warning('Multiple modeling realms assigned to variable {0}: {1}. ' - 'We wil use first ({1[0]}) as domain'.format(self.short_name, domain)) - if not domain[0]: + self.domain = self.get_modelling_realm(domain) + + self.valid_min = json_var['valid_min'].strip() + self.valid_max = json_var['valid_max'].strip() + self.units = json_var['units'].strip() + + def get_modelling_realm(self, domains): + if len(domains) > 1: + Log.warning('Multiple modeling realms assigned to variable {0}: {1}. ', self, domains) + parsed = [] + for domain in domains: + parsed.append(ModelingRealms.parse(domain)) + + selected = self._select_most_specific(parsed) + if selected: + Log.warning('We will use {0} as it is the most specific', selected) + return selected + + Log.warning('We will use {0} as it is the first on the list and there is no one that is more specific', + parsed[0]) + return parsed[0] + + if not domains[0]: Log.warning('Variable {0} has no modeling realm defined'.format(self.short_name)) + return None else: - self.domain = ModelingRealm(domain[0]) - - self.valid_min = json_var['valid_min'] - self.valid_max = json_var['valid_max'] - self.units = json_var['units'] + return ModelingRealms.parse(domains[0]) def parse_csv(self, var_line): self.short_name = var_line[1].strip() self.standard_name = var_line[2].strip() self.long_name = var_line[3].strip() - self.domain = ModelingRealm(var_line[4].strip()) + self.domain = ModelingRealms.parse(var_line[4].strip()) self.basin = Basins.parse(var_line[5]) self.units = var_line[6].strip() self.valid_min = var_line[7].strip() @@ -341,6 +353,22 @@ class Variable(object): table_name = self.domain.get_table_name(frequency, data_convention) return CMORTable(table_name, frequency, 'December 2013') + def _select_most_specific(self, parsed): + parsed = set(parsed) + if {ModelingRealms.land, ModelingRealms.landIce} == parsed: + return ModelingRealms.landIce + + if {ModelingRealms.seaIce, ModelingRealms.ocean} == parsed: + return ModelingRealms.seaIce + + if {ModelingRealms.atmos, ModelingRealms.atmosChem} == parsed: + return ModelingRealms.atmosChem + + if {ModelingRealms.ocean, ModelingRealms.ocnBgchem} == parsed: + return ModelingRealms.ocnBgchem + + return None + class VariableAlias(object): """ diff --git a/earthdiagnostics/variable_alias/cmip6.csv b/earthdiagnostics/variable_alias/cmip6.csv index 6aee91f502a827957a7411b8657e52e8d0344f37..abe2ddb8e24c1648546db92daa70e001e9a22359 100644 --- a/earthdiagnostics/variable_alias/cmip6.csv +++ b/earthdiagnostics/variable_alias/cmip6.csv @@ -1,3 +1,71 @@ Aliases,Shortname,Basin,Grid iiceconc:soicecov:ileadfra,siconc,, -ci,siconc,,ifs +alk,talk,, +oxygen,o2,, +calcite,calc,, +po4,po4,, +poc,poc,, +silicate,si,, +nanophy,nanophy,, +microzoo,zmicro,, +doc,dissoc,, +diaphy,phydiat,, +mesozoo,zmeso,, +dsi,dsi,, +dissfe,dfe,, +bfe,bfe,, +goc,goc,, +sfe,sfe,, +dfe,dfe,, +micrzoo,zmicro,, +nfe,nfe,, +nchl,nchl,, +dchl,chldiat,, +nitrate,no3,, +ammonium,nh4,, +pno3tot,pno3tot,, +psiltot,psiltot,, +palktot,palktot,, +pfertot,pfertot,, +tcflx,tcflx,, +tcflxcum,tcflxcum,, +c-export,c-export,, +tintpp,tintpp,, +tnfix,tnfix,, +tdenit,tdenit,, +intppnew:inttppnew,intppnew,, +inttpbfe,pbfe,, +intdic,intdic,, +o2min,o2min,, +zo2min,zo2min,, +intnfix,intpn2,, +intppphy,intppphy,, +intppphy2,intppdiat,, +ppphy ,ppphy ,, +ppphy2 ,pdi,, +intpp:inttpp,intpp,, +intpbfe,intpbfe,, +intpbsi,intpbsi,, +intpbcal,intpbcal,, +cflx,cflx,, +remin,remin,, +denit,denit,, +nfix,nfix,, +sdenit,sdenit,, +dpco2,dpco2,, +epc100,epc100,, +expc,expc,, +par,par,, +lnnut,lnnut,, +ldnut,ldnut,, +lnfe,lnfe,, +ldfe,limfediat,, +lnlight,lnlight,, +ldlight,ldlight,, +graz1,graz1,, +graz2,graz2,, +mumax,mumax,, +mun,mun,, +mud,mud,, +ppnewn,ppnewn,, +ppnewd,ppnewd,, diff --git a/earthdiagnostics/variable_alias/default.csv b/earthdiagnostics/variable_alias/default.csv index 24683fe3fed92fba78ad1a7f9daedabc3db6ae72..993ca7b55e0a042ffc144871681902e50f22fb72 100644 --- a/earthdiagnostics/variable_alias/default.csv +++ b/earthdiagnostics/variable_alias/default.csv @@ -170,7 +170,7 @@ iice_hsd:snthicat,sndcat,, isnoheco,snheco,, sd,snld,, smlt,snm,, -isnowthi,snthic,, +isnowthi,snld,, sbgvoltot,snvolga,, snvolu,snvolu,, vosaline:mean_3Dsosaline,so,, @@ -291,4 +291,4 @@ wo,wo,, w2o,wosq,, difvho,difvho,, vovematr,wmo,, -qtr_ice,qtr,, +qtr_ice,qtr,, \ No newline at end of file diff --git a/earthdiagnostics/variable_alias/primavera.csv b/earthdiagnostics/variable_alias/primavera.csv index 23d010ad7e7045cdcef49588202a4b1d8d3ea231..ff7b4128f33dffa4e17f822e7aac909be0b07b0c 100644 --- a/earthdiagnostics/variable_alias/primavera.csv +++ b/earthdiagnostics/variable_alias/primavera.csv @@ -1,3 +1,2 @@ Aliases,Shortname,Basin,Grid iiceconc:siconc:soicecov:ileadfra,siconc,, -ci,siconc,,ifs \ No newline at end of file diff --git a/earthdiagnostics/variable_alias/specs.csv b/earthdiagnostics/variable_alias/specs.csv index 1aebaab3c330db12114aedb4ac2198d944e6692a..07aef840283521d855e3b6e9c4fad60f400ed56d 100644 --- a/earthdiagnostics/variable_alias/specs.csv +++ b/earthdiagnostics/variable_alias/specs.csv @@ -1,3 +1,4 @@ Aliases,Shortname,Basin,Grid -siconc:soicecov,sic,, -ci,sic,,ifs \ No newline at end of file +iiceconc:siconc:soicecov:ileadfra,sic,, +ci,sic,,ifs +es,sbl,, \ No newline at end of file diff --git a/launch_diags.sh b/launch_diags.sh index cdeaff799e92e8363c03ca386781417ffc1b43e1..bbce66f32d8831fcd94074842312462e1c02089e 100755 --- a/launch_diags.sh +++ b/launch_diags.sh @@ -15,10 +15,10 @@ PATH_TO_VIRTUALENV=~jvegas/virtualenvs/diags/bin module purge module load NCO/4.5.4-foss-2015a module load CDO/1.7.2-foss-2015a -module load CDFTOOLS/3.0a2-foss-2015a +module load CDFTOOLS/3.0a5-foss-2015a source ${PATH_TO_VIRTUALENV}/activate export PYTHONPATH=${PATH_TO_DIAGNOSTICS}:${PYTHONPATH} cd ${PATH_TO_DIAGNOSTICS}/earthdiagnostics/ -./earthdiags.py -lc DEBUG -f ${PATH_TO_CONF_FILE} +./earthdiags.py -f ${PATH_TO_CONF_FILE} diff --git a/model_diags.conf b/model_diags.conf new file mode 100644 index 0000000000000000000000000000000000000000..72bf0d20ad2e367d6c472c85447578f4d75d4b0b --- /dev/null +++ b/model_diags.conf @@ -0,0 +1,165 @@ +[DIAGNOSTICS] + +# The next few configurations are mandatory + +# Temporary folder for the calculations. Final results will never be stored here. +SCRATCH_DIR = /scratch/Earth/$USER + +# ':' separated list of folders to look for data in. It will look for file in the path $DATA_FOLDER/$EXPID and +# $DATA_FOLDER/$DATA_TYPE/$MODEL/$EXPID +DATA_DIR = /esnas:/esarchive + +# Folder containing mask and mesh files for the dataset. +CON_FILES = /esnas/autosubmit/con_files/ + +# Default data frequency to be used by the diagnostics. Some diagnostics can override this configuration or even +# ignore it completely. +FREQUENCY = mon + +# All the other configurations in this section are optional + +# Type of the dataset to use. It can be exp, obs or recon. Default is exp. +DATA_TYPE = exp + +# This is used to choose the mechanism for storing and retrieving data. Options are CMOR (for our own experiments) or +# THREDDS (for anything else). Default value is CMOR +DATA_ADAPTOR = CMOR + + + +* DATA_CONVENTION + Convention to use for file paths and names and variable naming among other things. Can be SPECS, PRIMAVERA or CMIP6. + Default is SPECS. + +* CDFTOOLS_PATH + Path to the folder containing CDFTOOLS executables. By default is empty, so CDFTOOLS binaries must be added to the + system path. + +* MAX_CORES + Maximum number of cores to use. By default the diagnostics will use all cores available to them. It is not + necessary when launching through a scheduler, as Earthdiagnostics can detect how many cores the scheduler has + allocated to it. + + +# Data adaptor type: CMOR (for our experiments), THREDDS (for other experiments) + + +# Root path for the cmorized data to use + +# Specify if your data is from an experiment (exp), observation (obs) or reconstructions (recon) + +# CMORization type to use. Important also for THREDDS as it affects variable name conventions. +# Options: SPECS (default), PRIMAVERA, CMIP6 + +# Path to NEMO's mask and grid files needed for CDFTools + +# Diagnostics to run, space separated. You must provide for each one the name and the parameters (comma separated) or +# an alias defined in the ALIAS section (see more below). If you are using the diagnostics just to CMORize, leave it +# empty +DIAGS = +# DIAGS = OHC +# Frequency of the data you want to use by default. Some diagnostics do not use this value: i.e. monmean always stores +# its results at monthly frequency (obvious) and has a parameter to specify input's frequency. + +# Path to CDFTOOLS binaries +CDFTOOLS_PATH = ~jvegas/CDFTOOLS/bin +# If true, copies the mesh files regardless of presence in scratch dir +RESTORE_MESHES = False +# Limits the maximum amount of threads used. Default: 0 (no limitation, one per virtual core available) +MAX_CORES = 1 + +[CMOR] +# If true, recreates CMOR files regardless of presence. Default = False +FORCE = False +# If true, CMORizes ocean files. Default = True +OCEAN_FILES = True +FILTER_FILES = +# If true, CMORizes atmosphere files. Default = True +ATMOSPHERE_FILES = False +# You can specify the variable to cmorize, in the way domain:var domain:var2 domain2:var +VARIABLE_LIST = + +# Variables to be CMORized from the grib atmospheric files, separated by comma. +# You can also specify the levels to extract using the following syntax +# VARIABLE_CODE, VARIABLE_CODE:LEVEL, VARIABLE_CODE:LEVEL1-LEVEL2, VARIABLE_CODE:MIN_LEVEL:MAX_LEVEL:STEP +# Examples: +# Variable with code 129 at level 30000: 129:30000 +# Variable with code 129 at levels 30000, 40000 and 60000: 129:30000-40000-60000 +# Variable with code 129 at levels between 30000 and 600000 with 10000 intervals: +# 129:30000:60000:10000 equivalent to 129:30000-40000-50000-60000 + +# Hourly vars +ATMOS_HOURLY_VARS = 129:30000:90000:5000, 130, 131:30000:90000:5000, 132:30000:90000:5000, 151, 167, 168, 164, 165, 166 +# Daily vars +ATMOS_DAILY_VARS = 167, 165, 166, 151, 164, 168, 169, 177, 179, 228, 201, 202, 130:85000 +# Monthly vars +ATMOS_MONTHLY_VARS = 167, 201, 202, 165, 166, 151, 144, 228, 205, 182, 164, 146, 147, 176, 169, 177, 175, 212, 141, 180, 181, 179, 168, 243, 129:5000-20000-50000-85000, 130:5000-20000-50000-85000, 131:5000-20000-50000-85000, 132:5000-20000-50000-85000, 133:5000-20000-50000-85000 + +# The next bunch of parameters are used to provide metadata for the CMOR files +# ASSOCIATED_EXPERIMENT = +# INITIALIZATION_METHOD = 1 +# INITIALIZATION_DESCRIPTION = ocean: ECMWF system4, ice: DFS4.3 , atmosphere: +# PHYSICS_VERSION = 1 +# PHYSICS_DESCRIPTION = +# ASSOCIATED_MODEL = +# SOURCE = 'EC-Earthv2.3.0, ocean: Nemo3.1, ifs31r1, lim2 + +[THREDDS] +SERVER_URL = https://earth.bsc.es/thredds + +[EXPERIMENT] +# Experiments parameters as defined in CMOR standard +INSTITUTE = BSC +MODEL = EC-EARTH +# Model version: Available versions +MODEL_VERSION =Ec3.2_O1L75 +# Atmospheric output timestep in hours +ATMOS_TIMESTEP = 6 +# Ocean output timestep in hours +OCEAN_TIMESTEP = 6 + +# For those who use Autosubmit, this will be easy +# EXPID is the unique identifier of the experiment. +# STARTDATES is the list of start dates +# MEMBERS is the list of members of your experiment (only the numbers, the fc will be added by the tool) +# MEMBER_DIGITS is the minimum number of digits to use for the member name: if 1 the name for member 0 will be fc0, +# if 2, fc00 +# CHUNK_SIZE is the size of each data file, given in months +# CHUNKS is the number of chunks. You can specify less chunks than present on the experiment +EXPID = a0c2 +STARTDATES = 19900101 +MEMBERS = 0 +MEMBER_DIGITS = 1 +CHUNK_SIZE = 12 +CHUNKS = 2 +# CHUNKS = 1 + + +# This ALIAS section is a bit different +# Inside this, you can provide alias for frequent diagnostics calls. +# By default, there are some of the diagnostics available at the previous version. +# You can define an alias for one or moraa90a1ee diagnostic calls + +[ALIAS] +MAX_MOC = mocmax,38,50,500,2000 mocmax,40,40,0,10000 +AREA_MOC = mocarea,40,55,1000,2000,atl mocarea,30,40,1000,2000,atl +STC = mocarea,0,25,0,200,Pac mocarea,-25,0,0,200,Pac mocarea,0,25,0,200,Atl mocarea,-25,0,0,200,Atl +HEAT_SAL_MXL = mlotstsc mlotsthc +LMSALC = vertmeanmeters,so,300,5400 +USALC = vertmeanmeters,so,0,300 +OHC = ohc,glob,0,1,10 +XOHC = ohc,glob,1,0,0 +LOHC = ohc,glob,0,23,46 +MOHC = ohc,glob,0,18,22 +UOHC = ohc,glob,0,1,17 +OHC_SPECIFIED_LAYER = ohclayer,0,300 ohclayer,300,800 +3DTEMP = interp,thetao +3DSAL = interp,so +TSEC_AVE190-220E =avgsection,thetao,190,220,-90,90 +SSEC_AVE190-220E =avgsection,so,190,220,-90,90 +VERT_SSECTIONS = cutsection,so,Z,0 cutsection,so,Z,45 cutsection,so,Z,-45 cutsection,so,M,-30 cutsection,so,M,180 cutsection,so,M,80 +VERT_TSECTIONS = cutsection,thetao,Z,0 cutsection,thetao,Z,45 cutsection,thetao,Z,-45 cutsection,thetao,M,-30 cutsection,thetao,M,180 cutsection,thetao,M,80 +SIASIESIV = siasiesiv,glob + + + diff --git a/patch.bash b/patch.bash new file mode 100755 index 0000000000000000000000000000000000000000..60e7b8f885c9e7462391aee55c33bd030ec2869c --- /dev/null +++ b/patch.bash @@ -0,0 +1,18 @@ +#!/bin/bash +set -o nounset +set -o errexit + +exp=a0fj +sd=19580101 +root=esarchive +mod=nemo + +if [ -d /${root}/exp/${mod}/${exp}/original_files ] +then + echo "folder already exists" + exit +fi + +mkdir -p /${root}/exp/${mod}/${exp}/original_files +mv /${root}/exp/${mod}/${exp}/${sd} /${root}/exp/${mod}/${exp}/original_files +ln -sf /${root}/exp/${mod}/${exp}/original_files/${sd} /${root}/exp/${mod}/${exp}/${sd} diff --git a/setup.py b/setup.py index bd67a0b34a4472916733eeeac2848b16cd2cd7da..05db1e101d54e9155da08211a379259768373834 100644 --- a/setup.py +++ b/setup.py @@ -24,8 +24,8 @@ setup( author_email='javier.vegas@bsc.es', url='http://www.bsc.es/projects/earthscience/autosubmit/', keywords=['climate', 'weather', 'diagnostic'], - install_requires=['numpy', 'netCDF4', 'autosubmit', 'cdo', 'pygrib', 'nco', 'cfunits>=1.1.4', 'coverage', 'pyproj', - 'openpyxl'], + install_requires=['numpy', 'netCDF4', 'bscearth.utils', 'cdo', 'pygrib', 'nco', 'cfunits>=1.1.4', 'coverage', + 'pyproj', 'openpyxl', 'mock'], packages=find_packages(), include_package_data=True, scripts=['bin/earthdiags'] diff --git a/test/unit/__init__.py b/test/unit/__init__.py index 3c07e90196c414109ffe592d381dccdc45d463ff..caa995421069086fb1ec536dd3868931ac30e48e 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -11,6 +11,7 @@ from test_areamoc import TestAreaMoc from test_averagesection import TestAverageSection from test_cutsection import TestCutSection from test_convectionsites import TestConvectionSites +from test_frequency import TestFrequency from test_gyres import TestGyres from test_heatcontent import TestHeatContent from test_heatcontentlayer import TestHeatContentLayer @@ -19,8 +20,13 @@ from test_maxmoc import TestMaxMoc from test_mixedlayerheatcontent import TestMixedLayerHeatContent from test_mixedlayersaltcontent import TestMixedLayerSaltContent from test_moc import TestMoc +from test_modelling_realm import TestModellingRealms, TestModellingRealm from test_siasiesiv import TestSiasiesiv from test_verticalmean import TestVerticalMean from test_verticalmeanmeters import TestVerticalMeanMeters from test_monthlymean import TestMonthlyMean from test_rewrite import TestRewrite +from test_variable_type import TestVariableType +from test_monthlypercentile import TestMonthlyPercentile +from test_climatologicalpercentile import TestClimatologicalPercentile +from test_variable import TestCMORTable, TestVariableAlias diff --git a/test/unit/test_areamoc.py b/test/unit/test_areamoc.py index f4ddf699dc690767ed161511060f67c535ec0aa3..15d4bcd750f752c2989c7f259fb45b201f1d3d58 100644 --- a/test/unit/test_areamoc.py +++ b/test/unit/test_areamoc.py @@ -23,20 +23,21 @@ class TestAreaMoc(TestCase): self.psi = AreaMoc(self.data_manager, '20000101', 1, 1, Basins.Antarctic, self.box) def test_generate_jobs(self): - jobs = AreaMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0']) + jobs = AreaMoc.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], AreaMoc(self.data_manager, '20010101', 0, 0, Basins.Global, self.box)) self.assertEqual(jobs[1], AreaMoc(self.data_manager, '20010101', 0, 1, Basins.Global, self.box)) - jobs = AreaMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', 'atl']) + jobs = AreaMoc.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', 'atl']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], AreaMoc(self.data_manager, '20010101', 0, 0, Basins.Atlantic, self.box)) self.assertEqual(jobs[1], AreaMoc(self.data_manager, '20010101', 0, 1, Basins.Atlantic, self.box)) with self.assertRaises(Exception): - AreaMoc.generate_jobs(self.diags, ['psi']) + AreaMoc.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - AreaMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0']) + AreaMoc.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0']) def test_str(self): - self.assertEquals(str(self.psi), 'Area MOC Startdate: 20000101 Member: 1 Chunk: 1 Box: 0N0') + self.assertEquals(str(self.psi), 'Area MOC Startdate: 20000101 Member: 1 Chunk: 1 Box: 0N0 ' + 'Basin: Antarctic_Ocean') diff --git a/test/unit/test_averagesection.py b/test/unit/test_averagesection.py index 78554e5e495ab6b28a7d5c90e7d7537628abf1b7..7a454c4a97c2b13ccfe099169a9dfc12d2548b79 100644 --- a/test/unit/test_averagesection.py +++ b/test/unit/test_averagesection.py @@ -24,14 +24,14 @@ class TestAverageSection(TestCase): self.psi = AverageSection(self.data_manager, '20000101', 1, 1, ModelingRealms.ocean, 'var', self.box) def test_generate_jobs(self): - jobs = AverageSection.generate_jobs(self.diags, ['psi', 'var', '0', '0', '0', '0']) + jobs = AverageSection.generate_jobs(self.diags, ['diagnostic', 'var', '0', '0', '0', '0']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], AverageSection(self.data_manager, '20010101', 0, 0, ModelingRealms.ocean, 'var', self.box)) self.assertEqual(jobs[1], AverageSection(self.data_manager, '20010101', 0, 1, ModelingRealms.ocean, 'var', self.box)) - jobs = AverageSection.generate_jobs(self.diags, ['psi', 'var', '0', '0', '0', '0', 'ocean']) + jobs = AverageSection.generate_jobs(self.diags, ['diagnostic', 'var', '0', '0', '0', '0', 'ocean']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], AverageSection(self.data_manager, '20010101', 0, 0, ModelingRealms.ocean, 'var', self.box)) @@ -39,9 +39,9 @@ class TestAverageSection(TestCase): self.box)) with self.assertRaises(Exception): - AverageSection.generate_jobs(self.diags, ['psi']) + AverageSection.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - AverageSection.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + AverageSection.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.psi), 'Average section Startdate: 20000101 Member: 1 Chunk: 1 Box: 0N0E ' diff --git a/test/unit/test_climatologicalpercentile.py b/test/unit/test_climatologicalpercentile.py new file mode 100644 index 0000000000000000000000000000000000000000..95afc38bb081bf3cdee76a6f64521c688817158d --- /dev/null +++ b/test/unit/test_climatologicalpercentile.py @@ -0,0 +1,35 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.statistics.climatologicalpercentile import ClimatologicalPercentile +from mock import Mock + +from earthdiagnostics.modelingrealm import ModelingRealms + + +class TestClimatologicalPercentile(TestCase): + + def setUp(self): + self.data_manager = Mock() + self.data_manager.variable_list.get_variable.return_value = None + + self.diags = Mock() + self.diags.data_manager = self.data_manager + + self.diagnostic = ClimatologicalPercentile(self.data_manager, ModelingRealms.ocean, 'var', + [10, 90], 1000, self.diags.config.experiment) + + def test_generate_jobs(self): + jobs = ClimatologicalPercentile.generate_jobs(self.diags, ['climpercent', 'ocean', 'var', '1-2', '1000']) + self.assertEqual(len(jobs), 1) + self.assertEqual(jobs[0], ClimatologicalPercentile(self.data_manager, ModelingRealms.ocean, 'var', [1, 2], + 1000, self.diags.config.experiment)) + + with self.assertRaises(Exception): + ClimatologicalPercentile.generate_jobs(self.diags, ['climpercent']) + with self.assertRaises(Exception): + ClimatologicalPercentile.generate_jobs(self.diags, ['climpercent', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.diagnostic), 'Climatological percentile Variable: ocean:var Leadtimes: [10, 90] ' + 'Bins: 1000') diff --git a/test/unit/test_constants.py b/test/unit/test_constants.py index f8010d53a4dbbe42a506dde27c501ed5f0e076f2..720478187ac7ca5c8d7cb60b918f6e13498db779 100644 --- a/test/unit/test_constants.py +++ b/test/unit/test_constants.py @@ -29,3 +29,6 @@ class TestBasin(TestCase): self.assertFalse(Basin('bas', 'OtherBasin') == self.basin) self.assertFalse(Basin('otbas', 'Basin') == self.basin) self.assertFalse(Basin('otbas', 'OtherBasin') == self.basin) + + def test__str__(self): + self.assertEquals(str(self.basin), 'Basin') diff --git a/test/unit/test_convectionsites.py b/test/unit/test_convectionsites.py index b6a7e543ba60da2d741734a6d1eeaeff95e1bad6..63710d054e6a4f937cb9c22e8e78d659c80f3c73 100644 --- a/test/unit/test_convectionsites.py +++ b/test/unit/test_convectionsites.py @@ -16,13 +16,13 @@ class TestConvectionSites(TestCase): self.psi = ConvectionSites(self.data_manager, '20000101', 1, 1, 'model_version') def test_generate_jobs(self): - jobs = ConvectionSites.generate_jobs(self.diags, ['psi']) + jobs = ConvectionSites.generate_jobs(self.diags, ['diagnostic']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], ConvectionSites(self.data_manager, '20010101', 0, 0, 'model_version')) self.assertEqual(jobs[1], ConvectionSites(self.data_manager, '20010101', 0, 1, 'model_version')) with self.assertRaises(Exception): - ConvectionSites.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + ConvectionSites.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.psi), 'Convection sites Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_cutsection.py b/test/unit/test_cutsection.py index 8cfb3cdb4e4f6ffd56e58e064522d01c7784d7ec..170d04265c6a55a8816deb494416f04d3237abaa 100644 --- a/test/unit/test_cutsection.py +++ b/test/unit/test_cutsection.py @@ -24,14 +24,14 @@ class TestCutSection(TestCase): self.psi = CutSection(self.data_manager, '20000101', 1, 1, ModelingRealms.atmos, 'var', True, 0) def test_generate_jobs(self): - jobs = CutSection.generate_jobs(self.diags, ['psi', 'var', 'true', '10']) + jobs = CutSection.generate_jobs(self.diags, ['diagnostic', 'var', 'true', '10']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], CutSection(self.data_manager, '20010101', 0, 0, ModelingRealms.ocean, 'var', True, 10)) self.assertEqual(jobs[1], CutSection(self.data_manager, '20010101', 0, 1, ModelingRealms.ocean, 'var', True, 10)) - jobs = CutSection.generate_jobs(self.diags, ['psi', 'var', 'false', '0', 'atmos']) + jobs = CutSection.generate_jobs(self.diags, ['diagnostic', 'var', 'false', '0', 'atmos']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], CutSection(self.data_manager, '20010101', 0, 0, ModelingRealms.atmos, 'var', False, 0)) @@ -39,9 +39,9 @@ class TestCutSection(TestCase): False, 0)) with self.assertRaises(Exception): - CutSection.generate_jobs(self.diags, ['psi']) + CutSection.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - CutSection.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + CutSection.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.psi), 'Cut section Startdate: 20000101 Member: 1 Chunk: 1 Variable: atmos:var ' diff --git a/test/unit/test_earthdiags.py b/test/unit/test_earthdiags.py new file mode 100644 index 0000000000000000000000000000000000000000..5dc657b81b4e6af7b491b7b037a1f9012e23a75d --- /dev/null +++ b/test/unit/test_earthdiags.py @@ -0,0 +1,8 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.earthdiags import EarthDiags + + +class TestEarthDiags(TestCase): + pass diff --git a/test/unit/test_frequency.py b/test/unit/test_frequency.py new file mode 100644 index 0000000000000000000000000000000000000000..845dfe6ad336a4923da92ec4fd80663a65541076 --- /dev/null +++ b/test/unit/test_frequency.py @@ -0,0 +1,32 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.frequency import Frequency +from earthdiagnostics.variable_type import VariableType + + +class TestFrequency(TestCase): + + def test_not_supported(self): + with self.assertRaises(ValueError): + Frequency('badfreq') + + def test_get_monthly_mean(self): + self.assertEqual(Frequency('m').folder_name(VariableType.MEAN), 'monthly_mean') + + def test_get_monthly_stats(self): + self.assertEqual(Frequency('m').folder_name(VariableType.STATISTIC), 'monthly_statistics') + + def test_get_daily_mean(self): + self.assertEqual(Frequency('d').folder_name(VariableType.MEAN), 'daily_mean') + + def test_get_daily_stats(self): + self.assertEqual(Frequency('d').folder_name(VariableType.STATISTIC), 'daily_statistics') + + def test_get_6hourlymean(self): + self.assertEqual(Frequency('6hr').folder_name(VariableType.STATISTIC), '6hourly') + + def test_get_climatology(self): + self.assertEqual(Frequency('clim').folder_name(VariableType.STATISTIC), 'clim') + self.assertEqual(Frequency('clim').folder_name(VariableType.MEAN), 'clim') + diff --git a/test/unit/test_gyres.py b/test/unit/test_gyres.py index 77f3987258eca4cee0a494197090db777a2d7061..becc4e7d331754aff23baae244d82154252eaa6e 100644 --- a/test/unit/test_gyres.py +++ b/test/unit/test_gyres.py @@ -17,13 +17,13 @@ class TestGyres(TestCase): self.gyres = Gyres(self.data_manager, '20000101', 1, 1, 'model_version') def test_generate_jobs(self): - jobs = Gyres.generate_jobs(self.diags, ['psi']) + jobs = Gyres.generate_jobs(self.diags, ['diagnostic']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], Gyres(self.data_manager, '20010101', 0, 0, 'model_version')) self.assertEqual(jobs[1], Gyres(self.data_manager, '20010101', 0, 1, 'model_version')) with self.assertRaises(Exception): - Gyres.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + Gyres.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.gyres), 'Gyres Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_heatcontent.py b/test/unit/test_heatcontent.py index 32c6900afd86ced0e63d464bb0b0fab4f0ef2f4e..8452eb598d1cf4e7d62a5e7c0f3a2f1922d505fc 100644 --- a/test/unit/test_heatcontent.py +++ b/test/unit/test_heatcontent.py @@ -16,24 +16,24 @@ class TestHeatContent(TestCase): self.diags.model_version = 'model_version' self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) - self.box = Box(True) + self.box = Box(False) self.box.min_depth = 0 self.box.max_depth = 100 self.heat_content = HeatContent(self.data_manager, '20000101', 1, 1, Basins.Global, 1, self.box) def test_generate_jobs(self): - jobs = HeatContent.generate_jobs(self.diags, ['psi', 'atl', '-1', '0', '100']) + jobs = HeatContent.generate_jobs(self.diags, ['diagnostic', 'atl', '-1', '0', '100']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], HeatContent(self.data_manager, '20010101', 0, 0, Basins.Atlantic, -1, self.box)) self.assertEqual(jobs[1], HeatContent(self.data_manager, '20010101', 0, 1, Basins.Atlantic, -1, self.box)) with self.assertRaises(Exception): - HeatContent.generate_jobs(self.diags, ['psi']) + HeatContent.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - HeatContent.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + HeatContent.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.heat_content), 'Heat content Startdate: 20000101 Member: 1 Chunk: 1 Mixed layer: 1 ' - 'Box: 0m-100m Basin: Global_Ocean') + 'Box: 0-100 Basin: Global_Ocean') diff --git a/test/unit/test_maxmoc.py b/test/unit/test_maxmoc.py index 99c2fca6da18126d818ec305728a900ef5d4bb1b..35117a673907aa68ea13978ae39111e75bdba608 100644 --- a/test/unit/test_maxmoc.py +++ b/test/unit/test_maxmoc.py @@ -27,25 +27,25 @@ class TestMaxMoc(TestCase): self.diags.config.experiment.members = (0,) self.diags.config.experiment.get_full_years.return_value = (2000, 2001) - jobs = MaxMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0']) + jobs = MaxMoc.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], MaxMoc(self.data_manager, '20010101', 0, 2000, Basins.Global, self.box)) self.assertEqual(jobs[1], MaxMoc(self.data_manager, '20010101', 0, 2001, Basins.Global, self.box)) - jobs = MaxMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', 'atl']) + jobs = MaxMoc.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', 'atl']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], MaxMoc(self.data_manager, '20010101', 0, 2000, Basins.Atlantic, self.box)) self.assertEqual(jobs[1], MaxMoc(self.data_manager, '20010101', 0, 2001, Basins.Atlantic, self.box)) self.diags.config.experiment.get_full_years.return_value = list() - jobs = MaxMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0']) + jobs = MaxMoc.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0']) self.assertEqual(len(jobs), 0) with self.assertRaises(Exception): - MaxMoc.generate_jobs(self.diags, ['psi']) + MaxMoc.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - MaxMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + MaxMoc.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.maxmoc), 'Max moc Startdate: 20000101 Member: 1 Year: 2000 ' diff --git a/test/unit/test_mixedlayerheatcontent.py b/test/unit/test_mixedlayerheatcontent.py index bf7bff95ff38b46ee998d78d4ce005217a9085d9..b7cf564bc9cdfabe924327ea0602ea3bc6d84478 100644 --- a/test/unit/test_mixedlayerheatcontent.py +++ b/test/unit/test_mixedlayerheatcontent.py @@ -17,13 +17,13 @@ class TestMixedLayerHeatContent(TestCase): self.mixed = MixedLayerHeatContent(self.data_manager, '20000101', 1, 1) def test_generate_jobs(self): - jobs = MixedLayerHeatContent.generate_jobs(self.diags, ['psi']) + jobs = MixedLayerHeatContent.generate_jobs(self.diags, ['diagnostic']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], MixedLayerHeatContent(self.data_manager, '20010101', 0, 0)) self.assertEqual(jobs[1], MixedLayerHeatContent(self.data_manager, '20010101', 0, 1)) with self.assertRaises(Exception): - MixedLayerHeatContent.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + MixedLayerHeatContent.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.mixed), 'Mixed layer heat content Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_mixedlayersaltcontent.py b/test/unit/test_mixedlayersaltcontent.py index 38074f0f481675b968dd331f8e5b65a08a4c5794..7aa42f6691eff2d80c4290b1fa5d1505794543d2 100644 --- a/test/unit/test_mixedlayersaltcontent.py +++ b/test/unit/test_mixedlayersaltcontent.py @@ -17,13 +17,13 @@ class TestMixedLayerSaltContent(TestCase): self.mixed = MixedLayerSaltContent(self.data_manager, '20000101', 1, 1) def test_generate_jobs(self): - jobs = MixedLayerSaltContent.generate_jobs(self.diags, ['psi']) + jobs = MixedLayerSaltContent.generate_jobs(self.diags, ['diagnostic']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], MixedLayerSaltContent(self.data_manager, '20010101', 0, 0)) self.assertEqual(jobs[1], MixedLayerSaltContent(self.data_manager, '20010101', 0, 1)) with self.assertRaises(Exception): - MixedLayerSaltContent.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + MixedLayerSaltContent.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.mixed), 'Mixed layer salt content Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_moc.py b/test/unit/test_moc.py index 1a143035edfc3e841d3b9907bfa21ddd2214bf90..0c05f8c4fe7dd3632cc5b662aacda78c95dbfe95 100644 --- a/test/unit/test_moc.py +++ b/test/unit/test_moc.py @@ -17,13 +17,13 @@ class TestMoc(TestCase): self.mixed = Moc(self.data_manager, '20000101', 1, 1) def test_generate_jobs(self): - jobs = Moc.generate_jobs(self.diags, ['psi']) + jobs = Moc.generate_jobs(self.diags, ['diagnostic']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], Moc(self.data_manager, '20010101', 0, 0)) self.assertEqual(jobs[1], Moc(self.data_manager, '20010101', 0, 1)) with self.assertRaises(Exception): - Moc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + Moc.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.mixed), 'MOC Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_modelling_realm.py b/test/unit/test_modelling_realm.py new file mode 100644 index 0000000000000000000000000000000000000000..2d44e6a6ddb03a17835afd1f3b5bb1bc881623f0 --- /dev/null +++ b/test/unit/test_modelling_realm.py @@ -0,0 +1,52 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.frequency import Frequencies +from earthdiagnostics.modelingrealm import ModelingRealm, ModelingRealms + + +class TestModellingRealms(TestCase): + + def test_parse(self): + self.assertEquals(ModelingRealms.parse('atmos'), ModelingRealms.atmos) + self.assertEquals(ModelingRealms.parse('atmoschem'), ModelingRealms.atmosChem) + self.assertEquals(ModelingRealms.parse('atmoSChem'), ModelingRealms.atmosChem) + with self.assertRaises(ValueError): + ModelingRealms.parse('badrealm') + + +class TestModellingRealm(TestCase): + + def setUp(self): + self.basin = ModelingRealm('ocean') + + def test_constructor_fail_on_bad_realm(self): + with self.assertRaises(ValueError): + ModelingRealm('badrealm') + + def test_comparison(self): + self.assertEqual(ModelingRealm('ocean'), self.basin) + self.assertNotEqual(ModelingRealm('OCEAN'), self.basin) + self.assertNotEqual(ModelingRealm('atmos'), self.basin) + + def test_get_omon(self): + self.assertEqual(self.basin.get_table_name(Frequencies.monthly, 'specs'), 'Omon') + + def test_get_oimon(self): + self.assertEqual(ModelingRealm('seaIce').get_table_name(Frequencies.monthly, 'specs'), 'OImon') + + def test_get_simon(self): + self.assertEqual(ModelingRealm('seaIce').get_table_name(Frequencies.monthly, 'cmip6'), 'SImon') + + def test_get_limon(self): + self.assertEqual(ModelingRealm('landIce').get_table_name(Frequencies.monthly, 'specs'), 'LImon') + + def test_get_day(self): + self.assertEqual(ModelingRealm('atmos').get_table_name(Frequencies.daily, 'specs'), 'day') + + def test_get_6hrplev(self): + self.assertEqual(ModelingRealm('atmos').get_table_name(Frequencies.six_hourly, 'specs'), '6hrPlev') + + + + diff --git a/test/unit/test_monthlymean.py b/test/unit/test_monthlymean.py index 91e42a5387c9cf2df305f0f31321aa52caa3cd5d..e2165f5dee2f7bb65259926f1c946ea3801af080 100644 --- a/test/unit/test_monthlymean.py +++ b/test/unit/test_monthlymean.py @@ -26,21 +26,21 @@ class TestMonthlyMean(TestCase): def test_generate_jobs(self): - jobs = MonthlyMean.generate_jobs(self.diags, ['psi', 'var', 'ocean']) + jobs = MonthlyMean.generate_jobs(self.diags, ['diagnostic', 'var', 'ocean']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], MonthlyMean(self.data_manager, '20010101', 0, 0, ModelingRealms.ocean, 'var', Frequencies.daily, '')) self.assertEqual(jobs[1], MonthlyMean(self.data_manager, '20010101', 0, 1, ModelingRealms.ocean, 'var', Frequencies.daily, '')) - jobs = MonthlyMean.generate_jobs(self.diags, ['psi', 'var', 'atmos', 'monthly']) + jobs = MonthlyMean.generate_jobs(self.diags, ['diagnostic', 'var', 'atmos', 'monthly']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], MonthlyMean(self.data_manager, '20010101', 0, 0, ModelingRealms.atmos, 'var', Frequencies.monthly, '')) self.assertEqual(jobs[1], MonthlyMean(self.data_manager, '20010101', 0, 1, ModelingRealms.atmos, 'var', Frequencies.monthly, '')) - jobs = MonthlyMean.generate_jobs(self.diags, ['psi', 'var', 'seaice', 'mon', 'grid']) + jobs = MonthlyMean.generate_jobs(self.diags, ['diagnostic', 'var', 'seaice', 'mon', 'grid']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], MonthlyMean(self.data_manager, '20010101', 0, 0, ModelingRealms.seaIce, 'var', Frequencies.monthly, 'grid')) @@ -48,10 +48,10 @@ class TestMonthlyMean(TestCase): Frequencies.monthly, 'grid')) with self.assertRaises(Exception): - MonthlyMean.generate_jobs(self.diags, ['psi']) + MonthlyMean.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - MonthlyMean.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + MonthlyMean.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.mixed), 'Calculate monthly mean Startdate: 20000101 Member: 1 Chunk: 1 ' diff --git a/test/unit/test_monthlypercentile.py b/test/unit/test_monthlypercentile.py new file mode 100644 index 0000000000000000000000000000000000000000..4b9bbfb09ed3b03bb0aa2b4cd68a373972c844a7 --- /dev/null +++ b/test/unit/test_monthlypercentile.py @@ -0,0 +1,41 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.statistics.monthlypercentile import MonthlyPercentile +from mock import Mock + +from earthdiagnostics.modelingrealm import ModelingRealms + + +class TestMonthlyPercentile(TestCase): + + def setUp(self): + self.data_manager = Mock() + self.diags = Mock() + + self.box = Box() + self.box.min_lat = 0 + self.box.max_lat = 0 + self.box.min_lon = 0 + self.box.max_lon = 0 + + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + self.diagnostic = MonthlyPercentile(self.data_manager, '20000101', 1, 1, ModelingRealms.ocean, 'var', [10, 90]) + + def test_generate_jobs(self): + jobs = MonthlyPercentile.generate_jobs(self.diags, ['monpercent', 'var', 'ocean', '10-90']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], MonthlyPercentile(self.data_manager, '20010101', 0, 0, ModelingRealms.ocean, 'var', + [10, 90])) + self.assertEqual(jobs[1], MonthlyPercentile(self.data_manager, '20010101', 0, 1, ModelingRealms.ocean, 'var', + [10, 90])) + + with self.assertRaises(Exception): + MonthlyPercentile.generate_jobs(self.diags, ['monpercent']) + with self.assertRaises(Exception): + MonthlyPercentile.generate_jobs(self.diags, ['monpercent', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.diagnostic), 'Monthly percentile Startdate: 20000101 Member: 1 Chunk: 1 ' + 'Variable: ocean:var Percentiles: 10, 90') diff --git a/test/unit/test_psi.py b/test/unit/test_psi.py index 3099fa82bd3178f7dec2327623e59ec06e7b326e..019e9339ca281274d59ba15b07bf190ee9b457da 100644 --- a/test/unit/test_psi.py +++ b/test/unit/test_psi.py @@ -13,13 +13,13 @@ class TestPsi(TestCase): self.psi = Psi(self.data_manager, '20000101', 1, 1) def test_generate_jobs(self): - jobs = Psi.generate_jobs(self.diags, ['psi']) + jobs = Psi.generate_jobs(self.diags, ['diagnostic']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], Psi(self.data_manager, '20010101', 0, 0)) self.assertEqual(jobs[1], Psi(self.data_manager, '20010101', 0, 1)) with self.assertRaises(Exception): - Psi.generate_jobs(self.diags, ['psi', 'badoption']) + Psi.generate_jobs(self.diags, ['diagnostic', 'badoption']) def test_str(self): self.assertEquals(str(self.psi), 'PSI Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_rewrite.py b/test/unit/test_rewrite.py index f125947f4c2f82c3c15eb5d0b0cb5e8360419f56..25380fcd81a752a85e679ff93787aa2ec4d0bf77 100644 --- a/test/unit/test_rewrite.py +++ b/test/unit/test_rewrite.py @@ -25,21 +25,21 @@ class TestRewrite(TestCase): def test_generate_jobs(self): - jobs = Rewrite.generate_jobs(self.diags, ['psi', 'var', 'atmos']) + jobs = Rewrite.generate_jobs(self.diags, ['diagnostic', 'var', 'atmos']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], Rewrite(self.data_manager, '20010101', 0, 0, ModelingRealms.atmos, 'var', 'original')) self.assertEqual(jobs[1], Rewrite(self.data_manager, '20010101', 0, 1, ModelingRealms.atmos, 'var', 'original')) - jobs = Rewrite.generate_jobs(self.diags, ['psi', 'var', 'ocean', 'grid']) + jobs = Rewrite.generate_jobs(self.diags, ['diagnostic', 'var', 'ocean', 'grid']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], Rewrite(self.data_manager, '20010101', 0, 0, ModelingRealms.ocean, 'var', 'grid')) self.assertEqual(jobs[1], Rewrite(self.data_manager, '20010101', 0, 1, ModelingRealms.ocean, 'var', 'grid')) with self.assertRaises(Exception): - Rewrite.generate_jobs(self.diags, ['psi']) + Rewrite.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - Rewrite.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + Rewrite.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.mixed), 'Rewrite output Startdate: 20000101 Member: 1 Chunk: 1 ' diff --git a/test/unit/test_variable.py b/test/unit/test_variable.py index 7f3e0b8472157c518f517af25cbafdb1a4812bf5..07199ab822962e8d8c43a6685dddc30ddb4ff96f 100644 --- a/test/unit/test_variable.py +++ b/test/unit/test_variable.py @@ -1,29 +1,27 @@ # coding=utf-8 -# from unittest import TestCase -# -# from earthdiagnostics.variable import Variable -# from earthdiagnostics.modelingrealm import ModelingRealms - - -# class TestVariable(TestCase): -# -# def test__init__(self): -# variable = Variable('alias:alias2,name,standard_name,long_name,ocean,basin,units,' -# 'valid_min,valid_max,grid'.split(',')) -# self.assertEqual(variable.short_name, 'name') -# self.assertEqual(variable.standard_name, 'standard_name') -# self.assertEqual(variable.long_name, 'long_name') -# self.assertEqual(variable.domain, Domains.ocean) -# self.assertEqual(variable.basin, None) -# self.assertEqual(variable.units, 'units') -# self.assertEqual(variable.valid_min, 'valid_min') -# self.assertEqual(variable.valid_max, 'valid_max') -# self.assertEqual(variable.grid, 'grid') -# -# def test_get_variable(self): -# Variable._dict_variables = dict() -# variable = Variable('alias:alias2,name,standard_name,long_name,atmos,basin,units,valid_min,' -# 'valid_max,grid'.split(',')) -# Variable._dict_variables['var'] = variable -# self.assertIs(Variable.get_variable('var'), variable) -# self.assertIsNone(Variable.get_variable('novar')) + +from mock import Mock +from unittest import TestCase +from earthdiagnostics.variable import CMORTable, VariableAlias + + +class TestCMORTable(TestCase): + + def setUp(self): + self.frequency = Mock() + + def test_str(self): + self.assertEquals(str(CMORTable('name', 'm', 'Month YEAR')), 'name') + + +class TestVariableAlias(TestCase): + + def test_str(self): + alias = VariableAlias('alias') + self.assertEquals(str(alias), 'alias') + alias.basin = 'basin' + self.assertEquals(str(alias), 'alias Basin: basin') + alias.grid = 'grid' + self.assertEquals(str(alias), 'alias Basin: basin Grid: grid') + + diff --git a/test/unit/test_variable_type.py b/test/unit/test_variable_type.py new file mode 100644 index 0000000000000000000000000000000000000000..28dd44f56c34599ce1c9d293d6a1a430e71709d7 --- /dev/null +++ b/test/unit/test_variable_type.py @@ -0,0 +1,20 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.variable_type import VariableType + + +class TestVariableType(TestCase): + + def test_mean(self): + self.assertEqual(VariableType.to_str(VariableType.MEAN), 'mean') + + def test_statistics(self): + self.assertEqual(VariableType.to_str(VariableType.STATISTIC), 'statistics') + + def test_bad_one(self): + with self.assertRaises(ValueError): + VariableType.to_str('bad type') + + + diff --git a/test/unit/test_verticalmean.py b/test/unit/test_verticalmean.py index dc2d32ae8d45f5160f453acdf7263412c53eda90..59d0fb501a35cabcba02d01cdc189d3dc2a302be 100644 --- a/test/unit/test_verticalmean.py +++ b/test/unit/test_verticalmean.py @@ -22,28 +22,28 @@ class TestVerticalMean(TestCase): self.mixed = VerticalMean(self.data_manager, '20000101', 1, 1, 'var', self.box) def test_generate_jobs(self): - jobs = VerticalMean.generate_jobs(self.diags, ['psi', 'var', '0', '100']) + jobs = VerticalMean.generate_jobs(self.diags, ['diagnostic', 'var', '0', '100']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], VerticalMean(self.data_manager, '20010101', 0, 0, 'var', self.box)) self.assertEqual(jobs[1], VerticalMean(self.data_manager, '20010101', 0, 1, 'var', self.box)) - jobs = VerticalMean.generate_jobs(self.diags, ['psi', 'var', '0']) + jobs = VerticalMean.generate_jobs(self.diags, ['diagnostic', 'var', '0']) box = Box() box.min_depth = 0 self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], VerticalMean(self.data_manager, '20010101', 0, 0, 'var', box)) self.assertEqual(jobs[1], VerticalMean(self.data_manager, '20010101', 0, 1, 'var', box)) - jobs = VerticalMean.generate_jobs(self.diags, ['psi', 'var']) + jobs = VerticalMean.generate_jobs(self.diags, ['diagnostic', 'var']) self.assertEqual(len(jobs), 2) self.assertEqual(jobs[0], VerticalMean(self.data_manager, '20010101', 0, 0, 'var', Box())) self.assertEqual(jobs[1], VerticalMean(self.data_manager, '20010101', 0, 1, 'var', Box())) with self.assertRaises(Exception): - VerticalMean.generate_jobs(self.diags, ['psi']) + VerticalMean.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - VerticalMean.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + VerticalMean.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.mixed), 'Vertical mean Startdate: 20000101 Member: 1 Chunk: 1 Variable: var ' diff --git a/test/unit/test_verticalmeanmeters.py b/test/unit/test_verticalmeanmeters.py index 20599cd56fe64c3f4e817a60c396cf167164fc59..ecfb3da3f5d9b88f2cd5c249b457d70b26563b43 100644 --- a/test/unit/test_verticalmeanmeters.py +++ b/test/unit/test_verticalmeanmeters.py @@ -3,6 +3,7 @@ from unittest import TestCase from earthdiagnostics.box import Box from earthdiagnostics.ocean.verticalmeanmeters import VerticalMeanMeters +from earthdiagnostics.modelingrealm import ModelingRealms from mock import Mock @@ -19,31 +20,37 @@ class TestVerticalMeanMeters(TestCase): self.box.min_depth = 0 self.box.max_depth = 100 - self.mixed = VerticalMeanMeters(self.data_manager, '20000101', 1, 1, 'var', self.box) + self.mixed = VerticalMeanMeters(self.data_manager, '20000101', 1, 1, ModelingRealms.ocean, 'var', self.box) def test_generate_jobs(self): - jobs = VerticalMeanMeters.generate_jobs(self.diags, ['psi', 'var', '0', '100']) + jobs = VerticalMeanMeters.generate_jobs(self.diags, ['diagnostic', 'var', '0', '100']) self.assertEqual(len(jobs), 2) - self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, 'var', self.box)) - self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, 'var', self.box)) + self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, ModelingRealms.ocean, 'var', + self.box)) + self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, ModelingRealms.ocean, 'var', + self.box)) - jobs = VerticalMeanMeters.generate_jobs(self.diags, ['psi', 'var', '0']) + jobs = VerticalMeanMeters.generate_jobs(self.diags, ['diagnostic', 'var', '0']) box = Box(True) box.min_depth = 0 self.assertEqual(len(jobs), 2) - self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, 'var', box)) - self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, 'var', box)) + self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, 'var', ModelingRealms.ocean, + box)) + self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, 'var', ModelingRealms.ocean, + box)) - jobs = VerticalMeanMeters.generate_jobs(self.diags, ['psi', 'var']) + jobs = VerticalMeanMeters.generate_jobs(self.diags, ['diagnostic', 'var']) self.assertEqual(len(jobs), 2) - self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, 'var', Box(True))) - self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, 'var', Box(True))) + self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, 'var', ModelingRealms.ocean, + Box(True))) + self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, 'var', ModelingRealms.ocean, + Box(True))) with self.assertRaises(Exception): - VerticalMeanMeters.generate_jobs(self.diags, ['psi']) + VerticalMeanMeters.generate_jobs(self.diags, ['diagnostic']) with self.assertRaises(Exception): - VerticalMeanMeters.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + VerticalMeanMeters.generate_jobs(self.diags, ['diagnostic', '0', '0', '0', '0', '0', '0', '0']) def test_str(self): self.assertEquals(str(self.mixed), 'Vertical mean meters Startdate: 20000101 Member: 1 Chunk: 1 Variable: var '