diff --git a/.gitignore b/.gitignore index 3ed3e8b76854f6dc0cb8f79e1661c956572a7584..c583596b75d541f47c68b9ca0c3513c10efc4e10 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,7 @@ -.*.sw* -.*.log* +*.pyc +.idea/* +doc/build/* +*.err +*.out +.coverage +htmlcov \ No newline at end of file diff --git a/CHANGES b/CHANGES new file mode 100644 index 0000000000000000000000000000000000000000..11b4c2eec83862f710ff1381f0c310d7660d08ec --- /dev/null +++ b/CHANGES @@ -0,0 +1,5 @@ +3.0.0 + Complete rewrite in Python + Update CDFTools from version 2.1 to 3.0 + Adoption of CMOR standard + diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000000000000000000000000000000000000..d3fa2e05d9aed9cd9ca4d233a33903d126df4437 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,6 @@ +include earthdiagnostics/*.csv +include earthdiagnostics/*.so +include diags.conf +include README +include VERSION +include earthdiagnostics/EarthDiagnostics.pdf \ No newline at end of file diff --git a/README b/README index 805fa9b23f4afc511dc2e2c4d85fae0adddd9715..f247d009952abfbc0a536addd6730abcd6f5f8a6 100644 --- a/README +++ b/README @@ -1,101 +1,12 @@ -To use the ocean diagnostic tools available here and to be able to create new diagnostics, -you need first to add the following line to your .bashrc: -PATH=/cfu/software/cdftools2.1/:$PATH +This tool is a set of diagnostics used at BSC-ES department for NEMO and EC-EARTH models postprocessing. +They are based on CDO, NCO and CDFTOOLS 3.0. For CDFTOOLS, a custom build is required. -#Launching instructions: +FULL DOCUMENTATION AND HOW TOs +============================== -./ocean_pp.bash $path_to_your_file/config_file-ocean_pp.bash -- You can copy and modify the config_file to any directory if you indicate the good path when launching ocean_pp.bash -- ocean_pp.bash, README and example of config_file-ocean_pp.bash can be downloaded from git repository: git clone https://dev.cfu.local/cfutools.git +Check the Earth Diagnostics documentation in PDF format in EarthDiagnostics.pdf available also in this folder. -# +CONTACT +======= -####################### DIAGNOSTICS LIST ####################################### -'siasiesiv' : sea ice area + sea ice extent + sea ice volume -'moc' : Atlantic + Pacific + Indian + Indo-Pacific + Global - meridional overturning streamfunction -'max_moc' : Yearly maximum of the Atlantic meridional overturning - streamfunction between 38N-50N and 500-3000m and - at 40N. - Beware that the max_moc diagnostics can not be computed - if you don't process complete years, as it is a diagnostic - computed from complete years. -'area_moc' : Averaged Atlantic meridional overturning streamfunction - between 40N-55N and 1km-2km and between 30N-40N and 1km-2km -'convection' : indices of convective activity in the Labrador, extended Irminger, - GIN and Wedell seas computed as the maximum in mixed layer depth - based on the potential density criteria -'stc' : Subtropical cell strength, computed as the averaged Atlantic - and Pacific overturning streamfunction in 0-25N/S, 0-200m -'ext_raw_ice' : raw ice varibales to be extracted from input files - (called "ice" before february 2013 update)) -'ext_raw_oce' : raw ocean varibales to be extracted from input files - (called "sstsssmld" before february 2013 update)) -'heat_sal_mxl' : mixed layer heat and salt content (potential density criteria) -'psi' : barotropic streamfunction -'gyres' : indices of the barotropic gyres computed as the maximum absolute - barotropic streamfunction in the North Atlantic and North Pacific - subpolar and subtropical gyres and in the South Atlantic, South - Pacific and Indian subtropical gyres and indice of the Antarctic - Circumpolar Current strength -'usalc' : upper layer salt content 0-300m -'temp_lev' : temperature, vertical mean between dif levels (2D) -'sal_lev' : salinity, vertical mean between dif levels (2D) -'lmsalc' : middle + lower layer salt content 300m-bottom -'ohc' : global total ocean heat content -'ohc_specified_layer' : Pointwise ocean heat content in a specified ocean thickness (at present 0-300 m) -'uohc' : global upper (0-350m) ocean heat content -'mohc' : global middle (350-800m) ocean heat content -'lohc' : global lower (800m-bottom) ocean heat content -'xohc' : global mixed layer ocean heat content -'TSec_ave190-220E' : meridional temperature section averaged between 190E and 220E -'SSec_ave190-220E' : meridional salinity section averaged between 190E and 220E (added in february 2013 update) -'vert_Tsections' : zonal temperature sections along 45S, 0N, 45N, and - meridional temperature sections along 30W, 80E, 180E -'vert_Ssections' : zonal salinity sections along 45S, 0N, 45N, and - meridional salinity sections along 30W, 80E, 180E (added in february 2013 update) -'3dtemp' : 3-dimensional temperature interpolated horizontally onto the - atmospheric grid -'3dsal' : 3-dimensional salinity interpolated horizontally onto the - atmospheric grid (added in february 2013 update) -'NAtlohc' : North Atlantic (10-65N) total ocean heat content -'xNAtlohc' : North Atlantic (10-65N) mixed layer ocean heat content -'uNAtlohc' : North Atlantic (10-65N) upper (0-350m) ocean heat content -'mNAtlohc' : North Atlantic (10-65N) middle (350-800m) ocean heat content -'lNAtlohc' : North Atlantic (10-65N) lower (800m-bottom) ocean heat content -'NPacohc' : North Pacific (10-70N) ocean heat content -'xNPacohc' : North Pacific (10-70N) mixed layer ocean heat content -'uNPacohc' : North Pacific (10-70N) upper (0-350m) ocean heat content -'mNPacohc' : North Pacific (10-70N) middle (350-800m) ocean heat content -'lNPacohc' : North Pacific (10-70N) lower (800m-bottom) ocean heat content -'TAtlohc' : Tropical Atlantic (30S-30N) ocean heat content -'xTAtlohc' : Tropical Atlantic (30S-30N) mixed layer ocean heat content -'uTAtlohc' : Tropical Atlantic (30S-30N) upper (0-350m) ocean heat content -'mTAtlohc' : Tropical Atlantic (30S-30N) middle (350-800m) ocean heat - content -'lTAtlohc' : Tropical Atlantic (30S-30N) lower (800m-bottom) ocean heat - content -'TPacohc' : Tropical Pacific (30S-30N) ocean heat content -'xTPacohc' : Tropical Pacific (30S-30N) mixed layer ocean heat content -'uTPacohc' : Tropical Pacific (30S-30N) upper (0-350m) ocean heat content -'mTPacohc' : Tropical Pacific (30S-30N) middle (350-800m) ocean heat content -'lTPacohc' : Tropical Pacific (30S-30N) lower (800m-bottom) ocean heat - content -'TIndohc' : Tropical Indian (30S-30N) ocean heat content -'xTIndohc' : Tropical Indian (30S-30N) mixed layer ocean heat content -'uTIndohc' : Tropical Indian (30S-30N) upper (0-350m) ocean heat content -'mTIndohc' : Tropical Indian (30S-30N) middle (350-800m) ocean heat content -'lTIndohc' : Tropical Indian (30S-30N) lower (800m-bottom) ocean heat - content -'Antaohc' : Antarctic (90-60S) ocean heat content -'xAntaohc' : Antarctic (90-60S) mixed layer ocean heat content -'uAntaohc' : Antarctic (90-60S) upper (0-350m) ocean heat content -'mAntaohc' : Antarctic (90-60S) middle (350-800m) ocean heat content -'lAntaohc' : Antarctic (90-60S) lower (800m-bottom) ocean heat content -'Arctohc' : Arctic (65-90N) ocean heat content -'xArctohc' : Arctic (65-90N) mixed layer ocean heat content -'uArctohc' : Arctic (65-90N) upper (0-350m) ocean heat content -'mArctohc' : Arctic (65-90N) middle (350-800m) ocean heat content -'lArctohc' : Arctic (65-90N) lower (800m-bottom) ocean heat content -'temp_lev' : vertical mean of ocean temp (weighted) between level1 and level2 (in numbers, not in meters), specified in the config file -'sal_lev' : vertical mean of ocean sal (weighted) between level1 and level2 (in numbers, not in meters), specified in the config file +For any doubts or suggestions, contact javier.vegas@bsc.es \ No newline at end of file diff --git a/VERSION b/VERSION new file mode 100644 index 0000000000000000000000000000000000000000..13d22bb6857324e2a20bd9229c94eb193a70db43 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +3.0.0b18 diff --git a/bin/earthdiags b/bin/earthdiags new file mode 100644 index 0000000000000000000000000000000000000000..6ac2607276ec2558b7179ce10e942048f989218c --- /dev/null +++ b/bin/earthdiags @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# coding=utf-8 + +"""Script for launching Earth Diagnostics""" +import os +import sys + +scriptdir = os.path.abspath(os.path.dirname(sys.argv[0])) +assert sys.path[0] == scriptdir +sys.path[0] = os.path.normpath(os.path.join(scriptdir, os.pardir)) + +# noinspection PyUnresolvedReferences,PyPep8 +from earthdiagnostics.earthdiags import EarthDiags + + +# noinspection PyProtectedMember +def main(): + """ + Entry point for the Earth Diagnostics + """ + if not EarthDiags.parse_args(): + os._exit(1) + os._exit(0) + +if __name__ == "__main__": + main() diff --git a/common_ocean_post.txt b/common_ocean_post.txt deleted file mode 100644 index 0f468dc3a66c0d5bd44bd7ff249791ac996a3b43..0000000000000000000000000000000000000000 --- a/common_ocean_post.txt +++ /dev/null @@ -1,1345 +0,0 @@ -############################################################################### -# This file gathers a set of bash functions that rely on cdftools to # -# # -# reduce_mmo # -# get_diagsMMO # -# get_nemovar # -# get_glorys # -# clean_diagsMMO # -# vertmeansal # -# heat_sal_mxl # -# ohc_specified_layer # -# moc # -# convection # -# psi # -# gyres # -# area_moc # -# max_moc # -# siasiesiv # -# ohc # -# cutsection # -# interp3d # -# setminmax # -# concat # -# gather_memb # -# vertmeanvar # -# # -# Those functions would never have seen the day without Hui Du, # -# usually referred to as Super-Hui. # -# # -# He made a crucial work to develop what ended up below in the functions # -# that computes the sea ice extent, sea ice area, ocean heat content and # -# meridional overturning streamfunction. # -# Especially, he developped new options from the cdftools sources to be # -# able to compute the heat content in different basins. # -# # -#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ -# You want to make available a new diagnostic ? @ -# @ -# 1) Get an MMO tar files from any experiment on esnas @ -# 2) Write a bash function that works on a grid_T or grid_U or grid_V or @ -# grid_W file or a combination from this MMO file @ -# --> You can test your function by defining the CON_FILES and NEMOVERSION @ -# variables and by sourcing the current file, the meshmasks will be @ -# available after sourcing, remember to source again after any @ -# modification of your function @ -# --> Your function should work on input files of any resolution @ -# ORCA1/ORCA025/ORCA2 @ -# --> Your function should work on input files of any time length @ -# --> The output file should contain a proper time axis that you can copy @ -# from your input file @ -# --> The output file should be at most a 3d field including the time @ -# dimension @ -# 3) Write a short description of your function, and add its name to the @ -# list above @ -# 4) Go the the ocean_pp.sh script to add a call to your function @ -# @ -# Any doubt ---> vguemas@ic3.cat @ -#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ -# Link constant file for co # -############################################################################### - -cp -f ${CON_FILES}/mesh_mask_nemo.${NEMOVERSION}.nc mesh_hgr.nc -cp -f ${CON_FILES}/mesh_mask_nemo.${NEMOVERSION}.nc mesh_zgr.nc -cp -f ${CON_FILES}/mesh_mask_nemo.${NEMOVERSION}.nc mask.nc -cp -f ${CON_FILES}/new_maskglo.${NEMOVERSION}.nc new_maskglo.nc - -if [ -e ${CON_FILES}/mask.regions.${NEMOVERSION}.nc ] ; then -cp ${CON_FILES}/mask.regions.${NEMOVERSION}.nc mask_regions.nc -fi - -if [[ ! -f mask.nc ]] ; then - echo "No configuration files for cdftools" - exit -fi - -############################################################################### -# Reduced number of variables in diag files to save disk space # -# # -# $1 : input grid_T file name # -# $2 : input icemod file name # -# $3 : suffix of output files with nc extension # -# # -# Created in February 2012 Author : vguemas@ic3.cat # -# May 2014 : Compatibility with PA changes to oce output - Virginie # -############################################################################### - -function reduce_mmo { -ncks -O -v sosstsst,sosaline,somixhgt,somxl010,sossheig $1 oce_${3} -typeset var lstvars=`cdo showvar $2` -if [[ ${lstvars/ileadfra} != ${lstvars} ]] ; then - ncks -O -v isnowthi,iicethic,ileadfra,iicetemp $2 ice_${3} -else - ncks -O -v isnowthi,iicethic,iiceconc,iicetemp $2 ice_${3} -fi -ncks -O -v votemper $1 t3d_${3} -} -############################################################################### -# Copy diags or MMO files from esnas # -# # -# $1 : starting date # -# $2 : expid # -# $3 : member # -# $4 : starting leadtime # -# $5 : end leadtime # -# $6 : chunk length in month # -# $7 : nemo/ecearth # -# $8 : diags/MMO # -# $9 : storage frequency (daily/monthly) # -# $10 : list of files extracted from the original tarballs -# # -# Created in May 2012 Author : vguemas@ic3.cat # -# Option 10: June 2013 isabel.andreu-burillo@ic3.cat -############################################################################### - -function get_diagsMMO { -typeset var yyyy0=`echo $1|cut -c1-4` -typeset var mm0=`echo $1|cut -c5-6` -if [ -z "${10}" ] ; then - typeset var lstypes="grid_T grid_U grid_V grid_W icemod" -else - typeset var lstypes=${10} -fi - -typeset var jt -typeset var year1 -typeset var year2 -typeset var mon1 -typeset var mon2 -for jt in $(seq $4 $6 $5) ; do - year1=$(($yyyy0+(10#$mm0+$jt-2)/12)) - mon1=$(((10#$mm0+$jt-2)%12+1)) - year2=$(($yyyy0+(10#$mm0+$jt+$6-3)/12)) - mon2=$(((10#$mm0+$jt+$6-3)%12+1)) - cp /esnas/exp/$7/$2/$1/fc$3/outputs/$8_$2_$1_fc$3_${year1}$(printf "%02d" $mon1)01-${year2}$(printf "%02d" $mon2)*.tar . - if [[ "$8" == "MMO" ]]; then - for filetype in $lstypes; do - tar --wildcards -xvf $8_$2_$1_fc$3_${year1}$(printf "%02d" $mon1)01-${year2}$(printf "%02d" $mon2)*.tar "*${freqkeep}*${filetype}*" - done - else - tar --wildcards -xvf $8_$2_$1_fc$3_${year1}$(printf "%02d" $mon1)01-${year2}$(printf "%02d" $mon2)*.tar - fi - rm -f $8_$2_$1_fc$3_${year1}$(printf "%02d" $mon1)01-${year2}$(printf "%02d" $mon2)*.tar - if [[ `ls *.gz` != '' ]] ; then gunzip -f *.gz ; fi -done - -typeset var listroots -case $8 in - 'diags' ) - listroots="t3d heat_sal_mxl ice moc psi sal_0-300m sal_300-5400m" - if [[ `ls sstsimld*` != '' ]] ; then - listroots=$listroots" sstsimld" - elif [[ `ls sstsssmld*` != '' ]] ; then - listroots=$listroots" sstsssmld" - else - listroots=$listroots" oce" - fi - ;; - 'MMO' ) listroots=$lstypes ;; -esac -case $9 in - 'daily') freqexcl1='1m' ; freqexcl2='MM' ;; - 'monthly' ) freqexcl1='1d' ; freqexcl2='DD' ;; - *) freqexcl1='1d' ; freqexcl2='DD' ;; -esac - -function concat_startdate { - typeset var root - typeset var lstfiles - for root in ${listroots[@]} ; do - if [[ `ls *${root}*` != '' ]] && [[ `ls *${root}*` != ${root}_1m.nc ]] && [[ `ls *${root}* | grep -v "${freqexcl1}" | grep -v "${freqexcl2}"` != '' ]] ; then - if [[ "$8" == "MMO" ]] ; then - lstfiles=`ls *${root}* | grep -v "${root}_$2_$1_fc" | grep -v "${freqexcl1}" | grep -v "${freqexcl2}" | grep -v "km" ` - else - lstfiles=`ls ${root}* | grep -v "${root}_$2_$1_fc" | grep -v "${freqexcl1}" | grep -v "${freqexcl2}" | grep -v "km" ` - fi - if [[ ! -z ${lstfiles} ]] ; then - file1=`echo "${lstfiles}" | tail -n 1` - cdo_version=`cdo -V &> ff ; grep Climate ff | cut -d \ -f 5` - rm ff - #test on cdo version: if >1.5.6, remove valid_min/max attributes to avoid values out of that range to be replaced by NaN - if [[ "$cdo_version" = "`echo -e "$cdo_version\n1.5.6" | sort -V | head -n1`" ]] ; then - if [[ $root == 'grid_T' || $root == 't3d' ]] ; then - for file in $lstfiles ; do - ncatted -O -a valid_max,votemper,d,, $file $file - ncatted -O -a valid_min,votemper,d,, $file $file - done - fi - if [[ $root == 'heat_sal_mxl' ]] ; then - for file in $lstfiles ; do - ncatted -O -a valid_max,somxlheatc,d,, $file $file - ncatted -O -a valid_min,somxlheatc,d,, $file $file - done - fi - fi - - - outfile=${root}_$2_$1_fc$3_$(($yyyy0+(10#$mm0+$4-2)/12))$(printf "%02d" $(((10#$mm0+$4-2)%12+1)))_${year2}$(printf "%02d" $mon2).nc - typeset var lstvars=`cdo showvar $file1` - if [[ ${lstvars/iicenflx} != ${lstvars} ]] ; then for file in $lstfiles ; do ncks -O -x -v iicenflx $file $file ; done ; fi - cdo mergetime $lstfiles ${outfile} - timevar=`ncdump -h ${outfile} | grep UNLIMITED | awk '{print $1}'` - if [[ $timevar == 'time_counter' ]] ; then ncrename -v time_counter,time -d time_counter,time ${outfile} ; fi - if [[ $root == 'moc' ]] ; then - lstdims=`ncdump -h ${outfile} | awk /dimensions:/,/variables:/ | grep -v dimensions: | grep -v variables: | awk '{print $1}'` - if [[ ${lstdims/gsize} != ${lstdims} ]] ; then - ncrename -d gsize,y ${outfile} - fi - lenx=`ncdump -h ${outfile} | grep 'x =' | head -n 1 | awk '{print $3}'` - if [[ $lenx > 1 ]] ; then - if [[ ${lstvars/nav_lon} != ${lstvars} ]] ; then - ncks -O -x -v nav_lon,nav_lat ${outfile} ${outfile} - fi - ncrename -d x,y ${outfile} - fi - ncks -A -v nav_lon,nav_lat `echo $lstfiles | awk '{print $1}' ` ${outfile} - fi - rm -f $lstfiles - if [[ $root == 'sstsimld' || $root == 'sstsssmld' ]] ; then mv ${outfile} oce_$2_$1_fc$3_$(($yyyy0+(10#$mm0+$4-2)/12))$(printf "%02d" $(((10#$mm0+$4-2)%12+1)))_${year2}$(printf "%02d" $mon2).nc ; fi - fi - fi - done -} - -concat_startdate $1 $2 $3 $4 $5 $6 $7 $8 - -# These lines aim at concatenating the daily means as well and computing the monthly means from these daily means -if [[ $9 == 'monthly' ]] ; then - freqexcl1='1m' ; freqexcl2='MM' - for root in ${listroots[@]} ; do - outfile=${root}_$2_$1_fc$3_$(($yyyy0+(10#$mm0+$4-2)/12))$(printf "%02d" $(((10#$mm0+$4-2)%12+1)))_${year2}$(printf "%02d" $mon2).nc - if [[ -e $outfile ]] ; then - mv $outfile ${root}_1m.nc - fi - done - concat_startdate $1 $2 $3 $4 $5 $6 $7 $8 - for root in ${listroots[@]} ; do - outfile=${root}_$2_$1_fc$3_$(($yyyy0+(10#$mm0+$4-2)/12))$(printf "%02d" $(((10#$mm0+$4-2)%12+1)))_${year2}$(printf "%02d" $mon2).nc - if [[ -e $outfile ]] ; then - cdo monmean $outfile ${root}_daily2monthly.nc - rm -f $outfile - if [[ -e ${root}_1m.nc ]] ; then - mv ${root}_1m.nc $outfile - ncks -A ${root}_daily2monthly.nc $outfile - rm -f ${root}_daily2monthly.nc - else - mv ${root}_daily2monthly.nc $outfile - fi - else - if [[ -e ${root}_1m.nc ]] ; then - mv ${root}_1m.nc $outfile - fi - fi - done -fi - -rm -f *${freqexcl1}* *${freqexcl2}* - -} -############################################################################### -# Copy NEMOVAR files from esnas # -# # -# $1 : expid # -# $2 : member # -# $3 : start year # -# $4 : end year # -# $5 : start month # -# $6 : end month # -# $7 : list of files extracted from the original tarballs -# # -# Created in May 2012 Author : vguemas@ic3.cat # -# Modified: June 2013 isabel.andreu-burillo@ic3.cat # -############################################################################### - -function get_nemovar { - -if [ -z "$5" ] ; then - typeset var moni=9 -else - typeset var moni=$5 -fi - -if [ -z "$5" ] ; then - typeset var monf=8 -else - typeset var monf=$6 -fi - -typeset var path -typeset var yearf -case $1 in - 'nemovar_s4') path=/esnas/exp/ECMWF/NEMOVAR_S4/outputs/fc$2/s4 ;; - 'nemovar_combine') path=/esnas/exp/ECMWF/NEMOVAR_COMBINE/outputs/opa0/fa9p_1m ;; -esac -typeset var year -typeset var mon -for year in $(seq $3 $4) ; do - case $year in - $3) mona=${moni} ;; - *) mona=1 ;; - esac - case $year in - $4) monb=${monf} ;; - *) monb=12 ;; - esac - for mon in $(seq $mona $monb); do - cp ${path}_fc$2_${year}$(printf "%02d" $mon)*.gz . - done -done -gunzip -f *.gz - -typeset var listroots=${7} -typeset var root -typeset var lstfiles -typeset var ntimes -typeset var jt -for root in ${listroots[@]} ; do - lstfiles=`ls *fc${2}*${root}* | grep -v "${root}_$1_195709_fc$2_${3}09_${4}$(printf "%02d" $monb).nc"` - ncrcat -O -x -v vorbiasp $lstfiles tmp_${root}.nc - cdo settaxis,${3}-$(printf "%02d" $moni)-15,12:00,1mon tmp_${root}.nc ${root}_$1_19570901_fc$2_${3}$(printf "%02d" $moni)_${4}$(printf "%02d" $monf).nc - rm -f $lstfiles tmp_${root}.nc -done -} -############################################################################### -# Copy GLORYS files from esnas # -# # -# $1 : start year # -# $2 : end year # -# $3 : start month # -# $4 : end month # -# # -# Created in June 2013 Author : vguemas@ic3.cat # -############################################################################### - -function get_glorys { -typeset var path=/esnas/exp/MERCATOR/GLORYS2V1/outputs/ORCA1L46 #ORCA025L75_glorys -typeset var lstfiles="" -for year in $(seq $1 $2) ; do - cp ${path}/vosaline_${year}.nc . - cp ${path}/votemper_${year}.nc . - ncks -A vosaline_${year}.nc votemper_${year}.nc - rm -f vosaline_${year}.nc - lstfiles=${lstfiles}" "votemper_${year}.nc -done -cdo cat ${lstfiles} tmp.nc -cdo settaxis,${1}-01-15,12:00,1mon tmp.nc tmp2.nc -cdo seldate,${1}-$(printf "%02d" $3)-00,${2}-$(printf "%02d" $4)-31 tmp2.nc grid_T_glorys2v1_19930101_fc0_${1}$(printf "%02d" $3)_${2}$(printf "%02d" $4).nc -rm -f ${lstfiles} tmp.nc tmp2.nc -ncks -O -x -v nav_lon,nav_lat,x_2,y_2 grid_T_glorys2v1_19930101_fc0_${1}$(printf "%02d" $3)_${2}$(printf "%02d" $4).nc grid_T_glorys2v1_19930101_fc0_${1}$(printf "%02d" $3)_${2}$(printf "%02d" $4).nc -ncks -A -v nav_lon,nav_lat mesh_hgr.nc grid_T_glorys2v1_19930101_fc0_${1}$(printf "%02d" $3)_${2}$(printf "%02d" $4).nc -} -############################################################################### -# Clean diags or MMO files after postprocessing # -# # -# $1 : starting date # -# $2 : expid # -# $3 : member # -# $4 : starting leadtime # -# $5 : end leadtime # -# $6 : diags/MMO # -# $7 : list of files extracted from the original tarballs -# # -# Created in May 2012 Author : vguemas@ic3.cat # -# Modified: June 2013 isabel.andreu-burillo@ic3.cat # -############################################################################### - -function clean_diagsMMO { -typeset var yyyy0=`echo $1|cut -c1-4` -typeset var mm0=`echo $1|cut -c5-6` -typeset var year1=$(($yyyy0+(10#$mm0+$4-2)/12)) -typeset var year2=$(($yyyy0+(10#$mm0+$5-2)/12)) -typeset var mon1=$(((10#$mm0+$4-2)%12+1)) -typeset var mon2=$(((10#$mm0+$5-2)%12+1)) - -typeset var listroots - case $6 in - 'diags' ) listroots="t3d" ;; - 'MMO' ) - if [ -z "${7}" ] ; then - listroots="grid_T grid_U grid_V grid_W icemod" - else - listroots=${7} - fi - ;; - esac -typeset var root -typeset var lstfiles -for root in ${listroots[@]} ; do - rm -f ${root}_$2_$1_fc$3_${year1}$(printf "%02d" $mon1)_${year2}$(printf "%02d" $mon2).nc -done -} -############################################################################### -# Vertically averaged salt content # -# # -# $1 : input grid_T file name # -# $2 : upper depth of the layer (in meters) # -# $3 : lower depth of the layer (in meters) # -# $4 : output file name (=> 2D) # -# # -# Created in February 2012 Author : vguemas@ic3.cat # -############################################################################### - -function vertmeansal { -cdo_version=`cdo -V &> ff ; grep Climate ff | cut -d \ -f 5` -rm ff -typeset var ntime=`cdo ntime $1` -typeset var list="" -typeset var jt -for jt in $(seq 1 $ntime); do - ncks -O -d time,$((jt-1)) $1 intvertmeansal.nc - - #test on cdo version: if >1.5.6, remove valid_min/max attributes to avoid values out of that range to be replaced by NaN - if [[ "$cdo_version" > "`echo -e "$cdo_version\n1.5.6" | sort -V | head -n1`" ]] ; then - ncatted -O -a valid_max,deptht,d,, intvertmeansal.nc - ncatted -O -a valid_min,deptht,d,, intvertmeansal.nc - fi - cdfvertmean intvertmeansal.nc vosaline T $2 $3 - ncrename -O -v sovertmean,vertmeansal -d time_counter,time -v time_counter,time vertmean.nc - mv vertmean.nc outputvertmeansal_$jt.nc - list=$list" "outputvertmeansal_$jt.nc - rm -f intvertmeansal.nc -# #test on cdo version: if >1.5.6, remove valid_min/max attributes to avoid values out of that range to be replaced by NaN -# if [[ "$cdo_version" = "`echo -e "$cdo_version\n1.5.6" | sort -V | head -n1`" ]] ; then -# ncatted -O -a valid_max,vertmeansal,d,, outputvertmeansal_$jt.nc outputvertmeansal_$jt.nc -# ncatted -O -a valid_min,vertmeansal,d,, outputvertmeansal_$jt.nc outputvertmeansal_$jt.nc -# fi -done -cdo cat $list $4 -ncks -A -v time $1 $4 -rm -f $list -setminmax $4 vertmeansal -} -############################################################################### -# Compute mixed layer heat and salt content # -# # -# $1 : input grid_T file name # -# $2 : output file name (=> 2D x-y ) # -# # -# Created in February 2012 Author : vguemas@ic3.cat # -################################################################################ - -function heat_sal_mxl { -typeset var ntime=`cdo ntime $1` -typeset var list="" -typeset var jt -typeset var lstvars=`cdo showvar $1` -for jt in $(seq 1 $ntime); do - ncks -O -d time,$((jt-1)) $1 intheat_sal_mxl.nc - if [[ ${lstvars/somxl010} == ${lstvars} ]] ; then - cdfmxl intheat_sal_mxl.nc mxl.nc - ncrename -d time_counter,time mxl.nc - ncks -A mxl.nc intheat_sal_mxl.nc - rm -f mxl.nc - fi - cdfmxlheatc intheat_sal_mxl.nc - if [[ $lstvars != ${lstvars/vosaline} ]] ; then - cdfmxlsaltc intheat_sal_mxl.nc - ncks -A mxlsaltc.nc mxlheatc.nc - rm -f mxlsaltc.nc - fi - mv mxlheatc.nc outputintheat_sal_mxl_$jt.nc - timevar=`ncdump -h outputintheat_sal_mxl_$jt.nc | grep UNLIMITED | awk '{print $1}'` - if [[ $timevar == 'time_counter' ]] ; then ncrename -v time_counter,time -d time_counter,time outputintheat_sal_mxl_$jt.nc ; fi - list=$list" "outputintheat_sal_mxl_$jt.nc - rm -f intheat_sal_mxl.nc -done -cdo cat $list $2 -ncks -A -v time $1 $2 -rm -f $list -setminmax $2 somxlheatc -if [[ $lstvars != ${lstvars/vosaline} ]] ; then setminmax $2 somxlsaltc ; fi -} -############################################################################### -# Pointwise Ocean Heat Content in a specified ocean thickness # -# (J/m-2) -# # -# $1 : input grid_T file name # -# $2 : upper depth of the layer (in meters) # -# $3 : lower depth of the layer (in meters) # -# $4 : output file name (=> 2D x-y ) # -# # -# Created in June 2012 Author : isabel.andreu-burillo@ic3.cat # -# May 2014 - Virginie Guemas - Way around the bc that does not work on moore # -############################################################################### - -function ohc_specified_layer { -typeset var ntime=`cdo ntime $1` -typeset var list="" -typeset var jt -ncap2 -v -O -s "heatc_sl=tmask*e3t" mesh_zgr.nc e3t_file.nc -ncrename -d t,time -d z,deptht e3t_file.nc -for jt in $(seq 1 $ntime); do - cdo seltimestep,$jt $1 intohc_slayer.nc - ncks -O -v votemper intohc_slayer.nc intmeantem.nc - ncrename -v votemper,heatc_sl intmeantem.nc #to be commented - cdo mul intmeantem.nc e3t_file.nc heatc_sl_out.nc -#? ncks -A -m -v nav_lon,nav_lat $1 heatc_sl_out.nc - # extract the data between the two given depths --> heatc_sl_top.nc - ncks -O -d deptht,$2,$3 heatc_sl_out.nc heatc_sl_top.nc - #perform the integration of ohc down to that level (main contribution) - ncap2 -O -s 'heatc_sl=heatc_sl.total($deptht)' heatc_sl_top.nc heatc_sl_top.nc - # now extract a few levels below, to compute the residual ohc - # lower_bnd=`echo "$3 + 200.0" | bc` -> does not work on new moore - # strip out the .* from $3: - stripped=`echo ${3/.*}` - # addition with float returned: - lower_bnd=`echo $(printf "%f" $(( $stripped + 200)))` - ncks -O -d deptht,$3,$lower_bnd heatc_sl_out.nc heatc_sl_bottom.nc - # obtain the weight for the extra level containing the 300 m - # deptht in the gridT files is positive - # weight = (300.0 - depth_top)/(depth_bottom - depth_top) - # and add the thickness down to 300 m in the next layer - ncpdq -a '-deptht' heatc_sl_top.nc heatc_sl_top_invert.nc - ncks -O -d deptht,0,0,1 heatc_sl_top_invert.nc level_above.nc - ncks -O -d deptht,0,0,1 heatc_sl_bottom.nc level_below.nc - ## Here, add the residual contribution, before adding it to the main contribution - ncrename -v deptht,layerthcknss level_below.nc - ncrename -v deptht,layerthcknss level_above.nc - ncbo -A --op_typ=sub -v layerthcknss level_below.nc level_above.nc depth_diff_lay.nc - ncrename -v layerthcknss,heatc_sl depth_diff_lay.nc - ncap2 -s "heatc_sl=($3 - layerthcknss)" level_above.nc depth_diff_sublay.nc - ncbo --op_typ=/ -v heatc_sl depth_diff_sublay.nc depth_diff_lay.nc factor.nc - ncrename -v heatc_sl,factor factor.nc #to be commented - ncks -A -v factor factor.nc level_below.nc - rm -f depth_diff_sublay.nc depth_diff_lay.nc - ncap2 -O -s "heatc_sl=(factor * heatc_sl)" level_below.nc level_below.nc - ncwa -O -a deptht level_below.nc level_below.nc - ncbo --op_typ=+ -v heatc_sl heatc_sl_top.nc level_below.nc total_heatc_sl.nc - ncap2 -s "heatc_sl=1020.0*4000*heatc_sl" total_heatc_sl.nc heatc_sl_$jt.nc - list=$list" "heatc_sl_$jt.nc - rm -f depth_diff_lay.nc depth_diff_sublay.nc - rm -f heatc_sl_out.nc heatc_sl_top.nc heatc_sl_top_invert.nc heatc_sl_bottom.nc - rm -f level_above.nc level_below.nc - rm -f intohc_slayer.nc intmeantem.nc vertmean.nc total_heatc_sl.nc - rm -f factor.nc -done -cdo cat $list $4 -ncks -A -v time $1 $4 -rm -f $list -rm -f e3t_file.nc -setminmax $4 heatc_sl -} -############################################################################### -# Compute the MOC for oceanic basins # -# # -# $1 : input grid_V file name # -# $2 : output file name (=> 2D, depth-y) # -# # -# Created in March 2012 Author : vguemas@ic3.cat # -############################################################################### - -function moc { -typeset var ntime=`cdo ntime $1` -typeset var list="" -typeset var jt -for jt in $(seq 1 $ntime); do - cdo seltimestep,$jt $1 intmoc.nc - cdfmoc intmoc.nc - ncwa -O -a x moc.nc outmoc_$jt.nc - ncks -O -x -v nav_lon,nav_lat outmoc_$jt.nc outmoc_$jt.nc - timevar=`ncdump -h outmoc_$jt.nc | grep UNLIMITED | awk '{print $1}'` - if [[ $timevar == 'time_counter' ]] ; then ncrename -v time_counter,time -d time_counter,time outmoc_$jt.nc ; fi - list=$list" "outmoc_$jt.nc - rm -f intmoc.nc moc.nc -done -cdo cat $list $2 -lstdims=`ncdump -h $2 | awk /dimensions:/,/variables:/ | grep -v dimensions: | grep -v variables: | awk '{print $1}'` -if [[ ${lstdims/gsize} != ${lstdims} ]] ; then - ncrename -d gsize,y $2 -fi -ncks -A -v nav_lon,nav_lat $1 $2 -ncks -A -v time $1 $2 -rm -f $list -} -############################################################################### -# # -# Compute the intensity of convection in the four main convection sites # -# # -# $1 : input oce file name containing somxl010 # -# $2 : input grid # -# $3 : output file name (=> index) # -# # -# Created in October 2013 Author : vguemas@ic3.cat # -############################################################################### - -function convection { -case $2 in - 'Ec2.3_O1L42'|'Ec3.0_O1L46'|'N3.2_O1L42'|'N3.3_O1L46'|'nemovar_O1L42') - A1=225;A2=245;A3=215;A4=255; - B1=245;B2=290;B3=215;B4=245; - C1=260;C2=310;C3=245;C4=291; - D1=225;D2=280;D3=1;D4=50;; - - 'Ec3.0_O25L46'|'Ec3.0_O25L75'|'glorys2v1_O25L75') - stop"Option convection not available yet for this configuration" - ;; -esac - -cdo fldmax -selindexbox,${A1},${A2},${A3},${A4} $1 Labrador.nc -ncrename -v somxl010,Labrador Labrador.nc -ncks -O -v Labrador Labrador.nc convection.nc -rm -f Labrador.nc - -cdo fldmax -selindexbox,${B1},${B2},${B3},${B4} $1 Irminger.nc -ncrename -v somxl010,Irminger Irminger.nc -ncks -A -v Irminger Irminger.nc convection.nc -rm -f Irminger.nc - -cdo fldmax -selindexbox,${C1},${C2},${C3},${C4} $1 GIN.nc -ncrename -v somxl010,GIN GIN.nc -ncks -A -v GIN GIN.nc convection.nc -rm -f GIN.nc - -cdo fldmax -selindexbox,${D1},${D2},${D3},${D4} $1 Wedell.nc -ncrename -v somxl010,Wedell Wedell.nc -ncks -A -v Wedell Wedell.nc convection.nc -rm -f Wedell.nc - -mv convection.nc $3 -} -############################################################################### -# # -# Compute the barotropic stream function # -# # -# $1 : input grid_U file name # -# $2 : input grid_V file name # -# $3 : output file name without nc extension (=> 2D x-y) # -# # -# Created in March 2012 Author : vguemas@ic3.cat # -############################################################################### - -function psi { -typeset var ntime=`cdo ntime $1` -typeset var list="" -typeset var jt -for jt in $(seq 1 $ntime); do - cdo seltimestep,$jt $1 intU.nc - cdo seltimestep,$jt $2 intV.nc - cdfpsi intU.nc intV.nc - mv psi.nc psi_U.nc - cdfpsi intU.nc intV.nc V - mv psi.nc psi_V.nc - ncea psi_U.nc psi_V.nc psi_${jt}.nc - timevar=`ncdump -h psi_$jt.nc | grep UNLIMITED | awk '{print $1}'` - if [[ $timevar == 'time_counter' ]] ; then ncrename -v time_counter,time -d time_counter,time psi_$jt.nc ; fi - list=$list" "psi_$jt.nc - rm -f intU.nc intV.nc psi_U.nc psi_V.nc -done -cdo cat $list ${3} -ncks -A -v time $1 ${3} -rm -f $list -} -############################################################################### -# # -# Compute the intensity of the subtropical and subpolar gyres # -# # -# $1 : input psi file name # -# $2 : input grid # -# $3 : output file name ( => index ) # -# # -# Created in October 2013 Author : vguemas@ic3.cat # -############################################################################### - -function gyres { -case $2 in - 'Ec2.3_O1L42'|'Ec3.0_O1L46'|'N3.2_O1L42'|'N3.3_O1L46'|'nemovar_O1L42') - A1=230;A2=275;A3=215;A4=245; - B1=70;B2=145;B3=195;B4=235; - C1=45;C2=175;C3=165;C4=220; - D1=195;D2=275;D3=175;D4=225; - E1=70;E2=205;E3=120;E4=145; - F1=235;F2=300;F3=120;F4=145; - G1=320;G2=30;G3=110;G4=180; - H1=1;H2=361;H3=1;H4=65;; - - 'Ec3.0_O25L46'|'Ec3.0_O25L75'|'glorys2v1_O25L75') - stop"Option gyres not available yet for this configuration" - ;; -esac - -cdo fldmin -selindexbox,${A1},${A2},${A3},${A4} $1 subpolar_NAtl.nc -ncrename -v sobarstf,subpolNAtl subpolar_NAtl.nc -cdo mulc,-1 subpolar_NAtl.nc gyres.nc -rm -f subpolar_NAtl.nc - -cdo fldmin -selindexbox,${B1},${B2},${B3},${B4} $1 subpolar_NPac.nc -ncrename -v sobarstf,subpolNPac subpolar_NPac.nc -cdo mulc,-1 subpolar_NPac.nc tmp.nc -ncks -A tmp.nc gyres.nc -rm -f subpolar_NPac.nc tmp.nc - -cdo fldmax -selindexbox,${C1},${C2},${C3},${C4} $1 subtrop_NPac.nc -ncrename -v sobarstf,subtropNPac subtrop_NPac.nc -ncks -A subtrop_NPac.nc gyres.nc -rm -f subtrop_NPac.nc - -cdo fldmax -selindexbox,${E1},${E2},${E3},${E4} $1 subtrop_SPac.nc -ncrename -v sobarstf,subtropSPac subtrop_SPac.nc -ncks -A subtrop_SPac.nc gyres.nc -rm -f subtrop_SPac.nc - -cdo fldmax -selindexbox,${D1},${D2},${D3},${D4} $1 subtrop_NAtl.nc -ncrename -v sobarstf,subtropNAtl subtrop_NAtl.nc -ncks -A subtrop_NAtl.nc gyres.nc -rm -f subtrop_NAtl.nc - -cdo fldmax -selindexbox,${F1},${F2},${F3},${F4} $1 subtrop_SAtl.nc -ncrename -v sobarstf,subtropSAtl subtrop_SAtl.nc -ncks -A subtrop_SAtl.nc gyres.nc -rm -f subtrop_SAtl.nc - -cdo fldmax -selindexbox,${G1},${G2},${G3},${G4} $1 subtrop_Ind.nc -ncrename -v sobarstf,subtropInd subtrop_Ind.nc -ncks -A subtrop_Ind.nc gyres.nc -rm -f subtrop_Ind.nc - -cdo fldmax -selindexbox,${H1},${H2},${H3},${H4} $1 ACC.nc -ncrename -v sobarstf,ACC ACC.nc -ncks -A ACC.nc gyres.nc -rm -f ACC.nc - -mv gyres.nc $3 - -} -############################################################################### -# # -# Compute an Atlantic MOC index by averaging the meridional overturning # -# in a latitude band between 1km and 2km # -# or any other index averaging the meridional overturning in # -# a given basin and a given domain # -# # -# $1 : input moc file name # -# $2 : latitude min # -# $3 : latitude max # -# $4 : output file name ( => index ) # -# $5 : depth min (default : 1km) # -# $6 : depth max (default : 2km) # -# $7 : basin (default : zomsfatl) # -# # -# Created in March 2012 Author : vguemas@ic3.cat # -############################################################################### - -function area_moc { -if [ -z "$5" ] ; then - typeset var depmin=-1000.0 -else - typeset var depmin=-$5 -fi -if [ -z "$6" ] ; then - typeset var depmax=-2000.0 -else - typeset var depmax=-$6 -fi -if [ -z "$7" ] ; then - typeset var basin=zomsfatl -else - typeset var basin=$7 -fi -lstdims=`ncdump -h $1 | awk /dimensions:/,/variables:/ | grep -v dimensions: | grep -v variables: | awk '{print $1}'` -if [[ ${lstdims/x} != ${lstdims} ]] ; then - ncwa -O -a x $1 tmpmoc.nc -else - cp $1 tmpmoc.nc -fi -ncrename -O -d y,lat -v nav_lat,lat tmpmoc.nc tmpmoc.nc -ncks -O -v $basin,time,depthw,lat tmpmoc.nc tmpmoc.nc -ncks -O -d lat,$2,$3 -d depthw,${depmax},${depmin} tmpmoc.nc area_moc.nc -cdo vertmean area_moc.nc area_ave_moc.nc -ncap -O -s "coslat[lat]=cos(lat[lat]*3.141592657/180.0)" area_ave_moc.nc area_ave_moc2.nc -ncwa -w coslat -a lat area_ave_moc2.nc area_ave_moc3.nc -ncks -O -v $basin,time area_ave_moc3.nc $4 -rm -f tmpmoc.nc area_moc.nc area_ave_moc2.nc area_ave_moc3.nc -if [[ $4 != area_ave_moc.nc ]] ; then - rm -f area_ave_moc.nc -fi -} -############################################################################### -# # -# Compute an Atlantic MOC index by finding the maximum of the annual # -# mean meridional overturning in a latitude / depth region # -# # -# $1 : input moc file name # -# $2 : latitude min # -# $3 : latitude max # -# $4 : depth mean # -# $5 : depth max # -# $6 : output file name ( => index ) # -# # -# Created in March 2012 Author : vguemas@ic3.cat # -############################################################################### - -function max_moc { -if [ ! -f $6 ] ; then - ncecat -h $1 tmpmoc1.nc - lstdims=`ncdump -h tmpmoc1.nc | awk /dimensions:/,/variables:/ | grep -v dimensions: | grep -v variables: | awk '{print $1}'` - if [[ ${lstdims/x} != ${lstdims} ]] ; then - ncwa -O -a x tmpmoc1.nc tmpmoc1.nc - fi - ncrename -d record,x tmpmoc1.nc - ncpdq -O -h -a time,x tmpmoc1.nc tmpmoc1.nc - ncpdq -O -h -a depthw,x tmpmoc1.nc tmpmoc1.nc - ncpdq -O -h -a y,x tmpmoc1.nc tmpmoc1.nc - cdo yearmean tmpmoc1.nc tmpmoc.nc - typeset var ntime=`cdo ntime tmpmoc.nc` - typeset var list="" - for jt in $(seq 1 $ntime) ; do - cdo seltimestep,$jt tmpmoc.nc tmpmoc2.nc - cdfmaxmoc tmpmoc2.nc atl $2 $3 $4 $5 - mv maxmoc.nc maxmoc_$jt.nc - timevar=`ncdump -h maxmoc_$jt.nc | grep UNLIMITED | awk '{print $1}'` - if [[ $timevar == 'time_counter' ]] ; then ncrename -v time_counter,time -d time_counter,time maxmoc_$jt.nc ; fi - list=${list}" "maxmoc_$jt.nc - rm -f tmpmoc2.nc - done - cdo cat $list $6 - ncks -A -v time tmpmoc.nc $6 - rm -f $list tmpmoc.nc tmpmoc1.nc -fi -} -############################################################################### -# # -# Compute the sea ice extent (1000km2), area (1000km2), volume (km3) # -# and mean thickness (m) in both hemispheres or a specified region. # -# # -# $1 : input ice file name # -# $2 : output file name ( => index ) # -# $3 : region of interest (if empty default is global) # -# # -# Created in April 2012 Author : vguemas@ic3.cat # -# Modified in June 2014 Author : neven.fuckar@ic3.cat # -# # -# Computation of the properties in various selected regions according to # -# mask.regions.${NEMOVERSION}.nc (mask_regions.nc) is based on modification # -# of mask.regions.ORCA1.noverticalinfo.Matt.nc from Matthieu Chevallier. # -# # -############################################################################### - -function siasiesiv { -cp ${CON_FILES}/ice_template.nc toto_N.nc -cp ${CON_FILES}/ice_template.nc toto_S.nc -case ${NEMOVERSION} in - 'Ec3.0_O1L46'|'Ec3.0_O25L46'|'Ec3.0_O25L75') for var in `cdo showvar $1 | head -n 1` -do -[[ $var = "ice_pres" || $var = "iiceconc" ]] && ncrename -v $var,ileadfra $1 -done;; -#'Ec3.0_O1L46'|'Ec3.0_O25L46') ncrename -v ice_pres,ileadfra $1 ;; -#'Ec3.0_O1L46'|'Ec3.0_O25L46') ncrename -v iiceconc,ileadfra $1 ;; -esac - -typeset var ntime=`cdo ntime $1` -typeset var list1="" -typeset var list2="" -typeset var jt - -if [ ! -z "$3" ] ; then - mv mask.nc mask_tmp.nc - mv mask_regions.nc mask.nc - ncrename -h -v $3,tmask mask.nc -fi - -for jt in $(seq 1 $ntime) ; do - cdo seltimestep,$jt $1 tmpice.nc - cdficediags tmpice.nc>ice.txt - for d in N S;do - ncdump toto_${d}.nc > ice_template.cdl - sia=`grep ${d}Area ice.txt |awk '{print $4}'` - sie=`grep ${d}Exnsidc ice.txt|awk '{print $4}'` - siv=`grep ${d}Volume ice.txt|awk '{print $4}'` - sed -e "s/sia =.*/sia = $sia ;/" ice_template.cdl > ice_template2.cdl - sed -e "s/sie =.*/sie = $sie ;/" ice_template2.cdl > ice_template3.cdl - sed -e "s/siv =.*/siv = $siv ;/" ice_template3.cdl > ice_template.cdl - ncgen -o ice_${d}_${jt}.nc ice_template.cdl - rm -f ice_template.cdl ice_template2.cdl ice_template3.cdl - done - list1=$list1" "ice_N_${jt}.nc - list2=$list2" "ice_S_${jt}.nc - rm -f ice.txt tmpice.nc icediags.nc -done -cdo cat $list1 ice_N_${2} -cdo cat $list2 ice_S_${2} -ncks -A -v time $1 ice_N_${2} -ncks -A -v time $1 ice_S_${2} -rm -f $list1 $list2 toto_N.nc toto_S.nc - -for d in N S;do - ncatted -O -a units,sia,m,c,1000km2 ice_${d}_${2} - ncatted -O -a units,sie,m,c,1000km2 ice_${d}_${2} - - ncks -O -v siv ice_${d}_${2} siv_${d}_${2}1 - ncks -O -v sia ice_${d}_${2} sia_${d}_${2}1 - ncrename -h -v sia,siv sia_${d}_${2}1 - ncbo -O --op_typ=dvd siv_${d}_${2}1 sia_${d}_${2}1 sit_${d}_${2} - ncatted -O -a standard_name,siv,m,c,Mean_sea_ice_thickness sit_${d}_${2} - ncatted -O -a long_name,siv,m,c,"Mean sea ice thickness" sit_${d}_${2} - ncatted -O -a units,siv,m,c,m sit_${d}_${2} - cdo ltc,100 sit_${d}_${2} sit_${d}_${2}1 - cdo ifthenelse sit_${d}_${2}1 sit_${d}_${2} sit_${d}_${2}1 sit_${d}_${2}2 - ncrename -h -v siv,sit sit_${d}_${2}2 - ncks -A sit_${d}_${2}2 ice_${d}_${2} - - rm siv_${d}_${2}1 sia_${d}_${2}1 sit_${d}_${2} sit_${d}_${2}1 sit_${d}_${2}2 -done - -setminmax ice_N_${2} sia sie siv sit -setminmax ice_S_${2} sia sie siv sit - -if [ ! -z "$3" ] ; then - ncrename -h -v tmask,$3 mask.nc - mv mask.nc mask_regions.nc - mv mask_tmp.nc mask.nc -fi - -} -############################################################################### -# # -# Compute the total ocean heat extent # -# # -# $1 : input temperature file name # -# $2 : output file name ( => 2D x-y ) # -# $3 : basin (NAtl, NPac, TAtl, TPac, TInd, Anta, Arct, Glob) Default : Glob # -# $4 = 0 if $3 = Glob # -# $4 : mixed layer (1=only, 0=included, -1=without) Default : 0 # -# $5 : upper level of the layer (optional) Default : top # -# $6 : lower level of the layer (optional) Default : bottom # -# # -# Created in May 2012 Author : vguemas@ic3.cat # -############################################################################### -module load CDO/1.5.3-foss-2015a -function ohc { -cp ${CON_FILES}/depth.${NEMOVERSION}.txt depth.txt -# -# Input arguments -# -if [ -z "$3" ] ; then - typeset var basin='Glob' -else - typeset var basin=$3 -fi -if [ -z "$4" ] ; then - typeset var mxl=0 -else - typeset var mxl=$4 -fi -if [ -z "$5" ] ; then - typeset var up=1 -else - typeset var up=$5 -fi -if [ -z "$6" ] ; then - typeset var down=`cat depth.txt | wc -l` -else - typeset var down=$6 -fi - -if [[ ${up} -eq 1 ]] ; then - typeset var depmin=0 -else - typeset var depmin=`cat depth.txt |head -n ${up} |tail -n 1 | awk '{print $2}' | awk '{printf "%.0f",$1}'` -fi -typeset var depmax=`cat depth.txt |head -n ${down} |tail -n 1 | awk '{print $2}' | awk '{printf "%.0f",$1}'` - -cp ${CON_FILES}/heatc_template.nc template_heatc.nc -ncdump template_heatc.nc > template_heatc.cdl -# -# Define some parameters -# -typeset var para -typeset var output -typeset var nlev=`cat depth.txt | wc -l` -if [[ ! -z "$depmin" && ! -z "$depmax" ]] ; then - if [[ $depmin != 0 || ${down} != ${nlev} && ${down} != 0 ]] ; then - output=${depmin}-${depmax}'_' - fi -fi - -case $basin in - 'NAtl') para="atl $mxl 0 0 10 65"; output='NAtl_10N65N_'${output} ;; - 'TAtl') para="atl $mxl 0 0 -30 30" ; output='TAtl_30S30N_'${output} ;; - 'NPac') para="pac $mxl 0 0 10 70" ; output='NPac_10N70N_'${output} ;; - 'TPac') para="pac $mxl 0 0 -30 30" ; output='TPac_30S30N_'${output} ;; - 'Arct') para="atl $mxl 0 0 65 90" ; output='Arc_65N90N_'${output} ;; - 'Anta') para="all $mxl 0 0 -90 -60" ; output='Ant_90S60S_'${output} ;; - 'TInd') para="ind $mxl 0 0 -30 30" ; output='TInd_30S30N_'${output} ;; - 'Glob') para="all $mxl 0 0 0 0" ;; -esac - -case $mxl in - 1) output='mxl_'${output} ;; - -1) output='nonmxl_'${output} ;; -esac -# -# Compute ohc -# -typeset var lstvars=`cdo showvar $1` -typeset var ntime=`cdo ntime $1` -typeset var list="" -typeset var jt -for jt in $(seq 1 $ntime) ; do - cdo seltimestep,$jt $1 tmpohc.nc - lstdims=`ncdump -h tmpohc.nc | awk /dimensions:/,/variables:/ | grep -v dimensions: | grep -v variables: | awk '{print $1}'` - if [[ ${lstdims/x_2} != ${lstdims} ]] ; then - if [[ ${lstdims/x} != ${lstdims} ]] ; then - ncwa -O -a x tmpohc.nc tmpohc.nc - fi - ncrename -d x_2,x tmpohc.nc - fi - if [[ ${lstvars/somxl010} != ${lstvars} ]] ; then - ncks -O -v somxl010 tmpohc.nc mxl.nc - else - cdfmxl tmpohc.nc mxl.nc - fi - cdfheatc-cfu tmpohc.nc $para $up $down > tmp.log -echo $para -echo $up -echo $down - cat tmp.log - thc=`cat tmp.log | grep "Total Heat content :" | awk '{print $5}'`; - uhc=`cat tmp.log | grep "Total Heat content/volume" | awk '{print $5}'`; - sed -e "s/thc =.*/thc = $thc ;/" template_heatc.cdl > template_heatc2.cdl - sed -e "s/uhc =.*/uhc = $uhc ;/" template_heatc2.cdl > template_heatc.cdl - ncgen -o heatc_${jt}.nc template_heatc.cdl - rm -f template_heatc2.cdl tmpohc.nc mxl.nc tmp.log - list=$list" "heatc_${jt}.nc -done -cdo cat $list ${output}$2 -ncks -h -A -v time $1 ${output}$2 -rm -f $list template_heatc.nc template_heatc.cdl depth.txt -setminmax ${output}$2 thc uhc -} -############################################################################### -# # -# Cut a meridional or zonal section # -# # -# # -# $1 : input file # -# $2 : input var # -# $3 : Z/M (zonal / meridional section) # -# $4 : lat/lon # -# $5 : output file ( => 2D ) # -# # -# Created in September 2012 Author : vguemas@ic3.cat # -# # -############################################################################### - -function cutsection { - typeset var ntime=`cdo ntime $1` - typeset var nx=`ncdump -h $1|grep 'x = '|head -n 1|cut -f3 -d" "` - typeset var ny=`ncdump -h $1|grep 'y = '|head -n 1|cut -f3 -d" "` - typeset var nz=`ncdump -h $1|grep 'depth'|head -n 1|cut -f3 -d" "` -cat>section.R<max(lon)) {exactpos=exactpos-360} - } - # Collect the indexes defining the section - listi=array(dim=switch('$3','Z'=$nx-2,'M'=$ny-1)) - listj=array(dim=switch('$3','Z'=$nx-2,'M'=$ny-1)) - for (jpt in 1:length(listi)) { - vect=switch('$3','Z'=lat[jpt,],'M'=lon[,jpt+1]) - if (min(abs(vect-exactpos))<(2*360./$nx)) { - pos=sort(abs(vect-exactpos),index.return=T)\$ix[1] - listi[jpt]=switch('$3','Z'=jpt+1,'M'=pos) - listj[jpt]=switch('$3','Z'=pos,'M'=jpt) - } - } - listi=listi[is.na(listi)==F] - listj=listj[is.na(listj)==F] - print(listi) - print(listj) - # Select variable at those indexes - fnc1=open.ncdf('$1') - varout=array(dim=c(length(listi),$nz,$ntime)) - for (jt in 1:$ntime) { - varin=get.var.ncdf(fnc1,'$2',start=c(1,1,1,jt),count=c($nx,$ny,$nz,1)) - varin[which(mask<0.5)]=1e20 - for (jpt in 1:length(listi)) { - varout[jpt,,jt]=varin[listi[jpt],listj[jpt],] - } - } - close.ncdf(fnc1) - # Write the output - wtime=dim.def.ncdf("time","",seq(1,$ntime),unlim=TRUE) - dimout=array(dim=length(listi)) - for (jpt in 1:length(listi)) { - dimout[jpt]=switch('$3','Z'=lon[listi[jpt],listj[jpt]],'M'=lat[listi[jpt],listj[jpt]]) - } - wsec=switch('$3','Z'=dim.def.ncdf("lon","",dimout),'M'=dim.def.ncdf("lat","",dimout)) - wdep=dim.def.ncdf("deptht","",depth) - wvar=var.def.ncdf("$2","",list(wsec,wdep,wtime),1e20) - fnc2=create.ncdf('$5',wvar) - put.var.ncdf(fnc2,wvar,varout) - close.ncdf(fnc2) -EOF1 -R CMD BATCH section.R -ncks -h -A -v time $1 $5 -} -############################################################################### -# # -# 3-dimensional conservative interpolation to the regular atmospheric grid # -# # -# $1 : input file # -# $2 : input var # -# $3 : output file ( => 3D ) # -# # -# Created in November 2012 Author : vguemas@ic3.cat # -# # -############################################################################### - -function interp3d { - typeset var nz=`ncdump -h $1|grep 'deptht'|head -n 1|cut -f3 -d" "` - [[ ! -f scrip_use ]] && ln -sf /shared/earth/software/scripts/interpolation/scrip_use scrip_use - for lev in $(seq 1 $nz) ; do - ncks -O -d deptht,$((lev-1)) -v $2 $1 tmp_${lev}.nc - ncwa -O -h -a deptht tmp_${lev}.nc tmp_${lev}.nc - [[ ! -f rmp_${NEMOVERSION}_to_regular_lev${lev}.nc ]] && ln -sf /esnas/autosubmit/con_files/weigths/${NEMOVERSION}/rmp_${NEMOVERSION}_to_*_lev${lev}.nc rmp_${NEMOVERSION}_to_regular_lev${lev}.nc - cat > scrip_use_in < 2D field ) # -# # -# Created in February 2012 Author : vguemas@ic3.cat # -# Modified (more generic, -# i.e. for any input var) in December 2014 # -# Author : eleftheria.exarchou@ic3.cat # -############################################################################### - -function vertmeanvar { - typeset var ntime=`cdo ntime $1` - typeset var list="" - typeset var jt -for jt in $(seq 1 $ntime); do - ncks -O -d time,$((jt-1)) $1 a1 - # The oras4 data do not have gdepth data in their mask, but only gdept_0, so: - if grep -q nemovar_s4 $1 ; then - l1=$(( $3 % 6 )) - l2=$(( $4 % 6 )) - ll1=$(( $3 / 6 + 1 )) - ll2=$(( $4 / 6 + 1 )) - lev1=`echo $(cdo output -selvar,gdept_0 mesh_zgr.nc | sed -n ${ll1}p | awk '{ print $'$l1' }')` - lev2=`echo $(cdo output -selvar,gdept_0 mesh_zgr.nc | sed -n ${ll2}p | awk '{ print $'$l2' }')` - else - l1=$(($3+1)) - l2=$(($4+1)) - lev1=`echo $(cdo info -seltimestep,1 -selvar,gdept mesh_zgr.nc | sed -n ${l1}p | awk '{ print $10 }')` - lev2=`echo $(cdo info -seltimestep,1 -selvar,gdept mesh_zgr.nc | sed -n ${l2}p | awk '{ print $10 }')` - fi - cdfvertmean a1 $2 T $lev1 $lev2 - rm -f a1 - ncrename -O -v sovertmean,vertmean -d time_counter,time -v time_counter,time vertmean.nc - mv vertmean.nc outputvertmean_$jt.nc - list=$list" "outputvertmean_$jt.nc - rm -f intvertmean.nc a? -# #test on cdo version: if >1.5.6, remove valid_min/max attributes to avoid values out of that range to be replaced by NaN - ncatted -O -a valid_max,vertmean,d,, outputvertmean_$jt.nc outputvertmean_$jt.nc - ncatted -O -a valid_min,vertmean,d,, outputvertmean_$jt.nc outputvertmean_$jt.nc - ncatted -O -a standard_name,time,a,c,time outputvertmean_$jt.nc outputvertmean_$jt.nc - ncatted -O -a units,time,o,c,'seconds since 1993-05-01 00:00:00' outputvertmean_$jt.nc outputvertmean_$jt.nc - ncatted -O -a long_name,time,a,c,'Time axis' outputvertmean_$jt.nc outputvertmean_$jt.nc -done -cdo cat $list $5 -ncks -A -v time $1 $5 -rm -f $list -setminmax $5 vertmean -ncrename -v vertmean,$2 $5 -#typeset var level=`echo $(cdo info -selvar,$2 -setctomiss,0 $5 | sed -n 2p | awk '{ print $7 }')` -#typeset var lev=`echo $(cdo info -seltimestep,1 -selvar,$2 -setctomiss,0 $1 | grep $level | awk '{ print $1 }')` -lev=$3 -echo $lev -cp $5 tmp_${lev}.nc -## Here we interpolate horizontally onto a regular grid - [[ ! -f rmp_${NEMOVERSION}_to_regular_lev${lev}.nc ]] && ln -sf /esnas/autosubmit/con_files/weigths/${NEMOVERSION}/rmp_${NEMOVERSION}_to_*_lev${lev}.nc rmp_${NEMOVERSION}_to_regular_lev${lev}.nc - [[ ! -f scrip_use ]] && ln -sf /shared/earth/software/scripts/interpolation/scrip_use scrip_use - cat > scrip_use_in <' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/EarthDiagnostics.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/EarthDiagnostics.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/EarthDiagnostics" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/EarthDiagnostics" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/doc/source/codedoc/earthdiagnostics.rst b/doc/source/codedoc/earthdiagnostics.rst new file mode 100644 index 0000000000000000000000000000000000000000..0bd7d0319695a2f2fb0c2e18fcdcd72d70a1258a --- /dev/null +++ b/doc/source/codedoc/earthdiagnostics.rst @@ -0,0 +1,87 @@ +earthdiagnostics +================ + +earthdiagnostics.box +-------------------- +.. automodule:: earthdiagnostics.box + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.cdftools +------------------------- +.. automodule:: earthdiagnostics.cdftools + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.cmorizer +------------------------- +.. automodule:: earthdiagnostics.cmorizer + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.cmormanager +---------------------------- +.. automodule:: earthdiagnostics.cmormanager + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.config +----------------------- +.. automodule:: earthdiagnostics.config + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.constants +-------------------------- +.. automodule:: earthdiagnostics.constants + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.datamanager +---------------------------- +.. automodule:: earthdiagnostics.datamanager + :show-inheritance: + :inherited-members: + :members: + + +earthdiagnostics.diagnostic +--------------------------- +.. automodule:: earthdiagnostics.diagnostic + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.earthdiags +--------------------------- +.. automodule:: earthdiagnostics.earthdiags + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.parser +----------------------- +.. automodule:: earthdiagnostics.parser + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.utils +---------------------- +.. automodule:: earthdiagnostics.utils + :show-inheritance: + :inherited-members: + :members: + +earthdiagnostics.variable +------------------------- +.. automodule:: earthdiagnostics.variable + :show-inheritance: + :inherited-members: + :members: diff --git a/doc/source/codedoc/general.rst b/doc/source/codedoc/general.rst new file mode 100644 index 0000000000000000000000000000000000000000..a64c615047630e06e2ccd5fc01467c10f932a399 --- /dev/null +++ b/doc/source/codedoc/general.rst @@ -0,0 +1,20 @@ +earthdiagnostics.general +======================== + +earthdiagnostics.general.monthlymean +------------------------------------ +.. automodule:: earthdiagnostics.general.monthlymean + :show-inheritance: + :members: + +earthdiagnostics.ocean.relink +----------------------------- +.. automodule:: earthdiagnostics.general.relink + :show-inheritance: + :members: + +earthdiagnostics.ocean.rewrite +------------------------------ +.. automodule:: earthdiagnostics.general.rewrite + :show-inheritance: + :members: diff --git a/doc/source/codedoc/main.rst b/doc/source/codedoc/main.rst new file mode 100644 index 0000000000000000000000000000000000000000..16072848955a410b3f76a83883c7f392ad142a29 --- /dev/null +++ b/doc/source/codedoc/main.rst @@ -0,0 +1,10 @@ +******************** +Module documentation +******************** + +.. toctree:: + :titlesonly: + + earthdiagnostics + general + ocean \ No newline at end of file diff --git a/doc/source/codedoc/ocean.rst b/doc/source/codedoc/ocean.rst new file mode 100644 index 0000000000000000000000000000000000000000..2da23cabf3372ccd784b4fb0980f812d4663cd7c --- /dev/null +++ b/doc/source/codedoc/ocean.rst @@ -0,0 +1,104 @@ +earthdiagnostics.ocean +====================== + +earthdiagnostics.ocean.areamoc +------------------------------ +.. automodule:: earthdiagnostics.ocean.areamoc + :show-inheritance: + :members: + +earthdiagnostics.ocean.averagesection +------------------------------------- +.. automodule:: earthdiagnostics.ocean.averagesection + :show-inheritance: + :members: + +earthdiagnostics.ocean.convectionsites +-------------------------------------- +.. automodule:: earthdiagnostics.ocean.convectionsites + :show-inheritance: + :members: + +earthdiagnostics.ocean.cutsection +--------------------------------- +.. automodule:: earthdiagnostics.ocean.cutsection + :show-inheritance: + :members: + +earthdiagnostics.ocean.gyres +---------------------------- +.. automodule:: earthdiagnostics.ocean.gyres + :show-inheritance: + :members: + +earthdiagnostics.ocean.heatcontent +---------------------------------- +.. automodule:: earthdiagnostics.ocean.heatcontent + :show-inheritance: + :members: + +earthdiagnostics.ocean.heatcontentlayer +--------------------------------------- +.. automodule:: earthdiagnostics.ocean.heatcontentlayer + :show-inheritance: + :members: + +earthdiagnostics.ocean.interpolate +---------------------------------- +.. automodule:: earthdiagnostics.ocean.interpolate + :show-inheritance: + :members: + +earthdiagnostics.ocean.interpolatecdo +------------------------------------- +.. automodule:: earthdiagnostics.ocean.interpolatecdo + :show-inheritance: + :members: + +earthdiagnostics.ocean.maxmoc +----------------------------- +.. automodule:: earthdiagnostics.ocean.maxmoc + :show-inheritance: + :members: + +earthdiagnostics.ocean.mixedlayerheatcontent +-------------------------------------------- +.. automodule:: earthdiagnostics.ocean.mixedlayerheatcontent + :show-inheritance: + :members: + +earthdiagnostics.ocean.mixedlayersaltcontent +-------------------------------------------- +.. automodule:: earthdiagnostics.ocean.mixedlayersaltcontent + :show-inheritance: + :members: + +earthdiagnostics.ocean.moc +-------------------------- +.. automodule:: earthdiagnostics.ocean.moc + :show-inheritance: + :members: + +earthdiagnostics.ocean.psi +-------------------------- +.. automodule:: earthdiagnostics.ocean.psi + :show-inheritance: + :members: + +earthdiagnostics.ocean.siasiesiv +-------------------------------- +.. automodule:: earthdiagnostics.ocean.siasiesiv + :show-inheritance: + :members: + +earthdiagnostics.ocean.verticalmean +----------------------------------- +.. automodule:: earthdiagnostics.ocean.verticalmean + :show-inheritance: + :members: + +earthdiagnostics.ocean.verticalmeanmeters +----------------------------------------- +.. automodule:: earthdiagnostics.ocean.verticalmeanmeters + :show-inheritance: + :members: diff --git a/doc/source/conf.py b/doc/source/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..27204143eaa8e5e6db6d572b037fe51a7139f86d --- /dev/null +++ b/doc/source/conf.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- +# +# Earth Diagnostics documentation build configuration file, created by +# sphinx-quickstart on Fri May 13 12:40:01 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# sys.path.insert(0, os.path.abspath('.')) +sys.path.insert(0, os.path.abspath('../..')) +print os.path.abspath('../..') + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.pngmath', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Earth Diagnostics' +copyright = u'2016, BSC-CNS Earth Sciences Department' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '3.0b' +# The full version, including alpha/beta/rc tags. +release = '3.0.0b18' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +show_authors = True + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'EarthDiagnosticsdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ('index', 'EarthDiagnostics.tex', u'Earth Diagnostics Documentation', + u'BSC-CNS Earth Sciences Department', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'earthdiagnostics', u'Earth Diagnostics Documentation', + [u'BSC-CNS Earth Sciences Department'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'EarthDiagnostics', u'Earth Diagnostics Documentation', + u'BSC-CNS Earth Sciences Department', 'EarthDiagnostics', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + + +# -- Options for Epub output ---------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = u'Earth Diagnostics' +epub_author = u'BSC-CNS Earth Sciences Department' +epub_publisher = u'BSC-CNS Earth Sciences Department' +epub_copyright = u'2016, BSC-CNS Earth Sciences Department' + +# The basename for the epub file. It defaults to the project name. +#epub_basename = u'Earth Diagnostics' + +# The HTML theme for the epub output. Since the default themes are not optimized +# for small screen space, using the same theme for HTML and epub output is +# usually not wise. This defaults to 'epub', a theme designed to save visual +# space. +#epub_theme = 'epub' + +# The language of the text. It defaults to the language option +# or en if the language is not set. +#epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +#epub_scheme = '' + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +#epub_identifier = '' + +# A unique identification for the text. +#epub_uid = '' + +# A tuple containing the cover image and cover page html template filenames. +#epub_cover = () + +# A sequence of (type, uri, title) tuples for the guide element of content.opf. +#epub_guide = () + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_pre_files = [] + +# HTML files shat should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_post_files = [] + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + +# The depth of the table of contents in toc.ncx. +#epub_tocdepth = 3 + +# Allow duplicate toc entries. +#epub_tocdup = True + +# Choose between 'default' and 'includehidden'. +#epub_tocscope = 'default' + +# Fix unsupported image types using the PIL. +#epub_fix_images = False + +# Scale large images. +#epub_max_image_width = 0 + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#epub_show_urls = 'inline' + +# If false, no index is generated. +#epub_use_index = True + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/doc/source/developers.rst b/doc/source/developers.rst new file mode 100644 index 0000000000000000000000000000000000000000..0a259ae202cfeb7a4d8c3b0c87827c5baa59a722 --- /dev/null +++ b/doc/source/developers.rst @@ -0,0 +1,60 @@ +Developer's guide +================= + +The tool provides a set of useful diagnostics, but a lot more can be required at anytime. +If you miss something and are able to develop it, you are more than welcome to collaborate. Even if you can not develop +it, please let us know what do you want. + +The first step is to go to the GitLab page for the project ( https://earth.bsc.es/gitlab/es/ocean_diagnostics/ ) +and open a new issue. Be sure that the title is self-explicative and give a detailed description of what you want. +Please, be very explicit about what you want to avoid misunderstandings. + +.. hint:: + + If reading your description, you think that you are taking the developers as stupids, you are doing it perfectly. + +Don't forget to add the relevant tags. At this stage you will have to choose between 'enhancement', if you are proposing +an improvement on a currently available feature, or 'new feature' in any the other case. + +Now, if you are thinking on developing it yourself, please refer to the BSC-ES Git strategy ( wiki_link_when_available ) +If you have any doubts, or just want help to start the development, contact javier.vegas@bsc.es. + + +Developing a diagnostic +----------------------- + +For new diagnostics development, we have some advice to give: + + * Do not worry about performance at first, just create a version that works. Developers can help you to optimize it + later. + + * There is nothing wrong with doing some common preparations in the generate_jobs of the diagnostic. + + * Parallelization is achieved by running multiple diagnostics at a time. You don't need to implement it at + diagnostic level + + * Use the smallest time frame for your diagnostic: if you can work at chunk level, do not ask for full year data. + + * Prefer NCO over CDO, you will have less problems when versions change. + + * Ask for help as soon as you get stuck. + + * Use always the methods in Utils instead of writing your own code. + + * Use meaningful variable names. If you are using short names just to write less, please switch to an editor with + autocompletion! + + * Do not modify the mesh and mask files, another diagnostic can be using them at the same time. + + + + + + + + + + + + + diff --git a/doc/source/diagnostic_list.rst b/doc/source/diagnostic_list.rst new file mode 100644 index 0000000000000000000000000000000000000000..7539fc3ccd36dc0e5c19bc4de9955b6146991fd1 --- /dev/null +++ b/doc/source/diagnostic_list.rst @@ -0,0 +1,93 @@ +Diagnostic list +=============== + +In this section you have a list of the available diagnostics, with a small description of each one and a link to +the full documentation. To see what options are available for each diagnostic, see generate_jobs' documentation. + +Remember that diagnostics are specified separated by spaces while options are given separated by commas: + +.. code-block:: ini + + DIAGS = diag1 diag2,option1,option2 diag3 + + +General +------- + +- monmean: + Calculates the monthly mean of the given variable. + See :class:`~earthdiagnostics.general.monthlymean.MonthlyMean` + +- relink: + Regenerates the links created in the monthly_mean, daily_mean, folders. + See :class:`~earthdiagnostics.general.relink.Relink` + +- rewrite: + Just rewrites the CMOR output of a given variable. Useful to correct metadata or variable units. + See :class:`~earthdiagnostics.general.rewrite.Rewrite` + +Ocean +----- +- areamoc: + Compute an Atlantic MOC index. See :class:`~earthdiagnostics.ocean.areamoc.AreaMoc` + +- averagesection: + Compute an average of a given zone. The variable MUST be in a regular grid + See :class:`~earthdiagnostics.ocean.averagesection.AverageSection` + +- convectionsites: + Compute the intensity of convection in the four main convection sites. + See :class:`~earthdiagnostics.ocean.convectionsites.ConvectionSites` + +- cutsection: + Cuts a meridional or zonal section. See :class:`~earthdiagnostics.ocean.cutsection.CutSection` + +- gyres: + Compute the intensity of the subtropical and subpolar gyres. See :class:`~earthdiagnostics.ocean.gyres.Gyres` + +- heatcontent: + Compute the total ocean heat content. See :class:`~earthdiagnostics.ocean.heatcontent.HeatContent` + +- heatcontentlayer: + Point-wise Ocean Heat Content in a specified ocean thickness. + See :class:`~earthdiagnostics.ocean.heatcontentlayer.HeatContentLayer` + +- interpolate: + 3-dimensional conservative interpolation to the regular atmospheric grid. + It can also be used for 2D (i,j) variables. See :class:`~earthdiagnostics.ocean.interpolate.Interpolate` + +- interpolateCDO: + Bilinear interpolation to a given grid using CDO. See :class:`~earthdiagnostics.ocean.interpolatecdo.InterpolateCDO` + +- maxmoc: + Compute an Atlantic MOC index by finding the maximum of the annual mean meridional overturning in a + latitude / depth region See :class:`~earthdiagnostics.ocean.maxmoc.MaxMoc` + +- mixedlayerheatcontent: + Compute mixed layer heat content. + See :class:`~earthdiagnostics.ocean.mixedlayerheatcontent.MixedLayerHeatContent` + +- mixedlayersaltcontent: + Compute mixed layer salt content. See + :class:`~earthdiagnostics.ocean.mixedlayersaltcontent.MixedLayerSaltContent` + +- moc: + Compute the MOC for oceanic basins. See :class:`~earthdiagnostics.ocean.moc.Moc` + +- psi: + Compute the barotropic stream function. See :class:`~earthdiagnostics.ocean.psi.Psi` + +- siasiesiv: + Compute the sea ice extent , area and volume in both hemispheres or a specified region. + See :class:`~earthdiagnostics.ocean.siasiesiv.Siasiesiv` + +- verticalmean: + Chooses vertical level in ocean, or vertically averages between 2 or more ocean levels. + See :class:`~earthdiagnostics.ocean.verticalmean.VerticalMean` + +- verticalmeanmeters: + Averages vertically any given variable. + See :class:`~earthdiagnostics.ocean.verticalmeanmeters.VerticalMeanMeters` + + + diff --git a/doc/source/errors.rst b/doc/source/errors.rst new file mode 100644 index 0000000000000000000000000000000000000000..d71948c1cf2c596167e3f9993418a3e30b123c37 --- /dev/null +++ b/doc/source/errors.rst @@ -0,0 +1,38 @@ +What to do if you have an error +=============================== + +Sometimes, the diagnostics may crash and you will not know why. This section will give you a procedure to follow before +reporting the issue. This procedure is intended to solve some common problems or, at least, to help you in creating +good issue reports. Remember: a good issue report reduces the time required to solve it! + +.. hint:: + + Please, read carefully the error message. Most times the error message will point you to the problem's source and + sometimes even give you a hint of how to solve it by yourself. And if this it not the case or if you find it + obscure, even if it was helpful, please contact the developers so it can be improved in further versions + +Try this simple steps BEFORE reporting an issue + +* Clean scratch folder +* Update to the latest compatible tag: maybe your issue is already solved in it +* If you get the error for the first chunk of a given diagnostic, change the number of chunks to 1 +* Call the diags with the -lc DEBUG -log log.txt options + +Now, you have two options: if everything is fine, the error was probably due to some corrupted files or some unstable +machine state. Nevertheless, try running the diagnostic with -lc DEBUG -log log.txt for all the chunks. If everything +it's fine that's all. + +If you experienced the same problem again, go to the GitLab portal and look into the open issues +( https://earth.bsc.es/gitlab/es/ocean_diagnostics/issues ). If you find your issue or a very similar one, use it to +report your problems. If you can not find an open one that suites your problem, create a new one and explain what is +happening to you. In any case, it will be very useful if you can attach your diags.conf and log.txt files and specify +the machine you were using. + +After that, it's just a matter of waiting for the developers to do their work and answering the questions that they may +have. Please, be patient. + +.. caution:: + + Of course, there is a third option: you keep experiencing an error that appears randomly on some executions but you + are not able to reproduce it in a consistent manner. Report it and attach as much logs and configuration files as + you have, along with the date and time of the errors. \ No newline at end of file diff --git a/doc/source/faq.rst b/doc/source/faq.rst new file mode 100644 index 0000000000000000000000000000000000000000..656a2a6684c7b7f7fee490ea4e98437343356591 --- /dev/null +++ b/doc/source/faq.rst @@ -0,0 +1,4 @@ +Frequently Asked Questions +========================== + +Here will be the answers to the most usual questions. For the moment, there is nothing to see here... \ No newline at end of file diff --git a/doc/source/index.rst b/doc/source/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..c7858dc9f19a612427cff53bb5a6be5e26fc9152 --- /dev/null +++ b/doc/source/index.rst @@ -0,0 +1,18 @@ +.. Earth Diagnostics documentation master file, created by + sphinx-quickstart on Fri May 13 12:40:01 2016. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Earth Diagnostics's documentation! +============================================= + +.. toctree:: + :maxdepth: 3 + + tutorial + diagnostic_list + tips + errors + developers + faq + codedoc/main diff --git a/doc/source/tips.rst b/doc/source/tips.rst new file mode 100644 index 0000000000000000000000000000000000000000..0cd7de979daf07ea96ad15066099f5d485b315fd --- /dev/null +++ b/doc/source/tips.rst @@ -0,0 +1,25 @@ +Tips and tricks +=============== + +Working with ORCA1 +------------------ + +If you plan to run diagnostics for ORCA1 resolution, be aware that your workstation will be more than capable to run +them. At this resolution, memory and CPU consumption is low enough to allow you keep using the machine while running, +specially if you reserve a pair of cores for other uses. + +Configuring core usage +---------------------- + +By default, the Earth Diagnostics creates a thread for each available core for the execution. If you are using a queueing +system, the diagnostics will always use the number of cores that you reserved. If you are running outside a queueing +system, the diagnostics will try to use all the cores on the machine. To avoid this, add the MAX_CORES parameter to the +DIAGNOSTICS section inside the diags.conf file that you are using. + +NEMO files +---------- + +Unlike the bash version of the ocean diagnostics, this program keeps the NEMO files in the scratch folder so you can +launch different configurations for the same experiment with reduced start time. You will need to remove the experiment's +folder in the scratch directory at the end of the experiment to avoid wasting resources. + diff --git a/doc/source/tutorial.rst b/doc/source/tutorial.rst new file mode 100644 index 0000000000000000000000000000000000000000..136c7bc307c5f4a0a2d513c89896392922ef4627 --- /dev/null +++ b/doc/source/tutorial.rst @@ -0,0 +1,67 @@ +Tutorial +======== + +So, you are planning to use the Earth Diagnostics? You don't know how to use them? This is the place to go. +From now on this tutorial will guide you through all the process from installation to running. + +.. Hint:: + If you have any problem with this tutorial, please report it to so it can be corrected. + A lof of people will benefit from it. + +Installation +------------ + +For now, you only have one option: download the diagnostics directly from BSC-ES's Gitlab: + +.. code-block:: sh + + git clone https://earth.bsc.es/gitlab/es/ocean_diagnostics.git + +You will also need + +* CDO version 1.6.9 (other versions could work, but this is the one we use) +* NCO version 4.5.4 or newer +* Python 2.7 or newer (but no 3.x) with Autosubmit, CDO and NCO packages, among others. A virtual environment with all requisites fullfilled is available at /shared/earth/ClimatePrediction/EarthDiagnostics +* Access to CDFTOOLS_3.0 executables for BSC-ES. At this point, those are located at /shared/earth/ClimatePrediction/CDFTOOLS_CMOR/bin. + +Creating a config file +---------------------- + +Go to the folder where you installed the EarthDiagnostics. You will see a folder called earthdiagnostics, +and, inside it, a diags.conf file that can be used as a model for your config file. Create a copy of it wherever it +suites you. + +Now open your brand new copy with your preferred text editor. The file contains commentaries explaining each +one of its options, so read it carefully and edit whatever you need. Don't worry about DIAGS option, we will +talk about it next. + +After this, you need to choose the diagnostics you want to run. For a simple test, it's recommended to use the monmean +diagnostic to compute monthly means from daily data. We recommend it because it can be used with any variable, the user +has to provide parameters but they are quite intuitive and it's relatively fast to compute. If your experiment does not +have daily data, you can use any other diagnostic. Check next section for a list of available diagnostics and choose +whichever suits you better. From now on, we will assume that you are going to run the monmean diagnostic. + +.. hint:: + + For old Ocean Diagnostics users: you can use most of the old names as aliases to launch one or multiple diagnostics. + Check the ALIAS section on the diags.conf to see which ones are available. + +First, choose a variable that has daily data. Then replace the DIAGS option with the next one where $VARIABLE represents the +variable's name and $DOMAIN its domain (atmos, ocean, seaice, landice...) + +.. code-block:: sh + + DIAGS = monmean,$VARIABLE,$DOMAIN + +Prepare the run script +---------------------- + +Once you have configured your experiment you can execute any diagnostic with the provided launch_diags.sh script. +Create a copy and change the variables PATH_TO_CONF_FILE and PATH_TO_DIAGNOSTICS so they point to your conf file and +installation folder. + +Now, execute the script (or submit it to bsceslogin01, it has the correct header) and... that's it! +You will find your results directly on the storage and a folder for the temp files in the scratch named after the EXPID. + + + diff --git a/earthdiagnostics/EarthDiagnostics.pdf b/earthdiagnostics/EarthDiagnostics.pdf new file mode 100644 index 0000000000000000000000000000000000000000..87355f4c78cde080d0dc20b23341c2da73ba26de Binary files /dev/null and b/earthdiagnostics/EarthDiagnostics.pdf differ diff --git a/earthdiagnostics/__init__.py b/earthdiagnostics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..75244852b819646f7172bbc4187109cad3850719 --- /dev/null +++ b/earthdiagnostics/__init__.py @@ -0,0 +1,14 @@ +# coding=utf-8 +""" +Module containing the Earth Diagnostics. +""" +from cdo import Cdo +from nco import Nco +from earthdiagnostics.cdftools import CDFTools +import os + +cdo = Cdo() +nco = Nco() +cdftools = CDFTools('/home/Earth/jvegas/CDFTOOLS_3.0/bin') +DEVNULL = open(os.devnull, 'wb') + diff --git a/earthdiagnostics/box.py b/earthdiagnostics/box.py new file mode 100644 index 0000000000000000000000000000000000000000..68b24bca4beeb0f9826fecadfb7e0b5cf0c5d4c3 --- /dev/null +++ b/earthdiagnostics/box.py @@ -0,0 +1,162 @@ +# coding=utf-8 +class Box(object): + """ + Represents a box in the 3D space. Also allows easy conversion from the coordinate values to significant string + representations + """ + def __init__(self, depth_in_meters=False): + self.depth_in_meters = depth_in_meters + """ + If True, treats the depth as if it is given in meters. If False, as it is given in levels + :rtype: bool + """ + self._max_lat = None + self._min_lat = None + self._max_lon = None + self._min_lon = None + self.max_depth = None + """ + Maximum depth + :rtype: float + """ + self.min_depth = None + """ + Minimum depth + :rtype: float + """ + + def __eq__(self, other): + return self.depth_in_meters == other.depth_in_meters and self.max_lat == other.max_lat and \ + self.min_lat == other.min_lat and self.max_lon == other.max_lon and self.min_lon == other.min_lon and \ + self.max_depth == other.max_depth and self.min_depth == other.min_depth + + def __str__(self): + return self.get_lat_str() + self.get_lon_str() + self.get_depth_str() + + @property + def max_lat(self): + """ + Maximum latitude + :rtype: float + """ + return self._max_lat + + @max_lat.setter + def max_lat(self, value): + if value > 90 or value < -90: + raise ValueError('{0} is not a valid latitude. Must be between -90 and -90'.format(value)) + self._max_lat = value + + @property + def min_lat(self): + """ + Minimum latitude + :rtype: float + """ + return self._min_lat + + @min_lat.setter + def min_lat(self, value): + if value > 90 or value < -90: + raise ValueError('{0} is not a valid latitude. Must be between -90 and 90'.format(value)) + self._min_lat = value + + @property + def max_lon(self): + """ + Maximum longitude + :rtype: float + """ + return self._max_lon + + @max_lon.setter + def max_lon(self, value): + if value >= 360 or value <= -360: + raise ValueError('{0} is not a valid longitude. Must be between -360 and 360'.format(value)) + self._max_lon = value + + @property + def min_lon(self): + """ + Minimum longitude + :rtype: float + """ + return self._min_lon + + @min_lon.setter + def min_lon(self, value): + if value >= 360 or value <= -360: + raise ValueError('{0} is not a valid longitude. Must be between -360 and 360'.format(value)) + self._min_lon = value + + def get_lat_str(self): + """ + Gets a string representation of the latitude in the format XX{N/S}. + If min_lat is different from max_lat, it concatenates the two values + :return: string representation for latitude + :rtype: str + """ + if self.max_lat is None or self.min_lat is None: + return '' + if self.min_lat < 0: + direction = 'S' + else: + direction = 'N' + + string = str(abs(self.min_lat)) + direction + + if self.max_lat != self.min_lat: + if self.max_lat < 0: + direction = 'S' + else: + direction = 'N' + string += str(abs(self.max_lat)) + direction + + return string + + def get_lon_str(self): + """ + Gets a string representation of the longitude in the format XX{E/W}. + If min_lon is different from max_lon, it concatenates the two values + :return: string representation for longitude + :rtype: str + """ + if self.max_lon is None or self.min_lon is None: + return '' + if self.min_lon < 0: + direction = 'W' + else: + direction = 'E' + + string = str(abs(self.min_lon)) + direction + + if self.max_lon != self.min_lon: + if self.max_lon < 0: + direction = 'W' + else: + direction = 'E' + string += str(abs(self.max_lon)) + direction + return string + + def get_depth_str(self): + """ + Gets a string representation of depth. For depth expressed in meters, it adds th character 'm' to the end + If min_depth is different from max_depth, it concatenates the two values + :return: string representation for depth + :rtype: str + """ + if self.max_depth is None or self.min_depth is None: + return '' + + if self.depth_in_meters: + suffix = 'm' + else: + suffix = '' + + string = str(abs(self.min_depth)) + suffix + + if self.min_depth != self.max_depth: + string += '-' + str(abs(self.max_depth)) + suffix + return string + + diff --git a/earthdiagnostics/cdftools.py b/earthdiagnostics/cdftools.py new file mode 100644 index 0000000000000000000000000000000000000000..baa5623a24b0a95d201470a86956f7e105288729 --- /dev/null +++ b/earthdiagnostics/cdftools.py @@ -0,0 +1,66 @@ +# coding=utf-8 +from earthdiagnostics.utils import Utils +import os +from autosubmit.config.log import Log + + +class CDFTools(object): + """ + Class to run CDFTools executables + + :param path: path to CDFTOOLS binaries + :type path: str + """ + + def __init__(self, path=''): + self.path = path + + # noinspection PyShadowingBuiltins + def run(self, command, input, output=None, options=None, log_level=Log.INFO): + """ + Runs one of the CDFTools + + :param command: executable to run + :type command: str | iterable + :param input: input file + :type input: str + :param output: output file. Not all tools support this parameter + :type options: str + :param options: options for the tool. + :type options: str | list[str] | Tuple[str] + :param log_level: log level at which the output of the cdftool command will be added + :type log_level: int + """ + + line = [os.path.join(self.path, command)] + if self.path and not os.path.exists(line[0]): + raise ValueError('Error executing {0}\n Command does not exist in {1}', command, self.path) + + if input: + if isinstance(input, basestring): + line.append(input) + if not os.path.exists(input): + raise ValueError('Error executing {0}\n Input file {1} file does not exist', command, input) + else: + for element in input: + line.append(element) + if not os.path.exists(element): + raise ValueError('Error executing {0}\n Input file {1} file does not exist', command, element) + if options: + if isinstance(options, basestring): + options = options.split() + for option in options: + line.append(str(option)) + if output: + if input == output: + raise ValueError('Input and output file can not be the same on CDFTools') + line.append('-o') + line.append(output) + Log.debug('Executing {0}', ' '.join(line)) + shell_output = Utils.execute_shell_command(line, log_level) + + if output: + if not os.path.exists(output): + raise Exception('Error executing {0}\n Output file not created', ' '.join(line)) + + return shell_output diff --git a/earthdiagnostics/cdftoolspython.so b/earthdiagnostics/cdftoolspython.so new file mode 100755 index 0000000000000000000000000000000000000000..2d3916295e882a4723b0fdda32ceab0ed0d26105 Binary files /dev/null and b/earthdiagnostics/cdftoolspython.so differ diff --git a/earthdiagnostics/cmor_table.csv b/earthdiagnostics/cmor_table.csv new file mode 100644 index 0000000000000000000000000000000000000000..1449bbcafb14c7c17afd010366b25c790f0ba505 --- /dev/null +++ b/earthdiagnostics/cmor_table.csv @@ -0,0 +1,300 @@ +Variable,Shortname,Name,Long name,Domain,Basin,Units,Valid min,Valid max,Grid +iiceages:siage:iice_otd,ageice,age_of_sea_ice,Age of sea ice,seaIce,,,,, +al,al,surface_albedo,Albedo,atmos,,,,, +bgfrcsal,bgfrcsal,change_over_time_in_heat_content_from_forcing,Change over time in salt content from forcing,ocean,,,,, +bgfrctem,bgfrctem,change_over_time_in_heat_content_from_forcing,Change over time in heat content from forcing,ocean,,,,, +bgfrcvol,bgfrcvol,change_over_time_in_volume_from_forcing,Change over time in volume from forcing,ocean,,,,, +bgheatco,bgheatco,change_over_time_in_heat_content,Change over time in sea water heat content,ocean,,,,, +bgsaline,bgsaline,change_over_time_in_sea_water_practical_salinity,Change over time in sea water salinity,ocean,,,,, +bgsaltco,bgsaltco,change_over_time_in_salt_content,Change over time in sea water salt content,ocean,,,,, +bgtemper,bgtemper,change_over_time_in_sea_water_potential_temperature,Change over time in sea water potential temperature,ocean,,,,, +bgvole3t,bgvole3t,change_over_time_in_volume_variation,Change over time in volume variation (e3t),ocean,,,,, +bgvolssh,bgvolssh,change_over_time_in_sea_surface_height,Change over time in sea surface height,ocean,,,,, +bld,bld,boundary_layer_dissipation,Boundary layer dissipation,atmos,,,,, +iicebome:iocewflx,bmelt,tendency_of_sea_ice_amount_due_to_basal_melting,Rate of melt at sea ice base,seaIce,,,,, +sobowlin,bowlin,bowl_index,Bowl index,ocean,,,,, +cc,cl,cloud_area_fraction_in_atmosphere_layer,Cloud area fraction,atmos,,,,, +hcc,clh,high_cloud_area_fraction,High cloud fraction,atmos,,,,, +lcc,cll,low_cloud_area_fraction,Low cloud fraction,atmos,,,,, +mcc,clm,medium_cloud_area_fraction,Medium cloud fraction,atmos,,,,, +ciwc,cli,mass_fraction_of_cloud_ice_in_air,Mass fraction of cloud ice,atmos,,,,, +tcc,clt,cloud_area_fraction,Total cloud fraction,atmos,,,,, +clwc,clw,mass_fraction_of_cloud_liquid_water_in_air,Mass fraction of cloud liquid water,atmos,,,,, +tcw,clwvi,atmosphere_cloud_condensed_water_content,Condensed water path,atmos,,,,, +iicedive:sidive,divice,Strain Rate Divergence of Sea Ice,Divergence_of_sea_ice_velocity,seaIce,,,,, +e,evspsbl,water_evaporation_flux,Evaporation,atmos,,,,, +fal,fal,forecast_albedo,Forecast albedo,atmos,,,,, +sowaflep,fatmosocean,atmosphere_ocean_water_flux,Atmos=>ocean net freshwater,ocean,,,,, +sowaflcd,fdilution,dilution_water_flux,Concentration/dilution water flux,ocean,,,,, +sophtldf,fhbasindif,northward_ocean_heat_transport_due_to_diffusion,Northward ocean heat transport due to diffusion,ocean,,,,, +iowaflup,ficeocean,ice_ocean_water_flux,Ice=>ocean net freshwater,ocean,,,,, +sorunoff,friver,water_flux_into_sea_water_from_rivers,Water flux into sea water from rivers ,ocean,,,,, +sowaflup,fupward,upward_water_flux,Net upward water flux,ocean,,,,, +gwd,gwd,gravity_wave_dissipation,Gravity wave dissipation,atmos,,,,, +ibgheatco,hcicega,global mean ice heat content,Global mean ice heat content,seaIce,,,,, +sbgheatco,hcsnga,global mean snow heat content,Global mean snow heat content,seaIce,,,,, +heatc,heatc,integral_of_sea_water_potential_temperature_wrt_depth_expressed_as_heat_content,Heat content vertically integrated,ocean,,,,, +sohtatl,hfbasin,northward_ocean_heat_transport,Northward ocean heat transport,ocean,Atl,,,, +sohtind,hfbasin,northward_ocean_heat_transport,Northward ocean heat transport,ocean,Ind,,,, +sohtipc,hfbasin,northward_ocean_heat_transport,Northward ocean heat transport,ocean,IndPac,,,, +sohtpac,hfbasin,northward_ocean_heat_transport,Northward ocean heat transport,ocean,Pac,,,, +sophtadv,hfbasinadv,northward_ocean_heat_transport_due_to_advection,Northward ocean heat transport due to advection ,ocean,,,,, +sophteiv,hfbasinba,northward_ocean_heat_transport_due_to_bolus_advection,Northward ocean heat transport due to bolus advection ,ocean,,,,, +qt_oce:sohefldo:qt,hfds,surface_downward_heat_flux_in_sea_water,Downward heat flux at sea water surface,ocean,,,,, +slhf,hfls,surface_upward_latent_heat_flux,Surface upward latent heat flux,atmos,,,,, +sshf,hfss,surface_upward_sensible_heat_flux,Surface upward sensible heat flux,atmos,,,,, +sophtove,htovovrt,northward_ocean_heat_transport_due_to_overturning,Northward ocean heat transport due to overturning ,ocean,,,,, +q,hus,specific_humidity,Specific humidity,atmos,,,,, +soicealb,ialb,sea_ice_albedo,Sea ice albedo,seaIce,,,,, +ibgfrcsfx,ibgfrcsfx,global_mean_forcing_salt,Global mean forcing salt (sfx),seaIce,,,,, +ibgfrcvol,ibgfrcvol,globa_mean_forcing_volume,Global mean forcing volume (emp),seaIce,,,,, +ibghfxbog,ibghfxbog,heat_fluxes_causing_bottom_ice_growth,Heat fluxes causing bottom ice growth,seaIce,,,,, +ibghfxbom,ibghfxbom,heat_fluxes_causing_bottom_ice_melt,Heat fluxes causing bottom ice melt,seaIce,,,,, +ibghfxdhc,ibghfxdhc,Heat_content_variation_in_snow_and_ice,Heat content variation in snow and ice,seaIce,,,,, +ibghfxdif,ibghfxdif,heat_fluxes_causing_ice temperature_change,Heat fluxes causing ice temperature change,seaIce,,,,, +ibghfxdyn,ibghfxdyn,heat_fluxes_from_ice-ocean_exchange_during_dynamic,Heat fluxes from ice-ocean exchange during dynamic,seaIce,,,,, +ibghfxin,ibghfxin,total_heat_fluxes_at_the_ice_surface,Total heat fluxes at the ice surface,seaIce,,,,, +ibghfxopw,ibghfxopw,heat_fluxes_causing_open_water_ice_formation,Heat fluxes causing open water ice formation,seaIce,,,,, +ibghfxout,ibghfxout,non_solar_heat_fluxes_received_by_the_ocean,Non solar heat fluxes received by the ocean,seaIce,,,,, +ibghfxres,ibghfxres,heat_fluxes_from_ice-ocean_exchange_during_resultant,Heat fluxes from ice-ocean exchange during resultant,seaIce,,,,, +ibghfxsnw,ibghfxsnw,heat_fluxes_from_snow-ocean_exchange,Heat fluxes from snow-ocean exchange,seaIce,,,,, +ibghfxspr,ibghfxspr,Heat_content_of_snow_precip,Heat content of snow precip,seaIce,,,,, +ibghfxsub,ibghfxsub,heat_fluxes_from_sublimation,Heat fluxes from sublimation,seaIce,,,,, +ibghfxsum,ibghfxsum,heat_fluxes_causing_surface_ice_melt,Heat fluxes causing surface ice melt,seaIce,,,,, +ibghfxthd,ibghfxthd,heat_fluxes_from_ice-ocean_exchange_during_thermo,Heat fluxes from ice-ocean exchange during thermo,seaIce,,,,, +ibgsfxbog,ibgsfxbogga,salt_flux_thermo,Global mean salt flux (thermo),seaIce,,,,, +ibgsfxbom,ibgsfxbomga,salt_flux_bottom_melt,Global mean salt flux (bottom melt),seaIce,,,,, +ibgsfxbri,ibgsfxbriga,salt_flux_brines,Global mean salt flux (brines),seaIce,,,,, +ibgsfxdyn,ibgsfxdynga,salt_flux_dynamic,Global mean salt flux (dynamic),seaIce,,,,, +ibgsfx,ibgsfxga,salt_flux,Global mean salt flux (total),seaIce,,,,, +ibgsfxopw,ibgsfxopwga,salt_flux_open_waters,Global mean salt flux (open water),seaIce,,,,, +ibgsfxres,ibgsfxresga,salt_flux_resultant,Global mean salt flux (resultant),seaIce,,,,, +ibgsfxsni,ibgsfxsniga,salt_flux_snow_ice_growth,Global mean salt flux (snow-ice growth),seaIce,,,,, +ibgsfxsum,ibgsfxsumga,salt_flux_surface_melt,Global mean salt flux (surface melt),seaIce,,,,, +ibgvfxbog,ibgvfxbogga,volume_flux_bottom_growth,Global mean volume flux (bottom growth),seaIce,,,,, +ibgvfxbom,ibgvfxbomga,volume_flux_bottom_melt,Global mean volume flux (bottom melt),seaIce,,,,, +ibgvfxdyn,ibgvfxdynga,volume_flux_dynamic_growth,Global mean volume flux (dynamic growth),seaIce,,,,, +ibgvfx,ibgvfxga,volume_flux_emp,Global mean volume flux (emp),seaIce,,,,, +ibgvfxopw,ibgvfxopwga,volume_flux_open_water_growth,Global mean volume flux (open water growth),seaIce,,,,, +ibgvfxres,ibgvfxresga,volume_flux_resultant,Global mean volume flux (resultant),seaIce,,,,, +ibgvfxsni,ibgvfxsniga,volume_flux_snow_ice_growth,Global mean volume flux (snow-ice growth),seaIce,,,,, +ibgvfxsnw,ibgvfxsnwga,volume_flux_snow_melt,Global mean volume flux (snow melt),seaIce,,,,, +ibgvfxspr,ibgvfxsprga,snheco,Global mean volume flux (snow precip),seaIce,,,,, +ibgvfxsub,ibgvfxsubga,volume_flux_snow_sublimation,Global mean volume flux (snow sublimation),seaIce,,,,, +ibgvfxsum,ibgvfxsumga,volume_flux_surface_melt,Global mean volume flux (surface melt),seaIce,,,,, +ibgvolgrm,ibgvolgrm,global_mean_ice_growth+melt_volume,Global mean ice growth+melt volume,seaIce,,,,, +ibrinvol,ibrinvol,brine_volume,Brine volume,seaIce,,,,, +sibricat,ibrinvolcat,brine_volume_in_categories,Brine volume for categories,seaIce,,,,, +iicebopr,iicebopr,daily_bottom_thermo_ice_production,Daily bottom thermo ice production,seaIce,,,,, +iicecolf,iicecolf,frazil_ice_collection_thickness,Frazil ice collection thickness,seaIce,,,,, +iicedypr,iicedypr,daily_dynamic_ice_production,Daily dynamic ice production,seaIce,,,,, +iice_etd,iiceetd,brine_volume_distribution,Brine volume distribution,seaIce,,,,, +iicelapr,iicelapr,daily_lateral_thermo_ice_production,Daily lateral thermo ice prod.,seaIce,,,,, +iicenflx,iicenflx,nonsolar_flux_ice_ocean_surface,Non-solar flux at ice/ocean surface,seaIce,,,,, +iicesflx,iicesflx,solar_flux_ice_ocean_surface,Solar flux at ice/ocean surface,seaIce,,,,, +iiceshea,iiceshea,shear,Shear,seaIce,,,,, +iicesipr,iicesipr,daily_snowice_ice_production,Daily snowice ice production,seaIce,,,,, +iicfsbri,iicfsbri,brine_salt_flux,Fsbri - brine salt flux,seaIce,,,,, +iicfseqv,iicfseqv,equivalent_FW_salt_flux,Fseqv - equivalent fw salt flux,seaIce,,,,, +ioceflxb,ioceflxb,oceanic_flux_ar_ice_base,Oceanic flux at the ice base,seaIce,,,,, +iocehebr,iocehebr,heat_flux_due_to_brine_release,Heat flux due to brine release,seaIce,,,,, +iocesafl,iocesafl,salt_flux_ocean_surface,Salt flux at ocean surface,seaIce,,,,, +iocesflx,iocesflx,solar_fux_ocean_surface,Solar flux at ocean surface,seaIce,,,,, +iocetflx,iocetflx,total_flux_ocean_surface,Total flux at ocean surface,seaIce,,,,, +iocwnsfl,iocwnsfl,nonsolar_flux_ocean_surface,Non-solar flux at ocean surface,seaIce,,,,, +isstempe,isstempe,sea_surface_temperature,Sea surface temperature,seaIce,,K,,, +scmastot,masso,sea_water_mass,Sea water mass ,ocean,,,,, +mldkz5,mldkz5,ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity,Turbocline depth (kz = 5e-4),ocean,,,,, +somxl010:mldr10_1,mlotst,ocean_mixed_layer_thickness_defined_by_sigma_t,Ocean mixed layer thickness defined by sigma T ,ocean,,,,, +swvl1,mrlsl1,moisture_content_of_soil_layer_1, Water content of soil layer 1,land,,,,, +swvl2,mrlsl2,moisture_content_of_soil_layer_2, Water content of soil layer 2,land,,,,, +swvl3,mrlsl3,moisture_content_of_soil_layer_3, Water content of soil layer 3,land,,,,, +swvl4,mrlsl4,moisture_content_of_soil_layer_4, Water content of soil layer 4,land,,,,, +ro,mrro,runoff_flux,Total runoff,atmos,,,,, +tp:precip,pr,precipitation_flux,Precipitation,atmos,,,,, +cp,prc,convective_precipitation_flux,Convective precipitation,atmos,,,,, +lsp,prs,stratiform_precipitation_flux,Stratiform precipitation,atmos,,,,, +isnowpre,prsn,snowfall_flux,Surface snowfall rate into the sea ice portion of the grid cell,seaIce,,,,, +sf:snowpre,prsn,snowfall_flux,Snowfall flux,atmos,,,,, +tcwv,prw,atmosphere_water_vapor_content,Water vapor path,atmos,,,,, +msl,psl,air_pressure_at_sea_level,Sea level pressure,atmos,,,,, +qns_ice,qnsice,non_solar_heat_flux_at_ice_surface,Non-solar heat flux at ice surface: sum over categories,seaIce,,,,, +qt_ice,qtice,surface_downward_heat_flux_in_air,Surface downward heat flux in air,seaIce,,,,, +strd,rlds,surface_downwelling_longwave_flux_in_air,Surface downwelling longwave radiation,atmos,,,,, +strc:str,rls,surface_longwave_flux_in_air,Surface longwave radiation,atmos,,,,, +ttr,rlut,toa_outgoing_longwave_flux,Toa outgoing longwave radiation,atmos,,,,, +ttrc,rlutcs,toa_outgoing_longwave_flux_assuming_clear_sky,"Top net thermal radiation, clear sky",atmos,,,,, +ssrd,rsds,surface_downwelling_shortwave_flux_in_air,Surface downwelling shortwave radiation,atmos,,,,, +tsr,rsdt,toa_incoming_shortwave_flux,Toa incident shortwave radiation,atmos,,,,, +soshfldo,rsntds,net_downward_shortwave_flux_at_sea_water_surface,Net downward shortwave radiation at sea water surface ,ocean,,,,, +ssr,rss,surface_shortwave_flux_in_air,Surface shortwave radiation,atmos,,,,, +ssrc,rsscs,surface_shortwave_flux_in_air_assuming_clear_sky,Surface clear-sky shortwave radiation,atmos,,,,, +tsrc,rsut,toa_outgoing_shortwave_flux,Toa outgoing shortwave radiation,atmos,,,,, +saltc,saltc,salt_content_vertically_integrated,Salt content vertically integrated,ocean,,,,, +es,sbl,surface_snow_and_ice_sublimation_flux,Surface snow and ice sublimation flux,landIce,,,,, +sosalflx,sfs,salt_flux_surface,Surface salt flux,ocean,,,,, +si,si,solar_insolation,Solar insolation,atmos,,,,, +NArea,siarean,sea_ice_area,Total area of sea ice in the northern hemisphere,seaIce,,10^6 km2,,, +SArea,siareas,sea_ice_area,Total area of sea ice in the southern hemisphere,seaIce,,10^6 km2,,, +iiceconc:siconc:soicecov:ileadfra:ci,sic,sea_ice_area_fraction,Sea Ice Area Fraction,seaIce,,%,,, +ci,sic,sea_ice_area_fraction,Sea Ice Area Fraction,seaIce,,%,,,ifs +iice_itd:siconc_cat:siconcat,siccat,ice_area_in_categories,Ice area in categories,seaIce,,,,, +ibgarea,sicga,sea_ice_content,Global mean sea ice content,seaIce,,,,, +NExnsidc,siextentn,sea_ice_extent,Total area of all northern-hemisphere grid cells that are covered by at least 15 % areal fraction of sea ice,seaIce,,10^6 km2,,, +SExnsidc,siextents,sea_ice_extent,Total area of all southern-hemisphere grid cells that are covered by at least 15 % areal fraction of sea ice,seaIce,,10^6 km2,,, +iiceprod,sigr,ice_production,Ice production,seaIce,,,,, +iiceheco,siheco,integral_of_sea_ice_temperature_wrt_depth_expressed_as_heat_content,Sea ice heat content,seaIce,,,,, +ibgsaltco,sisaltcga,global mean ice salt content,Global mean ice salt content,seaIce,,,,, +iicethic:sithic,sit,sea_ice_thickness,Sea Ice Thickness,seaIce,,m,,, +iice_hid:sithic_cat:sithicat,sitcat,ice_thicknesss_in_categories,Ice thickness in categories,seaIce,,,,, +iicetemp,sitemp,ice_temperature,Mean ice temperature,seaIce,,K,,, +ibgtemper,sitempga,sea_ice_temperature,Global mean sea ice temperature,seaIce,,K,,, +iicevelo:sivelo,sivelo,ice_velocity,Ice velocity,seaIce,,,,, +iicevelu:sivelu,sivelu,ice_velocity_u,Ice velocity u,seaIce,,,,, +iicevelv:sivelv,sivelv,ice_velocity_v,Ice velocity v,seaIce,,,,, +ibgvoltot,sivolga,sea_ice_volume,Global mean sea ice volume,seaIce,,,,, +sivoln:NVolume,sivoln,sea_ice_volume,Total volume of sea ice in the northern hemisphere,seaIce,,10^3 km3,,, +sivols:SVolume,sivols,sea_ice_volume,Total volume of sea ice in the southern hemisphere,seaIce,,10^3 km3,,, +sivolu,sivolu,sea_ice_volume_per_unit_gridcell_area,Sea ice volume per gridcell area unit,seaIce,,,,, +sostatl,sltbasin,northward_ocean_salt_transport,Northward ocean salt transport,ocean,,,,, +sostind,sltbasin,northward_ocean_salt_transport,Northward ocean salt transport,ocean,,,,, +sostipc,sltbasin,northward_ocean_salt_transport,Northward ocean salt transport,ocean,,,,, +sostpac,sltbasin,northward_ocean_salt_transport,Northward ocean salt transport,ocean,,,,, +sopstadv,sltbasinadv,northward_ocean_salt_transport_due_to_advection,Northward ocean salt transport due to advection ,ocean,,,,, +sopsteiv,sltbasinba,northward_ocean_salt_transport_due_to_bolus_advection,Northward ocean salt transport due to bolus advection ,ocean,,,,, +sopstldf,sltbasindif,northward_ocean_salt_transport_due_to_diffusion,Northward ocean salt transport due to diffusion,ocean,,,,, +sltnortha,sltnortha,northward_ocean_salt_transport,Atlantic northward ocean salt transport,ocean,,,,, +sopstove,sltovovrt,northward_ocean_salt_transport_due_to_overturning,Northward ocean salt transport due to overturning ,ocean,,,,, +zosalatl,sltzmean,zonal_mean_salinity,Zonal mean salinity,ocean,Atl,psu,,, +zosalglo,sltzmean,zonal_mean_salinity,Zonal mean salinity,ocean,Glob,psu,,, +zosalind,sltzmean,zonal_mean_salinity,Zonal mean salinity,ocean,Ind,psu,,, +zosalipc,sltzmean,zonal_mean_salinity,Zonal mean salinity,ocean,IndPac,psu,,, +zosalpac,sltzmean,zonal_mean_salinity,Zonal mean salinity,ocean,Pac,psu,,, +asn,snal,snow_albedo,Snow albedo,landIce,,,,, +iice_hsd:snthicat,sndcat,snow_thickness_in_categories,Snow thickness in in categories,seaIce,,,,, +isnoheco,snheco,snow_heat_content,Snow total heat content,seaIce,,,,, +sd,snld,lwe_thickness_of_surface_snow_amount,Snow depth,atmos,,,,, +smlt,snm,surface_snow_melt_flux,Surface snow melt,landIce,,,,, +isnowthi,snthic,surface_snow_thickness,Surface snow thickness,seaIce,,,,, +sbgvoltot,snvolga,snow_volume,Global mean snow volume,seaIce,,,,, +snvolu,snvolu,snow_volume_per_unit_gridcell_area,Snow volume per gridcell area unit,seaIce,,,,, +vosaline:mean_3Dsosaline,so,sea_water_salinity,Sea water salinity,ocean,,psu,,, +scsaltot,soga,sea_water_salinity,Global mean sea water salinity ,ocean,,psu,,, +hfnortha,sohtatl,northward_ocean_heat_transport,Atlantic northward ocean heat transport,ocean,,,,, +soleaeiw,soleaeiw,eddy_induced_velocity_coefficient,Eddy induced vel. coeff. at w-point,ocean,,,,, +soleahtw,soleahtw,lateral_eddy_diffusivity,Lateral eddy diffusivity,ocean,,,,, +somixhgt,somixhgt,mixing_layer_depth_turbocline,Mixing layer depth (turbocline),ocean,,,,, +sosaline:isssalin:mean_sosaline,sos,sea_surface_salinity,Sea surface salinity ,ocean,,psu,,, +sothedep,sothedep,thermocline_depth,Thermocline depth (max dt/dz),ocean,,,,, +src,src,skin_reservoir_content,Skin reservoir content,land,,,,, +zosrfatl,srfzmean,zonal_mean_surface,Zonal mean surface,ocean,Atl,,,, +zosrfglo,srfzmean,zonal_mean_surface,Zonal mean surface,ocean,Glob,,,, +zosrfind,srfzmean,zonal_mean_surface,Zonal mean surface,ocean,Ind,,,, +zosrfipc,srfzmean,zonal_mean_surface,Zonal mean surface,ocean,IndPac,,,, +zosrfpac,srfzmean,zonal_mean_surface,Zonal mean surface,ocean,Pac,,,, +rsn,srho,snow_density,Snow density,landIce,,,,, +iicesali:iice_std,ssi,sea_ice_salinity,Sea ice salinity,seaIce,,psu,,, +salincat,ssicat,sea_ice_salinity_in_categories,Sea-ice bulk salinity for categories,seaIce,,psu,,, +ibgsaline,ssiga,sea_ice_salinity,Global mean sea ice salinity ,seaIce,,psu,,, +iicestre,streng,compressive_strength_of_sea_ice,Compressive sea ice strength,seaIce,,,,, +so20chgt,t20d,depth_of_isosurface_of_sea_water_potential_temperature,,ocean,,,,, +t,ta,air_temperature,Air temperature,atmos,,K,,, +t2m,tas,air_temperature,Near-surface air temperature,atmos,,K,,, +mx2t,tasmax,air_temperature,Daily maximum near-surface air temperature,atmos,,K,,, +mn2t,tasmin,air_temperature,Daily minimum near-surface air temperature,atmos,,K,,, +ewss,tauu,surface_downward_eastward_stress,Surface downward eastward wind stress,atmos,,,,, +utau_ice:iocestru:iicestru,strairx,surface_downward_x_stress,X-Component of Atmospheric Stress On Sea Ice,seaIce,,N m-2,,, +sozotaux,tauuo,surface_downward_x_stress,Surface downward x stress ,ocean,,,,, +nsss,tauv,surface_downward_northward_stress,Surface downward northward wind stress,atmos,,,,, +vtau_ice:iocestrv:iicestrv,strairy,surface_downward_y_stress,Y-Component of Atmospheric Stress On Sea Ice,seaIce,,N m-2,,, +sozotauy:sometauy,tauvo,surface_downward_y_stress,Surface downward y stress ,ocean,,,,, +d2m,tdps,dew_point_temperature,2m dewpoint temperature,atmos,,K,,, +votemper:mean_3Dsosstsst,thetao,sea_water_potential_temperature,Sea water potential temperature,ocean,,K,,, +sctemtot,thetaoga,sea_water_potential_temperature,Global average sea water potential temperature ,ocean,,K,,, +iicesume,tmelt,tendency_of_sea_ice_amount_due_to_surface_melting,Rate of melt at upper surface of sea ice,seaIce,,,,, +sosstsst:mean_sosstsst,tos,sea_surface_temperature,Sea surface temperature ,ocean,,K,,, +sstk,tos,sea_surface_temperature,Sea surface temperature ,ocean,,K,,,ifs +tossq,tossq,square_of_sea_surface_temperature,Square of sea surface temperature ,ocean,,K2,,, +zotematl,toszmean,zonal_mean_temperature,Zonal mean temperature,ocean,Atl,K,,, +zotemglo,toszmean,zonal_mean_temperature,Zonal mean temperature,ocean,Glob,K,,, +zotemind,toszmean,zonal_mean_temperature,Zonal mean temperature,ocean,Ind,K,,, +zotemipc,toszmean,zonal_mean_temperature,Zonal mean temperature,ocean,IndPac,K,,, +zotempac,toszmean,zonal_mean_temperature,Zonal mean temperature,ocean,Pac,K,,, +skt,ts,surface_temperature,Surface temperature,atmos,,K,,, +iicesurt:soicetem:sistem,tsice,surface_temperature,Surface temperature of sea ice,seaIce,,K,,, +istl1,tsice,surface_temperature,Surface temperature of ice,landIce,,K,,, +stl1,tsl1,soil_temperature_level_1,Temperature of soil level 1,land,,,,, +stl2,tsl2,soil_temperature_level_2,Temperature of soil level 2,land,,,,, +stl3,tsl3,soil_temperature_level_3,Temperature of soil level 3,land,,,,, +stl4,tsl4,soil_temperature_level_4,Temperature of soil level 4,land,,,,, +tsn,tsn,temperature_in_surface_snow,Snow internal temperature,landIce,,,,, +u,ua,eastward_wind,U velocity,atmos,,,,, +u10m,uas,eastward_wind,Eastward near-surface wind,atmos,,,,, +vozocrtx,uo,sea_water_x_velocity,Sea water x velocity,ocean,,,,, +v,va,northward_wind,V velocity,atmos,,,,, +v10m,vas,northward_wind,Northward near-surface wind,atmos,,,,, +vomecrty,vo,sea_water_y_velocity,Sea water y velocity,ocean,,,,, +voddmavs,voddmavs,salt_vertical_eddy_diffusivity,Salt vertical eddy diffusivity,ocean,,,,, +vozoeivu,voeivu,sea_water_x_EIV_current,Zonal eiv current,ocean,,,,, +vomeeivv,voeivv,sea_water_y_EIV_current,Meridional eiv current,ocean,,,,, +voveeivw,voeivz,sea_water_z_EIV_current,Vertical eiv current,ocean,,,,, +scvoltot,volo,sea_water_volume,Sea water volume ,ocean,,,,, +votkeavm,votkeavm,vertical_eddy_viscosity,Vertical eddy viscosity,ocean,,,,, +votkeavt,votkeavt,vertical_eddy_diffusivity,Vertical eddy diffusivity,ocean,,,,, +votkeevd,votkeevd,enhanced_vertical_diffusivity,Enhanced vertical diffusivity,ocean,,,,, +votkeevm,votkeevm,enhanced_vertical_viscosity,Enhanced vertical viscosity,ocean,,,,, +sobarstf,vsftbarot,ocean_barotropic_volume_streamfunction,Ocean barotropic volume streamfunction ,ocean,,,,, +zomsfatl,vsftmyz,ocean_meridional_overturning_volume_streamfunction,Ocean meridional overturning volume streamfunction ,ocean,Atl,,,, +zomsfglo,vsftmyz,ocean_meridional_overturning_volume_streamfunction,Ocean meridional overturning volume streamfunction ,ocean,Glob,,,, +zomsfind,vsftmyz,ocean_meridional_overturning_volume_streamfunction,Ocean meridional overturning volume streamfunction ,ocean,Ind,,,, +zomsfipc:zomsfinp,vsftmyz,ocean_meridional_overturning_volume_streamfunction,Ocean meridional overturning volume streamfunction ,ocean,IndPac,,,, +zomsfpac,vsftmyz,ocean_meridional_overturning_volume_streamfunction,Ocean meridional overturning volume streamfunction ,ocean,Pac,,,, +zomsfeiv,vsftmyzba,ocean_meridional_overturning_mass_streamfunction_due_to_bolus_advection,Ocean meridional overturning volume streamfunction due to bolus advection ,ocean,,,,, +w,wa,vertical_velocity,Vertical velocity,atmos,,,,, +z,zg,geopotential_height,Geopotential height,atmos,,,,, +vovecrtz,zo,sea_water_z_velocity,Sea water z velocity,ocean,,,,, +sossheigh:sossheig:mean_sossheig,zos,sea_surface_height_above_geoid,Sea surface height above geoid ,ocean,,,,, +scsshtot,zosga,global_average_sea_level_change,Global average sea level change ,ocean,,,,, +scsshste,zossga,global_average_steric_sea_level_change,Global average steric sea level change ,ocean,,,,, +zossq,zossq,square_of_sea_surface_height_above_geoid,Square of sea surface height above geoid ,ocean,,,,, +scsshtst,zostoga,snthic,Global average thermosteric sea level change ,ocean,,,,, +heatc,ohc,ocean_heat_content,Ocean heat content,ocean,,J,,, +ohcsum,ohcsum,total_ocean_heat_content,Total Ocean heat content,ocean,,J,,, +ohcvmean,ohcvmean,average_ocean_heat_content,Average Ocean heat content,ocean,,J m-3,,, +ohc,ohc,ocean_heat_content,Ocean heat content,ocean,,J,,, +transix,transix,sea_ice_x_transport,X-Component of Sea Ice Mass Transport,seaIce,,kg s-1,,, +transiy,transiy,sea_ice_y_transport,Y-Component of Sea Ice Mass Transport,seaIce,,kg s-1,,, +windsp,sfcWind,wind_speed,Near-Surface Wind Speed,atmos,,,,, +vsfsit,vsfsit,virtual_salt_flux_into_sea_water_due_to_sea_ice_thermodynamics,Virtual Salt Flux into Sea Water due to Sea Ice Thermodynamics ,ocean,,,,, +sfdsi,sfdsi,downward_sea_ice_basal_salt_flux,Downward Sea Ice Basal Salt Flux,ocean,,,,, +hfsithermds,hfsithermds,heat_flux_into_sea_water_due_to_sea_ice_thermodynamics,Heat Flux into Sea Water due to Sea Ice Thermodynamics ,ocean,,,,, +u2o,uosq,square_of_sea_water_x_velocity,Square of Sea Water X Velocity ,ocean,,,,, +v2o,vosq,square_of_sea_water_y_velocity,Square of Sea Water Y Velocity ,ocean,,,,, +vozomatr,umo,ocean_mass_x_transport,Ocean Mass X Transport ,ocean,,,,, +vomematr,vmo,ocean_mass_y_transport,Ocean Mass Y Transport ,ocean,,,,, +sozohetr,hfx,ocean_heat_x_transport,Ocean Heat X Transport ,ocean,,,,, +somehetr,hfy,ocean_heat_y_transport,Ocean Heat Y Transport ,ocean,,,,, +uto,uothetao,product_of_xward_sea_water_velocity_and_temperature,Product of X-ward Sea Water Velocity and Temperature,ocean,,,,, +vto,vothetao,product_of_yward_sea_water_velocity_and_temperature,Product of Y-ward Sea Water Velocity and Temperature,ocean,,,,, +uso,uoso,product_of_xward_sea_water_velocity_and_salinity,Product of X-ward Sea Water Velocity and Salinity,ocean,,,,, +vso,voso,product_of_yward_sea_water_velocity_and_salinity,Product of Y-ward Sea Water Velocity and Salinity,ocean,,,,, +wfo,wfo,water_flux_into_sea_water,Water Flux into Sea Water ,ocean,,,,, +emp_oce,evsmpr,evap_minus_precip_over_sea_water,Evap minus Precip over ocean,ocean,,,,, +emp_ice,evsmpr,evap_minus_precip_over_sea_ice,Evap minus Precip over ice,seaIce,,,,, +qsr_oce,rsntds,net_downward_shortwave_flux_at_sea_water_surface,Net Downward Shortwave Radiation at Sea Water Surface ,ocean,,,,, +qns_oce,rlds,surface_net_downward_longwave_flux,Surface Net Downward Longwave Radiation,ocean,,,,, +qsr_ice,rsdssi,surface_downwelling_shortwave_flux_in_air,Downwelling Shortwave over Sea Ice,seaIce,,,,, +qns_ice,rldssi,surface_downwelling_longwave_flux_in_air,Downwelling Long Wave over Sea Ice,seaIce,,,,, +sfx,sfx,downward_salt_flux,Downward Salt Flux,ocean,,,,, +taum,taum,surface_downward_stress_module,Surface Downward Stress Module,ocean,,,,, +zfull,zfull,depth_below_geoid,Depth Below Geoid of Ocean Layer,ocean,,,,, +zhalf,zhalf,depth_below_geoid,Depth Below Geoid of Ocean Layer,ocean,,,,, +pbo,pbo,sea_water_pressure_at_sea_floor,Sea Water Pressure at Sea Floor,ocean,,,,, +thkcello,thkcello,cell_thickness,Cell Thickness,ocean,,,,, +ficeberg,ficeberg,water_flux_into_sea_water_from_icebergs,Water Flux into Sea Water From Icebergs ,ocean,,,,, +rsdo,rsds,downwelling_shortwave_flux_in_sea_water,Downwelling Shortwave Radiation in Sea Water ,ocean,,,,, +wo,wo,sea_water_upward_velocity,Sea Water Upward Velocity ,ocean,,,,, +w2o,wosq,square_of_sea_water_upward_velocity,Square of Sea Water Upward Velocity ,ocean,,,,, +difvho,difvho,ocean_vertical_heat_diffusivity,Ocean Vertical Heat Diffusivity,ocean,,,,, +vovematr,wmo,upward_ocean_mass_transport,Upward Ocean Mass Transport ,ocean,,,,, +qtr_ice,qtr,shortwave_flux_transmitted_through_ice,Shortwave Flux Transmitted Through The Ice,seaIce,,,,, diff --git a/earthdiagnostics/cmorizer.py b/earthdiagnostics/cmorizer.py new file mode 100644 index 0000000000000000000000000000000000000000..dc8d9db1c1d79fcf9390e7a686102ad347154583 --- /dev/null +++ b/earthdiagnostics/cmorizer.py @@ -0,0 +1,518 @@ +# coding=utf-8 +import glob +import shutil +import uuid + +import os +from datetime import datetime + +import pygrib +from autosubmit.config.log import Log +from autosubmit.date.chunk_date_lib import parse_date, chunk_end_date, previous_day, date2str, add_months + +from earthdiagnostics.variable import Variable, Domains +from earthdiagnostics.utils import TempFile, Utils + + +class Cmorizer(object): + """ + Class to manage CMORization + + :param data_manager: experiment's data manager + :type data_manager: DataManager + :param startdate: startdate to cmorize + :type startdate: str + :param member: member to cmorize + :type member: int + + """ + + NON_DATA_VARIABLES = ('lon', 'lat', 'time', 'time_bnds', 'leadtime', 'lev', 'icethi', + 'deptht', 'depthu', 'depthw', 'depthv', 'time_centered', 'time_centered_bounds', + 'deptht_bounds', 'depthu_bounds', 'depthv_bounds', 'depthw_bounds', + 'deptht_bnds', 'depthu_bnds', 'depthv_bnds', 'depthw_bnds', + 'time_counter_bounds', 'ncatice', 'nav_lat_grid_V', 'nav_lat_grid_U', + 'nav_lat_grid_T', 'nav_lon_grid_V', 'nav_lon_grid_U', 'nav_lon_grid_T', + 'depth', 'depth_2', 'depth_3', 'depth_4', + 'mlev', 'hyai', 'hybi', 'hyam', 'hybm') + + ALT_COORD_NAMES = {'time_counter': 'time', 'time_counter_bnds': 'time_bnds', 'time_counter_bounds': 'time_bnds', + 'tbnds': 'bnds', 'nav_lat': 'lat', 'nav_lon': 'lon', 'x': 'i', 'y': 'j'} + + def __init__(self, data_manager, startdate, member): + self.data_manager = data_manager + self.startdate = startdate + self.member = member + self.config = data_manager.config + self.experiment = self.config.experiment + self.cmor = self.config.cmor + self.member_str = self.experiment.get_member_str(member) + self.original_files_path = os.path.join(self.config.data_dir, self.experiment.expid, 'original_files', + self.startdate, self.member_str, 'outputs') + self.atmos_timestep = None + self.cmor_scratch = os.path.join(self.config.scratch_dir, 'CMOR') + + def cmorize_ocean(self): + """ + CMORizes ocean files from MMO files + :return: + """ + if not self.cmor.ocean: + return + self._cmorize_ocean_files('MMO') + self._cmorize_ocean_files('PPO') + self._cmorize_ocean_files('diags') + + def _cmorize_ocean_files(self, prefix): + tar_folder = os.path.join(self.original_files_path, '{0}*'.format(prefix)) + tar_files = glob.glob(tar_folder) + tar_files.sort() + count = 1 + for tarfile in tar_files: + Log.info('Unpacking oceanic file {0}/{1}'.format(count, len(tar_files))) + self._unpack_tar_file(tarfile) + self._cmorize_nc_files() + Log.result('Oceanic file {0}/{1} finished'.format(count, len(tar_files))) + count += 1 + + def _cmorize_nc_files(self): + for filename in glob.glob(os.path.join(self.cmor_scratch, '*.nc')): + self._cmorize_nc_file(filename) + + def _unpack_tar_file(self, tarfile): + if os.path.exists(self.cmor_scratch): + shutil.rmtree(self.cmor_scratch) + os.makedirs(self.cmor_scratch) + Utils.untar((tarfile,), self.cmor_scratch) + Utils.unzip(glob.glob(os.path.join(self.cmor_scratch, '*.gz'))) + + def _merge_mma_files(self, tarfile): + temp = TempFile.get() + for filename in glob.glob(os.path.join(self.cmor_scratch, 'MMA_*_SH_*.nc')): + Utils.cdo.sp2gpl(options='-O', input=filename, output=temp) + shutil.move(temp, filename) + sh_files = glob.glob(os.path.join(self.cmor_scratch, 'MMA_*_SH_*.nc')) + Utils.cdo.mergetime(input=sh_files, output=os.path.join(self.cmor_scratch, 'sh.nc')) + gg_files = glob.glob(os.path.join(self.cmor_scratch, 'MMA_*_GG_*.nc')) + Utils.cdo.mergetime(input=gg_files, output=os.path.join(self.cmor_scratch, 'gg.nc')) + for filename in sh_files + gg_files: + os.remove(filename) + Utils.nco.ncks(input=os.path.join(self.cmor_scratch, 'sh.nc'), + output=os.path.join(self.cmor_scratch, 'gg.nc'), options='-A') + os.remove(os.path.join(self.cmor_scratch, 'sh.nc')) + tar_startdate = tarfile[0:-4].split('_')[5].split('-') + new_name = 'MMA_1m_{0[0]}_{0[1]}.nc'.format(tar_startdate) + shutil.move(os.path.join(self.cmor_scratch, 'gg.nc'), os.path.join(self.cmor_scratch, new_name)) + + def cmorize_atmos(self): + """ + CMORizes atmospheric data, from grib or MMA files + :return: + """ + if not self.cmor.atmosphere: + return + + if self.cmor.use_grib and self.gribfiles_available(): + self._cmorize_grib_files() + else: + self._cmorize_mma_files() + + def _cmorize_mma_files(self): + tar_files = glob.glob(os.path.join(self.original_files_path, 'MMA*')) + tar_files.sort() + count = 1 + for tarfile in tar_files: + Log.info('Unpacking atmospheric file {0}/{1}'.format(count, len(tar_files))) + self._unpack_tar_file(tarfile) + self._merge_mma_files(tarfile) + self._cmorize_nc_files() + Log.result('Atmospheric file {0}/{1} finished'.format(count, len(tar_files))) + count += 1 + + def _cmorize_grib_files(self): + count = 1 + chunk_start = parse_date(self.startdate) + + while os.path.exists(self.get_original_grib_path(chunk_start, 'GG')) or \ + os.path.exists(self.get_original_grib_path(chunk_start, 'SH')): + + chunk_end = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', 'standard') + chunk_end = previous_day(chunk_end, 'standard') + Log.info('CMORizing chunk {0}-{1}', date2str(chunk_start), date2str(chunk_end)) + for grid in ('SH', 'GG'): + Log.info('Processing {0} variables', grid) + + if not os.path.exists(self.get_original_grib_path(chunk_start, grid)): + continue + self.cmorize_grib_file(chunk_end, chunk_start, count, grid) + chunk_start = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', 'standard') + + def cmorize_grib_file(self, chunk_end, chunk_start, count, grid): + for month in range(0, self.experiment.chunk_size): + current_date = add_months(chunk_start, month, 'standard') + original_gribfile = self.get_original_grib_path(current_date, grid) + Log.info('Processing month {1}', grid, date2str(current_date)) + gribfile = self.get_scratch_grib_path(current_date, grid) + if not os.path.isfile(gribfile): + Log.info('Copying file...', grid, date2str(current_date)) + Utils.copy_file(original_gribfile, gribfile) + + self._obtain_atmos_timestep(gribfile) + + prev_gribfile = self.get_scratch_grib_path(add_months(current_date, -1, 'standard'), grid) + if os.path.exists(prev_gribfile): + self._merge_grib_files(current_date, prev_gribfile, gribfile) + full_file = 'ICM' + else: + full_file = gribfile + + Log.info('Unpacking... ') + # remap on regular Gauss grid + if grid == 'SH': + Utils.cdo.splitparam(input='-sp2gpl {0}'.format(full_file), output=gribfile + '_', + options='-f nc4') + else: + Utils.cdo.splitparam(input=full_file, output=gribfile + '_', options='-R -f nc4') + # total precipitation (remove negative values) + Utils.cdo.setcode(228, input='-setmisstoc,0 -setvrange,0,Inf -add ' + '{0}_{{142,143}}.128.nc'.format(gribfile), + output='{0}_228.128.nc'.format(gribfile)) + Utils.remove_file('ICM') + next_gribfile = self.get_original_grib_path(add_months(current_date, 1, 'standard'), grid) + + if not os.path.exists(next_gribfile): + os.remove(gribfile) + + cdo_reftime = parse_date(self.startdate).strftime('%Y-%m-%d,00:00') + + self._ungrib_vars(cdo_reftime, gribfile, current_date.month, '{0}hr'.format(self.atmos_timestep)) + self._ungrib_vars(cdo_reftime, gribfile, current_date.month, '1d') + self._ungrib_vars(cdo_reftime, gribfile, current_date.month, '1m') + + for splited_file in glob.glob('{0}_*.128.nc'.format(gribfile)): + os.remove(splited_file) + + Log.result('Month {0}, {1} variables finished', date2str(current_date), grid) + count += 1 + self._merge_and_cmorize_atmos(chunk_start, chunk_end, grid, '1m') + self._merge_and_cmorize_atmos(chunk_start, chunk_end, grid, '1d') + self._merge_and_cmorize_atmos(chunk_start, chunk_end, grid, + '{0}hr'.format(self.atmos_timestep)) + + def get_scratch_grib_path(self, current_date, grid): + return os.path.join(self.config.scratch_dir, self._get_grib_filename(grid, current_date)) + + def _obtain_atmos_timestep(self, gribfile): + if self.atmos_timestep is None: + self.atmos_timestep = self._get_atmos_timestep(gribfile) + + def get_original_grib_path(self, current_date, grid): + return os.path.join(self.original_files_path, + self._get_grib_filename(grid, current_date)) + + def _get_grib_filename(self, grid, month): + return 'ICM{0}{1}+{2}.grb'.format(grid, self.experiment.expid, date2str(month)[:-2]) + + def _get_atmos_timestep(self, gribfile): + Log.info('Getting timestep...') + grib_handler = pygrib.open(gribfile) + mes1 = grib_handler.message(1) + mes2 = grib_handler.readline() + while mes2.analDate == mes1.analDate: + mes2 = grib_handler.readline() + atmos_timestep = mes2.analDate - mes1.analDate + atmos_timestep = int(atmos_timestep.total_seconds() / 3600) + self.experiment.atmos_timestep = atmos_timestep + grib_handler.close() + return atmos_timestep + + def _cmorize_nc_file(self, filename): + Log.info('Processing file {0}', filename) + + if not self._contains_requested_variables(filename): + os.remove(filename) + return + + Utils.convert2netcdf4(filename) + frequency = self._get_nc_file_frequency(filename) + Utils.rename_variables(filename, Cmorizer.ALT_COORD_NAMES, False, True) + self._add_common_attributes(filename, frequency) + self._update_time_variables(filename) + + handler = Utils.openCdf(filename) + Log.info('Splitting file {0}', filename) + for variable in handler.variables.keys(): + if variable in Cmorizer.NON_DATA_VARIABLES: + continue + self.extract_variable(filename, handler, frequency, variable) + Log.result('File {0} cmorized!', filename) + handler.close() + os.remove(filename) + + def _get_nc_file_frequency(self, filename): + file_parts = os.path.basename(filename).split('_') + if self.experiment.expid in [file_parts[1], file_parts[2]]: + frequency = 'm' + elif self.experiment.expid == file_parts[0]: + try: + parse_date(file_parts[1]) + frequency = 'm' + except ValueError: + frequency = file_parts[1][1].lower() + else: + frequency = file_parts[1][1].lower() + return frequency + + def _contains_requested_variables(self, filename): + variables = Utils.get_file_variables(filename) + return self.cmor.any_required(variables) + + def extract_variable(self, file_path, handler, frequency, variable): + """ + Extracts a variable from a file and creates the CMOR file + + :param file_path: path to the file + :type file_path: str + :param handler: netCDF4 handler for the file + :type handler: netCDF$.Dataset + :param frequency: variable's frequency + :type frequency: str + :param variable: variable's name + :type variable: str + """ + temp = TempFile.get() + file_parts = os.path.basename(file_path).split('_') + var_cmor = Variable.get_variable(variable) + if var_cmor is None: + return + if not self.cmor.cmorize(var_cmor): + return + frequency = self.translate_frequency(frequency) + Utils.nco.ncks(input=file_path, output=temp, options='-v {0}'.format(variable)) + self._rename_level_variables(temp, var_cmor) + + self._add_coordinate_variables(handler, temp) + + if var_cmor.basin is None: + region = None + else: + region = var_cmor.basin.fullname + + if file_parts[0] == self.experiment.expid or file_parts[0].startswith('ORCA') or \ + file_parts[0] in ('MMA', 'MMO'): + # Model output + date_str = '{0}-{1}'.format(file_parts[2][0:6], file_parts[3][0:6]) + elif file_parts[1] == self.experiment.expid: + # Files generated by the old version of the diagnostics + date_str = '{0}-{1}'.format(file_parts[4][0:6], file_parts[5][0:6]) + else: + Log.error('Variable {0} can not be cmorized. Original filename does not match a recognized pattern', + var_cmor.short_name) + raise CMORException('Variable {0}:{1} can not be cmorized. Original filename does not match a recognized ' + 'pattern'.format(var_cmor.domain, var_cmor.short_name)) + + self.data_manager.send_file(temp, var_cmor.domain, var_cmor.short_name, self.startdate, self.member, + frequency=frequency, rename_var=variable, date_str=date_str, region=region, + move_old=True, grid=var_cmor.grid, cmorized=True) + + @staticmethod + def _add_coordinate_variables(handler, temp): + handler_cmor = Utils.openCdf(temp) + Utils.copy_variable(handler, handler_cmor, 'lon', False) + Utils.copy_variable(handler, handler_cmor, 'lat', False) + if 'time' in handler_cmor.dimensions.keys(): + Utils.copy_variable(handler, handler_cmor, 'leadtime', False) + handler_cmor.close() + + @staticmethod + def _rename_level_variables(temp, var_cmor): + if var_cmor.domain == Domains.ocean: + Utils.rename_variables(temp, {'deptht': 'lev', 'depthu': 'lev', 'depthw': 'lev', 'depthv': 'lev', + 'depth': 'lev'}, False, True) + if var_cmor.domain in [Domains.landIce, Domains.land]: + Utils.rename_variables(temp, {'depth': 'sdepth', 'depth_2': 'sdepth', 'depth_3': 'sdepth', + 'depth_4': 'sdepth'}, False, True) + if var_cmor.domain == Domains.atmos: + Utils.rename_variables(temp, {'depth': 'plev'}, False, True) + + @staticmethod + def translate_frequency(frequency): + if frequency == 'd': + frequency = 'day' + elif frequency == 'm': + frequency = 'mon' + elif frequency == 'h': + frequency = '6hr' + else: + raise Exception('Frequency {0} not supported'.format(frequency)) + return frequency + + @staticmethod + def _merge_grib_files(current_month, prev_gribfile, gribfile): + Log.info('Merging data from different files...') + fd = open('rules_files', 'w') + fd.write('if (dataDate >= {0.year}{0.month:02}01) {{ write ; }}\n'.format(current_month)) + fd.close() + # get first timestep for each month from previous file (if possible) + if os.path.exists('ICM'): + os.remove('ICM') + Utils.execute_shell_command('grib_filter -o ICM rules_files ' + '{0} {1}'.format(os.path.basename(prev_gribfile), + os.path.basename(gribfile))) + os.remove('rules_files') + Utils.remove_file(prev_gribfile) + + def _ungrib_vars(self, cdo_reftime, gribfile, month, frequency): + Log.info('Preparing {0} variables'.format(frequency)) + var_codes = self.config.cmor.get_variables(frequency) + for var_code in var_codes: + if not os.path.exists('{0}_{1}.128.nc'.format(gribfile, var_code)): + continue + new_units = None + + cdo_operator = '-selmon,{0}'.format(month) + if frequency in ('month', 'monthly', 'mon', '1m'): + if var_code == 201: + cdo_operator = "-monmean -daymax {0}".format(cdo_operator) + elif var_code == 202: + cdo_operator = "-monmean -daymax {0}".format(cdo_operator) + else: + cdo_operator = "-monmean {0} ".format(cdo_operator) + if frequency in ('day', 'daily', '1d'): + if var_code == 201: + cdo_operator = "-daymax {0} ".format(cdo_operator) + elif var_code == 202: + cdo_operator = "-daymin {0} ".format(cdo_operator) + else: + cdo_operator = "-daymean {0} ".format(cdo_operator) + + if var_code in (144, 146, 147, 169, 175, 176, 177, 179, 180, 181, 182, 201, 202, 205, 212, 228): + cdo_operator = '{0} -shifttime,-{1}hours'.format(cdo_operator, self.experiment.atmos_timestep) + + if var_code == 129: + # geopotential + new_units = "m" + cdo_operator = "-divc,9.81 {0}".format(cdo_operator) + elif var_code in (146, 147, 169, 175, 176, 177, 179, 212): + # radiation + new_units = "W m-2" + cdo_operator = "-divc,{0} {1}".format(self.experiment.atmos_timestep * 3600, cdo_operator) + elif var_code in (180, 181): + # momentum flux + new_units = "N m-2" + cdo_operator = "-divc,{0} {1}".format(self.experiment.atmos_timestep * 3600, cdo_operator) + elif var_code in (144, 182, 205, 228): + # precipitation/evaporation/runoff + new_units = "kg m-2 s-1" + cdo_operator = "-mulc,1000 -divc,{0}".format(self.experiment.atmos_timestep * 3600) + + levels = self.config.cmor.get_levels(frequency, var_code) + if levels: + cdo_operator = "{0} -sellevel,{1}".format(cdo_operator, levels) + + Utils.execute_shell_command('cdo -t ecmwf setreftime,{0} ' + '{1} {2}_{3}.128.nc ' + '{2}_{3}_{4}.nc'.format(cdo_reftime, cdo_operator, + gribfile, var_code, frequency)) + h_var_file = '{0}_{1}_{2}.nc'.format(gribfile, var_code, frequency) + + handler = Utils.openCdf(h_var_file) + if new_units: + for var in handler.variables.values(): + if 'code' in var.ncattrs() and var.code == var_code: + var.units = new_units + break + + var_name = None + for key in handler.variables.keys(): + if key + '_2' in handler.variables and key not in handler.dimensions: + var_name = key + handler.close() + + if var_name is not None: + Utils.nco.ncks(input='{0}_{1}_1m.nc'.format(gribfile, var_code), + output='{0}_{1}_1m.nc'.format(gribfile, var_code), + options='-O -v {0}'.format(var_name)) + + def _merge_and_cmorize_atmos(self, chunk_start, chunk_end, grid, frequency): + merged_file = 'MMA_{0}_{1}_{2}_{3}.nc'.format(frequency, date2str(chunk_start), date2str(chunk_end), grid) + files = glob.glob(os.path.join(self.config.scratch_dir, + '{0}_*_{1}.nc'.format(self._get_grib_filename(grid, chunk_start), frequency))) + for first_file in files: + shutil.move(first_file, merged_file) + current_month = add_months(chunk_start, 1, 'standard') + while current_month < chunk_end: + month_file = first_file.replace('+{0}.grb'.format(date2str(chunk_start)[:-2]), + '+{0}.grb'.format(date2str(current_month)[:-2])) + Utils.concat_variables(month_file, merged_file, True) + current_month = add_months(current_month, 1, 'standard') + + self._cmorize_nc_file(merged_file) + + def _update_time_variables(self, filename): + handler = Utils.openCdf(filename) + time_var = handler.variables['time'] + if "time_bnds" in handler.variables: + time_var.bounds = "time_bnds" + handler.variables['time_bnds'].units = time_var.units + handler.close() + temp = TempFile.get() + Utils.cdo.setreftime('1850-01-01,00:00:00,days', input=filename, output=temp) + Utils.move_file(temp, filename) + + self._set_leadtime_var(filename) + + def _set_leadtime_var(self, filename): + handler = Utils.openCdf(filename) + if 'leadtime' in handler.variables: + var = handler.variables['leadtime'] + else: + var = handler.createVariable('leadtime', float, 'time') + var.units = "days" + var.long_name = "Time elapsed since the start of the forecast" + var.standard_name = "forecast_period" + leadtime = (Utils.get_datetime_from_netcdf(handler) - parse_date(self.startdate)) + for lt in range(0, leadtime.shape[0]): + var[lt] = leadtime[lt].days + handler.close() + + def _add_common_attributes(self, filename, frequency): + cmor = self.config.cmor + experiment = self.config.experiment + handler = Utils.openCdf(filename) + handler.associated_experiment = cmor.associated_experiment + handler.batch = '{0}{1}'.format(experiment.institute, datetime.now().strftime('%Y-%m-%d(T%H:%M:%SZ)')) + handler.contact = 'Pierre-Antoine Bretonnière, pierre-antoine.bretonniere@bsc.es , ' \ + 'Javier Vegas-Regidor, javier.vegas@bsc.es ' + handler.Conventions = 'CF-1.6' + handler.creation_date = datetime.now().strftime('%Y-%m-%d(T%H:%M:%SZ)') + handler.experiment_id = experiment.experiment_name + handler.forecast_reference_time = parse_date(self.startdate).strftime('%Y-%m-%d(T%H:%M:%SZ)') + if frequency == 'd': + handler.frequency = 'day' + elif frequency == 'm': + handler.frequency = 'mon' + handler.institute_id = experiment.institute + handler.institution = experiment.institute + handler.initialization_method = cmor.initialization_method + handler.initialization_description = cmor.initialization_description + handler.physics_version = cmor.physics_version + handler.physics_description = cmor.physics_description + handler.model_id = experiment.model + handler.associated_model = cmor.associated_model + handler.project_id = 'SPECS' + handler.realization = str(self.member + 1) + handler.source = cmor.source + handler.startdate = 'S{0}'.format(self.startdate) + handler.tracking_id = str(uuid.uuid1()) + handler.title = "{0} model output prepared for SPECS {1}".format(experiment.model, experiment.experiment_name) + handler.close() + + def gribfiles_available(self): + grb_path = os.path.join(self.original_files_path, '*.grb') + gribfiles = glob.glob(grb_path) + return len(gribfiles) > 0 + + +class CMORException(Exception): + pass diff --git a/earthdiagnostics/cmormanager.py b/earthdiagnostics/cmormanager.py new file mode 100644 index 0000000000000000000000000000000000000000..9d2601837475b64e9fbf51467ac4682a93486110 --- /dev/null +++ b/earthdiagnostics/cmormanager.py @@ -0,0 +1,413 @@ +# coding=utf-8 +import glob +from datetime import datetime + +import os +from autosubmit.config.log import Log +from autosubmit.date.chunk_date_lib import parse_date, chunk_start_date, chunk_end_date, previous_day + +from earthdiagnostics.cmorizer import Cmorizer +from earthdiagnostics.datamanager import DataManager, NetCDFFile +from earthdiagnostics.utils import TempFile, Utils +from earthdiagnostics.variable import Variable + + +class CMORManager(DataManager): + """ + Data manager class for CMORized experiments + """ + def __init__(self, config): + super(CMORManager, self).__init__(config) + data_folders = self.config.data_dir.split(':') + self.config.data_dir = None + for data_folder in data_folders: + if os.path.isdir(os.path.join(data_folder, self.experiment.expid)): + self.config.data_dir = data_folder + break + + if not self.config.data_dir: + raise Exception('Can not find model data') + + def get_file(self, domain, var, startdate, member, chunk, grid=None, box=None, frequency=None): + """ + Copies a given file from the CMOR repository to the scratch folder and returns the path to the scratch's copy + + :param domain: CMOR domain + :type domain: Domain + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str|NoneType + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :return: path to the copy created on the scratch folder + :rtype: str + """ + filepath = self.get_file_path(startdate, member, domain, var, chunk, frequency, box, grid, None, None) + + temp_path = TempFile.get() + Utils.copy_file(filepath, temp_path) + return temp_path + + def get_file_path(self, startdate, member, domain, var, chunk, frequency, + box=None, grid=None, year=None, date_str=None): + """ + Returns the path to a concrete file + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param domain: file's domain + :type domain: Domain + :param var: file's var + :type var: str + :param chunk: file's chunk + :type chunk: int + :param frequency: file's frequency + :type frequency: str + :param box: file's box + :type box: Box + :param grid: file's grid + :type grid: str + :param year: file's year + :type year: int|str + :param date_str: date string to add directly. Overrides year or chunk configurations + :type date_str: str + :return: path to the file + :rtype: str + """ + if not frequency: + frequency = self.config.frequency + var = self._get_final_var_name(box, var) + + folder_path = self._get_full_cmor_folder_path(startdate, member, domain, var, frequency, grid) + file_name = self._get_cmor_file_name(startdate, member, domain, var, frequency, chunk, year, date_str) + + filepath = os.path.join(folder_path, file_name) + return filepath + + def _get_cmor_file_name(self, startdate, member, domain, var, frequency, chunk, year, date_str): + domain_abreviattion = domain.get_table_name(frequency) + if chunk is not None: + time_bound = self._get_chunk_time_bounds(startdate, chunk) + elif year: + if frequency is not 'yr': + raise ValueError('Year may be provided instead of chunk only if frequency is "yr"') + time_bound = str(year) + elif date_str: + time_bound = date_str + else: + raise ValueError('Chunk, year and date_str can not be None at the same time') + file_name = '{0}_{1}_{2}_{3}_S{4}_r{5}i1p1_{6}.nc'.format(var, domain_abreviattion, self.experiment.model, + self.experiment.experiment_name, startdate, + member + 1, + time_bound) + return file_name + + def _get_full_cmor_folder_path(self, startdate, member, domain, var, frequency, grid): + folder_path = os.path.join(self._get_startdate_path(startdate), frequency, domain, var) + if grid: + folder_path = os.path.join(folder_path, grid) + folder_path = os.path.join(folder_path, 'r{0}i1p1'.format(member + 1)) + return folder_path + + def _get_chunk_time_bounds(self, startdate, chunk): + start = parse_date(startdate) + chunk_start = chunk_start_date(start, chunk, self.experiment.chunk_size, 'month', 'standard') + chunk_end = chunk_end_date(chunk_start, self.experiment.chunk_size, 'month', 'standard') + chunk_end = previous_day(chunk_end, 'standard') + time_bound = "{0:04}{1:02}-{2:04}{3:02}".format(chunk_start.year, chunk_start.month, chunk_end.year, + chunk_end.month) + return time_bound + + def link_file(self, domain, var, startdate, member, chunk=None, grid=None, box=None, + frequency=None, year=None, date_str=None, move_old=False): + """ + Creates the link of a given file from the CMOR repository. + + :param move_old: + :param date_str: + :param year: if frequency is yearly, this parameter is used to give the corresponding year + :type year: int + :param domain: CMOR domain + :type domain: Domain + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :return: path to the copy created on the scratch folder + :rtype: str + """ + var = self._get_final_var_name(box, var) + + if not frequency: + frequency = self.config.frequency + filepath = self.get_file_path(startdate, member, domain, var, chunk, frequency, grid, str(year), date_str) + self._create_link(domain, filepath, frequency, var, grid, move_old) + + def send_file(self, filetosend, domain, var, startdate, member, chunk=None, grid=None, region=None, + box=None, rename_var=None, frequency=None, year=None, date_str=None, move_old=False, + diagnostic=None, cmorized=False): + """ + Copies a given file to the CMOR repository. It also automatically converts to netCDF 4 if needed and can merge + with already existing ones as needed + + :param move_old: if true, moves files following older conventions that may be found on the links folder + :type move_old: bool + :param date_str: exact date_str to use in the cmorized file + :type: str + :param year: if frequency is yearly, this parameter is used to give the corresponding year + :type year: int + :param rename_var: if exists, the given variable will be renamed to the one given by var + :type rename_var: str + :param filetosend: path to the file to send to the CMOR repository + :type filetosend: str + :param region: specifies the region represented by the file. If it is defined, the data will be appended to the + CMOR repository as a new region in the file or will overwrite if region was already present + :type region: str + :param domain: CMOR domain + :type domain: Domain + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :param diagnostic: diagnostic used to generate the file + :type diagnostic: Diagnostic + :param cmorized: flag to indicate if file was generated in cmorization process + :type cmorized: bool + """ + original_var = var + cmor_var = Variable.get_variable(var) + var = self._get_final_var_name(box, var) + + if rename_var and rename_var != var: + Utils.rename_variable(filetosend, rename_var, var) + elif original_var != var: + Utils.rename_variable(filetosend, original_var, var) + + if not frequency: + frequency = self.config.frequency + + filepath = self.get_file_path(startdate, member, domain, var, chunk, frequency, box, + grid, year, date_str) + netcdf_file = NetCDFFile(filepath, filetosend, domain, var, cmor_var) + if diagnostic: + netcdf_file.add_diagnostic_history(diagnostic) + elif cmorized: + netcdf_file.add_cmorization_history() + else: + raise ValueError('You must provide a diagnostic or set cmorized to true to store data ' + 'using the CMORManager') + netcdf_file.send() + + self._create_link(domain, filepath, frequency, var, grid, move_old) + + def get_year(self, domain, var, startdate, member, year, grid=None, box=None): + """ + Ge a file containing all the data for one year for one variable + :param domain: variable's domain + :type domain: str + :param var: variable's name + :type var: str + :param startdate: startdate to retrieve + :type startdate: str + :param member: member to retrieve + :type member: int + :param year: year to retrieve + :type year: int + :param grid: variable's grid + :type grid: str + :param box: variable's box + :type box: Box + :return: + """ + + chunk_files = list() + for chunk in self.experiment.get_year_chunks(startdate, year): + chunk_files.append(self.get_file(domain, var, startdate, member, chunk, grid=grid, box=box)) + + if len(chunk_files) > 1: + temp = self._merge_chunk_files(chunk_files) + else: + temp = chunk_files[0] + temp2 = self._select_data_of_given_year(temp, year) + os.remove(temp) + return temp2 + + @staticmethod + def _select_data_of_given_year(data_file, year): + temp2 = TempFile.get() + Utils.cdo.selyear(str(year), input=data_file, output=temp2) + return temp2 + + @staticmethod + def _merge_chunk_files(chunk_files): + temp = TempFile.get() + Utils.nco.ncrcat(input=' '.join(chunk_files), output=temp) + for chunk_file in chunk_files: + os.remove(chunk_file) + return temp + + # noinspection PyPep8Naming + def prepare(self): + """ + Prepares the data to be used by the diagnostic. + + If CMOR data is not created, it show a warning and closes. In the future, an automatic cmorization procedure + will be launched + + If CMOR data is available but packed, the procedure will unpack it. + + :return: + """ + # Check if cmorized and convert if not + + for startdate, member in self.experiment.get_member_list(): + if not self.config.cmor.force and not self.config.cmor.force_untar and self._is_cmorized(startdate, member): + continue + if not self._unpack_cmor_files(startdate, member): + self._cmorize_member(startdate, member) + + def _is_cmorized(self, startdate, member): + startdate_path = self._get_startdate_path(startdate) + if not os.path.exists(startdate_path): + return False + for freq in os.listdir(startdate_path): + freq_path = os.path.join(startdate_path, freq) + for domain in os.listdir(freq_path): + domain_path = os.path.join(freq_path, domain) + for var in os.listdir(domain_path): + member_path = os.path.join(domain_path, var, 'r{0}i1p1'.format(member + 1)) + if os.path.exists(member_path): + return True + return False + + def _cmorize_member(self, startdate, member): + start_time = datetime.now() + member_str = self.experiment.get_member_str(member) + Log.info('CMORizing startdate {0} member {1}. Starting at {0}', startdate, member_str, start_time) + cmorizer = Cmorizer(self, startdate, member) + cmorizer.cmorize_ocean() + cmorizer.cmorize_atmos() + Log.result('CMORized startdate {0} member {1}!\n\n', startdate, member_str, datetime.now() - start_time) + + def _unpack_cmor_files(self, startdate, member): + if self.config.cmor.force: + return False + filepaths = self._get_transferred_cmor_data_filepaths(startdate, member, 'tar.gz') + if len(filepaths) > 0: + Log.info('Unzipping cmorized data...') + Utils.unzip(filepaths, True) + + if not os.path.exists(self.cmor_path): + os.mkdir(self.cmor_path) + + filepaths = self._get_transferred_cmor_data_filepaths(startdate, member, 'tar') + if len(filepaths) > 0: + Log.info('Unpacking cmorized data...') + Utils.untar(filepaths, self.cmor_path) + self._correct_paths(startdate) + self._create_links(startdate) + return True + return False + + def _get_transferred_cmor_data_filepaths(self, startdate, member, extension): + tar_path = os.path.join(self.config.data_dir, self.experiment.expid, 'original_files', 'cmorfiles') + tar_original_files = os.path.join(self.config.data_dir, 'original_files', self.experiment.expid, + 'cmorfiles') + file_name = 'CMOR?_{0}_{1}_{2}_*.{3}'.format(self.experiment.expid, startdate, + self.experiment.get_member_str(member), extension) + filepaths = glob.glob(os.path.join(tar_path, file_name)) + filepaths += glob.glob(os.path.join(tar_path, 'outputs', file_name)) + filepaths += glob.glob(os.path.join(tar_original_files, file_name)) + filepaths += glob.glob(os.path.join(tar_original_files, 'outputs', file_name)) + return filepaths + + def _correct_paths(self, startdate): + self._remove_extra_output_folder() + self._fix_model_as_experiment_error(startdate) + + def _fix_model_as_experiment_error(self, startdate): + if self.experiment.experiment_name != self.experiment.model: + bad_path = os.path.join(self.cmor_path, self.experiment.institute, self.experiment.model, + self.experiment.model) + Log.debug('Correcting double model appearance') + for (dirpath, dirnames, filenames) in os.walk(bad_path, False): + + for filename in filenames: + filepath = os.path.join(dirpath, filename) + good = filepath.replace('_{0}_output_'.format(self.experiment.model), + '_{0}_{1}_S{2}_'.format(self.experiment.model, + self.experiment.experiment_name, + startdate)) + + good = good.replace('/{0}/{0}'.format(self.experiment.model), + '/{0}/{1}'.format(self.experiment.model, + self.experiment.experiment_name)) + + Utils.move_file(filepath, good) + os.rmdir(dirpath) + Log.debug('Done') + + def _remove_extra_output_folder(self): + bad_path = os.path.join(self.cmor_path, 'output', self.experiment.institute) + if os.path.exists(bad_path): + Log.debug('Moving CMOR files out of the output folder') + Utils.execute_shell_command(['mv', bad_path, os.path.join(bad_path, '..', '..')]) + os.rmdir(os.path.join(self.cmor_path, 'output')) + Log.debug('Done') + + def _create_links(self, startdate): + Log.info('Creating links for CMOR files ()') + path = self._get_startdate_path(startdate) + for freq in os.listdir(path): + for domain in os.listdir(os.path.join(path, freq)): + for var in os.listdir(os.path.join(path, freq, domain)): + for member in os.listdir(os.path.join(path, freq, domain, var)): + for name in os.listdir(os.path.join(path, freq, domain, var, member)): + filepath = os.path.join(path, freq, domain, var, member, name) + if os.path.isfile(filepath): + self._create_link(domain, filepath, freq, var, "", False) + else: + for filename in os.listdir(filepath): + self._create_link(domain, os.path.join(filepath, filename), freq, var, "", False) + Log.info('Creating lings for CMOR files') + + def _get_startdate_path(self, startdate): + """ + Returns the path to the startdate's CMOR folder + :param startdate: target startdate + :type startdate: str + :return: path to the startdate's CMOR folder + :rtype: str + """ + return os.path.join(self.config.data_dir, self.experiment.expid, 'cmorfiles', self.experiment.institute, + self.experiment.model, self.experiment.experiment_name, 'S' + startdate) diff --git a/earthdiagnostics/config.py b/earthdiagnostics/config.py new file mode 100644 index 0000000000000000000000000000000000000000..206e12a6fc1cc8e1d9e6ad528fadb21ae9dcab7a --- /dev/null +++ b/earthdiagnostics/config.py @@ -0,0 +1,296 @@ +# coding=utf-8 +import os + +from autosubmit.config.log import Log +from autosubmit.date.chunk_date_lib import parse_date, chunk_start_date, chunk_end_date + +from earthdiagnostics.parser import Parser +from earthdiagnostics.variable import Variable +from utils import Utils + + +class Config(object): + """ + Class to read and manage the configuration + + :param path: path to the conf file + :type path: str + """ + + def __init__(self, path): + parser = Parser() + parser.optionxform = str + parser.read(path) + + # Read diags config + self.data_adaptor = parser.get_option('DIAGNOSTICS', 'DATA_ADAPTOR', 'CMOR').upper() + "Scratch folder path" + self.scratch_dir = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'SCRATCH_DIR')) + "Scratch folder path" + self.data_dir = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'DATA_DIR')) + "Root data folder path" + self.con_files = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'CON_FILES')) + "Mask and meshes folder path" + self._diags = parser.get_option('DIAGNOSTICS', 'DIAGS') + self.frequency = parser.get_option('DIAGNOSTICS', 'FREQUENCY') + "Default data frequency to be used by the diagnostics" + self.cdftools_path = Utils.expand_path(parser.get_option('DIAGNOSTICS', 'CDFTOOLS_PATH')) + "Path to CDFTOOLS executables" + self.max_cores = parser.get_int_option('DIAGNOSTICS', 'MAX_CORES', 100000) + "Maximum number of cores to use" + self.restore_meshes = parser.get_bool_option('DIAGNOSTICS', 'RESTORE_MESHES', False) + "If True, forces the tool to copy all the mesh and mask files for the model, regardless of existence" + + # Read experiment config + self.experiment = ExperimentConfig(parser) + """ + Configuration related to the experiment + + :rtype: ExperimentConfig + """ + # Read aliases + self._aliases = dict() + if parser.has_section('ALIAS'): + for option in parser.options('ALIAS'): + self._aliases[option.lower()] = parser.get_option('ALIAS', option).lower().split() + Log.debug('Preparing command list') + commands = self._diags.split() + self._real_commands = list() + for command in commands: + if command.lower() in self._aliases: + added_commands = self._aliases[command.lower()] + Log.info('Changing alias {0} for {1}', command, ' '.join(added_commands)) + for add_command in added_commands: + self._real_commands.append(add_command) + else: + self._real_commands.append(command) + Log.debug('Command list ready ') + + self.scratch_dir = os.path.join(self.scratch_dir, 'diags', self.experiment.expid) + + self.cmor = CMORConfig(parser) + self.thredds = THREDDSConfig(parser) + + def get_commands(self): + """ + Returns the list of commands after replacing the alias + :return: full list of commands + :rtype: list(str) + """ + return self._real_commands + + +class CMORConfig(object): + + def __init__(self, parser): + self.force = parser.get_bool_option('CMOR', 'FORCE', False) + self.force_untar = parser.get_bool_option('CMOR', 'FORCE_UNTAR', False) + self.ocean = parser.get_bool_option('CMOR', 'OCEAN_FILES', True) + self.atmosphere = parser.get_bool_option('CMOR', 'ATMOSPHERE_FILES', True) + self.use_grib = parser.get_bool_option('CMOR', 'USE_GRIB', True) + self.associated_experiment = parser.get_option('CMOR', 'ASSOCIATED_EXPERIMENT', 'to be filled') + self.associated_model = parser.get_option('CMOR', 'ASSOCIATED_MODEL', 'to be filled') + self.initialization_description = parser.get_option('CMOR', 'INITIALIZATION_DESCRIPTION', 'to be filled') + self.initialization_method = parser.get_option('CMOR', 'INITIALIZATION_METHOD', '1') + self.physics_description = parser.get_option('CMOR', 'PHYSICS_DESCRIPTION', 'to be filled') + self.physics_version = parser.get_option('CMOR', 'PHYSICS_VERSION', '1') + self.source = parser.get_option('CMOR', 'SOURCE', 'to be filled') + self.add_name = parser.get_bool_option('CMOR', 'ADD_NAME') + self.add_startdate = parser.get_bool_option('CMOR', 'ADD_STARTDATE') + + vars_string = parser.get_option('CMOR', 'VARIABLE_LIST', '') + if vars_string: + self._variable_list = list() + for domain_var in vars_string.split(' '): + self._variable_list.append(domain_var.lower()) + else: + self._variable_list = None + + self._var_hourly = CMORConfig._parse_variables(parser.get_option('CMOR', 'ATMOS_HOURLY_VARS', '')) + self._var_daily = CMORConfig._parse_variables(parser.get_option('CMOR', 'ATMOS_DAILY_VARS', '')) + self._var_monthly = CMORConfig._parse_variables(parser.get_option('CMOR', 'ATMOS_MONTHLY_VARS', '')) + + def cmorize(self, var_cmor): + """ + Checks if var_cmor is on variable list + + :param var_cmor: CMOR variable object + :rtype var_cmor: Variablle + :return: + """ + if self._variable_list is None: + return True + if not var_cmor: + return False + return '{0}:{1}'.format(var_cmor.domain, var_cmor.short_name).lower() in self._variable_list + + def any_required(self, variables): + if self._variable_list is None: + return True + for var in variables: + if self.cmorize(Variable.get_variable(var, silent=True)): + return True + + return False + + @staticmethod + def _parse_variables(raw_string): + variables = dict() + if raw_string: + splitted = raw_string.split(',') + for var_section in splitted: + splitted_var = var_section.split(':') + if len(splitted_var) == 1: + levels = None + else: + levels = ','.join(map(str, CMORConfig._parse_levels(splitted_var[1:]))) + variables[int(splitted_var[0])] = levels + return variables + + @staticmethod + def _parse_levels(levels_splitted): + if len(levels_splitted) == 1: + return map(int, levels_splitted[0].split('-')) + start = int(levels_splitted[0]) + end = int(levels_splitted[1]) + if len(levels_splitted) == 3: + step = int(levels_splitted[2]) + else: + step = 1 + return range(start, end, step) + + def get_variables(self, frequency): + if frequency in ('hour', 'hourly') or frequency[1:] == 'hr': + return self._var_hourly + elif frequency in ('day', 'daily', '1d'): + return self._var_daily + elif frequency in ('month', 'monthly', 'mon', '1m'): + return self._var_monthly + raise Exception('Frequency not recognized: {0}'.format(frequency)) + + def get_levels(self, frequency, variable): + return self.get_variables(frequency)[variable] + + +class THREDDSConfig(object): + def __init__(self, parser): + self.server_url = parser.get_option('THREDDS', 'SERVER_URL', '') + + +class ExperimentConfig(object): + """ + Encapsulates all chunk related tasks + + :param parser: parser for the config file + :type parser: Parser + """ + + def __init__(self, parser): + self.institute = parser.get_option('EXPERIMENT', 'INSTITUTE') + self.expid = parser.get_option('EXPERIMENT', 'EXPID') + self.experiment_name = parser.get_option('EXPERIMENT', 'NAME', self.expid) + + members = list() + for member in parser.get_option('EXPERIMENT', 'MEMBERS').split(): + members.append(int(member)) + + member_digits = parser.get_int_option('EXPERIMENT', 'MEMBER_DIGITS', 1) + startdates = parser.get_option('EXPERIMENT', 'STARTDATES').split() + chunk_size = parser.get_int_option('EXPERIMENT', 'CHUNK_SIZE') + chunks = parser.get_int_option('EXPERIMENT', 'CHUNKS') + calendar = parser.get_option('EXPERIMENT', 'CALENDAR', 'standard') + self.model = parser.get_option('EXPERIMENT', 'MODEL') + self.atmos_timestep = parser.get_int_option('EXPERIMENT', 'ATMOS_TIMESTEP', 6) + self.ocean_timestep = parser.get_int_option('EXPERIMENT', 'OCEAN_TIMESTEP', 6) + self.model_version = parser.get_option('EXPERIMENT', 'MODEL_VERSION') + self.atmos_grid = parser.get_option('EXPERIMENT', 'ATMOS_GRID') + + self.startdates = startdates + self.members = members + self.num_chunks = chunks + self.chunk_size = chunk_size + self.member_digits = member_digits + self.calendar = calendar + + def get_chunk_list(self): + """ + Return a list with all the chunks + :return: List containing tuples of startdate, member and chunk + :rtype: tuple[str, int, int] + """ + chunk_list = list() + for startdate in self.startdates: + for member in self.members: + for chunk in range(1, self.num_chunks + 1): + chunk_list.append((startdate, member, chunk)) + return chunk_list + + def get_member_list(self): + """ + Return a list with all the members + :return: List containing tuples of startdate and member + :rtype: tuple[str, int, int] + """ + member_list = list() + for startdate in self.startdates: + for member in self.members: + member_list.append((startdate, member)) + return member_list + + def get_year_chunks(self, startdate, year): + """ + Get the list of chunks containing timesteps from the given year + :param startdate: startdate to use + :type startdate: str + :param year: reference year + :type year: int + :return: list of chunks containing data from the given year + :rtype: list[int] + """ + date = parse_date(startdate) + chunks = list() + for chunk in range(1, self.num_chunks + 1): + chunk_start = chunk_start_date(date, chunk, self.chunk_size, 'month', self.calendar) + if chunk_start.year > year: + break + elif chunk_start.year == year or chunk_end_date(chunk_start, self.chunk_size, 'month', + self.calendar).year == year: + chunks.append(chunk) + + return chunks + + def get_full_years(self, startdate): + """ + Returns the list of full years that are in the given startdate + :param startdate: startdate to use + :type startdate: str + :return: list of full years + :rtype: list[int] + """ + chunks_per_year = 12 / self.chunk_size + date = parse_date(startdate) + first_january = 0 + first_year = date.year + if date.month != 1: + month = date.month + first_year += 1 + while month + self.chunk_size < 12: + month += self.chunk_size + first_january += 1 + + years = list() + for chunk in range(first_january, self.num_chunks - chunks_per_year, chunks_per_year): + years.append(first_year) + first_year += 1 + return years + + def get_member_str(self, member): + """ + Returns the member name for a given member number. + :param member: member's number + :type member: int + :return: member's name + :rtype: str + """ + return 'fc{0}'.format(str(member).zfill(self.member_digits)) + diff --git a/earthdiagnostics/constants.py b/earthdiagnostics/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..b8b63d31e525b90b6a732dced2f9e1daedad7349 --- /dev/null +++ b/earthdiagnostics/constants.py @@ -0,0 +1,249 @@ +# coding=utf-8 +""" +Contains the enumeration-like classes used by the diagnostics +""" +from earthdiagnostics.box import Box + + +class Basin(object): + """ + Class representing a given basin + + :param shortname: sfull basin's name + :type shortname: str + :param fullname: full basin's name + :type fullname: str + :param box: box defining the basin + :type box: Box + """ + + def __init__(self, shortname, fullname, box=None): + self._shortname = shortname + self._fullname = fullname + if box is None: + box = Box() + + self.box = box + """ + Box representing the basin + """ + + def __eq__(self, other): + if self.shortname != other.shortname or self.fullname != other.fullname: + return False + return True + + @property + def shortname(self): + """ + Basin's short name + :rtype: str + """ + return self._shortname + + @property + def fullname(self): + """ + Basin's full name + :rtype: str + """ + return self._fullname + + +class Basins(object): + """ + Predefined basins + """ + Global = Basin('glob', 'Global_Ocean') + """ Global ocean """ + + Atlantic = Basin('Atl', 'Atlantic_Ocean') + """ Atlantic ocean """ + NorthAtlantic = Basin('NAtl', 'North_Atlantic_Ocean') + """ North Atlantic Ocean """ + TropicalAtlantic = Basin('TAtl', 'Tropical_Atlantic_Ocean') + """ Tropical Atlantic Ocean """ + + Pacific = Basin('Pac', 'Pacific_Ocean') + """ Pacific Ocean """ + NorthPacific = Basin('NPac', 'North_Pacific_Ocean') + """ North Pacific Ocean """ + TropicalPacific = Basin('TPac', 'Tropical_Pacific_Ocean') + """ Tropical Pacific Ocean """ + IndoPacific = Basin('IndPac', 'Indo_Pacific_Ocean') + """ Indo Pacific Ocean """ + + Indian = Basin('Ind', 'Indian_Ocean') + """ Indian Ocean """ + TropicalIndian = Basin('TInd', 'Tropical_Indian_Ocean') + """ Tropical Indian Ocean """ + + Antarctic = Basin('Anta', 'Antarctic_Ocean') + """ Antarctic Ocean """ + AntarcticAtlantic = Basin('AntaAtl', 'Antarctic_Atlantic_Sector') + """ Antarctic Ocean Atlantic Sector """ + AntarcticIndian = Basin('AntaInd', 'Antarctic_Indian_Sector') + """ Antarctic Ocean Indian Sector""" + + Arctic = Basin('Arct', 'Arctic_Ocean') + """ Arctic Ocean """ + ArcticNorthAtlantic = Basin('ArctNAtl', 'Arctic_Ocean_North_Atlantic') + """ Arctic Ocean North Atlantic""" + ArcticMarginalSeas = Basin('ArctMarg', 'Arctic_Marginal_Seas') + """ Arctic Ocean """ + + # ArctOcn + + Baffin = Basin('Baffin', 'Baffin') + " Baffin " + Baffin_Bay = Basin('BaffBay', 'Baffin_Bay') + " Baffin_Bay " + Baltic_Sea = Basin('Baltic', 'Baltic_Sea') + " Baltic_Sea " + BarKara = Basin('BarKara', 'BarKara') + " BarKara " + Barents_Sea = Basin('Barents', 'Barents_Sea') + " Barents_Sea " + Beaufort_Chukchi_Sea = Basin('BeaufortChukchi', 'Beaufort_Chukchi_Sea') + " Beaufort_Chukchi_Sea " + Beaufort_Sea = Basin('Beaufort', 'Beaufort_Sea') + " Beaufort_Sea " + Bellingshausen_Sea = Basin('Bellingshausen_', 'Bellingshausen_Sea') + " Bellingshausen_Sea " + Bering = Basin('Bering', 'Bering') + " Bering " + Bering_Strait = Basin('BeringStr', 'Bering_Strait') + " Bering_Strait " + CanArch = Basin('CanArch', 'CanArch') + " CanArch " + Canadian_Waters = Basin('Canadian', 'Canadian_Waters') + " Canadian_Waters " + Caspian_Sea = Basin('Caspian', 'Caspian_Sea') + " Caspian_Sea " + Central_Arctic = Basin('CArct', 'Central_Arctic') + " Central_Arctic " + Chukchi_Sea = Basin('Chukchi', 'Chukchi_Sea') + " Chukchi_Sea " + East_Siberian_Sea = Basin('ESiberian', 'East_Siberian_Sea') + " East_Siberian_Sea " + Eastern_Central_Arctic = Basin('ECArct', 'Eastern_Central_Arctic') + " Eastern_Central_Arctic " + Fram_Strait = Basin('Fram', 'Fram_Strait') + " Fram_Strait " + Global_Ocean = Basin('Global', 'Global_Ocean') + " Global_Ocean " + Greenland_Sea = Basin('Greenland', 'Greenland_Sea') + " Greenland_Sea " + Grnland = Basin('Grnland', 'Grnland') + " Grnland " + Hudson = Basin('Hudson', 'Hudson') + " Hudson " + Icelandic_Sea = Basin('Iceland', 'Icelandic_Sea') + " Icelandic_Sea " + Kara_Gate_Strait = Basin('KaraGate', 'Kara_Gate_Strait') + " Kara_Gate_Strait " + Kara_Sea = Basin('Kara', 'Kara') + " Kara_Sea " + Labrador_Sea = Basin('Labrador', 'Labrador') + " Labrador_Sea " + Laptev_East_Siberian_Chukchi_Seas = Basin('LaptevESiberianChukchi', 'Laptev_East_Siberian_Chukchi_Seas') + " Laptev_East_Siberian_Chukchi_Seas " + Laptev_East_Siberian_Seas = Basin('LaptevESiberian', 'Laptev_East_Siberian_Seas') + " Laptev_East_Siberian_Seas " + Laptev_Sea = Basin('Laptev', 'Laptev_Sea') + " Laptev_Sea " + Lincoln_Sea = Basin('Lincoln', 'Lincoln_Sea') + " Lincoln_Sea " + Mediterranean_Sea = Basin('Medit', 'Mediterranean_Sea') + " Mediterranean_Sea " + Nares_Strait = Basin('Nares', 'Nares_Strait') + " Nares_Strait " + Nordic_Barents_Seas = Basin('NordicBarents', 'Nordic_Barents_Seas') + " Nordic_Barents_Seas " + Nordic_Seas = Basin('Nordic', 'Nordic_Seas') + " Nordic_Seas " + NorthWest_Passage = Basin('NWPass', 'NorthWest_Passage') + " NorthWest_Passage " + North_Atlantic_Arctic = Basin('North_Atlantic_Arctic', 'North_Atlantic_Arctic') + " North_Atlantic_Arctic " + North_Hemisphere_Ocean = Basin('NHem', 'North_Hemisphere_Ocean') + " North_Hemisphere_Ocean " + Norwegian_Sea = Basin('Norwe', 'Norwegian_Sea') + " Norwegian_Sea " + Okhotsk = Basin('Okhotsk', 'Okhotsk') + " Okhotsk " + OpenOcean = Basin('OpenOcean', 'OpenOcean') + " OpenOcean " + Ross_Sea = Basin('Ross', 'Ross_Sea') + " Ross_Sea " + Serreze_Arctic = Basin('SerArc', 'Serreze_Arctic') + " Serreze_Arctic " + Southern_Hemisphere = Basin('SHem', 'Southern_Hemisphere') + " Southern_Hemisphere " + StLawr = Basin('StLawr', 'StLawr') + " StLawr " + Subpolar_Gyre = Basin('Subpolar', 'Subpolar_Gyre') + " Subpolar_Gyre " + TotalArc = Basin('TotalArc', 'TotalArc') + " TotalArc " + Vilkitsky_Strait = Basin('Vilkitsky', 'Vilkitsky_Strait') + " Vilkitsky_Strait " + Weddell_Sea = Basin('Weddell', 'Weddell_Sea') + " Weddell_Sea " + Western_Central_Arctic = Basin('Western_Central_Arctic', 'Western_Central_Arctic') + " Western_Central_Arctic " + + @classmethod + def parse(cls, basin): + """ + Return the basin matching the given name. If the parameter basin is a Basin instance, directly returns the same + instance. This bahaviour is intended to facilitate the development of methods that can either accept a name + or a Basin instance to characterize the basin. + + :param basin: basin name or basin instance + :type basin: str | Basin + :return: basin instance corresponding to the basin name + :rtype: Basin + """ + if isinstance(basin, Basin): + return basin + for name in cls.__dict__.keys(): + if name.startswith('_'): + continue + # noinspection PyCallByClass + value = cls.__getattribute__(cls, name) + if isinstance(value, Basin): + if basin.lower() in [value.shortname.lower(), value.fullname.lower()]: + return value + return None + + +class Models(object): + """ + Predefined models + """ + + ECEARTH_2_3_O1L42 = 'Ec2.3_O1L42' + """ EC-Earth 2.3 ORCA1 L42""" + ECEARTH_3_0_O1L46 = 'Ec3.0_O1L46' + """ EC-Earth 3 ORCA1 L46 """ + ECEARTH_3_0_O25L46 = 'Ec3.0_O25L46' + """ EC-Earth 3 ORCA0.25 L46 """ + ECEARTH_3_0_O25L75 = 'Ec3.0_O25L75' + """ EC-Earth 3 ORCA0.25 L75 """ + ECEARTH_3_1_O25L75 = 'Ec3.1_O25L75' + """ EC-Earth 3.1 ORCA0.25 L75 """ + ECEARTH_3_2_O1L75 = 'Ec3.2_O1L75' + """ EC-Earth 3.2 ORCA1 L75 """ + + NEMO_3_2_O1L42 = 'N3.2_O1L42' + """ NEMO 3.2 ORCA1 L42 """ + NEMO_3_3_O1L46 = 'N3.3_O1L46' + """ NEMO 3.3 ORCA1 L46 """ + NEMO_3_6_O1L46 = 'N3.6_O1L75' + """ NEMO 3.6 ORCA1 L75 """ + + NEMOVAR_O1L42 = 'nemovar_O1L42' + """ NEMOVAR ORCA1 L42 """ + GLORYS2_V1_O25L75 = 'glorys2v1_O25L75' + """ GLORYS2v1 ORCA0.25 L75 """ diff --git a/earthdiagnostics/conversions.csv b/earthdiagnostics/conversions.csv new file mode 100644 index 0000000000000000000000000000000000000000..5e07878874b74395c3bc95e46fb58e881711279e --- /dev/null +++ b/earthdiagnostics/conversions.csv @@ -0,0 +1,8 @@ +original,converted,factor ,offset +C,K,1,273.15 +degC,K,1,273.15 +m,km,1000,0 +m2,km2,1.00E+006,0 +m3,km3,1.00E+009,0 +"[0,1]",%,100,0 +1e-3,psu,1,0 diff --git a/earthdiagnostics/datamanager.py b/earthdiagnostics/datamanager.py new file mode 100644 index 0000000000000000000000000000000000000000..403af5446e1087d14f2e98f4f9b347a971e4fb74 --- /dev/null +++ b/earthdiagnostics/datamanager.py @@ -0,0 +1,525 @@ +# coding: utf-8 +import csv +import shutil +import threading +from datetime import datetime + +import numpy as np +import os +import re +from cfunits import Units + +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Variable, Domains + + +class DataManager(object): + """ + Class to manage the data repositories + + :param config: + :type config: Config + """ + def __init__(self, config): + self.config = config + self.experiment = config.experiment + self._checked_vars = list() + Variable.load_variables() + UnitConversion.load_conversions() + self.lock = threading.Lock() + self.cmor_path = os.path.join(self.config.data_dir, self.experiment.expid, 'cmorfiles') + + def get_file(self, domain, var, startdate, member, chunk, grid=None, box=None, frequency=None): + """ + Copies a given file from the CMOR repository to the scratch folder and returns the path to the scratch's copy + + :param domain: CMOR domain + :type domain: Domain + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :return: path to the copy created on the scratch folder + :rtype: str + """ + raise NotImplementedError() + + def send_file(self, filetosend, domain, var, startdate, member, chunk=None, grid=None, region=None, + box=None, rename_var=None, frequency=None, year=None, date_str=None, move_old=False, + diagnostic=None, cmorized=False): + """ + Copies a given file to the CMOR repository. It also automatically converts to netCDF 4 if needed and can merge + with already existing ones as needed + + :param move_old: if true, moves files following older conventions that may be found on the links folder + :type move_old: bool + :param date_str: exact date_str to use in the cmorized file + :type: str + :param year: if frequency is yearly, this parameter is used to give the corresponding year + :type year: int + :param rename_var: if exists, the given variable will be renamed to the one given by var + :type rename_var: str + :param filetosend: path to the file to send to the CMOR repository + :type filetosend: str + :param region: specifies the region represented by the file. If it is defined, the data will be appended to the + CMOR repository as a new region in the file or will overwrite if region was already present + :type region: str + :param domain: CMOR domain + :type domain: Domain + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :param diagnostic: diagnostic used to generate the file + :type diagnostic: Diagnostic + :param cmorized: flag to indicate if file was generated in cmorization process + :type cmorized: bool + """ + raise NotImplementedError() + + def get_year(self, domain, var, startdate, member, year, grid=None, box=None): + """ + Ge a file containing all the data for one year for one variable + :param domain: variable's domain + :type domain: Domain + :param var: variable's name + :type var: str + :param startdate: startdate to retrieve + :type startdate: str + :param member: member to retrieve + :type member: int + :param year: year to retrieve + :type year: int + :param grid: variable's grid + :type grid: str + :param box: variable's box + :type box: Box + :return: + """ + raise NotImplementedError() + + @staticmethod + def _get_final_var_name(box, var): + if box: + var += box.get_lon_str() + box.get_lat_str() + box.get_depth_str() + return var + + def get_varfolder(self, domain, var, grid=None): + if grid: + var = '{0}-{1}'.format(var, grid) + + if domain in [Domains.ocean, Domains.seaIce]: + return '{0}_f{1}h'.format(var, self.experiment.ocean_timestep) + else: + return '{0}_f{1}h'.format(var, self.experiment.atmos_timestep) + + def _create_link(self, domain, filepath, frequency, var, grid, move_old): + freq_str = self.frequency_folder_name(frequency) + + if not grid: + grid = 'original' + + variable_folder = self.get_varfolder(domain, var) + vargrid_folder = self.get_varfolder(domain, var, grid) + + if grid == 'original': + link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, variable_folder) + if os.path.islink(link_path): + link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, vargrid_folder) + + Utils.create_folder_tree(link_path) + else: + link_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, vargrid_folder) + Utils.create_folder_tree(link_path) + default_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, variable_folder) + original_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, + vargrid_folder.replace('-{0}_f'.format(grid), '-original_f')) + if os.path.islink(default_path): + os.remove(default_path) + elif os.path.isdir(default_path): + shutil.move(default_path, original_path) + os.symlink(link_path, default_path) + + if move_old: + if self.lock.acquire(False): + if link_path not in self._checked_vars: + self._checked_vars.append(link_path) + self.lock.release() + old_path = os.path.join(self.config.data_dir, self.experiment.expid, freq_str, + 'old_{0}_f{1}h'.format(var, self.experiment.atmos_timestep)) + regex = re.compile(var + '_[0-9]{6,8}\.nc') + for filename in os.listdir(link_path): + if regex.match(filename): + Utils.create_folder_tree(old_path) + Utils.move_file(os.path.join(link_path, filename), + os.path.join(old_path, filename)) + + link_path = os.path.join(link_path, os.path.basename(filepath)) + if os.path.lexists(link_path): + os.remove(link_path) + if not os.path.exists(filepath): + raise ValueError('Original file {0} does not exists'.format(filepath)) + os.symlink(filepath, link_path) + + @staticmethod + def frequency_folder_name(frequency): + if frequency in ('d', 'daily', 'day'): + freq_str = 'daily_mean' + elif frequency.endswith('hr'): + freq_str = frequency[:-2] + 'hourly' + else: + freq_str = 'monthly_mean' + return freq_str + + # Overridable methods (not mandatory) + def link_file(self, domain, var, startdate, member, chunk=None, grid=None, box=None, + frequency=None, year=None, date_str=None, move_old=False): + """ + Creates the link of a given file from the CMOR repository. + + :param move_old: + :param date_str: + :param year: if frequency is yearly, this parameter is used to give the corresponding year + :type year: int + :param domain: CMOR domain + :type domain: Domain + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :return: path to the copy created on the scratch folder + :rtype: str + """ + pass + + def prepare(self): + """ + Prepares the data to be used by the diagnostic. + :return: + """ + pass + + +class NetCDFFile(object): + """ + Class to manage netCDF file and pr + + :param remote_file: + :type remote_file: str + :param local_file: + :type local_file: str + :param domain: + :type domain: Domain + :param var: + :type var: str + :param cmor_var: + :type cmor_var: Variable + """ + def __init__(self, remote_file, local_file, domain, var, cmor_var): + self.remote_file = remote_file + self.local_file = local_file + self.domain = domain + self.var = var + self.cmor_var = cmor_var + self.region = None + self.frequency = None + + def send(self): + Utils.convert2netcdf4(self.local_file) + if self.region: + self._prepare_region() + + if self.cmor_var: + self._correct_metadata() + self._rename_coordinate_variables() + + Utils.move_file(self.local_file, self.remote_file) + + def _prepare_region(self): + if not os.path.exists(self.remote_file): + self._add_region_dimension_to_var() + else: + self._update_var_with_region_data() + Utils.nco.ncks(input=self.local_file, output=self.local_file, options='-O --fix_rec_dmn region') + + def _update_var_with_region_data(self): + temp = TempFile.get() + shutil.copyfile(self.remote_file, temp) + Utils.nco.ncks(input=temp, output=temp, options='-O --mk_rec_dmn region') + handler = Utils.openCdf(temp) + handler_send = Utils.openCdf(self.local_file) + value = handler_send.variables[self.var][:] + var_region = handler.variables['region'] + basin_index = np.where(var_region[:] == self.region) + if len(basin_index[0]) == 0: + var_region[var_region.shape[0]] = self.region + basin_index = var_region.shape[0] - 1 + + else: + basin_index = basin_index[0][0] + handler.variables[self.var][..., basin_index] = value + handler.close() + handler_send.close() + Utils.move_file(temp, self.local_file) + + def _add_region_dimension_to_var(self): + handler = Utils.openCdf(self.local_file) + handler.createDimension('region') + var_region = handler.createVariable('region', str, 'region') + var_region[0] = self.region + original_var = handler.variables[self.var] + new_var = handler.createVariable('new_var', original_var.datatype, + original_var.dimensions + ('region',)) + new_var.setncatts({k: original_var.getncattr(k) for k in original_var.ncattrs()}) + value = original_var[:] + new_var[..., 0] = value + handler.close() + Utils.nco.ncks(input=self.local_file, output=self.local_file, options='-O -x -v {0}'.format(self.var)) + Utils.rename_variable(self.local_file, 'new_var', self.var) + + def _correct_metadata(self): + handler = Utils.openCdf(self.local_file) + var_handler = handler.variables[self.var] + self._fix_variable_name(var_handler) + handler.modeling_realm = self.cmor_var.domain.name + handler.table_id = 'Table {0} (December 2013)'.format(self.cmor_var.domain.get_table_name(self.frequency)) + if self.cmor_var.units: + self._fix_units(var_handler) + handler.sync() + self._fix_coordinate_variables_metadata(handler) + var_type = var_handler.dtype + handler.close() + self._fix_values_metadata(var_type) + + def _fix_variable_name(self, var_handler): + var_handler.standard_name = self.cmor_var.standard_name + var_handler.long_name = self.cmor_var.long_name + var_handler.short_name = self.cmor_var.short_name + + def _fix_values_metadata(self, var_type): + if self.cmor_var.valid_min != '': + valid_min = '-a valid_min, {0}, o, {1}, "{2}" '.format(self.var, var_type.char, self.cmor_var.valid_min) + else: + valid_min = '' + if self.cmor_var.valid_max != '': + valid_max = '-a valid_max, {0}, o, {1}, "{2}" '.format(self.var, var_type.char, self.cmor_var.valid_max) + else: + valid_max = '' + Utils.nco.ncatted(input=self.local_file, output=self.local_file, + options='-O -a _FillValue,{0},o,{1},"1.e20" ' + '-a missingValue,{0},o,{1},"1.e20" {2}{3}'.format(self.var, var_type.char, + valid_min, valid_max)) + + def _fix_coordinate_variables_metadata(self, handler): + if 'lev' in handler.variables: + handler.variables['lev'].short_name = 'lev' + if self.domain == Domains.ocean: + handler.variables['lev'].standard_name = 'depth' + if 'lon' in handler.variables: + handler.variables['lon'].short_name = 'lon' + handler.variables['lon'].standard_name = 'longitude' + if 'lat' in handler.variables: + handler.variables['lat'].short_name = 'lat' + handler.variables['lat'].standard_name = 'latitude' + + def _fix_units(self, var_handler): + if 'units' not in var_handler.ncattrs(): + return + if var_handler.units == 'PSU': + var_handler.units = 'psu' + if var_handler.units == 'C' and self.cmor_var.units == 'K': + var_handler.units = 'deg_C' + if self.cmor_var.units != var_handler.units: + self._convert_units(var_handler) + var_handler.units = self.cmor_var.units + + def _convert_units(self, var_handler): + try: + self._convert_using_cfunits(var_handler) + except ValueError: + factor, offset = UnitConversion.get_conversion_factor_offset(var_handler.units, + self.cmor_var.units) + var_handler[:] = var_handler[:] * factor + offset + if 'valid_min' in var_handler.ncattrs(): + var_handler.valid_min = float(var_handler.valid_min) * factor + offset + if 'valid_max' in var_handler.ncattrs(): + var_handler.valid_max = float(var_handler.valid_max) * factor + offset + + def _convert_using_cfunits(self, var_handler): + new_unit = Units(self.cmor_var.units) + old_unit = Units(var_handler.units) + var_handler[:] = Units.conform(var_handler[:], old_unit, new_unit, inplace=True) + if 'valid_min' in var_handler.ncattrs(): + var_handler.valid_min = Units.conform(float(var_handler.valid_min), old_unit, new_unit, + inplace=True) + if 'valid_max' in var_handler.ncattrs(): + var_handler.valid_max = Units.conform(float(var_handler.valid_max), old_unit, new_unit, + inplace=True) + + def _rename_coordinate_variables(self): + variables = dict() + variables['x'] = 'i' + variables['y'] = 'j' + variables['nav_lat_grid_V'] = 'lat' + variables['nav_lon_grid_V'] = 'lon' + variables['nav_lat_grid_U'] = 'lat' + variables['nav_lon_grid_U'] = 'lon' + variables['nav_lat_grid_T'] = 'lat' + variables['nav_lon_grid_T'] = 'lon' + Utils.rename_variables(self.local_file, variables, False, True) + + def add_diagnostic_history(self, diagnostic): + from earthdiagnostics.earthdiags import EarthDiags + history_line = 'Diagnostic {1} calculated with EarthDiagnostics version {0}'.format(EarthDiags.version, + diagnostic) + self._add_history_line(history_line) + + def add_cmorization_history(self): + from earthdiagnostics.earthdiags import EarthDiags + history_line = 'CMORized with Earthdiagnostics version {0}'.format(EarthDiags.version) + self._add_history_line(history_line) + + def _add_history_line(self, history_line): + utc_datetime = 'UTC ' + datetime.utcnow().isoformat() + history_line = '{0}: {1};'.format(utc_datetime, history_line) + + handler = Utils.openCdf(self.local_file) + try: + history_line = handler.history + history_line + except AttributeError: + history_line = history_line + handler.history = self.maybe_encode(history_line) + handler.close() + + @staticmethod + def maybe_encode(string, encoding='ascii'): + try: + return string.encode(encoding) + except UnicodeEncodeError: + return string + + +class UnitConversion(object): + """ + Class to manage unit conversions + """ + _dict_conversions = None + + @classmethod + def load_conversions(cls): + """ + Load conversions from the configuration file + """ + cls._dict_conversions = dict() + with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'conversions.csv'), 'rb') as csvfile: + reader = csv.reader(csvfile, dialect='excel') + for line in reader: + if line[0] == 'original': + continue + cls.add_conversion(UnitConversion(line[0], line[1], line[2], line[3])) + + @classmethod + def add_conversion(cls, conversion): + """ + Adds a conversion to the dictionary + + :param conversion: conversion to add + :type conversion: UnitConversion + """ + cls._dict_conversions[(conversion.source, conversion.destiny)] = conversion + + def __init__(self, source, destiny, factor, offset): + self.source = source + self.destiny = destiny + self.factor = float(factor) + self.offset = float(offset) + + @classmethod + def get_conversion_factor_offset(cls, input_units, output_units): + """ + Gets the conversion factor and offset for two units . The conversion has to be done in the following way: + converted = original * factor + offset + + :param input_units: original units + :type input_units: str + :param output_units: destiny units + :type output_units: str + :return: factor and offset + :rtype: [float, float] + """ + units = input_units.split() + if len(units) == 1: + scale_unit = 1 + unit = units[0] + else: + if '^' in units[0]: + values = units[0].split('^') + scale_unit = pow(int(values[0]), int(values[1])) + else: + scale_unit = float(units[0]) + unit = units[1] + + units = output_units.split() + if len(units) == 1: + scale_new_unit = 1 + new_unit = units[0] + else: + if '^' in units[0]: + values = units[0].split('^') + scale_new_unit = pow(int(values[0]), int(values[1])) + else: + scale_new_unit = float(units[0]) + new_unit = units[1] + + factor, offset = UnitConversion._get_factor(new_unit, unit) + if factor is None: + return None, None + factor = factor * scale_unit / float(scale_new_unit) + offset /= float(scale_new_unit) + + return factor, offset + + @classmethod + def _get_factor(cls, new_unit, unit): + # Add only the conversions with a factor greater than 1 + if unit == new_unit: + return 1, 0 + elif (unit, new_unit) in cls._dict_conversions: + conversion = cls._dict_conversions[(unit, new_unit)] + return conversion.factor, conversion.offset + elif (new_unit, unit) in cls._dict_conversions: + conversion = cls._dict_conversions[(new_unit, unit)] + return 1 / conversion.factor, -conversion.offset + else: + return None, None + diff --git a/earthdiagnostics/diagnostic.py b/earthdiagnostics/diagnostic.py new file mode 100644 index 0000000000000000000000000000000000000000..af71dea66e4b25be3a672f8c119ef640cb518e4c --- /dev/null +++ b/earthdiagnostics/diagnostic.py @@ -0,0 +1,106 @@ +# coding=utf-8 +class Diagnostic(object): + """ + Base class for the diagnostics. Provides a common interface for them and also + has a mechanism that allows diagnostic retrieval by name. + + :param data_manager: data manager that will be used to store and retrieve the necessary data + :type data_manager: DataManager + """ + + alias = None + """ + Alias to call the diagnostic. Must be overridden at the derived clases + """ + _diag_list = dict() + + def __init__(self, data_manager): + self.data_manager = data_manager + self.required_vars = [] + self.generated_vars = [] + self.can_run_multiple_instances = True + + def __repr__(self): + return str(self) + + @staticmethod + def register(cls): + """ + Register a new diagnostic using the given alias. It must be call using the derived class. + :param cls: diagnostic class to register + :type cls: Diagnostic + """ + if not issubclass(cls, Diagnostic): + raise ValueError('Class {0} must be derived from Diagnostic'.format(cls)) + if cls.alias is None: + raise ValueError('Diagnostic class {0} must have defined an alias'.format(cls)) + Diagnostic._diag_list[cls.alias] = cls + + # noinspection PyProtectedMember + @staticmethod + def get_diagnostic(name): + """ + Return the class for a diagnostic given its name + + :param name: diagnostic alias + :type name: str + :return: the selected Diagnostic class, None if name can not be found + :rtype: Diagnostic + """ + if name in Diagnostic._diag_list.keys(): + return Diagnostic._diag_list[name] + return None + + def send_file(self, filetosend, domain, var, startdate, member, chunk=None, grid=None, region=None, + box=None, rename_var=None, frequency=None, year=None, date_str=None, move_old=False): + """ + + :param filetosend: + :param domain: + :type domain: Domain + :param var: + :param startdate: + :param member: + :param chunk: + :param grid: + :param region: + :param box: + :param rename_var: + :param frequency: + :param year: + :param date_str: + :param move_old: + :return: + """ + self.data_manager.send_file(filetosend, domain, var, startdate, member, chunk, grid, region, + box, rename_var, frequency, year, date_str, move_old, diagnostic=self) + + def compute(self): + """ + Calculates the diagnostic and stores the output + + Must be implemented by derived classes + """ + raise NotImplementedError("Class must override compute method") + + @classmethod + def generate_jobs(cls, diags, options): + """ + Generate the instances of the diagnostics that will be run by the manager + + Must be implemented by derived classes. + + :param diags: diagnostics manager + :type diags: Diags + :param options: list of strings containing the options passed to the diagnostic + :type options: list[str] + :return: + """ + raise NotImplementedError("Class must override generate_jobs class method") + + def __str__(self): + """ + Must be implemented by derived classes + :return: + """ + return 'Developer must override base class __str__ method' diff --git a/earthdiagnostics/earthdiags.py b/earthdiagnostics/earthdiags.py new file mode 100755 index 0000000000000000000000000000000000000000..d496ac21ab98aa21cbd5fa02fabd2670376c9381 --- /dev/null +++ b/earthdiagnostics/earthdiags.py @@ -0,0 +1,329 @@ +#!/usr/bin/env python +# coding=utf-8 + +import Queue +import argparse +import shutil +import threading +import pkg_resources + +import netCDF4 +import operator +import os +from autosubmit.date.chunk_date_lib import * + +from config import Config +from earthdiagnostics.cmormanager import CMORManager +from earthdiagnostics.threddsmanager import THREDDSManager +from earthdiagnostics import cdftools +from earthdiagnostics.utils import TempFile +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.ocean import * +from earthdiagnostics.general import * +from earthdiagnostics.statistics import * +from ocean import ConvectionSites, Gyres, Psi, MaxMoc, AreaMoc, Moc, VerticalMean, VerticalMeanMeters, Interpolate, \ + AverageSection, CutSection, MixedLayerSaltContent, Siasiesiv +from utils import Utils + + +class EarthDiags(object): + """ + Launcher class for the diagnostics + + :param config_file: path to the configuration file + :type config_file: str + """ + # Get the version number from the relevant file. If not, from autosubmit package + scriptdir = os.path.abspath(os.path.dirname(__file__)) + if not os.path.exists(os.path.join(scriptdir, 'VERSION')): + scriptdir = os.path.join(scriptdir, os.path.pardir) + + version_path = os.path.join(scriptdir, 'VERSION') + readme_path = os.path.join(scriptdir, 'README') + changes_path = os.path.join(scriptdir, 'CHANGELOG') + documentation_path = os.path.join(scriptdir, 'EarthDiagnostics.pdf') + if os.path.isfile(version_path): + with open(version_path) as f: + version = f.read().strip() + else: + version = pkg_resources.require("earthdiagnostics")[0].version + + def __init__(self, config_file): + Log.debug('Initialising Diags') + self.config = Config(config_file) + + TempFile.scratch_folder = self.config.scratch_dir + cdftools.path = self.config.cdftools_path + self._create_dic_variables() + self.time = dict() + self.data_manager = None + self.threads = None + Log.debug('Diags ready') + Log.info('Running diags for experiment {0}, startdates {1}, members {2}', self.config.experiment.expid, + self.config.experiment.startdates, self.config.experiment.members) + + @staticmethod + def parse_args(): + """ + Entry point for the Earth Diagnostics. For more detailed documentation, use -h option + """ + # try: + parser = argparse.ArgumentParser(description='Main executable for Earth Diagnostics.') + parser.add_argument('-v', '--version', action='version', version=EarthDiags.version, + help="returns Earth Diagnostics's version number and exit") + parser.add_argument('--doc', action='store_true', + help="opens documentation and exits") + parser.add_argument('--clean', action='store_true', + help="clean the scratch folder and exits") + parser.add_argument('-lf', '--logfile', choices=('EVERYTHING', 'DEBUG', 'INFO', 'RESULT', 'USER_WARNING', + 'WARNING', 'ERROR', 'CRITICAL', 'NO_LOG'), + default='DEBUG', type=str, + help="sets file's log level.") + parser.add_argument('-lc', '--logconsole', choices=('EVERYTHING', 'DEBUG', 'INFO', 'RESULT', 'USER_WARNING', + 'WARNING', 'ERROR', 'CRITICAL', 'NO_LOG'), + default='INFO', type=str, + help="sets console's log level") + + parser.add_argument('-log', '--logfilepath', default=None, type=str) + + parser.add_argument('-f', '--configfile', default='diags.conf', type=str) + + args = parser.parse_args() + if args.doc: + Log.info('Opening documentation...') + doc_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'EarthDiagnostics.pdf') + Utils.execute_shell_command(('xdg-open', doc_path)) + Log.result('Documentation opened!') + return True + Log.set_console_level(args.logconsole) + Log.set_file_level(args.logfile) + + if Log.console_handler.level <= Log.DEBUG: + Utils.cdo.debug = True + Utils.nco.debug = True + + if args.logfilepath: + Log.set_file(Utils.expand_path(args.logfilepath)) + + config_file_path = Utils.expand_path(args.configfile) + if not os.path.isfile(config_file_path): + Log.critical('Configuration file {0} can not be found', config_file_path) + return False + + diags = EarthDiags(config_file_path) + if args.clean: + diags.clean() + else: + diags.run() + TempFile.clean() + return True + + def _create_dic_variables(self): + self.dic_variables = dict() + self.dic_variables['x'] = 'i' + self.dic_variables['y'] = 'j' + self.dic_variables['z'] = 'lev' + self.dic_variables['nav_lon'] = 'lon' + self.dic_variables['nav_lat'] = 'lat' + self.dic_variables['nav_lev'] = 'lev' + self.dic_variables['time_counter'] = 'time' + self.dic_variables['t'] = 'time' + + def run(self): + """ + Run the diagnostics + """ + Log.debug('Using netCDF version {0}', netCDF4.getlibversion()) + if not os.path.exists(self.config.scratch_dir): + os.makedirs(self.config.scratch_dir) + os.chdir(self.config.scratch_dir) + + self._prepare_mesh_files() + + self._register_diagnostics() + + parse_date('20000101') + + if self.config.data_adaptor == 'CMOR': + self.data_manager = CMORManager(self.config) + elif self.config.data_adaptor == 'THREDDS': + self.data_manager = THREDDSManager(self.config) + self.data_manager.prepare() + + # Run diagnostics + Log.info('Running diagnostics') + list_jobs = self.prepare_job_list() + + time = datetime.datetime.now() + Log.info("Starting to compute at {0}", time) + self.threads = min(Utils.available_cpu_count(), self.config.max_cores) + Log.info('Using {0} threads', self.threads) + threads = list() + for num_thread in range(0, self.threads): + self.time[num_thread] = dict() + t = threading.Thread(target=EarthDiags._run_jobs, args=(self, list_jobs, num_thread)) + threads.append(t) + t.start() + + for t in threads: + t.join() + + TempFile.clean() + finish_time = datetime.datetime.now() + Log.result("Diagnostics finished at {0}", finish_time) + Log.result("Time ellapsed: {0}\n", finish_time - time) + self.print_stats() + + def print_stats(self): + Log.info('Time consumed by each diagnostic class') + Log.info('--------------------------------------') + total = dict() + for num_thread in range(0, self.threads): + for key, value in self.time[num_thread].items(): + if key in total: + total[key] += value + else: + total[key] = value + for diag, time in sorted(total.items(), key=operator.itemgetter(1)): + Log.info('{0:23} {1:}', diag.__name__, time) + + def prepare_job_list(self): + list_jobs = Queue.Queue() + for fulldiag in self.config.get_commands(): + Log.info("Adding {0} to diagnostic list", fulldiag) + diag_options = fulldiag.split(',') + + diag_class = Diagnostic.get_diagnostic(diag_options[0]) + if diag_class: + for job in diag_class.generate_jobs(self, diag_options): + list_jobs.put(job) + continue + else: + Log.error('{0} is not an available diagnostic', diag_options[0]) + return list_jobs + + @staticmethod + def _register_diagnostics(): + Diagnostic.register(MixedLayerSaltContent) + Diagnostic.register(Siasiesiv) + Diagnostic.register(VerticalMean) + Diagnostic.register(VerticalMeanMeters) + Diagnostic.register(Interpolate) + Diagnostic.register(InterpolateCDO) + Diagnostic.register(Moc) + Diagnostic.register(AreaMoc) + Diagnostic.register(MaxMoc) + Diagnostic.register(Psi) + Diagnostic.register(Gyres) + Diagnostic.register(ConvectionSites) + Diagnostic.register(CutSection) + Diagnostic.register(AverageSection) + Diagnostic.register(MixedLayerHeatContent) + Diagnostic.register(HeatContentLayer) + Diagnostic.register(HeatContent) + Diagnostic.register(MonthlyMean) + Diagnostic.register(Rewrite) + Diagnostic.register(Relink) + + Diagnostic.register(MonthlyPercentil) + + def clean(self): + Log.info('Removing scratch folder...') + if os.path.exists(self.config.scratch_dir): + shutil.rmtree(self.config.scratch_dir) + Log.result('Scratch folder removed') + + def _run_jobs(self, queue, numthread): + def _run_job(current_job, retrials=1): + while retrials >= 0: + try: + Log.info('Starting {0}', current_job) + time = datetime.datetime.now() + current_job.compute() + time = datetime.datetime.now() - time + if type(current_job) in self.time[numthread]: + self.time[numthread][type(current_job)] += time + else: + self.time[numthread][type(current_job)] = time + Log.result('Finished {0}', current_job) + return True + except Exception as ex: + retrials -= 1 + Log.error('Job {0} failed: {1}', job, ex) + return False + count = 0 + failed_jobs = list() + + while not queue.empty(): + try: + job = queue.get(timeout=1) + if _run_job(job): + count += 1 + else: + failed_jobs.append(str(job)) + queue.task_done() + except Queue.Empty: + continue + + if len(failed_jobs) == 0: + Log.result('Thread {0} finished after taking care of {1} tasks', numthread, count) + else: + Log.result('Thread {0} finished after running successfully {1} of {2} tasks', numthread, count, + count + len(failed_jobs)) + for job in failed_jobs: + Log.error('Job {0} could not be run', job) + return + + def _prepare_mesh_files(self): + Log.info('Copying mesh files') + con_files = self.config.con_files + model_version = self.config.experiment.model_version + restore_meshes = self.config.restore_meshes + + self._copy_file(os.path.join(con_files, 'mesh_mask_nemo.{0}.nc'.format(model_version)), 'mesh_hgr.nc', + restore_meshes) + self._link_file('mesh_hgr.nc', 'mesh_zgr.nc') + self._link_file('mesh_hgr.nc', 'mask.nc') + + self._copy_file(os.path.join(con_files, 'new_maskglo.{0}.nc'.format(model_version)), 'new_maskglo.nc', + restore_meshes) + self._copy_file(os.path.join(con_files, 'mask.regions.{0}.nc'.format(model_version)), + 'mask_regions.nc', restore_meshes) + self._copy_file(os.path.join(con_files, 'mask.regions.3d.{0}.nc'.format(model_version)), + 'mask_regions.3d.nc', restore_meshes) + + Log.result('Mesh files ready!') + + def _copy_file(self, source, destiny, force): + if not os.path.exists(source): + Log.user_warning('File {0} is not available for {1}', destiny, self.config.experiment.model_version) + return + + if not force and os.path.exists(destiny): + if os.stat(source).st_size == os.stat(destiny).st_size: + Log.info('File {0} already exists', destiny) + return + + Log.info('Creating file {0}', destiny) + shutil.copy(source, destiny) + Log.info('File {0} ready', destiny) + Utils.rename_variables('mesh_hgr.nc', self.dic_variables, False, True) + + def _link_file(self, source, destiny): + if not os.path.exists(source): + Log.user_warning('File {0} is not available for {1}', destiny, self.config.experiment.model_version) + return + + if os.path.lexists(destiny): + os.remove(destiny) + + os.symlink(source, destiny) + Log.info('File {0} ready', destiny) + + +def main(): + EarthDiags.parse_args() + + +if __name__ == "__main__": + main() diff --git a/earthdiagnostics/general/__init__.py b/earthdiagnostics/general/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c53fb33f877c8f475a81aa5f996ea1bf873dc172 --- /dev/null +++ b/earthdiagnostics/general/__init__.py @@ -0,0 +1,4 @@ +# coding=utf-8 +from earthdiagnostics.general.monthlymean import MonthlyMean +from earthdiagnostics.general.rewrite import Rewrite +from earthdiagnostics.general.relink import Relink diff --git a/earthdiagnostics/general/monthlymean.py b/earthdiagnostics/general/monthlymean.py new file mode 100644 index 0000000000000000000000000000000000000000..38224e857dca5b2da7f5293f014d9f2d39be6c63 --- /dev/null +++ b/earthdiagnostics/general/monthlymean.py @@ -0,0 +1,102 @@ +# coding=utf-8 + +import os +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domain + + +class MonthlyMean(Diagnostic): + """ + Calculates monthly mean for a given variable + + :original author: Javier Vegas-Regidor + + :created: July 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable's name + :type variable: str + :param domain: variable's domain + :type domain: Domain + :param frequency: original frequency + :type frequency: str + :param grid: original data grid + :type grid: str + """ + + alias = 'monmean' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, domain, variable, frequency, grid): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.domain = domain + self.frequency = frequency + self.grid = grid + + def __str__(self): + return 'Calculate monthly mean Startdate: {0} Member: {1} Chunk: {2} ' \ + 'Variable: {3}:{4}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable) + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.domain == other.domain and self.variable == other.variable and self.frequency == other.frequency and \ + self.grid == other.grid + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, domain, frequency=day, grid='' + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 2: + raise Exception('You must specify the variable and domain to average monthly') + if num_options > 4: + raise Exception('You must specify between 2 and 4 parameters for the monthly mean diagnostic') + variable = options[1] + domain = Domain(options[2]) + if num_options >= 3: + frequency = options[3] + else: + frequency = 'day' + if num_options >= 4: + grid = options[4] + else: + grid = '' + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(MonthlyMean(diags.data_manager, startdate, member, chunk, domain, variable, + frequency, grid)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + temp = TempFile.get() + variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk, + frequency=self.frequency, grid=self.grid) + Utils.cdo.monmean(input=variable_file, output=temp, options='-O') + os.remove(variable_file) + + self.send_file(temp, self.domain, self.variable, self.startdate, self.member, self.chunk, + frequency='mon', grid=self.grid) + diff --git a/earthdiagnostics/general/relink.py b/earthdiagnostics/general/relink.py new file mode 100644 index 0000000000000000000000000000000000000000..3009f20e1c9cfd8116668617406537de9e3354ee --- /dev/null +++ b/earthdiagnostics/general/relink.py @@ -0,0 +1,84 @@ +# coding=utf-8 +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.variable import Domain + + +class Relink(Diagnostic): + """ + Recreates the links for the variable specified + + :original author: Javier Vegas-Regidor + + :created: September 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable's name + :type variable: str + :param domain: variable's domain + :type domain: Domain + :param move_old: if true, looks for files following the old convention and moves to avoid collisions + :type move_old: bool + """ + + alias = 'relink' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, domain, variable, move_old): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.domain = domain + self.move_old = move_old + + def __str__(self): + return 'Relink output Startdate: {0} Member: {1} Chunk: {2} Move old: {5} ' \ + 'Variable: {3}:{4}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable, + self.move_old) + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.domain == other.domain and self.variable == other.variable + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, domain, move_old=False + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 2: + raise Exception('You must specify the variable and domain to link') + if num_options > 3: + raise Exception('You must between 2 and 3 parameters for the relink diagnostic') + variable = options[1] + domain = Domain(options[2]) + if num_options >= 3: + move_old = bool(options[3].lower()) + else: + move_old = True + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(Relink(diags.data_manager, startdate, member, chunk, domain, variable, move_old)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + self.data_manager.link_file(self.domain, self.variable, self.startdate, self.member, self.chunk, + move_old=self.move_old) + diff --git a/earthdiagnostics/general/rewrite.py b/earthdiagnostics/general/rewrite.py new file mode 100644 index 0000000000000000000000000000000000000000..ba89b5e8454a45fb733fd477126e61d6229da8dd --- /dev/null +++ b/earthdiagnostics/general/rewrite.py @@ -0,0 +1,84 @@ +# coding=utf-8 +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.variable import Domain + + +class Rewrite(Diagnostic): + """ + Rewrites files without doing any calculations. + Can be useful to convert units or to correct wrong metadata + + :original author: Javier Vegas-Regidor + + :created: July 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable's name + :type variable: str + :param domain: variable's domain + :type domain: Domain + """ + + alias = 'rewrite' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, domain, variable, grid): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.domain = domain + self.grid = grid + + def __str__(self): + return 'Rewrite output Startdate: {0} Member: {1} Chunk: {2} ' \ + 'Variable: {3}:{4}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable) + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.domain == other.domain and self.variable == other.variable + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, domain, grid + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 2: + raise Exception('You must specify the variable and domain to rewrite') + if num_options > 3: + raise Exception('You must between 2 and 3 parameters for the rewrite diagnostic') + variable = options[1] + domain = Domain(options[2]) + if num_options >= 3: + grid = options[3] + else: + grid = None + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(Rewrite(diags.data_manager, startdate, member, chunk, domain, variable, grid)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk, + grid=self.grid) + self.send_file(variable_file, self.domain, self.variable, self.startdate, self.member, self.chunk, + grid=self.grid) + diff --git a/earthdiagnostics/ocean/__init__.py b/earthdiagnostics/ocean/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0628fb68cbf6a22162783c241bd460736c9a3138 --- /dev/null +++ b/earthdiagnostics/ocean/__init__.py @@ -0,0 +1,21 @@ +# coding=utf-8 +""" +Module containing the diagnostics related to the ocean output +""" +from earthdiagnostics.ocean.heatcontent import HeatContent +from earthdiagnostics.ocean.moc import Moc +from earthdiagnostics.ocean.areamoc import AreaMoc +from earthdiagnostics.ocean.maxmoc import MaxMoc +from earthdiagnostics.ocean.psi import Psi +from earthdiagnostics.ocean.gyres import Gyres +from earthdiagnostics.ocean.convectionsites import ConvectionSites +from earthdiagnostics.ocean.cutsection import CutSection +from earthdiagnostics.ocean.averagesection import AverageSection +from earthdiagnostics.ocean.interpolate import Interpolate +from earthdiagnostics.ocean.interpolatecdo import InterpolateCDO +from earthdiagnostics.ocean.verticalmeanmeters import VerticalMeanMeters +from earthdiagnostics.ocean.verticalmean import VerticalMean +from earthdiagnostics.ocean.mixedlayersaltcontent import MixedLayerSaltContent +from earthdiagnostics.ocean.siasiesiv import Siasiesiv +from earthdiagnostics.ocean.heatcontentlayer import HeatContentLayer +from earthdiagnostics.ocean.mixedlayerheatcontent import MixedLayerHeatContent diff --git a/earthdiagnostics/ocean/areamoc.py b/earthdiagnostics/ocean/areamoc.py new file mode 100644 index 0000000000000000000000000000000000000000..3942b7f3b3b37a27372b3143d466fb9f7f2d74ab --- /dev/null +++ b/earthdiagnostics/ocean/areamoc.py @@ -0,0 +1,152 @@ +# coding=utf-8 +import numpy as np +from earthdiagnostics.constants import Basins +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.box import Box +from earthdiagnostics.utils import Utils, TempFile +import os + +from earthdiagnostics.variable import Domains + + +class AreaMoc(Diagnostic): + """ + Compute an Atlantic MOC index by averaging the meridional overturning + in a latitude band between 1km and 2km + or any other index averaging the meridional overturning in + a given basin and a given domain + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: March 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param basin: basin to compute + :type basin: Basin + :param box: box to compute + :type box: Box + """ + + alias = 'mocarea' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, basin, box): + Diagnostic.__init__(self, data_manager) + self.basin = basin + self.startdate = startdate + self.member = member + self.chunk = chunk + self.required_vars = ['vo'] + self.generated_vars = ['vsftmyz'] + self.box = box + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.basin == other.basin and self.box == other.box + + def __str__(self): + return 'Area MOC Startdate: {0} Member: {1} Chunk: {2} Box: {3}'.format(self.startdate, self.member, + self.chunk, self.box) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: minimum latitude, maximum latitude, minimum depth, maximum depth, basin=Global + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 4: + raise Exception('You must specify the box to use') + if num_options > 5: + raise Exception('You must specify between 4 and 5 parameters for area moc diagnostic') + box = Box() + box.min_lat = int(options[1]) + box.max_lat = int(options[2]) + box.min_depth = int(options[3]) + box.max_depth = int(options[4]) + if num_options > 4: + basin = Basins.parse(options[5]) + else: + basin = Basins.Global + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(AreaMoc(diags.data_manager, startdate, member, chunk, basin, box)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + nco = Utils.nco + cdo = Utils.cdo + temp2 = TempFile.get() + + temp = self.data_manager.get_file('ocean', 'vsftmyz', self.startdate, self.member, self.chunk) + + handler = Utils.openCdf(temp) + if 'i' in handler.dimensions: + handler.close() + nco.ncwa(input=temp, output=temp, options='-O -a i') + handler = Utils.openCdf(temp) + + basin_index = np.where(handler.variables['basin'][:] == self.basin.fullname) + lat_values = handler.variables['lat'][:] + lat_type = handler.variables['lat'].dtype + lat_units = handler.variables['lat'].units + lat_long_name = handler.variables['lat'].long_name + + handler.close() + + if len(basin_index) == 0: + raise Exception('Basin {0} not defined in file') + basin_index = basin_index[0][0] + nco.ncks(input=temp, output=temp, options='-O -d basin,{0}'.format(basin_index)) + # To remove basin dimension + nco.ncwa(input=temp, output=temp, options='-O -a basin') + + source = Utils.openCdf(temp) + destiny = Utils.openCdf(temp2, 'w') + + Utils.copy_dimension(source, destiny, 'time') + Utils.copy_dimension(source, destiny, 'lev') + Utils.copy_dimension(source, destiny, 'j', new_names={'j': 'lat'}) + + lat_variable = destiny.createVariable('lat', lat_type, 'lat') + lat_variable[:] = lat_values[:] + lat_variable.units = lat_units + lat_variable.long_name = lat_long_name + + Utils.copy_variable(source, destiny, 'lev') + Utils.copy_variable(source, destiny, 'time') + Utils.copy_variable(source, destiny, 'vsftmyz', new_names={'j': 'lat'}) + + source.close() + destiny.close() + + nco.ncks(input=temp2, output=temp, + options='-O -d lev,{0:.1f},{1:.1f} -d lat,{2:.1f},{3:.1f}'.format(self.box.min_depth, + self.box.max_depth, + self.box.min_lat, + self.box.max_lat)) + + cdo.vertmean(input=temp, output=temp2) + os.remove(temp) + nco.ncap2(input=temp2, output=temp2, options='-O -s "coslat[lat]=cos(lat[lat]*3.141592657/180.0)"') + nco.ncwa(input=temp2, output=temp2, options='-w coslat -a lat') + nco.ncks(input=temp2, output=temp2, options='-O -v vsftmyz,time') + self.send_file(temp2, Domains.ocean, 'vsftmyz', self.startdate, self.member, self.chunk, box=self.box) diff --git a/earthdiagnostics/ocean/averagesection.py b/earthdiagnostics/ocean/averagesection.py new file mode 100644 index 0000000000000000000000000000000000000000..50961df6910496634cb2c8bdb602d532db7385e3 --- /dev/null +++ b/earthdiagnostics/ocean/averagesection.py @@ -0,0 +1,102 @@ +# coding=utf-8 +import os +from earthdiagnostics.box import Box +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domain +from earthdiagnostics.variable import Domains + + +class AverageSection(Diagnostic): + """ + Compute an average of a given zone. The variable MUST be in a regular grid + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: March 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable's name + :type variable: str + :param domain: variable's domain + :type domain: Domain + :param box: box to use for the average + :type box: Box + + """ + + alias = 'avgsection' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, domain, variable, box): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.domain = domain + self.box = box + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.domain == other.domain and self.variable == other.variable and self.box == other.box + + def __str__(self): + return 'Average section Startdate: {0} Member: {1} Chunk: {2} Box: {3} ' \ + 'Variable: {4}:{5}'.format(self.startdate, self.member, self.chunk, self.box, self.domain, self.variable) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, minimum longitude, maximum longitude, minimum latitude, maximum latitude, domain=ocean + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 5: + raise Exception('You must specify the variable and the box to average') + if num_options > 6: + raise Exception('You must specify between 5 and 6 parameters for the section average diagnostic') + variable = options[1] + box = Box() + box.min_lon = int(options[2]) + box.max_lon = int(options[3]) + box.min_lat = int(options[4]) + box.max_lat = int(options[5]) + if num_options >= 6: + domain = Domain(options[6]) + else: + domain = Domains.ocean + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(AverageSection(diags.data_manager, startdate, member, chunk, domain, variable, box)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + temp = TempFile.get() + variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk, + grid='regular') + Utils.cdo.zonmean(input='-sellonlatbox,{0},{1},{2},{3} {4}'.format(self.box.min_lon, self.box.max_lon, + self.box.min_lat, self.box.max_lat, + variable_file), + output=temp) + os.remove(variable_file) + self.send_file(temp, self.domain, self.variable + 'mean', self.startdate, self.member, self.chunk, + box=self.box, grid='regular') diff --git a/earthdiagnostics/ocean/convectionsites.py b/earthdiagnostics/ocean/convectionsites.py new file mode 100644 index 0000000000000000000000000000000000000000..3ff6a65b1837b3a13f068ea36637ff6efa1abb0c --- /dev/null +++ b/earthdiagnostics/ocean/convectionsites.py @@ -0,0 +1,122 @@ +# coding=utf-8 +import numpy as np +from autosubmit.config.log import Log +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.constants import Models + + +class ConvectionSites(Diagnostic): + """ + Compute the intensity of convection in the four main convection sites + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: October 2013 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param model_version: model version + :type model_version: str + """ + + alias = 'convection' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, model_version): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.model_version = model_version + self.required_vars = ['vsftbarot'] + self.generated_vars = ['gyres'] + self.mlotst_handler = None + + def __str__(self): + return 'Convection sites Startdate: {0} Member: {1} Chunk: {2}'.format(self.startdate, self.member, self.chunk) + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.model_version == other.model_version + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: None + :type options: list[str] + :return: + """ + if len(options) > 1: + raise Exception('The convection sites diagnostic has no options') + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(ConvectionSites(diags.data_manager, startdate, member, chunk, diags.model_version)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + if self.model_version in [Models.ECEARTH_2_3_O1L42, Models.ECEARTH_3_0_O1L46, + Models.NEMO_3_2_O1L42, Models.NEMO_3_3_O1L46, + Models.NEMOVAR_O1L42]: + labrador = [225, 245, 215, 255] + irminger = [245, 290, 215, 245] + gin = [260, 310, 245, 291] + wedell = [225, 280, 1, 50] + + elif self.model_version in [Models.ECEARTH_3_0_O25L46, Models.ECEARTH_3_0_O25L75, + Models.GLORYS2_V1_O25L75]: + raise Exception("Option convection not available yet for {0}".format(self.model_version)) + else: + raise Exception("Input grid {0} not recognized".format(self.model_version)) + + mlotst_file = self.data_manager.get_file('ocean', 'mlotst', self.startdate, self.member, self.chunk) + output = TempFile.get() + + self.mlotst_handler = Utils.openCdf(mlotst_file) + handler = Utils.openCdf(output, 'w') + handler.createDimension('time', self.mlotst_handler.variables['time'].shape[0]) + handler.createDimension('region', 4) + Utils.copy_variable(self.mlotst_handler, handler, 'time') + var_region = handler.createVariable('region', str, 'region') + var_gyre = handler.createVariable('site', 'f', ('time', 'region')) + var_gyre.short_name = 'site' + var_gyre.long_name = 'convection sites' + var_gyre.units = 'm^3/s' + + var_region[0] = 'labrador' + var_gyre[:, 0] = self._convection_site(labrador) + + var_region[1] = 'irminger' + var_gyre[:, 1] = self._convection_site(irminger) + + var_region[2] = 'gin' + var_gyre[:, 2] = self._convection_site(gin) + + var_region[3] = 'wedell' + var_gyre[:, 3] = self._convection_site(wedell) + + self.mlotst_handler.close() + handler.close() + + self.send_file(output, 'ocean', 'site', self.startdate, self.member, self.chunk) + Log.info('Finished convection sites for startdate {0}, member {1}, chunk {2}', + self.startdate, self.member, self.chunk) + + def _convection_site(self, site): + return np.max(self.mlotst_handler.variables['mlotst'][:, site[2] - 1:site[3] - 1, site[0] - 1:site[1] - 1], + (1, 2)) diff --git a/earthdiagnostics/ocean/cutsection.py b/earthdiagnostics/ocean/cutsection.py new file mode 100644 index 0000000000000000000000000000000000000000..95701ab806f7106acbc81932ccf1d44d3a287986 --- /dev/null +++ b/earthdiagnostics/ocean/cutsection.py @@ -0,0 +1,189 @@ +# coding=utf-8 +import numpy as np +from autosubmit.config.log import Log + +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.box import Box +from earthdiagnostics.utils import Utils +from earthdiagnostics.variable import Domain +from earthdiagnostics.variable import Domains + + +class CutSection(Diagnostic): + """ + Cuts a meridional or zonal section + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: September 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable's name + :type variable: str + :param domain: variable's domain + :type domain: str + :param zonal: specifies if section is zonal or meridional + :type zonal: bool + :param value: value of the section's coordinate + :type value: int + """ + + alias = 'cutsection' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, domain, variable, zonal, value): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.domain = domain + self.zonal = zonal + self.value = value + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.domain == other.domain and self.variable == other.variable and self.zonal == other.zonal and \ + self.value == other.value + + def __str__(self): + return 'Cut section Startdate: {0} Member: {1} Chunk: {2} Variable: {3}:{4} ' \ + 'Zonal: {5} Value: {6}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable, + self.zonal, self.value) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, zonal, value, domain=ocean + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 3: + raise Exception('You must specify the variable, coordinate and coordinate value') + if num_options > 4: + raise Exception('You must specify between 3 and 4 parameters for the interpolation diagnostic') + variable = options[1] + zonal = options[2].lower() == 'true' + value = int(options[3]) + if num_options >= 4: + domain = Domain(options[4]) + else: + domain = Domains.ocean + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(CutSection(diags.data_manager, startdate, member, chunk, domain, variable, zonal, value)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + nco = Utils.nco + + handler = Utils.openCdf('mesh_hgr.nc') + dimi = handler.dimensions['i'].size + dimj = handler.dimensions['j'].size + dimlev = handler.dimensions['lev'].size + + lon = handler.variables['lon'][:] + lat = handler.variables['lat'][:] + handler.close() + + handler = Utils.openCdf('mask.nc') + mask_lev = handler.variables['tmask'][:] + mask_lev = mask_lev.astype(float) + # noinspection PyTypeChecker + np.place(mask_lev, mask_lev == 0, [1e20]) + handler.close() + + # Latitude / longitude of the zonal / meridional section + exactpos = self.value + if not self.zonal: + while exactpos < np.min(lon): + exactpos += 360 + while exactpos > np.max(lon): + exactpos -= 360 + size = dimj + else: + size = dimi + + # Collect the indexes defining the section + + listi = np.empty(size, dtype=int) + listj = np.empty(size, dtype=int) + + for jpt in range(0, size): + if not self.zonal: + vector = lon[jpt, :] + else: + vector = lat[:, jpt] + distance = abs(vector - exactpos) + pos = np.where(distance == min(distance)) + if not self.zonal: + listi[jpt] = pos[0][0] + listj[jpt] = jpt + else: + listi[jpt] = jpt + listj[jpt] = pos[0][0] + + temp = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk) + + handler = Utils.openCdf(temp) + dimtime = handler.dimensions['time'].size + var_array = handler.variables[self.variable][:] + handler.close() + + var = np.empty([dimtime, dimlev, size], dtype=handler.variables[self.variable].dtype) + new_coord = np.empty(size, dtype=float) + if self.zonal: + old_coord = lon + else: + old_coord = lat + + for jpt in range(0, size): + var[:, :, jpt] = np.maximum(var_array[:, :, listj[jpt], listi[jpt]], + mask_lev[:, :, listj[jpt], listi[jpt]]) + new_coord[jpt] = old_coord[listj[jpt], listi[jpt]] + + nco.ncks(input=temp, output=temp, options='-O -v lev,time') + + handler = Utils.openCdf(temp) + if not self.zonal: + handler.createDimension('lat', size) + coord_var = handler.createVariable('lat', float, 'lat') + file_var = handler.createVariable(self.variable, float, ('time', 'lev', 'lat')) + else: + handler.createDimension('lon', size) + coord_var = handler.createVariable('lon', float, 'lon') + file_var = handler.createVariable(self.variable, float, ('time', 'lev', 'lon')) + coord_var[:] = new_coord[:] + file_var[:] = var[:] + file_var.missing_value = 1e20 + handler.close() + + box = Box() + if self.zonal: + box.max_lon = self.value + box.min_lon = self.value + else: + box.max_lat = self.value + box.min_lat = self.value + + self.send_file(temp, self.domain, self.variable, self.startdate, self.member, self.chunk, box=box) + Log.info('Finished cut section for startdate {0}, member {1}, chunk {2}', + self.startdate, self.member, self.chunk) diff --git a/earthdiagnostics/ocean/gyres.py b/earthdiagnostics/ocean/gyres.py new file mode 100644 index 0000000000000000000000000000000000000000..93bdb6076718f9ecd5077bff3d6ac7bdc586e831 --- /dev/null +++ b/earthdiagnostics/ocean/gyres.py @@ -0,0 +1,171 @@ +# coding=utf-8 +import numpy as np +from autosubmit.config.log import Log + +from earthdiagnostics.constants import Models +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile + + +class Gyres(Diagnostic): + """ + Compute the intensity of the subtropical and subpolar gyres + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: October 2013 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param model_version: model version + :type model_version: str + """ + + alias = 'gyres' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, model_version): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.model_version = model_version + self.required_vars = ['vsftbarot'] + self.generated_vars = ['gyres'] + self.var_vsftbarot = None + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.model_version == other.model_version + + def __str__(self): + return 'Gyres Startdate: {0} Member: {1} Chunk: {2}'.format(self.startdate, self.member, self.chunk) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: None + :type options: list[str] + :return: + """ + if len(options) > 1: + raise Exception('The gyres diagnostic has no options') + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(Gyres(diags.data_manager, startdate, member, chunk, diags.model_version)) + return job_list + + # noinspection PyPep8Naming + def compute(self): + """ + Runs the diagnostic + """ + if self.model_version in [Models.ECEARTH_2_3_O1L42, Models.ECEARTH_3_0_O1L46, + Models.NEMO_3_2_O1L42, Models.NEMO_3_3_O1L46, + Models.NEMOVAR_O1L42]: + + subpolNAtl = [230, 275, 215, 245] + subpolNPac = [70, 145, 195, 235] + subtropNPac = [45, 175, 165, 220] + subtropNAtl = [195, 275, 175, 225] + subtropSPac = [70, 205, 120, 145] + subtropSAtl = [235, 300, 120, 145] + subtropInd = [320, 30, 110, 180] + ACC = [1, 361, 1, 65] + + elif self in [Models.ECEARTH_3_0_O25L46, Models.ECEARTH_3_0_O25L75, + Models.GLORYS2_V1_O25L75]: + raise Exception("Option gyres not available yet for {0}".format(self.model_version)) + else: + raise Exception("Input grid {0} not recognized".format(self.model_version)) + + output = TempFile.get() + vsftbarot_file = self.data_manager.get_file('ocean', 'vsftbarot', self.startdate, self.member, self.chunk) + + handler_original = Utils.openCdf(vsftbarot_file) + self.var_vsftbarot = handler_original.variables['vsftbarot'] + handler = Utils.openCdf(output, 'w') + handler.createDimension('time', handler_original.variables['time'].shape[0]) + handler.createDimension('region', 8) + Utils.copy_variable(handler_original, handler, 'time') + var_region = handler.createVariable('region', str, 'region') + + var_gyre = handler.createVariable('gyre', 'f', ('time', 'region'), fill_value=0.0) + var_gyre.valid_max = 2e8 + var_gyre.valid_min = 0.0 + var_gyre.short_name = 'gyre' + var_gyre.long_name = 'gyre' + var_gyre.units = 'm^3/s' + + var_region[0] = 'subpolNAtl' + var_gyre[:, 0] = self._gyre(subpolNAtl, True) + Log.debug('subpolNAtl: {0}', var_gyre[:, 0]) + + var_region[1] = 'subpolNPac' + var_gyre[:, 1] = self._gyre(subpolNPac, True) + Log.debug('subpolNPac: {0}', var_gyre[:, 1]) + + var_region[2] = 'subtropNPac' + var_gyre[:, 2] = self._gyre(subtropNPac) + Log.debug('subtropNPac: {0}', var_gyre[:, 2]) + + var_region[3] = 'subtropSPac' + var_gyre[:, 3] = self._gyre(subtropSPac) + Log.debug('subtropSPac: {0}', var_gyre[:, 3]) + + var_region[4] = 'subtropNAtl' + var_gyre[:, 4] = self._gyre(subtropNAtl) + Log.debug('subtropNAtl: {0}', var_gyre[:, 4]) + + var_region[5] = 'subtropSAtl' + var_gyre[:, 5] = self._gyre(subtropSAtl) + Log.debug('subtropSAtl: {0}', var_gyre[:, 5]) + + var_region[6] = 'subtropInd' + var_gyre[:, 6] = self._gyre(subtropInd) + Log.debug('subtropInd: {0}', var_gyre[:, 6]) + + var_region[7] = 'ACC' + var_gyre[:, 7] = self._gyre(ACC) + Log.debug('ACC: {0}', var_gyre[:, 7]) + + handler.close() + handler_original.close() + self.send_file(output, 'ocean', 'gyre', self.startdate, self.member, self.chunk) + Log.info('Finished gyres for startdate {0}, member {1}, chunk {2}', self.startdate, self.member, self.chunk) + + def _gyre(self, site, invert=False): + if invert: + return np.min(self._extract_section(site), (1, 2)) * -1 + else: + return np.max(self._extract_section(site), (1, 2)) + + def _extract_section(self, site): + if site[2] <= site[3]: + if site[0] <= site[1]: + return self.var_vsftbarot[:, site[2] - 1:site[3] - 1, site[0] - 1:site[1] - 1] + else: + return np.concatenate((self.var_vsftbarot[:, site[2] - 1:site[3] - 1, site[0] - 1:], + self.var_vsftbarot[:, site[2] - 1:site[3] - 1, :site[1] - 1]), axis=2) + + else: + if site[0] <= site[1]: + return np.concatenate((self.var_vsftbarot[:, site[2] - 1:, site[0] - 1: site[1] - 1], + self.var_vsftbarot[:, :site[3] - 1, site[0] - 1: site[1] - 1]), axis=1) + else: + temp = np.concatenate((self.var_vsftbarot[:, site[2] - 1:, :], + self.var_vsftbarot[:, :site[3] - 1, :]), axis=1) + return np.concatenate((temp[:, :, site[0] - 1:], + temp[:, :, :site[1] - 1]), axis=2) diff --git a/earthdiagnostics/ocean/heatcontent.py b/earthdiagnostics/ocean/heatcontent.py new file mode 100644 index 0000000000000000000000000000000000000000..618d54c73dbcd3eca39d8343e86c6a07529adefa --- /dev/null +++ b/earthdiagnostics/ocean/heatcontent.py @@ -0,0 +1,173 @@ +# coding=utf-8 +import shutil + +from autosubmit.config.log import Log + +from earthdiagnostics import cdftools +from earthdiagnostics.constants import Basins +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.box import Box +from earthdiagnostics.variable import Domains + + +class HeatContent(Diagnostic): + """ + Compute the total ocean heat content + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: May 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param mixed_layer: If 1, restricts calculation to the mixed layer, if -1 exclude it. If 0, no effect + :type mixed_layer: int + :param box: box to use for the average + :type box: Box + + """ + + alias = 'ohc' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, basin, mixed_layer, box): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.basin = basin + self.mxloption = mixed_layer + self.box = box + self.required_vars = ['so', 'mlotst'] + self.generated_vars = ['scvertsum'] + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.box == other.box and self.basin == other.basin and self.mxloption == other.mxloption + + def __str__(self): + return 'Heat content Startdate: {0} Member: {1} Chunk: {2} Mixed layer: {3} Box: {4} ' \ + 'Basin: {5}'.format(self.startdate, self.member, self.chunk, self.mxloption, self.box, + self.basin.fullname) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: basin, mixed layer option (1 to only compute at the mixed layer, -1 to exclude it, 0 to ignore), + minimum depth, maximum depth + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 4: + raise Exception('You must specify the basin, mixed layer option and minimum and maximum depth to use') + if num_options > 4: + raise Exception('You must specify 4 parameters for the heat content diagnostic') + basin = Basins.parse(options[1]) + mixed_layer = int(options[2]) + box = Box() + box.min_depth = int(options[3]) + box.max_depth = int(options[4]) + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(HeatContent(diags.data_manager, startdate, member, chunk, basin, mixed_layer, box)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + nco = Utils.nco + temperature_file = self.data_manager.get_file('ocean', 'thetao', self.startdate, self.member, self.chunk) + if self.mxloption != 0: + mlotst_file = self.data_manager.get_file('ocean', 'mlotst', self.startdate, self.member, self.chunk) + nco.ncks(input=mlotst_file, output=temperature_file, options='-A -v mlotst') + + para = list() + para.append('0') + para.append('0') + para.append('0') + para.append('0') + para.append(self.box.min_depth) + para.append(self.box.max_depth) + if self.mxloption != 0: + para.append('-mxloption') + para.append(str(self.mxloption)) + if self.basin != Basins.Global: + handler = Utils.openCdf('mask_regions.3d.nc') + if self.basin.fullname not in handler.variables: + raise Exception('Basin {0} is not defined on mask_regions.nc'.format(self.basin.fullname)) + + handler.close() + para.append('-maskfile') + para.append('mask_regions.3d.nc') + para.append('-mask') + para.append(self.basin.fullname) + + shell_output = cdftools.run('cdfheatc', options=para, input=temperature_file) + + ohcsum_temp = TempFile.get() + ohcvmean_temp = TempFile.get() + nco.ncks(input=temperature_file, output=ohcsum_temp, options='-O -v time') + shutil.copy(ohcsum_temp, ohcvmean_temp) + + ohcsum_handler = Utils.openCdf(ohcsum_temp) + thc = ohcsum_handler.createVariable('ohcsum', float, 'time') + thc.standard_name = "integral_of_sea_water_potential_temperature_expressed_as_heat_content" + thc.long_name = "Total heat content" + thc.units = "J" + + ohcvmean_handler = Utils.openCdf(ohcvmean_temp) + uhc = ohcvmean_handler.createVariable('ohcvmean', float, 'time') + uhc.standard_name = "integral_of_sea_water_potential_temperature_expressed_as_heat_content" + uhc.long_name = "Heat content per unit volume" + uhc.units = "J*m^-3" + + time = 0 + # noinspection PyUnboundLocalVariable + for lines in shell_output: + if not lines: + continue + + for line in lines.split('\n'): + line = line.lstrip() + if line.startswith("Heat Content at level"): + Log.info(line) + elif line.startswith("Total Heat content/volume"): + Log.user_warning(line) + uhc[time] = line[line.index(':') + 1: line.index('Joules')] + time += 1 + if line.startswith("Total Heat content "): + Log.result(line) + thc[time] = line[line.index(':') + 1: line.index('Joules')] + elif line.startswith('TIME : '): + Log.info(line) + + ohcsum_handler.close() + ohcvmean_handler.close() + + if self.box.min_depth == 0: + # For cdftools, this is all levels + box_save = None + else: + box_save = self.box + + Utils.setminmax(ohcsum_temp, 'ohcsum') + self.send_file(ohcsum_temp, Domains.ocean, 'ohcsum', self.startdate, self.member, self.chunk, + box=box_save, region=self.basin.fullname, rename_var='ohcsum') + Utils.setminmax(ohcvmean_temp, 'ohcvmean') + self.send_file(ohcvmean_temp, Domains.ocean, 'ohcvmean', self.startdate, self.member, self.chunk, + box=box_save, region=self.basin.fullname, rename_var='ohcvmean') diff --git a/earthdiagnostics/ocean/heatcontentlayer.py b/earthdiagnostics/ocean/heatcontentlayer.py new file mode 100644 index 0000000000000000000000000000000000000000..65f86cb16e64bbc3cc77bb460d64bb688fb39c3f --- /dev/null +++ b/earthdiagnostics/ocean/heatcontentlayer.py @@ -0,0 +1,181 @@ +# coding=utf-8 +import numpy as np + +from earthdiagnostics.constants import Basins +from earthdiagnostics.box import Box +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domains + + +class HeatContentLayer(Diagnostic): + """ + Point-wise Ocean Heat Content in a specified ocean thickness (J/m-2) + + :original author: Isabel Andreu Burillo + :contributor: Virginie Guemas + :contributor: Eleftheria Exarchou + :contributor: Javier Vegas-Regidor + + :created: June 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param box: box to use for the calculations + :type box: Box + """ + + alias = 'ohclayer' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, box, weight, min_level, max_level): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.box = box + self.weight = weight + self.min_level = min_level + self.max_level = max_level + self.required_vars = ['so', 'mlotst'] + self.generated_vars = ['scvertsum'] + + def __str__(self): + return 'Heat content layer Startdate: {0} Member: {1} Chunk: {2} Box: {3}'.format(self.startdate, self.member, + self.chunk, self.box) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: minimum depth, maximum depth, basin=Global + :type options: list[str] + """ + num_options = len(options) - 1 + if num_options < 2: + raise Exception('You must specify the minimum and maximum depth to use') + if num_options > 3: + raise Exception('You must specify between 2 and 3 parameters for the heat content layer diagnostic') + box = Box(True) + box.min_depth = int(options[1]) + box.max_depth = int(options[2]) + if len(options) > 3: + basin = Basins.parse(options[3]) + else: + basin = Basins.Global + job_list = list() + + handler = Utils.openCdf('mesh_zgr.nc') + mask = Utils.get_mask(basin) + + if 'e3t' in handler.variables: + mask = handler.variables['e3t'][:] * mask + elif 'e3t_0' in handler.variables: + mask = handler.variables['e3t_0'][:] * mask + else: + raise Exception('e3t variable can not be found') + + if 'gdepw' in handler.variables: + depth = handler.variables['gdepw'][:] + elif 'gdepw_0' in handler.variables: + depth = handler.variables['gdepw_0'][:] + else: + raise Exception('gdepw variable can not be found') + + while len(depth.shape) < 4: + depth = np.expand_dims(depth, -1) + handler.close() + + def calculate_weight(array): + """ + Calculates the weight for each level for the given later + :param array: + :return: + """ + level = 0 + + while array[level + 1] <= box.min_depth: + array[level] = 0 + level += 1 + if level == array.size - 1: + array[level] = 0 + return array + + if array[level] != box.min_depth: + weight_value = (array[level + 1] - box.min_depth) / (array[level + 1] - array[level]) + array[level] = weight_value + level += 1 + if level == array.size - 1: + array[level] = 0 + return array + + while array[level + 1] <= box.max_depth: + array[level] = 1 + level += 1 + if level == array.size - 1: + array[level] = 0 + return array + + if array[level] != box.max_depth: + weight_value = (box.max_depth - array[level]) / (array[level + 1] - array[level]) + array[level] = weight_value + level += 1 + if level == array.size - 1: + array[level] = 0 + return array + + array[level:] = 0 + return array + + weight = mask * np.apply_along_axis(calculate_weight, 1, depth) * 1020 * 4000 + + # Now we will reduce to the levels with any weight != 0 to avoid loading too much data on memory + levels = weight.shape[1] + min_level = 0 + while min_level < levels and not weight[:, min_level, :].any(): + min_level += 1 + max_level = min_level + while max_level < (levels - 1) and weight[:, max_level + 1, :].any(): + max_level += 1 + weight = weight[:, min_level:max_level, :] + + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(HeatContentLayer(diags.data_manager, startdate, member, chunk, box, + weight, min_level, max_level)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + nco = Utils.nco + results = TempFile.get() + + thetao_file = self.data_manager.get_file(Domains.ocean, 'thetao', self.startdate, self.member, self.chunk) + + handler = Utils.openCdf(thetao_file) + heatc_sl = np.sum(handler.variables['thetao'][:, self.min_level:self.max_level, :] * self.weight, 1) + handler.sync() + handler.renameVariable('thetao', 'heatc_sl') + handler.close() + + nco.ncks(input=thetao_file, output=results, options='-O -v lon,lat,time') + Utils.rename_variables(results, {'x': 'i', 'y': 'j'}, False, True) + handler_results = Utils.openCdf(results) + handler_results.createVariable('ohc', float, ('time', 'j', 'i'), fill_value=1.e20) + handler_results.sync() + handler_results.variables['ohc'][:] = heatc_sl + handler_results.close() + + Utils.setminmax(results, 'ohc') + self.send_file(results, Domains.ocean, 'ohc', self.startdate, self.member, self.chunk, box=self.box) diff --git a/earthdiagnostics/ocean/interpolate.py b/earthdiagnostics/ocean/interpolate.py new file mode 100644 index 0000000000000000000000000000000000000000..353a14be53258cc985eb995f170ff71ab46d57dc --- /dev/null +++ b/earthdiagnostics/ocean/interpolate.py @@ -0,0 +1,186 @@ +# coding=utf-8 +import shutil +import threading + +import os +from autosubmit.config.log import Log +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domain, Domains + + +class Interpolate(Diagnostic): + """ + 3-dimensional conservative interpolation to the regular atmospheric grid. + It can also be used for 2D (i,j) variables + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: November 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable's name + :type variable: str + :param domain: variable's domain + :type domain: str + :param model_version: model version + :type model_version: str + """ + + alias = 'interp' + "Diagnostic alias for the configuration file" + + lock = threading.Lock() + + def __init__(self, data_manager, startdate, member, chunk, domain, variable, target_grid, model_version, + invert_lat): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.domain = domain + self.model_version = model_version + self.required_vars = [variable] + self.generated_vars = [variable] + self.tempTemplate = '' + self.grid = target_grid + self.invert_latitude = invert_lat + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.model_version == other.model_version and self.domain == other.domain and \ + self.variable == other.variable and self.grid == other.grid and \ + self.invert_latitude == other.invert_latitude + + def __str__(self): + return 'Interpolate Startdate: {0} Member: {1} Chunk: {2} ' \ + 'Variable: {3}:{4} Target grid: {5} Invert lat: {6} ' \ + 'Model: {7}' .format(self.startdate, self.member, self.chunk, self.domain, self.variable, self.grid, + self.invert_latitude, self.model_version) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: target_grid, variable, domain=ocean + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 2: + raise Exception('You must specify the grid and variable to interpolate') + if num_options > 4: + raise Exception('You must specify between 2 and 4 parameters for the interpolation diagnostic') + target_grid = options[1] + variable = options[2] + if num_options >= 3: + domain = Domain(options[3]) + else: + domain = Domains.ocean + if num_options >= 4: + invert_lat = bool(options[4].lower()) + else: + invert_lat = False + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append( + Interpolate(diags.data_manager, startdate, member, chunk, domain, variable, target_grid, + diags.config.experiment.model_version, invert_lat)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk) + Utils.rename_variables(variable_file, {'i': 'x', 'j': 'y'}, must_exist=False, rename_dimension=True) + cdo = Utils.cdo + nco = Utils.nco + handler = Utils.openCdf(variable_file) + if 'lev' in handler.dimensions: + num_levels = handler.dimensions['lev'].size + has_levels = True + else: + num_levels = 1 + has_levels = False + handler.close() + for lev in range(0, num_levels): + self._interpolate_level(lev, has_levels, variable_file) + + temp = TempFile.get() + if has_levels: + Interpolate.lock.acquire() + nco.ncrcat(input=self._get_level_file(0), output=temp, + options="-n {0},2,1 -v '{1}'".format(num_levels, self.variable)) + Interpolate.lock.release() + + else: + Utils.move_file(self._get_level_file(0), temp) + + handler = Utils.openCdf(temp) + handler.renameDimension('record', 'lev') + handler.close() + nco.ncpdq(input=temp, output=temp, options='-O -h -a time,lev') + + if has_levels: + nco.ncks(input=variable_file, output=temp, options='-A -v lev') + for lev in range(0, num_levels): + os.remove(self._get_level_file(lev)) + temp2 = TempFile.get() + cdo.setgrid('t106grid', input=temp, output=temp2) + os.remove(temp) + if self.invert_latitude: + cdo.invertlatdata(input=temp2, output=temp) + shutil.move(temp, temp2) + if not has_levels: + nco.ncks(input=temp2, output=temp2, options='-O -v {0},lat,lon,time'.format(self.variable)) + + self.send_file(temp2, self.domain, self.variable, self.startdate, self.member, self.chunk, grid=self.grid) + + def _get_level_file(self, lev): + if not self.tempTemplate: + self.tempTemplate = TempFile.get(suffix='_01.nc') + # self.tempTemplate = 'temp_01.nc' + return self.tempTemplate.replace('_01.nc', '_{0:02d}.nc'.format(lev + 1)) + + def _interpolate_level(self, lev, has_levels, input_file): + nco = Utils.nco + temp = TempFile.get() + if has_levels: + nco.ncks(input=input_file, output=temp, options='-O -d lev,{0} -v {1},lat,lon'.format(lev, self.variable)) + nco.ncwa(input=temp, output=temp, options='-O -h -a lev') + else: + shutil.copy(input_file, temp) + namelist_file = TempFile.get(suffix='') + scrip_use_in = open(namelist_file, 'w') + scrip_use_in.writelines("&remap_inputs\n") + scrip_use_in.writelines(" remap_wgt = '/esnas/autosubmit/con_files/" + "weigths/{0}/rmp_{0}_to_{1}_lev{2}.nc'\n".format(self.model_version, self.grid, + lev + 1)) + scrip_use_in.writelines(" infile = '{0}'\n".format(temp)) + scrip_use_in.writelines(" invertlat = FALSE\n") + scrip_use_in.writelines(" var = '{0}'\n".format(self.variable)) + scrip_use_in.writelines(" fromregular = FALSE\n") + scrip_use_in.writelines(" outfile = '{0}'\n".format(temp)) + scrip_use_in.writelines("/\n") + scrip_use_in.close() + Utils.execute_shell_command('/home/Earth/jvegas/pyCharm/cfutools/interpolation/scrip_use ' + '{0}'.format(namelist_file), Log.DEBUG) + os.remove(namelist_file) + nco.ncecat(input=temp, output=temp, options="-O -h") + shutil.move(temp, self._get_level_file(lev)) + Log.debug("Level {0} ready", lev) diff --git a/earthdiagnostics/ocean/interpolatecdo.py b/earthdiagnostics/ocean/interpolatecdo.py new file mode 100644 index 0000000000000000000000000000000000000000..a80e8da834d2800dc8af9cf29601aded87b8fa95 --- /dev/null +++ b/earthdiagnostics/ocean/interpolatecdo.py @@ -0,0 +1,130 @@ +# coding=utf-8 +from earthdiagnostics.constants import Basins +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +import numpy as np + +from earthdiagnostics.variable import Domain +from earthdiagnostics.variable import Domains + + +class InterpolateCDO(Diagnostic): + """ + 3-dimensional conservative interpolation to the regular atmospheric grid. + It can also be used for 2D (i,j) variables + + :original author: Javier Vegas-Regidor + + :created: October 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable's name + :type variable: str + :param domain: variable's domain + :type domain: Domain + :param model_version: model version + :type model_version: str + """ + + alias = 'interpcdo' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, domain, variable, target_grid, model_version): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.domain = domain + self.model_version = model_version + self.required_vars = [variable] + self.generated_vars = [variable] + self.tempTemplate = '' + self.grid = target_grid + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.model_version == other.model_version and self.domain == other.domain and \ + self.variable == other.variable and self.grid == other.grid + + def __str__(self): + return 'Interpolate Startdate: {0} Member: {1} Chunk: {2} ' \ + 'Variable: {3}:{4} Target grid: {5} ' \ + 'Model: {6}' .format(self.startdate, self.member, self.chunk, self.domain, self.variable, self.grid, + self.model_version) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: target_grid, variable, domain=ocean + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 1: + raise Exception('You must specify the variable to interpolate') + if num_options > 3: + raise Exception('You must specify between 1 and 3 parameters for the interpolation with CDO diagnostic') + variable = options[1] + + if num_options >= 3: + target_grid = options[2] + else: + target_grid = diags.config.experiment.atmos_grid.lower() + + target_grid = cls._translate_ifs_grids_to_cdo_names(target_grid) + + if num_options >= 3: + domain = Domain(options[3]) + else: + domain = Domains.ocean + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append( + InterpolateCDO(diags.data_manager, startdate, member, chunk, domain, variable, target_grid, + diags.config.experiment.model_version)) + return job_list + + @classmethod + def _translate_ifs_grids_to_cdo_names(cls, target_grid): + if target_grid.startswith('T159L'): + target_grid = 't106' + if target_grid.startswith('T255L'): + target_grid = 't170' + if target_grid.startswith('T511L'): + target_grid = 't340' + return target_grid + + def compute(self): + """ + Runs the diagnostic + """ + variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk) + handler = Utils.openCdf(variable_file) + var = handler.variables[self.variable] + + mask = Utils.get_mask(Basins.Global).astype(float) + mask[mask == 0] = np.nan + var[:] = mask * var[:] + handler.close() + + cdo = Utils.cdo + temp = TempFile.get() + cdo.remapbil(self.grid, input=variable_file, output=temp) + Utils.setminmax(temp, self.variable) + self.send_file(temp, self.domain, self.variable, self.startdate, self.member, self.chunk, grid=self.grid) + + + + diff --git a/earthdiagnostics/ocean/maxmoc.py b/earthdiagnostics/ocean/maxmoc.py new file mode 100644 index 0000000000000000000000000000000000000000..fe212894032b88ac6f379b51d114e2f04311ae52 --- /dev/null +++ b/earthdiagnostics/ocean/maxmoc.py @@ -0,0 +1,225 @@ +# coding=utf-8 +import netCDF4 +import numpy as np +import os +from autosubmit.config.log import Log +from earthdiagnostics.constants import Basins +from earthdiagnostics.box import Box +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils +from earthdiagnostics.variable import Domains + + +class MaxMoc(Diagnostic): + """ + Compute an Atlantic MOC index by finding the maximum of the annual + mean meridional overturning in a latitude / depth region + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: March 2012 + :last modified: June 2016 + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param year: year to compute + :type year: int + :param basin: basin to compute + :type basin: Basin + :param box: box to compute + :type box: Box + """ + + alias = 'mocmax' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, year, basin, box): + Diagnostic.__init__(self, data_manager) + self.basin = basin + self.startdate = startdate + self.member = member + self.year = year + self.required_vars = ['vo'] + self.generated_vars = ['vsftmyz'] + self.box = box + + def __str__(self): + return 'Max moc Startdate: {0} Member: {1} Year: {2} Box: {3} ' \ + 'Basin: {4}'.format(self.startdate, self.member, self.year, self.box, self.basin.fullname) + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.year == other.year and \ + self.box == other.box and self.basin == other.basin + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each complete year to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: minimum latitude, maximum latitude, minimum depth, maximum depth, basin=global + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 4: + raise Exception('You must specify the box to use') + if num_options > 5: + raise Exception('You must specify between 4 and 5 parameters for area moc diagnostic') + box = Box() + box.min_lat = int(options[1]) + box.max_lat = int(options[2]) + box.min_depth = int(options[3]) + box.max_depth = int(options[4]) + if num_options > 4: + basin = Basins.parse(options[5]) + else: + basin = Basins.Global + + job_list = list() + for startdate in diags.startdates: + for member in diags.members: + years = diags.config.experiment.get_full_years(startdate) + if len(years) == 0: + Log.user_warning('No complete years are available with the given configuration. ' + 'MaxMoc can not be computed') + for year in years: + job_list.append(MaxMoc(diags.data_manager, startdate, member, year, basin, box)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + nco = Utils.nco + + temp = self.data_manager.get_year(Domains.ocean, 'vsftmyz', self.startdate, self.member, self.year) + + handler = Utils.openCdf(temp) + if 'i' in handler.dimensions: + handler.close() + nco.ncwa(input=temp, output=temp, options='-O -a i') + else: + handler.close() + handler = Utils.openCdf(temp) + basin_index = np.where(handler.variables['basin'][:] == self.basin.fullname) + if len(basin_index) == 0: + raise Exception("Basin {1} is not defined in {0}", temp, self.basin.fullname) + basin_index = basin_index[0][0] + + lev = handler.variables['lev'][:] + lat = handler.variables['lat'][:] + + if self.box.min_lat == self.box.max_lat: + lat_inds = ((np.abs(lat - self.box.min_lat)).argmin(),) + else: + lat_inds = np.where((lat > self.box.min_lat) & (lat < self.box.max_lat))[0] + + if self.box.min_depth == self.box.max_depth: + lev_inds = ((np.abs(lev - self.box.min_depth)).argmin(),) + else: + lev_inds = np.where((lev > self.box.min_depth) & (lev < self.box.max_depth))[0] + + Log.info('Computing year {0}', str(self.year)) + moc = handler.variables['vsftmyz'][:, lev_inds, lat_inds, basin_index] + handler.close() + os.remove(temp) + + moc = np.mean(moc, 0) + + maximum = np.amax(moc) + max_index = np.unravel_index(np.argmax(moc), moc.shape) + # noinspection PyUnresolvedReferences + max_lev = lev[lev_inds[max_index[0]]] + # noinspection PyUnresolvedReferences + max_lat = lat[lat_inds[max_index[1]]] + + minimum = np.amin(moc) + minimum_index = np.unravel_index(np.argmin(moc), moc.shape) + # noinspection PyUnresolvedReferences + min_lev = lev[lev_inds[minimum_index[0]]] + # noinspection PyUnresolvedReferences + min_lat = lat[lat_inds[minimum_index[1]]] + + Log.info('Maximum {0} Sv, latitude {1} depth {2} m', maximum, max_lat, max_lev) + Log.info('Minimum {0} Sv, latitude {1} depth {2} m', minimum, min_lat, min_lev) + + handler = self._create_output_file(temp) + var = handler.createVariable('vsftmyzmax', float, ('time',)) + var.long_name = 'Maximum_Overturning' + var.units = 'Sverdrup' + var.valid_min = -1000. + var.valid_max = 1000. + var[0] = maximum + handler.close() + self.send_file(temp, Domains.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, + frequency='yr', year=self.year) + + handler = self._create_output_file(temp) + var = handler.createVariable('vsftmyzmaxlat', float, ('time',)) + var.long_name = 'Latitude_of_Maximum_Overturning' + var.units = 'Degrees' + var.valid_min = -90. + var.valid_max = 90. + var[0] = max_lat + handler.close() + self.send_file(temp, Domains.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, + frequency='yr', year=self.year) + + handler = self._create_output_file(temp) + var = handler.createVariable('vsftmyzmaxlev', float, ('time',)) + var.long_name = 'Depth_of_Maximum_Overturning' + var.units = 'Meters' + var.valid_min = 0. + var.valid_max = 10000. + var[0] = max_lev + handler.close() + self.send_file(temp, Domains.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, + frequency='yr', year=self.year) + + handler = self._create_output_file(temp) + var = handler.createVariable('vsftmyzmin', float, ('time',)) + var.long_name = 'Minimum_Overturning' + var.units = 'Sverdrup' + var.valid_min = -1000. + var.valid_max = 1000. + var[0] = minimum + handler.close() + self.send_file(temp, Domains.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, + frequency='yr', year=self.year) + + handler = self._create_output_file(temp) + var = handler.createVariable('vsftmyzminlat', float, ('time',)) + var.long_name = 'Latitude_of_Minimum_Overturning' + var.units = 'Degrees' + var.valid_min = -90. + var.valid_max = 90. + var[0] = min_lat + handler.close() + self.send_file(temp, Domains.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, + frequency='yr', year=self.year) + + handler = self._create_output_file(temp) + var = handler.createVariable('vsftmyzminlev', float, ('time',)) + var.long_name = 'Depth_of_Minimum_Overturning' + var.units = 'Meters' + var.valid_min = 0. + var.valid_max = 10000. + var[0] = min_lev + handler.close() + self.send_file(temp, Domains.ocean, 'vsftmyzmax', self.startdate, self.member, box=self.box, + frequency='yr', year=self.year) + + def _create_output_file(self, temp): + handler = netCDF4.Dataset(temp, 'w') + handler.createDimension('time') + + time = handler.createVariable('time', 'i2', ('time',)) + time.calendar = 'gregorian' + time.units = 'days since January 1, {0}'.format(self.year) + return handler diff --git a/earthdiagnostics/ocean/mixedlayerheatcontent.py b/earthdiagnostics/ocean/mixedlayerheatcontent.py new file mode 100644 index 0000000000000000000000000000000000000000..f5af3f53dfe51fa1cc792da24b76b2f5e0a3180d --- /dev/null +++ b/earthdiagnostics/ocean/mixedlayerheatcontent.py @@ -0,0 +1,82 @@ +# coding=utf-8 +import os + +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics import cdftools +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domains + + +class MixedLayerHeatContent(Diagnostic): + """ + Compute mixed layer heat content + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: February 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + """ + + alias = 'mlotsthc' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.required_vars = ['so', 'mlotst'] + self.generated_vars = ['scvertsum'] + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk + + def __str__(self): + return 'Mixed layer heat content Startdate: {0} Member: {1} Chunk: {2}'.format(self.startdate, self.member, + self.chunk) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: None + :type options: list[str] + :return: + """ + if len(options) > 1: + raise Exception('The mixed layer ocean heat content diagnostic has no options') + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(MixedLayerHeatContent(diags.data_manager, startdate, member, chunk)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + temperature_file = self.data_manager.get_file(Domains.ocean, 'thetao', self.startdate, self.member, self.chunk) + mlotst_file = self.data_manager.get_file(Domains.ocean, 'mlotst', self.startdate, self.member, self.chunk) + + Utils.nco.ncks(input=mlotst_file, output=temperature_file, options='-A -v mlotst') + + temp = TempFile.get() + cdftools.run('cdfmxlheatc', input=temperature_file, output=temp) + + os.remove(temperature_file) + + Utils.rename_variables(temp, {'x': 'i', 'y': 'j', 'somxlheatc': 'ohcvsumlotst'}, False, True) + Utils.setminmax(temp, 'ohcvsumlotst') + self.send_file(temp, Domains.ocean, 'ohcvsumlotst', self.startdate, self.member, self.chunk) diff --git a/earthdiagnostics/ocean/mixedlayersaltcontent.py b/earthdiagnostics/ocean/mixedlayersaltcontent.py new file mode 100644 index 0000000000000000000000000000000000000000..2fda1b3f990f05559dd8041b2353bc00e70d36a1 --- /dev/null +++ b/earthdiagnostics/ocean/mixedlayersaltcontent.py @@ -0,0 +1,79 @@ +# coding=utf-8 +import os +from earthdiagnostics import cdftools +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domains + + +class MixedLayerSaltContent(Diagnostic): + """ + Compute mixed layer salt content + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: February 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + """ + alias = 'mlotstsc' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.required_vars = ['so', 'mlotst'] + self.generated_vars = ['scvertsum'] + + def __str__(self): + return 'Mixed layer salt content Startdate: {0} Member: {1} Chunk: {2}'.format(self.startdate, self.member, + self.chunk) + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: None + :type options: list[str] + :return: + """ + if len(options) > 1: + raise Exception('The mixed layer salt content diagnostic has no options') + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(MixedLayerSaltContent(diags.data_manager, startdate, member, chunk)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + salinity_file = self.data_manager.get_file(Domains.ocean, 'so', self.startdate, self.member, self.chunk) + mlotst_file = self.data_manager.get_file(Domains.ocean, 'mlotst', self.startdate, self.member, self.chunk) + + Utils.nco.ncks(input=mlotst_file, output=salinity_file, options='-A -v mlotst') + + temp = TempFile.get() + cdftools.run('cdfmxlsaltc', input=salinity_file, output=temp) + os.remove(salinity_file) + + Utils.rename_variables(temp, {'x': 'i', 'y': 'j', 'somxlsaltc': 'scvsummlotst'}, False, True) + Utils.setminmax(temp, 'scvsummlotst') + self.send_file(temp, Domains.ocean, 'scvsummlotst', self.startdate, self.member, self.chunk) diff --git a/earthdiagnostics/ocean/moc.py b/earthdiagnostics/ocean/moc.py new file mode 100644 index 0000000000000000000000000000000000000000..459651d2acde77d03e10bb54f3ca808ace62e224 --- /dev/null +++ b/earthdiagnostics/ocean/moc.py @@ -0,0 +1,110 @@ +# coding=utf-8 +import numpy as np +from autosubmit.config.log import Log + +from earthdiagnostics import cdftools +from earthdiagnostics.constants import Basins +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domains + + +class Moc(Diagnostic): + """ + Compute the MOC for oceanic basins + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: March 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + """ + + alias = 'moc' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.required_vars = ['vo'] + self.generated_vars = ['vsftmyz'] + + def __str__(self): + return 'MOC Startdate: {0} Member: {1} Chunk: {2}'.format(self.startdate, self.member, self.chunk) + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: None + :type options: list[str] + :return: + """ + if len(options) > 1: + raise Exception('The MOC diagnostic has no options') + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(Moc(diags.data_manager, startdate, member, chunk)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + temp = TempFile.get() + + input_file = self.data_manager.get_file(Domains.ocean, 'vo', self.startdate, self.member, self.chunk) + + Log.debug('Computing MOC') + cdftools.run('cdfmoc', input=input_file, output=temp) + Utils.nco.ncks(input=input_file, output=temp, options='-A -v lev') + Utils.convert2netcdf4(temp) + + Log.debug('Reformatting variables') + handler = Utils.openCdf(temp) + + handler.createDimension('basin', 5) + handler.createVariable('basin', str, 'basin') + handler.variables['basin'][:] = np.array([Basins.Global.fullname, Basins.Atlantic.fullname, + Basins.Pacific.fullname, Basins.IndoPacific.fullname, + Basins.Indian.fullname], dtype=object) + example = handler.variables['zomsfglo'] + # noinspection PyProtectedMember + moc = handler.createVariable('vsftmyz', example.datatype, + ('time', 'lev', 'i', 'j', 'basin'), + fill_value=example._FillValue) + + moc.units = example.units + moc.add_offset = example.add_offset + moc.scale_factor = example.scale_factor + + moc[:, :, :, :, 0] = handler.variables['zomsfglo'][:] + moc[:, :, :, :, 1] = handler.variables['zomsfatl'][:] + moc[:, :, :, :, 2] = handler.variables['zomsfpac'][:] + moc[:, :, :, :, 3] = handler.variables['zomsfinp'][:] + moc[:, :, :, :, 4] = handler.variables['zomsfind'][:] + + handler.close() + + Utils.nco.ncks(input=temp, output=temp, + options='-O -x -v zomsfglo,zomsfatl,zomsfpac,zomsfinp,zomsfind,zomsfinp0') + Utils.setminmax(temp, 'vsftmyz') + + self.send_file(temp, Domains.ocean, 'vsftmyz', self.startdate, self.member, self.chunk) diff --git a/earthdiagnostics/ocean/psi.py b/earthdiagnostics/ocean/psi.py new file mode 100644 index 0000000000000000000000000000000000000000..0cabb6799188ed229faaf87d0a635a91d37285a1 --- /dev/null +++ b/earthdiagnostics/ocean/psi.py @@ -0,0 +1,73 @@ +# coding=utf-8 +from earthdiagnostics import cdftools +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domains + + +class Psi(Diagnostic): + """ + Compute the barotropic stream function + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: March 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + """ + + alias = 'psi' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.required_vars = ['vo', 'uo'] + self.generated_vars = ['vsftbarot'] + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk + + def __str__(self): + return 'PSI Startdate: {0} Member: {1} Chunk: {2}'.format(self.startdate, self.member, self.chunk) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: None + :type options: list[str] + :return: + """ + if len(options) > 1: + raise Exception('The PSI diagnostic has no options') + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(Psi(diags.data_manager, startdate, member, chunk)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + temp = TempFile.get() + input_file_u = self.data_manager.get_file(Domains.ocean, 'uo', self.startdate, self.member, self.chunk) + input_file_v = self.data_manager.get_file(Domains.ocean, 'vo', self.startdate, self.member, self.chunk) + cdftools.run('cdfpsi', input=[input_file_u, input_file_v], output=temp, options='-mean -mask') + Utils.rename_variable(temp, 'sobarstf', 'vsftbarot') + Utils.setminmax(temp, 'vsftbarot') + self.send_file(temp, Domains.ocean, 'vsftbarot', self.startdate, self.member, self.chunk) diff --git a/earthdiagnostics/ocean/siasiesiv.py b/earthdiagnostics/ocean/siasiesiv.py new file mode 100644 index 0000000000000000000000000000000000000000..6e46264e3930d3f7179d9241b8e9b107e54de4d1 --- /dev/null +++ b/earthdiagnostics/ocean/siasiesiv.py @@ -0,0 +1,152 @@ +# coding=utf-8 +import netCDF4 +import os +from earthdiagnostics.constants import Basins +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +import earthdiagnostics.cdftoolspython as cdftoolspython +import numpy as np + +from earthdiagnostics.variable import Domains + + +class Siasiesiv(Diagnostic): + """ + Compute the sea ice extent , area and volume in both hemispheres or a specified region. + + + :original author: Virginie Guemas + :contributor: Neven Fuckar + :contributor: Ruben Cruz + :contributor: Javier Vegas-Regidor + + :created: April 2012 + :last modified: June 2016 + + """ + alias = 'siasiesiv' + "Diagnostic alias for the configuration file" + + e1t = None + e2t = None + gphit = None + + def __init__(self, data_manager, startdate, member, chunk, basin, mask): + """ + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param mask: mask to use + :type mask: numpy.array + """ + Diagnostic.__init__(self, data_manager) + self.basin = basin + self.startdate = startdate + self.member = member + self.chunk = chunk + self.mask = mask + self.required_vars = ['sit', 'sic'] + self.generated_vars = ['siextents', 'sivols', 'siareas', 'siextentn', 'sivoln', 'siarean'] + + def __str__(self): + return 'Siasiesiv Startdate: {0} Member: {1} Chunk: {2} Basin: {3}'.format(self.startdate, self.member, + self.chunk, self.basin.fullname) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: basin + :type options: list[str] + :return: + """ + if len(options) != 2: + raise Exception('You must specify the basin for the siasiesiv diagnostic (and nothing else)') + basin = Basins.parse(options[1]) + + mask = Utils.get_mask(basin) + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(Siasiesiv(diags.data_manager, startdate, member, chunk, basin, mask)) + mesh_handler = Utils.openCdf('mesh_hgr.nc') + Siasiesiv.e1t = np.asfortranarray(mesh_handler.variables['e1t'][0, :]) + Siasiesiv.e2t = np.asfortranarray(mesh_handler.variables['e2t'][0, :]) + Siasiesiv.gphit = np.asfortranarray(mesh_handler.variables['gphit'][0, :]) + mesh_handler.close() + + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + sit_file = self.data_manager.get_file(Domains.seaIce, 'sit', self.startdate, self.member, self.chunk) + sit_handler = Utils.openCdf(sit_file) + sit = np.asfortranarray(sit_handler.variables['sit'][:]) + timesteps = sit_handler.dimensions['time'].size + sit_handler.close() + + sic_file = self.data_manager.get_file(Domains.seaIce, 'sic', self.startdate, self.member, self.chunk) + sic_handler = Utils.openCdf(sic_file) + sic = np.asfortranarray(sic_handler.variables['sic'][:]) + sic_handler.close() + + result = np.empty((8, timesteps)) + for t in range(0, timesteps): + try: + + result[:, t] = cdftoolspython.icediag.icediags(Siasiesiv.e1t, Siasiesiv.e2t, self.mask, + Siasiesiv.gphit, sit[t, :], sic[t, :]) + except Exception as ex: + print ex + + self.send_file(self._extract_variable_and_rename(sit_file, result[4, :], 'sivols', '10^9 m3'), + Domains.seaIce, 'sivols', self.startdate, self.member, self.chunk, region=self.basin.fullname) + self.send_file(self._extract_variable_and_rename(sit_file, result[5, :], 'siareas', '10^9 m2'), + Domains.seaIce, 'siareas', self.startdate, self.member, self.chunk, region=self.basin.fullname) + self.send_file(self._extract_variable_and_rename(sit_file, result[7, :], 'siextents', '10^9 m2'), + Domains.seaIce, 'siextents', self.startdate, self.member, self.chunk, region=self.basin.fullname) + + self.send_file(self._extract_variable_and_rename(sit_file, result[0, :], 'sivoln', '10^9 m3'), + Domains.seaIce, 'sivoln', self.startdate, self.member, self.chunk, region=self.basin.fullname) + self.send_file(self._extract_variable_and_rename(sit_file, result[1, :], 'siarean', '10^9 m2'), + Domains.seaIce, 'siarean', self.startdate, self.member, self.chunk, region=self.basin.fullname) + self.send_file(self._extract_variable_and_rename(sit_file, result[3, :], 'siextentn', '10^9 m2'), + Domains.seaIce, 'siextentn', self.startdate, self.member, self.chunk, region=self.basin.fullname) + + @staticmethod + def _extract_variable_and_rename(reference_file, values, cmor_name, units): + temp = TempFile.get() + reference_handler = Utils.openCdf(reference_file) + os.remove(temp) + handler = netCDF4.Dataset(temp, 'w') + + # Create dimensions + handler.createDimension('time') + handler.createDimension('bnds', 2) + + # Copy time variable + + Utils.copy_variable(reference_handler, handler, 'time') + Utils.copy_variable(reference_handler, handler, 'time_bnds') + Utils.copy_variable(reference_handler, handler, 'leadtime') + reference_handler.close() + + new_var = handler.createVariable(cmor_name, float, 'time', fill_value=1.0e20) + new_var.units = units + new_var.short_name = cmor_name + new_var.valid_min = 0.0 + new_var[:] = values + new_var.valid_max = np.max(values) + handler.close() + return temp + diff --git a/earthdiagnostics/ocean/verticalmean.py b/earthdiagnostics/ocean/verticalmean.py new file mode 100644 index 0000000000000000000000000000000000000000..7783ab245f5ac14dec5a62f6fdbb8de87c5238d6 --- /dev/null +++ b/earthdiagnostics/ocean/verticalmean.py @@ -0,0 +1,110 @@ +# coding=utf-8 +from earthdiagnostics import cdftools +from earthdiagnostics.box import Box +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domains + + +class VerticalMean(Diagnostic): + """ + Chooses vertical level in ocean, or vertically averages between + 2 or more ocean levels + + :original author: Virginie Guemas + :contributor: Eleftheria Exarchou + :contributor: Javier Vegas-Regidor + + :created: February 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable to average + :type variable: str + :param box: box used to restrict the vertical mean + :type box: Box + """ + + alias = 'vertmean' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, variable, box): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.box = box + self.required_vars = [variable] + self.generated_vars = [variable + 'vmean'] + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.box == other.box and self.variable == other.variable + + def __str__(self): + return 'Vertical mean Startdate: {0} Member: {1} Chunk: {2} Variable: {3} ' \ + 'Box: {4}'.format(self.startdate, self.member, self.chunk, self.variable, self.box) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, minimum depth (level), maximum depth (level) + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 1: + raise Exception('You must specify the variable to average vertically') + if num_options > 3: + raise Exception('You must specify between one and three parameters for the vertical mean') + variable = options[1] + + box = Box() + if num_options >= 2: + box.min_depth = float(options[2]) + if num_options >= 3: + box.max_depth = float(options[3]) + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(VerticalMean(diags.data_manager, startdate, member, chunk, + variable, box)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + temp = TempFile.get() + variable_file = self.data_manager.get_file('ocean', self.variable, self.startdate, self.member, self.chunk) + + handler = Utils.openCdf(variable_file) + if self.box.min_depth is None: + lev_min = handler.variables['lev'][0] + else: + lev_min = self.box.min_depth + + if self.box.max_depth is None: + lev_max = handler.variables['lev'][-1] + else: + lev_max = self.box.max_depth + handler.close() + + cdftools.run('cdfvertmean', input=variable_file, output=temp, options=[self.variable, 'T', lev_min, lev_max, + '-debug']) + Utils.setminmax(temp, '{0}_vert_mean'.format(self.variable)) + self.send_file(temp, Domains.ocean, self.variable + 'vmean', self.startdate, self.member, self.chunk, + box=self.box, rename_var='{0}_vert_mean'.format(self.variable)) + diff --git a/earthdiagnostics/ocean/verticalmeanmeters.py b/earthdiagnostics/ocean/verticalmeanmeters.py new file mode 100644 index 0000000000000000000000000000000000000000..5d43196b262926a32c0da3f6911e662d5cebe943 --- /dev/null +++ b/earthdiagnostics/ocean/verticalmeanmeters.py @@ -0,0 +1,106 @@ +# coding=utf-8 +from earthdiagnostics import cdftools +from earthdiagnostics.box import Box +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domains + + +class VerticalMeanMeters(Diagnostic): + """ + Averages vertically any given variable + + :original author: Virginie Guemas + :contributor: Javier Vegas-Regidor + + :created: February 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable to average + :type variable: str + :param box: box used to restrict the vertical mean + :type box: Box + + """ + + alias = 'vertmeanmeters' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, variable, box): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.box = box + self.required_vars = [variable] + self.generated_vars = [variable + 'vmean'] + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.box == other.box and self.variable == other.variable + + def __str__(self): + return 'Vertical mean meters Startdate: {0} Member: {1} Chunk: {2} Variable: {3} ' \ + 'Box: {4}'.format(self.startdate, self.member, self.chunk, self.variable, self.box) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: variable, minimum depth (meters), maximum depth (meters) + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 1: + raise Exception('You must specify the variable to average vertically') + if num_options > 3: + raise Exception('You must specify between one and three parameters for the vertical mean') + variable = options[1] + box = Box(True) + if num_options >= 2: + box.min_depth = float(options[2]) + if num_options >= 3: + box.max_depth = float(options[3]) + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(VerticalMeanMeters(diags.data_manager, startdate, member, chunk, variable, box)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + temp = TempFile.get() + variable_file = self.data_manager.get_file('ocean', self.variable, self.startdate, self.member, self.chunk) + + handler = Utils.openCdf(variable_file) + if self.box.min_depth is None: + lev_min = handler.variables['lev'][0] + else: + lev_min = self.box.min_depth + + if self.box.max_depth is None: + lev_max = handler.variables['lev'][-1] + else: + lev_max = self.box.max_depth + handler.close() + + cdftools.run('cdfvertmean', input=variable_file, output=temp, options=[self.variable, 'T', lev_min, lev_max, + '-debug']) + Utils.setminmax(temp, '{0}_vert_mean'.format(self.variable)) + self.send_file(temp, Domains.ocean, self.variable + 'vmean', self.startdate, self.member, self.chunk, + box=self.box, rename_var='{0}_vert_mean'.format(self.variable)) diff --git a/earthdiagnostics/parser.py b/earthdiagnostics/parser.py new file mode 100644 index 0000000000000000000000000000000000000000..25678218ab32edc15b99dc27a4d61dc3c12d8a90 --- /dev/null +++ b/earthdiagnostics/parser.py @@ -0,0 +1,213 @@ +# coding=utf-8 +from ConfigParser import SafeConfigParser +from autosubmit.config.log import Log +import re + + +# noinspection PyClassicStyleClass +class Parser(SafeConfigParser): + """ + Class to manage the config file. It add options to manage default values and to convert strings to the + desired types (int, bool, list ...) + """ + + def get_option(self, section, option, default=''): + """ + Gets an option + + :param section: section that contains the option + :type section: str + :param option: option to get + :type option: str + :param default: value to be returned if option is not present + :type default: object + :return: option value + :rtype: str + """ + if self.has_option(section, option): + return self.get(section, option) + else: + return default + + def get_list_option(self, section, option, default=list(), separator=' '): + """ + Gets a list option + + :param section: section that contains the option + :type section: str + :param option: option to get + :type option: str + :param default: value to be returned if option is not present + :type default: object + :param separator: separator used to split the list + :type separator: str + :return: option value + :rtype: list + """ + if self.has_option(section, option): + return self.get(section, option).split(separator) + else: + return default + + def get_bool_option(self, section, option, default=True): + """ + Gets a boolean option + + :param section: section that contains the option + :type section: str + :param option: option to get + :type option: str + :param default: value to be returned if option is not present + :type default: bool + :return: option value + :rtype: bool + """ + if self.has_option(section, option): + return self.get(section, option).lower().strip() == 'true' + else: + return default + + def get_int_option(self, section, option, default=0): + """ + Gets an integer option + + :param section: section that contains the option + :type section: str + :param option: option to get + :type option: str + :param default: value to be returned if option is not present + :type default: int + :return: option value + :rtype: int + """ + if self.has_option(section, option): + return int(self.get(section, option)) + else: + return default + + def get_float_option(self, section, option, default=0.0): + """ + Gets a float option + + :param section: section that contains the option + :type section: str + :param option: option to get + :type option: str + :param default: value to be returned if option is not present + :type default: float + :return: option value + :rtype: float + """ + if self.has_option(section, option): + return float(self.get(section, option)) + else: + return default + + def check_exists(self, section, option): + """ + Checks if an option exists + + :param section: section that contains the option + :type section: str + :param option: option to check + :type option: str + :return: True if option exists, False otherwise + :rtype: bool + """ + if self.has_option(section, option): + return True + else: + Log.error('Option {0} in section {1} not found'.format(option, section)) + return False + + def check_is_boolean(self, section, option, must_exist): + """ + Checks if an option is a boolean value + + :param section: section that contains the option + :type section: str + :param option: option to check + :type option: str + :param must_exist: if True, option must exist + :type must_exist: bool + :return: True if option value is boolean, False otherwise + :rtype: bool + """ + if must_exist and not self.check_exists(section, option): + Log.error('Option {0} in section {1} must exist'.format(option, section)) + return False + if self.get_option(section, option, 'false').lower() not in ['false', 'true']: + Log.error('Option {0} in section {1} must be true or false'.format(option, section)) + return False + return True + + def check_is_choice(self, section, option, must_exist, choices): + """ + Checks if an option is a valid choice in given self + + :param section: section that contains the option + :type section: str + :param option: option to check + :type option: str + :param must_exist: if True, option must exist + :type must_exist: bool + :param choices: valid choices + :type choices: list + :return: True if option value is a valid choice, False otherwise + :rtype: bool + """ + if must_exist and not self.check_exists(section, option): + return False + value = self.get_option(section, option, choices[0]) + if value not in choices: + Log.error('Value {2} in option {0} in section {1} is not a valid choice'.format(option, section, value)) + return False + return True + + def check_is_int(self, section, option, must_exist): + """ + Checks if an option is an integer value + + :param section: section that contains the option + :type section: str + :param option: option to check + :type option: str + :param must_exist: if True, option must exist + :type must_exist: bool + :return: True if option value is integer, False otherwise + :rtype: bool + """ + if must_exist and not self.check_exists(section, option): + return False + value = self.get_option(section, option, '1') + try: + int(value) + except ValueError: + Log.error('Option {0} in section {1} is not valid an integer'.format(option, section)) + return False + return True + + def check_regex(self, section, option, must_exist, regex): + """ + Checks if an option complies with a regular expression + + :param section: section that contains the option + :type section: str + :param option: option to check + :type option: str + :param must_exist: if True, option must exist + :type must_exist: bool + :param regex: regular expression to check + :type regex: str + :return: True if option complies with regex, False otherwise + :rtype: bool + """ + if must_exist and not self.check_exists(section, option): + return False + prog = re.compile(regex) + value = self.get_option(section, option, '1') + if not prog.match(value): + Log.error('Option {0} in section {1} is not valid: {2}'.format(option, section, value)) + return False + return True + diff --git a/earthdiagnostics/statistics/__init__.py b/earthdiagnostics/statistics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..12da83d8521731f45d332a23f5cd3b6545fc9b17 --- /dev/null +++ b/earthdiagnostics/statistics/__init__.py @@ -0,0 +1,2 @@ +# coding=utf-8 +from monthly_percentiles import MonthlyPercentil diff --git a/earthdiagnostics/statistics/monthly_percentiles.py b/earthdiagnostics/statistics/monthly_percentiles.py new file mode 100644 index 0000000000000000000000000000000000000000..7698983ac0b48dd0347bf0f828df84cb52bfed6c --- /dev/null +++ b/earthdiagnostics/statistics/monthly_percentiles.py @@ -0,0 +1,91 @@ +# coding=utf-8 +from earthdiagnostics.diagnostic import Diagnostic +from earthdiagnostics.utils import Utils, TempFile +from earthdiagnostics.variable import Domain + + +class MonthlyPercentil(Diagnostic): + """ + Chooses vertical level in ocean, or vertically averages between + 2 or more ocean levels + + :original author: Virginie Guemas + :contributor: Eleftheria Exarchou + :contributor: Javier Vegas-Regidor + + :created: February 2012 + :last modified: June 2016 + + :param data_manager: data management object + :type data_manager: DataManager + :param startdate: startdate + :type startdate: str + :param member: member number + :type member: int + :param chunk: chunk's number + :type chunk: int + :param variable: variable to average + :type variable: str + """ + + alias = 'monpercent' + "Diagnostic alias for the configuration file" + + def __init__(self, data_manager, startdate, member, chunk, variable, domain, percentile): + Diagnostic.__init__(self, data_manager) + self.startdate = startdate + self.member = member + self.chunk = chunk + self.variable = variable + self.domain = domain + self.percentile = percentile + + def __eq__(self, other): + return self.startdate == other.startdate and self.member == other.member and self.chunk == other.chunk and \ + self.domain == other.domain and self.variable == other.variable and self.percentile == other.percentile + + def __str__(self): + return 'Monthly percentile {0} Startdate: {0} Member: {1} Chunk: {2} ' \ + 'Variable: {3}:{4}'.format(self.startdate, self.member, self.chunk, self.domain, self.variable) + + @classmethod + def generate_jobs(cls, diags, options): + """ + Creates a job for each chunk to compute the diagnostic + + :param diags: Diagnostics manager class + :type diags: Diags + :param options: domain, variable, percentil number, maximum depth (level) + :type options: list[str] + :return: + """ + num_options = len(options) - 1 + if num_options < 3: + raise Exception('You must specify the variable (and its domain) to average vertically and ' + 'the percentil you want') + if num_options > 3: + raise Exception('You must specify between one and three parameters for the vertical mean') + + domain = Domain(options[1]) + variable = options[2] + percentile = int(options[3]) + if percentile < 0 or percentile > 100: + raise Exception('Percentile value must be in the interval [0,100]') + + job_list = list() + for startdate, member, chunk in diags.config.experiment.get_chunk_list(): + job_list.append(MonthlyPercentil(diags.data_manager, startdate, member, chunk, + variable, domain, percentile)) + return job_list + + def compute(self): + """ + Runs the diagnostic + """ + variable_file = self.data_manager.get_file(self.domain, self.variable, self.startdate, self.member, self.chunk) + temp = TempFile.get() + Utils.cdo.monpctl(str(self.percentile), input=[variable_file, '-monmin ' + variable_file, + '-monmax ' + variable_file], output=temp) + self.send_file(temp, 'ocean', '{0}p{1}'.format(self.variable, self.percentile), self.startdate, self.member, + self.chunk, frequency='mon') + diff --git a/earthdiagnostics/threddsmanager.py b/earthdiagnostics/threddsmanager.py new file mode 100644 index 0000000000000000000000000000000000000000..d044f91f1c69ae39c5b89b2d9b219d4a93be5745 --- /dev/null +++ b/earthdiagnostics/threddsmanager.py @@ -0,0 +1,221 @@ +# coding=utf-8 +import os +from earthdiagnostics.datamanager import DataManager, NetCDFFile +from earthdiagnostics.utils import TempFile, Utils +import urllib + +from earthdiagnostics.variable import Variable + + +class THREDDSManager(DataManager): + """ + Data manager class for CMORized experiments + """ + def __init__(self, config): + super(THREDDSManager, self).__init__(config) + self.server_url = config.thredds.server_url + data_folders = self.config.data_dir.split(':') + self.config.data_dir = None + for data_folder in data_folders: + if os.path.isdir(os.path.join(data_folder, self.experiment.institute.lower(), + self.experiment.model.lower())): + self.config.data_dir = data_folder + break + + if not self.config.data_dir: + raise Exception('Can not find model data') + + def get_file(self, domain, var, startdate, member, chunk, grid=None, box=None, frequency=None): + """ + Copies a given file from the CMOR repository to the scratch folder and returns the path to the scratch's copy + + :param domain: CMOR domain + :type domain: str + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :return: path to the copy created on the scratch folder + :rtype: str + """ + if not frequency: + frequency = self.config.frequency + aggregation_path = self.get_var_url(var, startdate, frequency, box) + temp = TempFile.get() + urllib.urlretrieve(aggregation_path, temp) + if not Utils.check_netcdf_file(temp): + raise THREDDSError('Can not retrieve {0} from server'.format(aggregation_path)) + return temp + + def send_file(self, filetosend, domain, var, startdate, member, chunk=None, grid=None, region=None, box=None, + rename_var=None, frequency=None, year=None, date_str=None, move_old=False, + diagnostic=None, cmorized=False): + """ + Copies a given file to the CMOR repository. It also automatically converts to netCDF 4 if needed and can merge + with already existing ones as needed + + :param move_old: if true, moves files following older conventions that may be found on the links folder + :type move_old: bool + :param date_str: exact date_str to use in the cmorized file + :type: str + :param year: if frequency is yearly, this parameter is used to give the corresponding year + :type year: int + :param rename_var: if exists, the given variable will be renamed to the one given by var + :type rename_var: str + :param filetosend: path to the file to send to the CMOR repository + :type filetosend: str + :param region: specifies the region represented by the file. If it is defined, the data will be appended to the + CMOR repository as a new region in the file or will overwrite if region was already present + :type region: str + :param domain: CMOR domain + :type domain: Domain + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :param diagnostic: diagnostic used to generate the file + :type diagnostic: Diagnostic + :param cmorized: flag to indicate if file was generated in cmorization process + :type cmorized: bool + + """ + if cmorized: + raise ValueError('cmorized is not supported in THREDDS manager') + original_var = var + cmor_var = Variable.get_variable(var) + var = self._get_final_var_name(box, var) + + if rename_var and rename_var != var: + Utils.rename_variable(filetosend, rename_var, var) + elif original_var != var: + Utils.rename_variable(filetosend, original_var, var) + + if not frequency: + frequency = self.config.frequency + + filepath = self.get_file_path(startdate, domain, var, frequency, box, + grid) + netcdf_file = NetCDFFile(filepath, filetosend, domain, var, cmor_var) + if diagnostic: + netcdf_file.add_diagnostic_history(diagnostic) + else: + raise ValueError('You must provide a diagnostic to store data using the THREDDSmanager') + netcdf_file.send() + + def get_file_path(self, startdate, domain, var, frequency, + box=None, grid=None): + """ + Returns the path to a concrete file + :param startdate: file's startdate + :type startdate: str + :param domain: file's domain + :type domain: str + :param var: file's var + :type var: str + :param frequency: file's frequency + :type frequency: str + :param box: file's box + :type box: Box + :param grid: file's grid + :type grid: str + :return: path to the file + :rtype: str + """ + if not frequency: + frequency = self.config.frequency + var = self._get_final_var_name(box, var) + + folder_path = self._get_folder_path(frequency, domain, var, grid) + file_name = '{0}_{1}.nc'.format(var, startdate) + + filepath = os.path.join(folder_path, file_name) + return filepath + + def _get_folder_path(self, frequency, domain, variable, grid): + folder_path = os.path.join(self.config.data_dir, + self.experiment.institute.lower(), + self.experiment.model.lower(), + self.frequency_folder_name(frequency), + self.get_varfolder(domain, variable, grid)) + return folder_path + + def get_year(self, domain, var, startdate, member, year, grid=None, box=None): + """ + Ge a file containing all the data for one year for one variable + :param domain: variable's domain + :type domain: str + :param var: variable's name + :type var: str + :param startdate: startdate to retrieve + :type startdate: str + :param member: member to retrieve + :type member: int + :param year: year to retrieve + :type year: int + :param grid: variable's grid + :type grid: str + :param box: variable's box + :type box: Box + :return: + """ + + def get_var_url(self, var, startdate, frequency, box): + var = self._get_final_var_name(box, var) + return os.path.join(self.server_url, 'fileServer', 'exp', self.experiment.institute, + self.experiment.model, self.frequency_folder_name(frequency), + var, '{0}_{1}.nc'.format(var, startdate)) + + def link_file(self, domain, var, startdate, member, chunk=None, grid=None, box=None, + frequency=None, year=None, date_str=None, move_old=False): + """ + Creates the link of a given file from the CMOR repository. + + :param move_old: + :param date_str: + :param year: if frequency is yearly, this parameter is used to give the corresponding year + :type year: int + :param domain: CMOR domain + :type domain: str + :param var: variable name + :type var: str + :param startdate: file's startdate + :type startdate: str + :param member: file's member + :type member: int + :param chunk: file's chunk + :type chunk: int + :param grid: file's grid (only needed if it is not the original) + :type grid: str + :param box: file's box (only needed to retrieve sections or averages) + :type box: Box + :param frequency: file's frequency (only needed if it is different from the default) + :type frequency: str + :return: path to the copy created on the scratch folder + :rtype: str + """ + # THREDDSManager does not require links + pass + + +class THREDDSError(Exception): + pass diff --git a/earthdiagnostics/utils.py b/earthdiagnostics/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..440c4c30c67d1cde271e43b09e66a474ce7b7d8d --- /dev/null +++ b/earthdiagnostics/utils.py @@ -0,0 +1,635 @@ +# coding=utf-8 +import hashlib +import shutil +import subprocess + +import netCDF4 +import numpy as np +import os +import re +import tempfile +from autosubmit.config.log import Log +from cdo import Cdo, CDOException +from nco import Nco + +from earthdiagnostics.constants import Basins + + +class Utils(object): + """ + Container class for miscellaneous utility methods + """ + + nco = Nco() + """An instance of Nco class ready to be used""" + cdo = Cdo() + """An instance of Cdo class ready to be used""" + + @staticmethod + def get_mask(basin): + """ + Returns a numpy array containing the mask for the given basin + + :param basin: basin to retrieve + :type basin: Basin + :return: mask + :rtype: numpy.array + """ + basin = Basins.parse(basin) + if basin != Basins.Global: + mask_handler = Utils.openCdf('mask_regions.nc') + mask = mask_handler.variables[basin.fullname][:, 0, :] + mask_handler.close() + else: + mask_handler = Utils.openCdf('mask.nc') + mask = np.asfortranarray(mask_handler.variables['tmask'][0, 0, :]) + mask_handler.close() + return mask + + @staticmethod + def setminmax(filename, variable_list): + """ + Sets the valid_max and valid_min values to the current max and min values on the file + :param filename: path to file + :type filename: str + :param variable_list: list of variables in which valid_min and valid_max will be set + :type variable_list: str | list + """ + if isinstance(variable_list, basestring): + variable_list = variable_list.split() + + Log.info('Getting max and min values for {0}', ' '.join(variable_list)) + + handler = Utils.openCdf(filename) + for variable in variable_list: + var = handler.variables[variable] + values = [np.max(var), np.min(var)] + Utils.nco.ncatted(input=filename, output=filename, + options='-h -a valid_max,{0},m,f,{1}'.format(variable, values[0])) + Utils.nco.ncatted(input=filename, output=filename, + options='-h -a valid_min,{0},m,f,{1}'.format(variable, values[1])) + handler.close() + + @staticmethod + def rename_variable(filepath, old_name, new_name, must_exist=True, rename_dimension=False): + """ + Rename multiple variables from a NetCDF file + :param filepath: path to file + :type filepath: str + :param old_name: variable's name to change + :type old_name: str + :param new_name: new name + :type new_name: str + :param must_exist: if True, the function will raise an exception if the variable name does not exist + :type must_exist: bool + :param rename_dimension: if True, also rename dimensions with the same name + :type rename_dimension: bool + """ + Utils.rename_variables(filepath, {old_name: new_name}, must_exist, rename_dimension) + + @staticmethod + def rename_variables(filepath, dic_names, must_exist=True, rename_dimension=False): + """ + Rename multiple variables from a NetCDF file + :param filepath: path to file + :type filepath: str + :param dic_names: dictionary containing old names as keys and new names as values + :type dic_names: dict + :param must_exist: if True, the function will raise an exception if the variable name does not exist + :type must_exist: bool + :param rename_dimension: if True, also rename dimensions with the same name + :type rename_dimension: bool + """ + for old, new in dic_names.iteritems(): + if old == new: + raise ValueError('{0} original name is the same as the new') + handler = Utils.openCdf(filepath) + + original_names = set(handler.variables.keys()).union(handler.dimensions.keys()) + if not any((True for x in dic_names.keys() if x in original_names)): + handler.close() + if must_exist: + raise Exception("Variables {0} does not exist in file {1}".format(','.join(dic_names.keys()), filepath)) + return + handler.close() + + temp = TempFile.get() + shutil.copyfile(filepath, temp) + + handler = Utils.openCdf(temp) + error = False + + try: + Utils._rename_vars_directly(dic_names, filepath, handler, must_exist, rename_dimension) + except RuntimeError: + error = True + + handler.close() + + if not Utils.check_netcdf_file(temp): + error = True + + if error: + Utils._rename_vars_by_creating_new_file(dic_names, filepath, temp) + + Utils.move_file(temp, filepath) + + @staticmethod + def check_netcdf_file(filepath): + try: + Utils.cdo.showvar(input=filepath) + except CDOException: + return False + return True + + @staticmethod + def get_file_variables(filename): + handler = Utils.openCdf(filename) + variables = handler.variables.keys() + handler.close() + return variables + + @staticmethod + def _rename_vars_by_creating_new_file(dic_names, filepath, temp): + Log.debug('Using secondary rename method for netCDF') + original_handler = Utils.openCdf(filepath) + new_handler = Utils.openCdf(temp, 'w') + for attribute in original_handler.ncattrs(): + setattr(new_handler, attribute, getattr(original_handler, attribute)) + for dimension in original_handler.dimensions.keys(): + Utils.copy_dimension(original_handler, new_handler, dimension, new_names=dic_names) + for variable in original_handler.variables.keys(): + Utils.copy_variable(original_handler, new_handler, variable, new_names=dic_names) + original_handler.close() + new_handler.close() + + @staticmethod + def _rename_vars_directly(dic_names, filepath, handler, must_exist, rename_dimension): + for old_name, new_name in dic_names.items(): + if rename_dimension: + if old_name in handler.dimensions: + handler.renameDimension(old_name, new_name) + elif must_exist: + raise Exception("Dimension {0} does not exist in file {1}".format(old_name, filepath)) + + if old_name in handler.variables: + if new_name not in handler.variables: + handler.renameVariable(old_name, new_name) + elif must_exist: + raise Exception("Variable {0} does not exist in file {1}".format(old_name, filepath)) + handler.sync() + + @staticmethod + def copy_file(source, destiny): + """ + Copies a file from source to destiny, creating dirs if necessary + + :param source: path to source + :type source: str + :param destiny: path to destiny + :type destiny: str + """ + dirname_path = os.path.dirname(destiny) + if dirname_path and not os.path.exists(dirname_path): + try: + os.makedirs(dirname_path) + except OSError as ex: + # This can be due to a race condition. If directory already exists, we don have to do nothing + if not os.path.exists(dirname_path): + raise ex + hash_destiny = None + hash_original = Utils.get_file_hash(source) + + retrials = 5 + while hash_original != hash_destiny: + if retrials == 0: + raise Exception('Can not move {0} to {1}'.format(source, destiny)) + shutil.copyfile(source, destiny) + hash_destiny = Utils.get_file_hash(destiny) + + @staticmethod + def move_file(source, destiny): + """ + Moves a file from source to destiny, creating dirs if necessary + + :param source: path to source + :type source: str + :param destiny: path to destiny + :type destiny: str + """ + Utils.copy_file(source, destiny) + os.remove(source) + + @staticmethod + def remove_file(path): + """ + Removes a file, checking before if its exists + + :param path: path to file + :type path: str + """ + if os.path.isfile(path): + os.remove(path) + + @staticmethod + def get_file_hash(filepath): + """ + Returns the MD5 hash for the given filepath + :param filepath: path to the file to compute hash on + :type filepath:str + :return: file's MD5 hash + :rtype: str + """ + blocksize = 65536 + hasher = hashlib.md5() + with open(filepath, 'rb') as afile: + buf = afile.read(blocksize) + while len(buf) > 0: + hasher.update(buf) + buf = afile.read(blocksize) + return hasher.hexdigest() + + @staticmethod + def execute_shell_command(command, log_level=Log.DEBUG): + """ + Executes a sheel command + :param command: command to execute + + Log.info('Detailed time for diagnostic class') + :param log_level: log level to use for command output + :type log_level: int + :return: command output + :rtype: list + """ + if isinstance(command, basestring): + command = command.split() + process = subprocess.Popen(command, stdout=subprocess.PIPE) + output = list() + comunicate = process.communicate() + for line in comunicate: + if not line: + continue + if log_level != Log.NO_LOG: + Log.log.log(log_level, line) + output.append(line) + if process.returncode != 0: + raise Utils.ExecutionError('Error executing {0}\n Return code: {1}', ' '.join(command), process.returncode) + return output + + _cpu_count = None + + @staticmethod + def available_cpu_count(): + """ + Number of available virtual or physical CPUs on this systemx + """ + if Utils._cpu_count is None: + try: + m = re.search(r'(?m)^Cpus_allowed:\s*(.*)$', + open('/proc/self/status').read()) + if m: + res = bin(int(m.group(1).replace(',', ''), 16)).count('1') + if res > 0: + Utils._cpu_count = res + except IOError: + try: + import multiprocessing + Utils._cpu_count = multiprocessing.cpu_count() + return Utils._cpu_count + except (ImportError, NotImplementedError): + Utils._cpu_count = -1 + return Utils._cpu_count + + @staticmethod + def convert2netcdf4(filetoconvert): + """ + Checks if a file is in netCDF4 format and converts to netCDF4 if not + + :param filetoconvert: file to convert + :type filetoconvert: str + """ + + if Utils._is_compressed_netcdf4(filetoconvert): + return + + Log.debug('Reformatting to netCDF-4') + temp = TempFile.get() + Utils.execute_shell_command(["nccopy", "-4", "-d4", "-s", filetoconvert, temp]) + shutil.move(temp, filetoconvert) + + @classmethod + def _is_compressed_netcdf4(cls, filetoconvert): + is_compressed = True + handler = Utils.openCdf(filetoconvert) + if not handler.file_format == 'NETCDF4': + is_compressed = False + else: + ncdump_result = Utils.execute_shell_command('ncdump -hs {0}'.format(filetoconvert), Log.NO_LOG) + ncdump_result = ncdump_result[0].replace('\t', '').split('\n') + for var in handler.variables: + if not '{0}:_DeflateLevel = 4 ;'.format(var) in ncdump_result: + is_compressed = False + break + if not '{0}:_Shuffle = "true" ;'.format(var) in ncdump_result: + is_compressed = False + break + + handler.close() + return is_compressed + + +# noinspection PyPep8Naming + @staticmethod + def openCdf(filepath, mode='a'): + """ + Opens a netCDF file and returns a handler to it + + :param filepath: path to the file + :type filepath: str + :param mode: mode to open the file. By default, a (append) + :type mode: str + :return: handler to the file + :rtype: netCDF4.Dataset + """ + return netCDF4.Dataset(filepath, mode) + + @staticmethod + def get_datetime_from_netcdf(handler, time_variable='time'): + """ + Gets a datetime array from a netCDF file + + :param handler: file to read + :type handler: netCDF4.Dataset + :param time_variable: variable to read, by default 'time' + :type time_variable: str + :return: Datetime numpy array created from the values stored at the netCDF file + :rtype: np.array + """ + var_time = handler.variables[time_variable] + nctime = var_time[:] # get values + units = var_time.units + + try: + cal_temps = var_time.calendar + except AttributeError: + cal_temps = u"standard" + return netCDF4.num2date(nctime, units=units, calendar=cal_temps) + + @staticmethod + def copy_variable(source, destiny, variable, must_exist=True, add_dimensions=False, new_names=None): + """ + Copies the given variable from source to destiny + + :param add_dimensions: if it's true, dimensions required by the variable will be automatically added to the + file. It will also add the dimension variable + :type add_dimensions: bool + :param source: origin file + :type source: netCDF4.Dataset + :param destiny: destiny file + :type destiny: netCDF4.Dataset + :param variable: variable to copy + :type variable: str + :param must_exist: if false, does not raise an error uf variable does not exist + :type must_exist: bool + :param new_names: dictionary containing variables to rename and new name as key-value pairs + :type new_names: dict + :return: + """ + if not must_exist and variable not in source.variables.keys(): + return + + if not new_names: + new_names = dict() + if variable in new_names: + new_name = new_names[variable] + else: + new_name = variable + + if new_name in destiny.variables.keys(): + return + + translated_dimensions = Utils._translate(source.variables[variable].dimensions, new_names) + if not set(translated_dimensions).issubset(destiny.dimensions): + if not add_dimensions: + raise Exception('Variable {0} can not be added because dimensions does not match'.format(variable)) + for dimension in source.variables[variable].dimensions: + Utils.copy_dimension(source, destiny, dimension, must_exist, new_names) + if new_name in destiny.variables.keys(): + # Just in case the variable we are copying match a dimension name + return + original_var = source.variables[variable] + new_var = destiny.createVariable(new_name, original_var.datatype, translated_dimensions) + new_var.setncatts({k: original_var.getncattr(k) for k in original_var.ncattrs()}) + new_var[:] = original_var[:] + + @staticmethod + def copy_dimension(source, destiny, dimension, must_exist=True, new_names=None): + """ + Copies the given dimension from source to destiny, including dimension variables if present + + :param new_names: dictionary containing variables to rename and new name as key-value pairs + :type new_names: dict + :param source: origin file + :type source: netCDF4.Dataset + :param destiny: destiny file + :type destiny: netCDF4.Dataset + :param dimension: variable to copy + :type dimension: str + :param must_exist: if false, does not raise an error uf variable does not exist + :type must_exist: bool + + :return: + """ + if not must_exist and dimension not in source.dimensions.keys(): + return + if not new_names: + new_names = dict() + if dimension in new_names: + new_name = new_names[dimension] + else: + new_name = dimension + if new_name in destiny.dimensions.keys(): + return + if not new_name: + new_name = dimension + destiny.createDimension(new_name, source.dimensions[dimension].size) + if dimension in source.variables: + Utils.copy_variable(source, destiny, dimension, new_names=new_names) + + @staticmethod + def concat_variables(source, destiny, remove_source=False): + """ + Add variables from a nc file to another + :param source: path to source file + :type source: str + :param destiny: path to destiny file + :type destiny: str + :param remove_source: if True, removes source file + :type remove_source: bool + """ + if os.path.exists(destiny): + handler_total = Utils.openCdf(destiny) + handler_variable = Utils.openCdf(source) + concatenated = dict() + for var in handler_variable.variables: + if var not in handler_total.variables: + Utils.copy_variable(handler_variable, handler_total, var, add_dimensions=True) + else: + variable = handler_variable.variables[var] + if 'time' not in variable.dimensions: + continue + concatenated[var] = np.concatenate((handler_total.variables[var][:], variable[:]), + axis=variable.dimensions.index('time')) + + for var, array in concatenated.iteritems(): + handler_total.variables[var][:] = array + handler_total.close() + handler_variable.close() + if remove_source: + os.remove(source) + else: + if remove_source: + Utils.move_file(source, destiny) + else: + shutil.copy(source, destiny) + Utils.convert2netcdf4(destiny) + + @staticmethod + def expand_path(path): + """ + Expands character ~ and system variables on the given path + :param path: path to expand + :type path: str + :return: path after the expansion + """ + return os.path.expandvars(os.path.expanduser(path)) + + class ExecutionError(Exception): + """ + Exception to raise when a command execution fails + """ + pass + + @classmethod + def _translate(cls, dimensions, new_names): + translated = list() + for dim in dimensions: + if dim in new_names: + translated.append(new_names[dim]) + else: + translated.append(dim) + return translated + + @staticmethod + def create_folder_tree(path): + """ + Createas a fodle path will and parent directories if needed. + :param path: folder's path + :type path: str + """ + if not os.path.exists(path): + # This can be a race condition + # noinspection PyBroadException + try: + os.makedirs(path) + except Exception: + pass + + @staticmethod + def untar(files, destiny_path): + """ + Untar files to a given destiny + :param files: files to unzip + :type files: list[Any] | Tuple[Any] + :param destiny_path: path to destination folder + :type destiny_path: str + """ + for filepath in files: + Log.debug('Unpacking {0}', filepath) + Utils.execute_shell_command('tar -xvf {0} -C {1}'.format(filepath, destiny_path)) + + @staticmethod + def unzip(files, force=False): + """ + Unzip a list of files + :param files: files to unzip + :type files: list + :param force: if True, it will overwrite unzipped files + :type force: bool + """ + for filepath in files: + Log.debug('Unzipping {0}', filepath) + if force: + option = ' -f' + else: + option = '' + try: + Utils.execute_shell_command('gunzip{1} {0}'.format(filepath, option)) + except Exception as ex: + raise Utils.UnzipException('Can not unzip {0}: {1}'.format(filepath, ex)) + + class UnzipException(Exception): + """ + Excpetion raised when unzip fails + """ + pass + + +class TempFile(object): + """ + Class to manage temporal files + """ + + autoclean = True + """ + If True, new temporary files are added to the list for future cleaning + """ + files = list() + """ + List of files to clean automatically + """ + scratch_folder = '' + """ + Scratch folder to create temporary files on it + """ + prefix = 'temp' + """ + Prefix for temporary filenames + """ + + @staticmethod + def get(filename=None, clean=None, suffix='.nc'): + """ + Gets a new temporal filename, storing it for automated cleaning + + :param suffix: + :param filename: if it is not none, the function will use this filename instead of a random one + :type filename: str + :param clean: if true, stores filename for cleaning + :type clean: bool + :return: path to the temporal file + :rtype: str + """ + if clean is None: + clean = TempFile.autoclean + + if filename: + path = os.path.join(TempFile.scratch_folder, filename) + else: + fd, path = tempfile.mkstemp(dir=TempFile.scratch_folder, prefix=TempFile.prefix, suffix=suffix) + os.close(fd) + + if clean: + TempFile.files.append(path) + + return path + + @staticmethod + def clean(): + """ + Removes all temporary files created with Tempfile until now + """ + for temp_file in TempFile.files: + if os.path.exists(temp_file): + os.remove(temp_file) + TempFile.files = list() diff --git a/earthdiagnostics/variable.py b/earthdiagnostics/variable.py new file mode 100644 index 0000000000000000000000000000000000000000..01b8c55efc73a05cc0673842a1885004c0c83556 --- /dev/null +++ b/earthdiagnostics/variable.py @@ -0,0 +1,114 @@ +# coding=utf-8 +import csv + +import os +from autosubmit.config.log import Log + +from earthdiagnostics.constants import Basins + + +class Variable(object): + """ + Class to characterize a CMOR variable. It also contains the static method to make the match between thje original + name and the standard name. Requires cmor_table.csv to work. + """ + _dict_variables = None + + def __init__(self, line): + self.short_name = line[1].strip() + self.standard_name = line[2].strip() + self.long_name = line[3].strip() + self.domain = Domain(line[4].strip()) + self.basin = Basins.parse(line[5]) + self.units = line[6].strip() + self.valid_min = line[7].strip() + self.valid_max = line[8].strip() + self.grid = line[9].strip() + + @classmethod + def get_variable(cls, original_name, silent=False): + """ + Returns the cmor variable instance given a variable name + + :param original_name: original variable's name + :type original_name: str + :param silent: if True, omits log warning when variable is not found + :type silent: bool + :return: CMOR variable + :rtype: Variable + """ + try: + return cls._dict_variables[original_name.lower()] + except KeyError: + if not silent: + Log.warning('Variable {0} is not defined in the CMOR table. Please add it'.format(original_name)) + return None + + @classmethod + def load_variables(cls): + """ + Loads the cmor_table.csv and creates the variables dictionary + """ + Variable._dict_variables = dict() + with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'cmor_table.csv'), 'rb') as csvfile: + reader = csv.reader(csvfile, dialect='excel') + for line in reader: + if line[0] == 'Variable': + continue + + var = Variable(line) + if not var.short_name: + continue + for old_name in line[0].split(':'): + Variable._dict_variables[old_name.lower()] = var + Variable._dict_variables[var.short_name.lower()] = var + + +class Domain(object): + + def __init__(self, domain_name): + domain_name = domain_name.lower() + if domain_name == 'seaice': + self.name = 'seaIce' + elif domain_name == 'landice': + self.name = 'landIce' + elif domain_name in ['ocean', 'atmos', 'land']: + self.name = domain_name + else: + raise ValueError('Domain {0} not recognized!'.format(domain_name)) + + def __eq__(self, other): + return other.__class__ == Domain and self.name == other.name + + def __str__(self): + return self.name + + def get_table_name(self, frequency): + """ + Returns the table name for a domain-frequency pair + :param frequency: variable's frequency + :type frequency: str + :return: variable's table name + :rtype: str + """ + if frequency == 'mon': + if self.name == 'seaIce': + table_name = 'OImon' + elif self.name == 'landIce': + table_name = 'LImon' + else: + table_name = self.name[0].upper() + 'mon' + elif frequency == '6hr': + table_name = '6hrPlev' + else: + table_name = 'day' + return table_name + + +class Domains(object): + seaIce = Domain('seaice') + ocean = Domain('ocean') + landIce = Domain('landIce') + atmos = Domain('seaice') + land = Domain('land') + diff --git a/launch_diags.sh b/launch_diags.sh new file mode 100755 index 0000000000000000000000000000000000000000..561d2df2da477693aefcdf11b2be135f3c52a5a6 --- /dev/null +++ b/launch_diags.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +#SBATCH -n 1 +#SBATCH -w gustafson +#SBATCH --time 72:00:00 +#SBATCH --error=job.%J.err +#SBATCH --output=job.%J.out + +set -xv + +PATH_TO_CONF_FILE=~/earthdiagnostics/diags.conf +PATH_TO_DIAGNOSTICS=~/earthdiagnostics +PATH_TO_VIRTUALENV=/shared/earth/ClimatePrediction/EarthDiagnostics/bin + +module purge +module load NCO/4.5.4-foss-2015a +module load CDO/1.6.9-foss-2015a +module load CDFTOOLS/3.0a1-foss-2015a + +source ${PATH_TO_VIRTUALENV}/activate + +export PYTHONPATH=${PATH_TO_DIAGNOSTICS}:${PYTHONPATH} +cd ${PATH_TO_DIAGNOSTICS}/earthdiagnostics/ +./earthdiags.py -lc DEBUG -f ${PATH_TO_CONF_FILE} diff --git a/ocean_pp.bash b/ocean_pp.bash deleted file mode 100755 index ddabc08c7125d2c700bfc8c49706b3939c9f15a3..0000000000000000000000000000000000000000 --- a/ocean_pp.bash +++ /dev/null @@ -1,646 +0,0 @@ -#!/bin/bash -set -evx - -#module load CDFTOOLS/2.1-foss-2015a CDO NCO - -function delete { -at now +7 days << EOF -rm -rf $WORKDIR -EOF -} - - -msg='Your experiment crashed! Your workdir \($WORKDIR\) will be kept for one week from now and then deleted' -trap "echo $msg ; delete ; exit" SIGINT SIGHUP SIGTERM SIGSEGV SIGKILL EXIT - -################################# -#### User Defined Funtions #### -################################# - - - - -# check if args are ok and read options in config_file - - if [ $# -ne 1 ] ; then - echo - echo "USAGE: config_file " - echo "For example: ./ocean_pp.new.bash /home/Earth/$user/es_git/ocean_diagnostics/config_file " - echo - exit 1 - fi - -config_file=$1 -. ${config_file} - -list_files='grid_T' -if [[ ${listpost[@]} =~ "psi" ]] ; then - echo "The list of diags require grid_U" - list_files=$(echo ${list_files} grid_U) -fi - -if [[ ${listpost[@]} =~ "moc" ]] ; then - echo "The list of diags require grid_V" - list_files=$(echo ${list_files} grid_V) -fi -if [[ ${listpost[@]} =~ "ice" ]] || [[ ${listpost[@]} =~ "siasiesiv" ]]; then - echo "The list of diags contains ice" - list_files=$(echo ${list_files} icemod) -fi - -############################################################################### -# -# moc needs to be computed before max_moc and area_moc -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -if [[ ${listpost[@]##*moc*} != ${listpost[@]} ]] || [[ ${listpost[@]##*stc*} != ${listpost[@]} ]] ; then - if [[ ${listpost[@]#moc} != ${listpost[@]:1} ]] ; then - listpost=( 'moc' "${listpost[@]#moc}" ) - fi -fi -# -# psi needs to be computed before gyres -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -if [[ ${listpost[@]##*gyres*} != ${listpost[@]} ]] ; then - if [[ ${listpost[@]#psi} != ${listpost[@]:1} ]] ; then - listpost=( 'psi' "${listpost[@]#psi}" ) - fi -fi -# -# oce raw outputs need to be extracted before convection option -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -if [[ ${listpost[@]##*convection*} != ${listpost[@]} ]] ; then - if [[ ${listpost[@]#ext_raw_oce} != ${listpost[@]:1} ]] ; then - listpost=( 'ext_raw_oce' "${listpost[@]#ext_raw_oce}" ) - fi -fi -# -# 3d interpolation required before average T sections over longitudes -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -if [[ ${listpost[@]##TSec_ave*} != ${listpost[@]} ]] ; then - if [[ ${listpost[@]#3dtemp} != ${listpost[@]:1} ]] ; then - listpost=( '3dtemp' "${listpost[@]#3dtemp}" ) - warning_T=.true. - fi -fi -if [[ ${listpost[@]##SSec_ave*} != ${listpost[@]} ]] ; then - if [[ ${listpost[@]#3dsal} != ${listpost[@]:1} ]] ; then - listpost=( '3dsal' "${listpost[@]#3dsal}" ) - warning_S=.true. - fi -fi -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ -# You have created a function ? If your new diagnostic relies on an already -# existing diagnotics, you might need similar lignes to the above ones -# Any doubt ---> vguemas@ic3.cat -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ -# -# Preparing WORKDIR and set of available functions -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -export WORKDIR=/scratch/Earth/${USER}/tmp/post_ocean/$$ -mkdir -p $WORKDIR -cd $WORKDIR -source $PATHCOMMONOCEANDIAG/common_ocean_post.txt -# -# Interval of lead months be post-processed -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -case $expid in - 'nemovar_s4'|'nemovar_combine') moni=09 ; syeari=1957 ; syearf=1957 ; insdate=1 ; typeoutput='MMO' ; NEMOVERSION='nemovar_O1L42' ;; - 'glorys2v1') moni=01 ; syeari=1993 ; syearf=1993 ; insdate=1 ; typeoutput='MMO' ;; -esac -case $expid in - 'nemovar_s4') rootout='/esnas/exp/ECMWF/NEMOVAR_S4/monthly_mean' ;; - 'nemovar_combine') rootout='/esnas/exp/ECMWF/NEMOVAR_COMBINE/monthly_mean' ;; - 'glorys2v1') rootout='/esnas/exp/MERCATOR/GLORYS2V1/monthly_mean';; -esac -if [[ ${listpost[@]##max_moc} != ${listpost[@]} ]] || [[ -z "$ltimef" ]] || [[ -z "$ltime0" ]] ; then - if [[ ! -z "$year0" ]] && [[ ! -z "$yearf" ]] ; then - ltime0=$(((${year0}-${syeari})*12+1)) - ltimef=$(((${yearf}-${syeari}+1-(10#$moni+10)/12)*12)) - fi -fi -mon0=$(( (10#$moni+$ltime0-2)%12+1 )) -monf=$(( (10#$moni+$ltimef-2)%12+1 )) -# -# Check on the consistency between the chunk length and the leadtimes -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -if [[ $((($ltimef-$ltime0+1)%$chunklen)) != 0 || $((($ltime0-1)%$chunklen)) != 0 ]] ; then - echo "This a safety stop because we think you might have made a mistake in your configuration file" - echo "Unless you have run your experiment with a variable chunk length, you should have" - echo "a number of leadtimes to post-process that is a multiple of the chunken and the first" - echo "leadtime should be right after the end of a chunk" - echo "If you have run your experiment with a variable chunk length, please remove l.85-93 of ocean_pp.bash" - exit 1 -fi -# -# Loop on start dates -# ~~~~~~~~~~~~~~~~~~~~~ -if [[ $intsdate -eq 0 ]] ; then intsdate=1 ; fi # if only one start date, user might set -# intsdates to 0 which leads to an infinite loop below -for ((yeari=$syeari;yeari<=$syearf;yeari=$(($yeari+intsdate)))) ; do - # - # Interval of years to be post-processed - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - year0=$(($yeari+(10#$moni+$ltime0-2)/12)) - yearf=$(($yeari+(10#$moni+$ltimef-2)/12)) - - for memb in ${listmemb[@]} ; do - # - # Fetching the files on esnas - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - case $expid in - 'nemovar_s4'|'nemovar_combine') get_nemovar ${expid} ${memb} ${year0} ${yearf} ${mon0} ${monf} "${list_files}" - ;; - 'glorys2v1') get_glorys ${year0} ${yearf} ${mon0} ${monf} ;; - *) freqout=${rootout:${#rootout}-12} ; freqout=${freqout/_mean} ; freqout=${freqout/*\/} - get_diagsMMO ${yeari}${moni}01 ${expid} ${memb} $ltime0 $ltimef $chunklen $mod $typeoutput $freqout "${list_files}" - esac - # - # Ready for the post-processing - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - for post in ${listpost[@]} ; do - - case $post in -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ -# You have created a function ? Enter its call right here under the flag chosen -# Remember to consider both 'MMO' and 'diags' cases -# Any doubt ---> vguemas@ic3.cat -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ - 'ext_raw_oce') - if [[ $typeoutput == 'MMO' ]] ; then - lstvars=`cdo showvar grid_T_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc` - if [[ $raw_vars_ocean == '' ]] ; then - lstext=`echo $lstvars | sed s/\ /,/g` - else - if [[ $raw_vars_ocean == 'default' ]] ; then - lstextvar=( 'sosstsst' 'sosaline' 'somixhgt' 'somxl010' ) - lstext='' - for varex in ${lstextvar[@]} ; do - if [[ ${lstvars/${varex}/} != ${lstvars} ]] ; then - lstext=`echo ${lstext} ${varex}|sed s/\ /,/g` - fi - done - else - lstext=`echo ${raw_vars_ocean[@]} |sed s/\ /,/g` - fi - fi - if [ -z "$lstext" ] ; then - echo "The list of variables you wish to extract is not in your outputs" - exit 1 - else - ncks -O -v ${lstext} grid_T_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc oce_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc -# ncks -O -v ${lstext[@]} grid_T_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc oce_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - 'ext_raw_ice') - if [[ $typeoutput == 'MMO' ]] ; then - lstvars=`cdo showvar icemod_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc` - if [[ $raw_vars_ice == '' ]] ; then - lstext=`echo $lstvars | sed s/\ /,/g` - else - if [[ $raw_vars_ice == 'default' ]] ; then - lstextvar=( 'isnowthi' 'iicethic' 'ileadfra' 'iicetemp' 'ice_pres' ) - lstext='' - for varex in ${lstextvar[@]} ; do - if [[ ${lstvars/${varex}/} != ${lstvars} ]] ; then - lstext=`echo ${lstext} ${varex}|sed s/\ /,/g` - fi - done - else - lstext=`echo $raw_vars_ice |sed s/\ /,/g` - fi - fi - if [ -z "$lstext" ] ; then - echo "The list of variables you wish to extract is not in your outputs" - exit 1 - else - ncks -O -v ${lstext} icemod_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ice_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - 'heat_sal_mxl') - if [[ $typeoutput == 'MMO' ]] ; then - if [ ! -f heat_sal_mxl_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] ; then - heat_sal_mxl grid_T_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc heat_sal_mxl_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - 'psi') - if [[ $typeoutput == 'MMO' ]] ; then - if [ ! -f psi_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] ; then - psi grid_U_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc grid_V_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc psi_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - 'gyres') - gyres psi_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc $NEMOVERSION gyres_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ;; - - 'lmsalc') - if [[ $typeoutput == 'MMO' ]] ; then - if [ ! -f sal_300-5400m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] ; then - vertmeansal grid_T_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 300 5400 sal_300-5400m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - 'usalc') - if [[ $typeoutput == 'MMO' ]] ; then - if [ ! -f sal_0-300m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] ; then - vertmeansal grid_T_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 0 300 sal_0-300m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - - 'temp_lev') - if [[ $typeoutput == 'MMO' ]] ; then - if [ ! -f temp_lev${level1}-${level2}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] ; then - vertmeanvar grid_T_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc votemper $level1 $level2 temp_lev${level1}-${level2}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - - 'sal_lev') - if [[ $typeoutput == 'MMO' ]] ; then - if [ ! -f sal_lev${level1}-${level2}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] ; then - vertmeanvar grid_T_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc vosaline $level1 $level2 sal_lev${level1}-${level2}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - 'ohc_specified_layer') - if [ ! -f ohc_2d_avg_0-300m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ];then - case $typeoutput in - 'MMO' ) pref='grid_T' ;; - 'diags') pref='t3d' ;; - esac - ohc_specified_layer ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 0.0 300.0 ohc_2d_avg_0-300m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ohc_specified_layer ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 300.0 800.0 ohc_2d_avg_300-800m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - ;; - - 'vert_Tsections') - case $typeoutput in - 'MMO' ) pref='grid_T' ;; - 'diags') pref='t3d' ;; - esac - for coord in 0 45 -45 -30 180 80 - do - if [[ $coord == '0' ]] || [[ $coord == '45' ]] || [[ $coord == '-45' ]] ; then - [[ ` echo $coord | cut -b 1 ` == '-' ]] && direction=S || direction=N - z_m=Z - else - [[ ` echo $coord | cut -b 1 ` == '-' ]] && direction=W || direction=E - z_m=M - fi - coord=`echo $coord | sed -e s/-//g` - [ ! -f temp_${coord}${direction}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] && cutsection ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc votemper $z_m $coord temp_${coord}${direction}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - done - - ;; - - 'vert_Ssections') - if [[ $typeoutput == 'MMO' ]] ; then - pref='grid_T' - for coord in 0 45 -45 -30 180 80 - do - if [[ $coord == '0' ]] || [[ $coord == '45' ]] || [[ $coord == '-45' ]] ; then - [[ ` echo $coord | cut -b 1 ` == '-' ]] && direction=S || direction=N - z_m=Z - else - [[ ` echo $coord | cut -b 1 ` == '-' ]] && direction=W || direction=E - z_m=M - fi - coord=`echo $coord | sed -e s/-//g` - [ ! -f sal_${coord}${direction}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] && cutsection ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc vosaline $z_m $coord sal_${coord}${direction}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - done - fi - ;; - '3dtemp') - case $typeoutput in - 'MMO' ) pref='grid_T' ;; - 'diags') pref='t3d' ;; - esac - if [ ! -f regular3dT_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ]; then - echo " Warning: you are about to perform a 3d interpolation " - [ $warning_T ] && echo "(because you asked for cross sections calculations)" - echo "this might take time to complete (~days), be sure you really need/want to do this..." - interp3d ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc votemper regular3dT_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - ;; - '3dsal') - if [[ $typeoutput == 'MMO' ]] ; then - pref='grid_T' - if [ ! -f regular3dS_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ]; then - echo " Warning: you are about to perform a 3d interpolation " - [ $warning_S ] && echo "(because you asked for cross sections calculations)" - echo "this might take time to complete (~days), be sure you really need/want to do this..." - interp3d ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc vosaline regular3dS_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - 'TSec_ave190-220E') - [ ! -f TSec_ave190-220E_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] && cdo zonmean -sellonlatbox,190,220,-90,90 regular3dT_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc TSec_ave190-220E_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ;; - 'SSec_ave190-220E') - if [[ $typeoutput == 'MMO' ]] ; then - [ ! -f SSec_ave190-220E_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] && cdo zonmean -sellonlatbox,190,220,-90,90 regular3dS_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc SSec_ave190-220E_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - ;; - - 'moc') - if [[ $typeoutput == 'MMO' ]] ; then - if [ ! -f moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] ; then - moc grid_V_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - fi - ;; - - 'max_moc') - max_moc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 38 50 500 2000 max_moc_38N50N_500m-2km_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - max_moc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 40 40 0 10000 max_moc_40N_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ;; - - 'stc') - area_moc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 0.0 25.0 NPac_stc_0N25N_0-200m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 0.0 200.0 zomsfpac - area_moc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc -25.0 0.0 SPac_stc_25S0S_0-200m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 0.0 200.0 zomsfpac - area_moc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 0.0 25.0 NAtl_stc_0N25N_0-200m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 0.0 200.0 - area_moc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc -25.0 0.0 SAtl_stc_25S0S_0-200m_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 0.0 200.0 - ;; - - 'area_moc') - if [ ! -f moc_40N55N_1-2km_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ];then - area_moc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 40.0 55.0 moc_40N55N_1-2km_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - if [ ! -f moc_30N40N_1-2km_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ];then - area_moc moc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc 30.0 40.0 moc_30N40N_1-2km_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - ;; - - 'convection') - convection oce_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc $NEMOVERSION convection_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ;; - - 'siasiesiv' ) - - if [ ! -f siasiesiv_N_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ]||[ ! -f siasiesiv_S_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ];then #check if ? instead of N or S works - case $typeoutput in - 'MMO' ) pref='icemod' ;; - 'diags') pref='ice' ;; - esac - siasiesiv ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc tmp.nc - mv ice_N_tmp.nc siasiesiv_N_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - mv ice_S_tmp.nc siasiesiv_S_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - fi - ;; - - 'siasiesivsit_Arcticreg1') - - case $typeoutput in - 'MMO' ) pref='icemod' ;; - 'diags') pref='ice' ;; - esac - - if [[ $raw_regions_ice == '' ]] ; then - lstseas=$( cdo showvar mask_regions.nc ) - else - if [[ $raw_regions_ice == 'default' ]] ; then - lstseas="Baffin_Bay Baltic_Sea Barents_Sea Beaufort_Sea Bering CanArch Chukchi_Sea CntArctic CntArcticRing1 CntArcticRing2 CntArcticRing3 CntArcticRing4 CntArcticRing5 CntArcticRing6 CntArcticRing7_Lincoln_Sea CntArcticRing8 CntArcticPrf1 CntArcticPrf2r CntArcticPrf3 CntArcticPrf4 East_Siberian_Sea1 Greenland_Sea Hudson Icelandic_Sea Irminger Japan1 Kara_Sea Laptev_Sea Labrador_Sea1 Norwegian_Sea Okhotsk StLawr" - else - lstseas=$( echo ${raw_regions_ice[@]} ) - fi - fi - - for sea in $lstseas ; do - - siasiesiv ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc tmp.nc $sea - - ncks -O -v sia ice_N_tmp.nc sia_N_tmp.nc - ncks -O -v sie ice_N_tmp.nc sie_N_tmp.nc - ncks -O -v siv ice_N_tmp.nc siv_N_tmp.nc - ncks -O -v sit ice_N_tmp.nc sit_N_tmp.nc - - ncrename -h -v sia,sia_$sea sia_N_tmp.nc - ncrename -h -v sie,sie_$sea sie_N_tmp.nc - ncrename -h -v siv,siv_$sea siv_N_tmp.nc - ncrename -h -v sit,sit_$sea sit_N_tmp.nc - - if [ -e sia_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc ] ; then - ncks -A sia_N_tmp.nc sia_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ncks -A sie_N_tmp.nc sie_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ncks -A siv_N_tmp.nc siv_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ncks -A sit_N_tmp.nc sit_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - rm -f sia_N_tmp.nc sie_N_tmp.nc siv_N_tmp.nc sit_N_tmp.nc ice_N_tmp.nc ice_S_tmp.nc - else - mv sia_N_tmp.nc sia_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - mv sie_N_tmp.nc sie_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - mv siv_N_tmp.nc siv_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - mv sit_N_tmp.nc sit_Arcticreg1_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - rm -f ice_N_tmp.nc ice_S_tmp.nc - fi - - done - ;; - - esac - - case `echo $post|cut -c$((${#post}-2))-${#post}` in - 'ohc') - case `echo $post | cut -c1` in - 'x') kmin=0 ; kmax=0 ; start=2 ; mxl=1 ;; - 'l') start=2 ; mxl=0 - case $NEMOVERSION in - 'Ec2.3_O1L42'|'N3.2_O1L42'|'nemovar_O1L42') kmin=25 ; kmax=42 ;; - 'Ec3.0_O1L46'|'Ec3.0_O25L46'|'N3.3_O1L46') kmin=23 ; kmax=46 ;; - 'Ec3.0_O1L75'|'Ec3.0_O25L75'|'glorys2v1_O25L75') kmin=45; kmax=75;; - esac - ;; - 'm') start=2 ; mxl=0 - case $NEMOVERSION in - 'Ec2.3_O1L42'|'N3.2_O1L42'|'nemovar_O1L42') kmin=21 ; kmax=24 ;; - 'Ec3.0_O1L46'|'Ec3.0_O25L46'|'N3.3_O1L46') kmin=18 ; kmax=22 ;; - 'Ec3.0_O1L75'|'Ec3.0_O25L75'|'glorys2v1_O25L75') kmin=35; kmax=44;; - esac - ;; - 'u') kmin=1 ; start=2 ; mxl=0 - case $NEMOVERSION in - 'Ec2.3_O1L42'|'N3.2_O1L42'|'nemovar_O1L42') kmax=20 ;; - 'Ec3.0_O1L46'|'Ec3.0_O25L46'|'N3.3_O1L46') kmax=17 ;; - 'Ec3.0_O1L75'|'Ec3.0_O25L75'|'glorys2v1_O25L75') kmax=34;; - esac - ;; - *) kmin="" ; kmax="" ; start=1 ; mxl=0 ;; - esac - case `echo $post | cut -c${start}-$((start+3))` in - 'ohc') basin='Glob' ;; - *) basin=`echo $post | cut -c${start}-$((start+3))` - esac - case $typeoutput in - 'MMO' ) pref='grid_T' ;; - 'diags') - pref='t3d' - ncks -A -v somxl010,somixhgt oce_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc t3d_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - ;; - esac - ohc ${pref}_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc heatc_${expid}_${yeari}${moni}01_fc${memb}_${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc $basin $mxl $kmin $kmax - ;; - esac - - done - - # Removing the raw output from this start dates and this member - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - clean_diagsMMO ${yeari}${moni}01 ${expid} ${memb} $ltime0 $ltimef $typeoutput "${list_files}" - done - - # Prepare storage : choose output directory and file name - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - for post in ${listpost[@]} ; do - case $post in -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ -# You have created a function ? Enter the output directory and the prefix -# or your(s) output files under the flag chosen -# Any doubt ---> vguemas@ic3.cat -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ - 'area_moc') dirout='moc'; files=('moc_40N55N_1-2km' 'moc_30N40N_1-2km') ;; - 'stc') dirout='moc' ; files=( 'NPac_stc_0N25N_0-200m' 'SPac_stc_25S0S_0-200m' 'NAtl_stc_0N25N_0-200m' 'SAtl_stc_25S0S_0-200m' ) ;; - 'max_moc') dirout='moc' ; files=('max_moc_38N50N_500m-2km' 'max_moc_40N' ) ;; - 'siasiesiv' ) dirout='ice' ; files=('siasiesiv_N' 'siasiesiv_S') ;; - 'siasiesivsit_Arcticreg1' ) dirout='ice' ; files=('sia_Arcticreg1' 'sie_Arcticreg1' 'siv_Arcticreg1' 'sit_Arcticreg1') ;; - 'moc') dirout='moc' ; files=('moc') ;; - 'convection') dirout='moc' ; files=('convection') ;; -# 'ext_raw_ice') dirout='ice' ; files=('ice_raw') ;; -# 'ext_raw_oce') dirout='oce_raw' ; files=('oce_raw') ;; - 'ext_raw_ice') dirout='ice' ; files=('ice') ;; - 'ext_raw_oce') dirout='oce' ; files=('oce') ;; - 'heat_sal_mxl') dirout='heatc' ; files=('heat_sal_mxl') ;; - 'psi') dirout='psi' ; files=('psi') ;; - 'gyres') dirout='psi' ; files=('gyres') ;; - 'usalc') dirout='saltc' ; files=('sal_0-300m') ;; - 'temp_lev') dirout='temp_lev'${level1}-${level2} ; files=('temp_lev'${level1}-${level2}) ;; - 'sal_lev') dirout='sal_lev'${level1}-${level2} ; files=('sal_lev'${level1}-${level2}) ;; - 'lmsalc') dirout='saltc' ; files=('sal_300-5400m') ;; - 'ohc_specified_layer') dirout='heatc' ; files=('ohc_2d_avg_0-300m' 'ohc_2d_avg_300-800m') ;; - 'vert_Tsections') dirout='sections' ; files=('temp_0N' 'temp_45N' 'temp_45S' 'temp_30W' 'temp_80E' 'temp_180E') ;; - 'vert_Ssections') dirout='sections' ; files=('sal_0N' 'sal_45N' 'sal_45S' 'sal_30W' 'sal_80E' 'sal_180E') ;; - '3dtemp') dirout='InterpT' ; files=('regular3dT') ;; - '3dsal') dirout='InterpS' ; files=('regular3dS') ;; - 'TSec_ave190-220E') dirout='sections' ; files=('TSec_ave190-220E') ;; - 'SSec_ave190-220E') dirout='sections' ; files=('SSec_ave190-220E') ;; - esac - case `echo $post|cut -c$((${#post}-2))-${#post}` in - 'ohc') - dirout='heatc' - file='heatc' - case `echo $post | cut -c1` in - 'x') mxl=1 ; start=2 ;; - 'l') start=2 ; mxl=0 - case $NEMOVERSION in - 'Ec2.3_O1L42'|'N3.2_O1L42'|'nemovar_O1L42') file='800-5350_'${file} ;; - 'Ec3.0_O1L46'|'Ec3.0_O25L46'|'N3.3_O1L46') file='855-5875_'${file} ;; - 'Ec3.0_O1L75'|'Ec3.0_O25L75'|'glorys2v1_O25L75') file='857-5902_'${file};; - esac - ;; - 'm') start=2 ; mxl=0 - case $NEMOVERSION in - 'Ec2.3_O1L42'|'N3.2_O1L42'|'nemovar_O1L42') file='373-657_'${file} ;; - 'Ec3.0_O1L46'|'Ec3.0_O25L46'|'N3.3_O1L46') file='382-735_'${file} ;; - 'Ec3.0_O1L75'|'Ec3.0_O25L75'|'glorys2v1_O25L75') file='301-773_'${file};; - esac - ;; - 'u') start=2 ; mxl=0 - case $NEMOVERSION in - 'Ec2.3_O1L42'|'N3.2_O1L42'|'nemovar_O1L42') file='0-315_'${file} ;; - 'Ec3.0_O1L46'|'Ec3.0_O25L46'|'N3.3_O1L46') file='0-322_'${file} ;; - 'Ec3.0_O1L75'|'Ec3.0_O25L75'|'glorys2v1_O25L75') file='0-271_'${file};; - esac - ;; - *) mxl=0 ; start=1 ;; - esac - - case `echo $post | cut -c${start}-$((start+3))` in - 'NAtl') file='NAtl_10N65N_'${file} ;; - 'TAtl') file='TAtl_30S30N_'${file};; - 'NPac') file='NPac_10N70N_'${file} ;; - 'TPac') file='TPac_30S30N_'${file} ;; - 'Arct') file='Arc_65N90N_'${file} ;; - 'Anta') file='Ant_90S60S_'${file} ;; - 'TInd') file='TInd_30S30N_'${file} ;; - esac - if [[ $mxl == 1 ]] ; then - file='mxl_'$file - fi - files=( $file ) - esac - pathout=${rootout}/${dirout} - mkdir -m ug+w -m o-w -p $pathout - for file in ${files[@]} ; do - prefix=${file}_${expid}_${yeari}${moni}01_fc - lsmbso=0-${listmemb[${#listmemb[@]}-1]} - # - # Merging the post-processed members together and with the previous members if existing - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - lsyrsh=${year0}$(printf "%02d" ${mon0})_${yearf}$(printf "%02d" ${monf}).nc - lsmbsh=${listmemb[0]}-${listmemb[${#listmemb[@]}-1]} - lsmbsb=0-$((${listmemb[0]}-1)) - if [ -e ${pathout}/${prefix}${lsmbsb}_${lsyrsh} ] ; then - cp ${pathout}/${prefix}${lsmbsb}_${lsyrsh} . - lsmbsh=0-${listmemb[${#listmemb[@]}-1]} - fi - gather_memb ${prefix} _${lsyrsh} ${prefix}${lsmbsh}_${lsyrsh} - for jmemb in ${listmemb[@]} ; do - rm -f ${prefix}${jmemb}_${lsyrsh} - done - # - # Concatenating the result with the previous years if existing - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ -# You have created a function ? If your diagnostic provides yearly output -# you need to use the concat option rather than the ncrcat one below. -# Any doubt ---> vguemas@ic3.cat -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ - lsyrsb=${yeari}${moni}_$((year0-(1-(10#$mon0+10)/12)))$(printf "%02d" $(((mon0-13)%12+12)) ).nc - lsyrso=${yeari}${moni}_${yearf}$(printf "%02d" ${monf}).nc - if [ -e ${pathout}/${prefix}${lsmbsh}_${lsyrsb} ] ; then - case $post in - 'max_moc' ) concat ${pathout}/${prefix}${lsmbsh}_${lsyrsb} ${prefix}${lsmbsh}_${lsyrsh} $(printf "%02d" ${monf}) ${prefix}${lsmbsh}_${lsyrso} ;; - *) ncrcat -O ${pathout}/${prefix}${lsmbsh}_${lsyrsb} ${prefix}${lsmbsh}_${lsyrsh} ${prefix}${lsmbsh}_${lsyrso} ;; - esac - else - lsyrso=$lsyrsh - fi - # - # Merging the result with the previous members if existing - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - if [[ $lsyrsh != $lsyrso ]] && [[ -e ${pathout}/${prefix}${lsmbsb}_${lsyrso} ]] ; then - cp ${pathout}/${prefix}${lsmbsb}_${lsyrso} . - gather_memb ${prefix} _${lsyrso} ${prefix}${lsmbso}_${lsyrso} - else - lsmbso=$lsmbsh - fi - # - # Storing and cleaning - # ~~~~~~~~~~~~~~~~~~~~~ - cp ${prefix}${lsmbso}_${lsyrso} ${pathout}/. || { if [ -e ${pathout}/${prefix}${lsmbso}_${lsyrso} ]; - then - echo "${prefix}${lsmbso}_${lsyrso} already exists in ${pathout}" - sleep 5 - else - echo " problem writing file in ${pathout} directory" - exit - fi - } - rm -f ${pathout}/${prefix}${lsmbsh}_${lsyrsb} ${prefix}${lsmbsh}_${lsyrso} ${prefix}${lsmbsb}_${lsyrso} ${pathout}/${prefix}${lsmbsb}_${lsyrso} ${prefix}${lsmbso}_${lsyrso} ${pathout}/${prefix}${lsmbsb}_${lsyrsh} ${prefix}${lsmbsb}_${lsyrsh} - done - done -done - -trap - EXIT -rm -rf $WORKDIR diff --git a/setup.py b/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..17f4b07a9ae63ec51cd0ca6709eaac3e080e2598 --- /dev/null +++ b/setup.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# coding=utf-8 +""" +Installation script for EarthDiagnostics package +""" + +from os import path +from setuptools import setup +from setuptools import find_packages + +here = path.abspath(path.dirname(__file__)) + +# Get the version number from the relevant file +with open(path.join(here, 'VERSION')) as f: + version = f.read().strip() + +setup( + name='earthdiagnostics', + license='GNU GPL v3', + platforms=['GNU/Linux Debian'], + version=version, + description='EarthDiagnostics', + author='BSC-CNS Earth Sciences Department', + author_email='javier.vegas@bsc.es', + url='http://www.bsc.es/projects/earthscience/autosubmit/', + keywords=['climate', 'weather', 'diagnostic'], + install_requires=['numpy', 'netCDF4', 'autosubmit', 'cdo', 'pygrib', 'nco', 'cfunits>=1.1.4', 'coverage', 'pyproj'], + packages=find_packages(), + include_package_data=True, + scripts=['bin/earthdiags'] +) diff --git a/setup_development.bash b/setup_development.bash deleted file mode 100755 index 9591a894ed1b6aed6278021684a07ed2bc4fcfac..0000000000000000000000000000000000000000 --- a/setup_development.bash +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash -# -# This script intends to support the development of new functions in -# common_ocean_post.txt of new options to existing funtions. It allows to -# test quickly these developments -# -# History : Virginie Guemas - Initial version - 2012 -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -set -evx -# This does not need to be changed : -CON_FILES='/esnas/autosubmit/con_files' -# This option is compatible with the testing on i00k but if you want to test -# your developments on another experiment you would need to change this : -NEMOVERSION=Ec2.3_O1L42 -# This lines needs to be replaced by the path were you stored your modifed -# version of common_ocean_post.txt : -source /home/Earth/rcruzgar/ocean_diagnostics/common_ocean_post.txt -# Here we only fetch one random chunk of outputs from i00k : -cp /esnas/exp/ecearth/i00k/19601101/fc0/outputs/MMO_i00k_19601101_fc0_19601101-19610228.tar . -# Here we untar and gunzip these files : -tar -xvf MMO_i00k_19601101_fc0_19601101-19610228.tar -gunzip *.gz -# The lines below might need to be changed depending on which function you need -# to test. In the case below, the grid_T files are needed because they contain -# the 3d temperature = input to ohc function. If you test siasiesiv for exemple, -# you would need to replace grid_T by icemod. -filein=`ls *grid_T*` -cdo cat *grid_T* tmp.nc -# The lines below are essential because the files have a time dimension named -# time in all the functions from common_ocean_post.txt (this is handled in -# ocean_pp.bash and in the templates) -timevar=`ncdump -h tmp.nc | grep UNLIMITED | awk '{print $1}'` -if [[ $timevar == 'time_counter' ]] ; then ncrename -v time_counter,time -d time_counter,time tmp.nc ; fi -# Some cleaning -rm -f *grid* *icemod* -# This is the final testing line. You need to replace that line by the function -# you want to test (here it is ohc) followed by all its arguments (here we have -# only the input file tmp.nc and the output file tmpout.nc) -ohc tmp.nc tmppout.nc 'Glob' diff --git a/test.py b/test.py new file mode 100644 index 0000000000000000000000000000000000000000..a6785ecb07d4ac8b805008f56b6e62194a127096 --- /dev/null +++ b/test.py @@ -0,0 +1,30 @@ +# coding=utf-8 +""" +Script to run the tests for EarthDiagnostics and generate the code coverage report +""" +import coverage +import unittest +import os +cov = coverage.Coverage() +cov.set_option("run:branch", True) +cov.start() + +# noinspection PyPep8 +import test.unit + +suite = unittest.TestLoader().loadTestsFromModule(test.unit) +unittest.TextTestRunner(verbosity=2).run(suite) +cov.stop() +cov.save() + +source_files = list() +for path, dirs, files in os.walk('earthdiagnostics'): + for filename in files: + if filename.endswith('.py'): + source_files.append(os.path.join(path, filename)) + +cov.report(source_files) +cov.html_report(source_files) + + + diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9bad5790a5799b96f2e164d825c0b1f8ec0c2dfb --- /dev/null +++ b/test/__init__.py @@ -0,0 +1 @@ +# coding=utf-8 diff --git a/test/unit/__init__.py b/test/unit/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a15d906c9002de2c6185d56ed22c90d2d2af217 --- /dev/null +++ b/test/unit/__init__.py @@ -0,0 +1,26 @@ +# coding=utf-8 +from test_data_manager import TestConversion +from test.unit.test_variable import TestVariable +from test_constants import TestBasin, TestBasins +from test_box import TestBox +from test_diagnostic import TestDiagnostic +from test_cdftools import TestCDFTools +from test_utils import TestTempFile, TestUtils +from test_psi import TestPsi +from test_areamoc import TestAreaMoc +from test_averagesection import TestAverageSection +from test_cutsection import TestCutSection +from test_convectionsites import TestConvectionSites +from test_gyres import TestGyres +from test_heatcontent import TestHeatContent +from test_heatcontentlayer import TestHeatContentLayer +from test_interpolate import TestInterpolate +from test_maxmoc import TestMaxMoc +from test_mixedlayerheatcontent import TestMixedLayerHeatContent +from test_mixedlayersaltcontent import TestMixedLayerSaltContent +from test_moc import TestMoc +from test_siasiesiv import TestSiasiesiv +from test_verticalmean import TestVerticalMean +from test_verticalmeanmeters import TestVerticalMeanMeters +from test_monthlymean import TestMonthlyMean +from test_rewrite import TestRewrite diff --git a/test/unit/test_areamoc.py b/test/unit/test_areamoc.py new file mode 100644 index 0000000000000000000000000000000000000000..f4ddf699dc690767ed161511060f67c535ec0aa3 --- /dev/null +++ b/test/unit/test_areamoc.py @@ -0,0 +1,42 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.constants import Basins +from earthdiagnostics.ocean.areamoc import AreaMoc +from mock import Mock + + +class TestAreaMoc(TestCase): + + def setUp(self): + self.data_manager = Mock() + self.diags = Mock() + + self.box = Box() + self.box.min_lat = 0 + self.box.max_lat = 0 + self.box.min_depth = 0 + self.box.max_depth = 0 + + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + self.psi = AreaMoc(self.data_manager, '20000101', 1, 1, Basins.Antarctic, self.box) + + def test_generate_jobs(self): + jobs = AreaMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], AreaMoc(self.data_manager, '20010101', 0, 0, Basins.Global, self.box)) + self.assertEqual(jobs[1], AreaMoc(self.data_manager, '20010101', 0, 1, Basins.Global, self.box)) + + jobs = AreaMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', 'atl']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], AreaMoc(self.data_manager, '20010101', 0, 0, Basins.Atlantic, self.box)) + self.assertEqual(jobs[1], AreaMoc(self.data_manager, '20010101', 0, 1, Basins.Atlantic, self.box)) + + with self.assertRaises(Exception): + AreaMoc.generate_jobs(self.diags, ['psi']) + with self.assertRaises(Exception): + AreaMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.psi), 'Area MOC Startdate: 20000101 Member: 1 Chunk: 1 Box: 0N0') diff --git a/test/unit/test_averagesection.py b/test/unit/test_averagesection.py new file mode 100644 index 0000000000000000000000000000000000000000..e9b4816cf976b6d9f8e2659ba4e87c5904033e9f --- /dev/null +++ b/test/unit/test_averagesection.py @@ -0,0 +1,42 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.ocean.averagesection import AverageSection +from mock import Mock + + +class TestAverageSection(TestCase): + + def setUp(self): + self.data_manager = Mock() + self.diags = Mock() + + self.box = Box() + self.box.min_lat = 0 + self.box.max_lat = 0 + self.box.min_lon = 0 + self.box.max_lon = 0 + + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + self.psi = AverageSection(self.data_manager, '20000101', 1, 1, 'domain', 'var', self.box) + + def test_generate_jobs(self): + jobs = AverageSection.generate_jobs(self.diags, ['psi', 'var', '0', '0', '0', '0']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], AverageSection(self.data_manager, '20010101', 0, 0, 'ocean', 'var', self.box)) + self.assertEqual(jobs[1], AverageSection(self.data_manager, '20010101', 0, 1, 'ocean', 'var', self.box)) + + jobs = AverageSection.generate_jobs(self.diags, ['psi', 'var', '0', '0', '0', '0', 'domain']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], AverageSection(self.data_manager, '20010101', 0, 0, 'domain', 'var', self.box)) + self.assertEqual(jobs[1], AverageSection(self.data_manager, '20010101', 0, 1, 'domain', 'var', self.box)) + + with self.assertRaises(Exception): + AverageSection.generate_jobs(self.diags, ['psi']) + with self.assertRaises(Exception): + AverageSection.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.psi), 'Average section Startdate: 20000101 Member: 1 Chunk: 1 Box: 0N0E ' + 'Variable: domain:var') diff --git a/test/unit/test_box.py b/test/unit/test_box.py new file mode 100644 index 0000000000000000000000000000000000000000..37b5ce4911333419281366b9d850e219c8aacb8d --- /dev/null +++ b/test/unit/test_box.py @@ -0,0 +1,93 @@ +# coding=utf-8 +from unittest import TestCase +from earthdiagnostics.box import Box + + +class TestBox(TestCase): + + def setUp(self): + self.box1 = Box() + self.box1.max_lat = 0 + self.box1.min_lat = -20 + self.box1.max_lon = 0 + self.box1.min_lon = -20 + self.box1.min_depth = 0 + self.box1.max_depth = 20 + + self.box2 = Box(True) + self.box2.max_lat = 20 + self.box2.min_lat = 20 + self.box2.max_lon = 20 + self.box2.min_lon = 20 + self.box2.min_depth = 20 + self.box2.max_depth = 20 + + self.box3 = Box() + + self.box4 = Box() + self.box4.max_lat = -10 + self.box4.min_lat = -20 + self.box4.max_lon = -10 + self.box4.min_lon = -20 + self.box4.min_depth = 0 + self.box4.max_depth = 20 + + def test_max_lat(self): + with self.assertRaises(ValueError): + Box().max_lat = 100 + with self.assertRaises(ValueError): + Box().max_lat = -100 + Box().max_lat = 0 + Box().max_lat = -20 + Box().max_lat = 20 + + def test_min_lat(self): + with self.assertRaises(ValueError): + Box().min_lat = 100 + with self.assertRaises(ValueError): + Box().min_lat = -100 + Box().min_lat = 0 + Box().min_lat = -90 + Box().min_lat = 90 + + def test_max_lon(self): + with self.assertRaises(ValueError): + Box().max_lon = 360 + with self.assertRaises(ValueError): + Box().max_lon = -360 + Box().max_lon = 0 + Box().max_lon = -20 + Box().max_lon = 20 + + def test_min_lon(self): + with self.assertRaises(ValueError): + Box().min_lon = 360 + with self.assertRaises(ValueError): + Box().min_lon = -360 + Box().min_lon = 0 + Box().min_lon = -80 + Box().min_lon = 80 + + def test_get_lat_str(self): + self.assertEquals('20S0N', self.box1.get_lat_str()) + self.assertEquals('20N', self.box2.get_lat_str()) + self.assertEquals('', self.box3.get_lat_str()) + self.assertEquals('20S10S', self.box4.get_lat_str()) + + def test_get_lon_str(self): + self.assertEquals('20W0E', self.box1.get_lon_str()) + self.assertEquals('20E', self.box2.get_lon_str()) + self.assertEquals('', self.box3.get_lon_str()) + self.assertEquals('20W10W', self.box4.get_lon_str()) + + def test_get_depth_str(self): + self.assertEquals('0-20', self.box1.get_depth_str()) + self.assertEquals('20m', self.box2.get_depth_str()) + self.assertEquals('', self.box3.get_depth_str()) + self.assertEquals('0-20', self.box4.get_depth_str()) + + def test__str__(self): + self.assertEquals('20S0N20W0E0-20', str(self.box1)) + self.assertEquals('20N20E20m', str(self.box2)) + self.assertEquals('', str(self.box3)) + self.assertEquals('20S10S20W10W0-20', str(self.box4)) diff --git a/test/unit/test_cdftools.py b/test/unit/test_cdftools.py new file mode 100644 index 0000000000000000000000000000000000000000..9fd8bb387dcca55354d1661510eded81ccd41b2f --- /dev/null +++ b/test/unit/test_cdftools.py @@ -0,0 +1,43 @@ +# coding=utf-8 +from unittest import TestCase +from earthdiagnostics.cdftools import CDFTools +import mock + + +class TestCDFTools(TestCase): + def setUp(self): + self.cdftools = CDFTools('') + mock.patch('os.path.join') + + # noinspection PyTypeChecker + def test_run(self): + with mock.patch('os.path.exists') as exists_mock: + def mock_exists(path): + """ + Function for os.path.exists mock + :param path: path to check + :type path: str + :return: true if path does not start with 'bad' + :rtype: bool + """ + return not path.startswith('bad') + exists_mock.side_effect = mock_exists + with mock.patch('earthdiagnostics.utils.Utils.execute_shell_command') as execute_mock: + execute_mock.return_value = ['Command output'] + with self.assertRaises(ValueError): + self.cdftools.run('badcommand', input='input_file', output='output_file') + with self.assertRaises(ValueError): + self.cdftools.run('command', input='badinput_file', output='output_file') + with self.assertRaises(ValueError): + self.cdftools.run('command', input=['input_file', 'badinput_file'], output='output_file') + with self.assertRaises(ValueError): + self.cdftools.run('command', input='input_file', output='input_file') + with self.assertRaises(Exception): + self.cdftools.run('command', input='input_file', output='badoutput_file') + + self.cdftools.run('command', input='input_file', output='output_file') + self.cdftools.run('command', input='input_file') + self.cdftools.run('command', input=None) + self.cdftools.run('command', input=['input_file', 'input_file2']) + self.cdftools.run('command', input='input_file', options='-o -p') + self.cdftools.run('command', input='input_file', options=('-o', '-p')) diff --git a/test/unit/test_constants.py b/test/unit/test_constants.py new file mode 100644 index 0000000000000000000000000000000000000000..f8010d53a4dbbe42a506dde27c501ed5f0e076f2 --- /dev/null +++ b/test/unit/test_constants.py @@ -0,0 +1,31 @@ +# coding=utf-8 +from unittest import TestCase +from earthdiagnostics.constants import Basins, Basin +from earthdiagnostics.box import Box + + +class TestBasins(TestCase): + + def test_parse(self): + self.assertEquals(Basins.Arctic, Basins.parse(Basins.Arctic)) + self.assertEquals(Basins.Arctic, Basins.parse('Arct')) + self.assertEquals(Basins.Arctic, Basins.parse('Arctic_Ocean')) + self.assertIsNone(Basins.parse('Basin not found')) + + +class TestBasin(TestCase): + + def setUp(self): + self.basin = Basin('bas', 'Basin', Box()) + + def test_shortname(self): + self.assertEquals('bas', self.basin.shortname) + + def test_fullname(self): + self.assertEquals('Basin', self.basin.fullname) + + def test__eq__(self): + self.assertTrue(Basin('bas', 'Basin') == self.basin) + self.assertFalse(Basin('bas', 'OtherBasin') == self.basin) + self.assertFalse(Basin('otbas', 'Basin') == self.basin) + self.assertFalse(Basin('otbas', 'OtherBasin') == self.basin) diff --git a/test/unit/test_convectionsites.py b/test/unit/test_convectionsites.py new file mode 100644 index 0000000000000000000000000000000000000000..b6a7e543ba60da2d741734a6d1eeaeff95e1bad6 --- /dev/null +++ b/test/unit/test_convectionsites.py @@ -0,0 +1,28 @@ +# coding=utf-8 +from unittest import TestCase +from earthdiagnostics.ocean.convectionsites import ConvectionSites +from mock import Mock + + +class TestConvectionSites(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.psi = ConvectionSites(self.data_manager, '20000101', 1, 1, 'model_version') + + def test_generate_jobs(self): + jobs = ConvectionSites.generate_jobs(self.diags, ['psi']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], ConvectionSites(self.data_manager, '20010101', 0, 0, 'model_version')) + self.assertEqual(jobs[1], ConvectionSites(self.data_manager, '20010101', 0, 1, 'model_version')) + + with self.assertRaises(Exception): + ConvectionSites.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.psi), 'Convection sites Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_cutsection.py b/test/unit/test_cutsection.py new file mode 100644 index 0000000000000000000000000000000000000000..060c4a4adab76a7c3ad7ca58329f42dc1fba0fac --- /dev/null +++ b/test/unit/test_cutsection.py @@ -0,0 +1,42 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.ocean.cutsection import CutSection +from mock import Mock + + +class TestCutSection(TestCase): + + def setUp(self): + self.data_manager = Mock() + self.diags = Mock() + + self.box = Box() + self.box.min_lat = 0 + self.box.max_lat = 0 + self.box.min_lon = 0 + self.box.max_lon = 0 + + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + self.psi = CutSection(self.data_manager, '20000101', 1, 1, 'domain', 'var', True, 0) + + def test_generate_jobs(self): + jobs = CutSection.generate_jobs(self.diags, ['psi', 'var', 'true', '10']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], CutSection(self.data_manager, '20010101', 0, 0, 'ocean', 'var', True, 10)) + self.assertEqual(jobs[1], CutSection(self.data_manager, '20010101', 0, 1, 'ocean', 'var', True, 10)) + + jobs = CutSection.generate_jobs(self.diags, ['psi', 'var', 'false', '0', 'domain']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], CutSection(self.data_manager, '20010101', 0, 0, 'domain', 'var', False, 0)) + self.assertEqual(jobs[1], CutSection(self.data_manager, '20010101', 0, 1, 'domain', 'var', False, 0)) + + with self.assertRaises(Exception): + CutSection.generate_jobs(self.diags, ['psi']) + with self.assertRaises(Exception): + CutSection.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.psi), 'Cut section Startdate: 20000101 Member: 1 Chunk: 1 Variable: domain:var ' + 'Zonal: True Value: 0') diff --git a/test/unit/test_data_manager.py b/test/unit/test_data_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..6bdc84b39140e42b8613a147af1acee8e886297a --- /dev/null +++ b/test/unit/test_data_manager.py @@ -0,0 +1,38 @@ +# coding=utf-8 + + +from unittest import TestCase + +from earthdiagnostics.datamanager import UnitConversion + + +class TestConversion(TestCase): + + def test__init(self): + conversion = UnitConversion('km', 'm', 1000, 0) + self.assertEqual(conversion.source, 'km') + self.assertEqual(conversion.destiny, 'm') + self.assertEqual(conversion.factor, 1000) + self.assertEqual(conversion.offset, 0) + + def test_add_conversion(self): + UnitConversion._dict_conversions = dict() + conversion = UnitConversion('km', 'm', 1000, 0) + UnitConversion.add_conversion(conversion) + self.assertIs(UnitConversion._dict_conversions[('km', 'm')], conversion) + UnitConversion._dict_conversions = dict() + + def test_get_factor_offset(self): + UnitConversion._dict_conversions = dict() + conversion = UnitConversion('km', 'm', 1000, 0) + UnitConversion.add_conversion(conversion) + self.assertEqual(UnitConversion.get_conversion_factor_offset('km', 'm'), (1000, 0)) + self.assertEqual(UnitConversion.get_conversion_factor_offset('m', 'km'), (1 / 1000.0, 0)) + self.assertEqual(UnitConversion.get_conversion_factor_offset('1e3 m', 'km'), (1, 0)) + self.assertEqual(UnitConversion.get_conversion_factor_offset('10^3 m', 'km'), (1, 0)) + self.assertEqual(UnitConversion.get_conversion_factor_offset('km', '1e3 m'), (1, 0)) + self.assertEqual(UnitConversion.get_conversion_factor_offset('km', '10^3 m'), (1, 0)) + self.assertEqual(UnitConversion.get_conversion_factor_offset('m', 'm'), (1, 0)) + self.assertEqual(UnitConversion.get_conversion_factor_offset('m²', 'km'), (None, None)) + + UnitConversion._dict_conversions = dict() diff --git a/test/unit/test_diagnostic.py b/test/unit/test_diagnostic.py new file mode 100644 index 0000000000000000000000000000000000000000..117e8e22d77e32fb7ffd2bd37321db2b538362ba --- /dev/null +++ b/test/unit/test_diagnostic.py @@ -0,0 +1,48 @@ +# coding=utf-8 +from earthdiagnostics.diagnostic import Diagnostic +from unittest import TestCase + + +class TestDiagnostic(TestCase): + + # noinspection PyMissingOrEmptyDocstring + class MockDiag(Diagnostic): + def compute(self): + pass + + @classmethod + def generate_jobs(cls, diags, options): + pass + + alias = 'mockdiag' + + def setUp(self): + self.diagnostic = Diagnostic(None) + Diagnostic.register(TestDiagnostic.MockDiag) + + def test_register(self): + with self.assertRaises(ValueError): + # noinspection PyTypeChecker + Diagnostic.register(str) + with self.assertRaises(ValueError): + Diagnostic.register(Diagnostic) + Diagnostic.register(TestDiagnostic.MockDiag) + + def test_get_diagnostic(self): + self.assertIsNone(Diagnostic.get_diagnostic('none')) + self.assertIs(TestDiagnostic.MockDiag, Diagnostic.get_diagnostic('mockdiag')) + + def test_generate_jobs(self): + with self.assertRaises(NotImplementedError): + Diagnostic.generate_jobs(None, ['']) + + def test_compute(self): + with self.assertRaises(NotImplementedError): + self.diagnostic.compute() + + def test_str(self): + self.assertEquals('Developer must override base class __str__ method', str(self.diagnostic)) + + def test_repr(self): + self.assertEquals(self.diagnostic.__repr__(), str(self.diagnostic)) + diff --git a/test/unit/test_gyres.py b/test/unit/test_gyres.py new file mode 100644 index 0000000000000000000000000000000000000000..77f3987258eca4cee0a494197090db777a2d7061 --- /dev/null +++ b/test/unit/test_gyres.py @@ -0,0 +1,29 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.ocean.gyres import Gyres +from mock import Mock + + +class TestGyres(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.gyres = Gyres(self.data_manager, '20000101', 1, 1, 'model_version') + + def test_generate_jobs(self): + jobs = Gyres.generate_jobs(self.diags, ['psi']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], Gyres(self.data_manager, '20010101', 0, 0, 'model_version')) + self.assertEqual(jobs[1], Gyres(self.data_manager, '20010101', 0, 1, 'model_version')) + + with self.assertRaises(Exception): + Gyres.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.gyres), 'Gyres Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_heatcontent.py b/test/unit/test_heatcontent.py new file mode 100644 index 0000000000000000000000000000000000000000..32c6900afd86ced0e63d464bb0b0fab4f0ef2f4e --- /dev/null +++ b/test/unit/test_heatcontent.py @@ -0,0 +1,39 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.constants import Basins +from earthdiagnostics.ocean.heatcontent import HeatContent +from mock import Mock + + +class TestHeatContent(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.box = Box(True) + self.box.min_depth = 0 + self.box.max_depth = 100 + + self.heat_content = HeatContent(self.data_manager, '20000101', 1, 1, Basins.Global, 1, self.box) + + def test_generate_jobs(self): + jobs = HeatContent.generate_jobs(self.diags, ['psi', 'atl', '-1', '0', '100']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], HeatContent(self.data_manager, '20010101', 0, 0, Basins.Atlantic, -1, self.box)) + self.assertEqual(jobs[1], HeatContent(self.data_manager, '20010101', 0, 1, Basins.Atlantic, -1, self.box)) + + with self.assertRaises(Exception): + HeatContent.generate_jobs(self.diags, ['psi']) + + with self.assertRaises(Exception): + HeatContent.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.heat_content), 'Heat content Startdate: 20000101 Member: 1 Chunk: 1 Mixed layer: 1 ' + 'Box: 0m-100m Basin: Global_Ocean') diff --git a/test/unit/test_heatcontentlayer.py b/test/unit/test_heatcontentlayer.py new file mode 100644 index 0000000000000000000000000000000000000000..60b6dd8bd46abf050ee4b83c5ae996b2da94196a --- /dev/null +++ b/test/unit/test_heatcontentlayer.py @@ -0,0 +1,26 @@ +# coding=utf-8 +from unittest import TestCase +from earthdiagnostics.box import Box +from earthdiagnostics.ocean.heatcontentlayer import HeatContentLayer +from mock import Mock + + +class TestHeatContentLayer(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.weight = Mock() + + self.box = Box(True) + self.box.min_depth = 0 + self.box.max_depth = 100 + + self.psi = HeatContentLayer(self.data_manager, '20000101', 1, 1, self.box, self.weight, 0, 10) + + def test_str(self): + self.assertEquals(str(self.psi), 'Heat content layer Startdate: 20000101 Member: 1 Chunk: 1 Box: 0m-100m') diff --git a/test/unit/test_interpolate.py b/test/unit/test_interpolate.py new file mode 100644 index 0000000000000000000000000000000000000000..9a092cfdfca8254081920ad56a6dba1e28ec8328 --- /dev/null +++ b/test/unit/test_interpolate.py @@ -0,0 +1,52 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.ocean.interpolate import Interpolate +from mock import Mock + + +class TestInterpolate(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + self.diags.config.experiment.model_version = 'model_version' + + self.interpolate = Interpolate(self.data_manager, '20000101', 1, 1, 'domain', 'var', 'grid', 'model_version', + False) + + def test_generate_jobs(self): + jobs = Interpolate.generate_jobs(self.diags, ['interp', 'grid', 'var']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], Interpolate(self.data_manager, '20010101', 0, 0, 'ocean', 'var', 'grid', + 'model_version', False)) + self.assertEqual(jobs[1], Interpolate(self.data_manager, '20010101', 0, 1, 'ocean', 'var', 'grid', + 'model_version', False)) + + jobs = Interpolate.generate_jobs(self.diags, ['interp', 'grid', 'var', 'domain']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], Interpolate(self.data_manager, '20010101', 0, 0, 'domain', 'var', 'grid', + 'model_version', False)) + self.assertEqual(jobs[1], Interpolate(self.data_manager, '20010101', 0, 1, 'domain', 'var', 'grid', + 'model_version', False)) + + jobs = Interpolate.generate_jobs(self.diags, ['interp', 'grid', 'var', 'domain', 'true']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], Interpolate(self.data_manager, '20010101', 0, 0, 'domain', 'var', 'grid', + 'model_version', True)) + self.assertEqual(jobs[1], Interpolate(self.data_manager, '20010101', 0, 1, 'domain', 'var', 'grid', + 'model_version', True)) + + with self.assertRaises(Exception): + Interpolate.generate_jobs(self.diags, ['interp']) + + with self.assertRaises(Exception): + Interpolate.generate_jobs(self.diags, ['interp', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.interpolate), 'Interpolate Startdate: 20000101 Member: 1 Chunk: 1 ' + 'Variable: domain:var Target grid: grid Invert lat: False ' + 'Model: model_version') diff --git a/test/unit/test_maxmoc.py b/test/unit/test_maxmoc.py new file mode 100644 index 0000000000000000000000000000000000000000..f31c58222b182c49d5e7635e0c0d708eb48be17e --- /dev/null +++ b/test/unit/test_maxmoc.py @@ -0,0 +1,52 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.constants import Basins +from earthdiagnostics.ocean.maxmoc import MaxMoc +from mock import Mock + + +class TestMaxMoc(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.box = Box() + self.box.min_lat = 0 + self.box.max_lat = 0 + self.box.min_depth = 0 + self.box.max_depth = 0 + + self.maxmoc = MaxMoc(self.data_manager, '20000101', 1, 2000, Basins.Global, self.box) + + def test_generate_jobs(self): + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.startdates = ('20010101',) + self.diags.members = (0,) + self.diags.config.experiment.get_full_years.return_value = (2000, 2001) + + jobs = MaxMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], MaxMoc(self.data_manager, '20010101', 0, 2000, Basins.Global, self.box)) + self.assertEqual(jobs[1], MaxMoc(self.data_manager, '20010101', 0, 2001, Basins.Global, self.box)) + + jobs = MaxMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', 'atl']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], MaxMoc(self.data_manager, '20010101', 0, 2000, Basins.Atlantic, self.box)) + self.assertEqual(jobs[1], MaxMoc(self.data_manager, '20010101', 0, 2001, Basins.Atlantic, self.box)) + + self.diags.config.experiment.get_full_years.return_value = list() + jobs = MaxMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0']) + self.assertEqual(len(jobs), 0) + + with self.assertRaises(Exception): + MaxMoc.generate_jobs(self.diags, ['psi']) + + with self.assertRaises(Exception): + MaxMoc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.maxmoc), 'Max moc Startdate: 20000101 Member: 1 Year: 2000 ' + 'Box: 0N0 Basin: Global_Ocean') diff --git a/test/unit/test_mixedlayerheatcontent.py b/test/unit/test_mixedlayerheatcontent.py new file mode 100644 index 0000000000000000000000000000000000000000..bf7bff95ff38b46ee998d78d4ce005217a9085d9 --- /dev/null +++ b/test/unit/test_mixedlayerheatcontent.py @@ -0,0 +1,29 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.ocean.mixedlayerheatcontent import MixedLayerHeatContent +from mock import Mock + + +class TestMixedLayerHeatContent(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.mixed = MixedLayerHeatContent(self.data_manager, '20000101', 1, 1) + + def test_generate_jobs(self): + jobs = MixedLayerHeatContent.generate_jobs(self.diags, ['psi']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], MixedLayerHeatContent(self.data_manager, '20010101', 0, 0)) + self.assertEqual(jobs[1], MixedLayerHeatContent(self.data_manager, '20010101', 0, 1)) + + with self.assertRaises(Exception): + MixedLayerHeatContent.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.mixed), 'Mixed layer heat content Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_mixedlayersaltcontent.py b/test/unit/test_mixedlayersaltcontent.py new file mode 100644 index 0000000000000000000000000000000000000000..38074f0f481675b968dd331f8e5b65a08a4c5794 --- /dev/null +++ b/test/unit/test_mixedlayersaltcontent.py @@ -0,0 +1,29 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.ocean.mixedlayersaltcontent import MixedLayerSaltContent +from mock import Mock + + +class TestMixedLayerSaltContent(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.mixed = MixedLayerSaltContent(self.data_manager, '20000101', 1, 1) + + def test_generate_jobs(self): + jobs = MixedLayerSaltContent.generate_jobs(self.diags, ['psi']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], MixedLayerSaltContent(self.data_manager, '20010101', 0, 0)) + self.assertEqual(jobs[1], MixedLayerSaltContent(self.data_manager, '20010101', 0, 1)) + + with self.assertRaises(Exception): + MixedLayerSaltContent.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.mixed), 'Mixed layer salt content Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_moc.py b/test/unit/test_moc.py new file mode 100644 index 0000000000000000000000000000000000000000..1a143035edfc3e841d3b9907bfa21ddd2214bf90 --- /dev/null +++ b/test/unit/test_moc.py @@ -0,0 +1,29 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.ocean.moc import Moc +from mock import Mock + + +class TestMoc(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.mixed = Moc(self.data_manager, '20000101', 1, 1) + + def test_generate_jobs(self): + jobs = Moc.generate_jobs(self.diags, ['psi']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], Moc(self.data_manager, '20010101', 0, 0)) + self.assertEqual(jobs[1], Moc(self.data_manager, '20010101', 0, 1)) + + with self.assertRaises(Exception): + Moc.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.mixed), 'MOC Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_monthlymean.py b/test/unit/test_monthlymean.py new file mode 100644 index 0000000000000000000000000000000000000000..669490898301aff7e1c6c43ca77cfedae3255ce3 --- /dev/null +++ b/test/unit/test_monthlymean.py @@ -0,0 +1,49 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.general.monthlymean import MonthlyMean +from mock import Mock + + +class TestMonthlyMean(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.box = Box() + self.box.min_depth = 0 + self.box.max_depth = 100 + + self.mixed = MonthlyMean(self.data_manager, '20000101', 1, 1, 'domain', 'var', 'freq', '') + + def test_generate_jobs(self): + + jobs = MonthlyMean.generate_jobs(self.diags, ['psi', 'var', 'domain']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], MonthlyMean(self.data_manager, '20010101', 0, 0, 'domain', 'var', 'day', '')) + self.assertEqual(jobs[1], MonthlyMean(self.data_manager, '20010101', 0, 1, 'domain', 'var', 'day', '')) + + jobs = MonthlyMean.generate_jobs(self.diags, ['psi', 'var', 'domain', 'freq']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], MonthlyMean(self.data_manager, '20010101', 0, 0, 'domain', 'var', 'freq', '')) + self.assertEqual(jobs[1], MonthlyMean(self.data_manager, '20010101', 0, 1, 'domain', 'var', 'freq', '')) + + jobs = MonthlyMean.generate_jobs(self.diags, ['psi', 'var', 'domain', 'freq', 'grid']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], MonthlyMean(self.data_manager, '20010101', 0, 0, 'domain', 'var', 'freq', 'grid')) + self.assertEqual(jobs[1], MonthlyMean(self.data_manager, '20010101', 0, 1, 'domain', 'var', 'freq', 'grid')) + + with self.assertRaises(Exception): + MonthlyMean.generate_jobs(self.diags, ['psi']) + + with self.assertRaises(Exception): + MonthlyMean.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.mixed), 'Calculate monthly mean Startdate: 20000101 Member: 1 Chunk: 1 ' + 'Variable: domain:var') diff --git a/test/unit/test_psi.py b/test/unit/test_psi.py new file mode 100644 index 0000000000000000000000000000000000000000..3099fa82bd3178f7dec2327623e59ec06e7b326e --- /dev/null +++ b/test/unit/test_psi.py @@ -0,0 +1,25 @@ +# coding=utf-8 +from unittest import TestCase +from earthdiagnostics.ocean.psi import Psi +from mock import Mock + + +class TestPsi(TestCase): + + def setUp(self): + self.data_manager = Mock() + self.diags = Mock() + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + self.psi = Psi(self.data_manager, '20000101', 1, 1) + + def test_generate_jobs(self): + jobs = Psi.generate_jobs(self.diags, ['psi']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], Psi(self.data_manager, '20010101', 0, 0)) + self.assertEqual(jobs[1], Psi(self.data_manager, '20010101', 0, 1)) + + with self.assertRaises(Exception): + Psi.generate_jobs(self.diags, ['psi', 'badoption']) + + def test_str(self): + self.assertEquals(str(self.psi), 'PSI Startdate: 20000101 Member: 1 Chunk: 1') diff --git a/test/unit/test_rewrite.py b/test/unit/test_rewrite.py new file mode 100644 index 0000000000000000000000000000000000000000..3cd0a7441cd99de6d08a979ae137111fac4a5389 --- /dev/null +++ b/test/unit/test_rewrite.py @@ -0,0 +1,39 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.general.rewrite import Rewrite +from mock import Mock + + +class TestRewrite(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.box = Box() + self.box.min_depth = 0 + self.box.max_depth = 100 + + self.mixed = Rewrite(self.data_manager, '20000101', 1, 1, 'domain', 'var') + + def test_generate_jobs(self): + + jobs = Rewrite.generate_jobs(self.diags, ['psi', 'var', 'domain']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], Rewrite(self.data_manager, '20010101', 0, 0, 'domain', 'var')) + self.assertEqual(jobs[1], Rewrite(self.data_manager, '20010101', 0, 1, 'domain', 'var')) + + with self.assertRaises(Exception): + Rewrite.generate_jobs(self.diags, ['psi']) + + with self.assertRaises(Exception): + Rewrite.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.mixed), 'Rewrite output Startdate: 20000101 Member: 1 Chunk: 1 ' + 'Variable: domain:var') diff --git a/test/unit/test_siasiesiv.py b/test/unit/test_siasiesiv.py new file mode 100644 index 0000000000000000000000000000000000000000..2ed284261464ec4341067576c9e2a89019d2fd49 --- /dev/null +++ b/test/unit/test_siasiesiv.py @@ -0,0 +1,21 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.constants import Basins +from earthdiagnostics.ocean.siasiesiv import Siasiesiv +from mock import Mock + + +class TestSiasiesiv(TestCase): + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.mask = Mock() + self.psi = Siasiesiv(self.data_manager, '20000101', 1, 1, Basins.Global, self.mask) + + def test_str(self): + self.assertEquals(str(self.psi), 'Siasiesiv Startdate: 20000101 Member: 1 Chunk: 1 Basin: Global_Ocean') diff --git a/test/unit/test_utils.py b/test/unit/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..603001129c7350292b575767075ff3fe10d16e39 --- /dev/null +++ b/test/unit/test_utils.py @@ -0,0 +1,110 @@ +# coding=utf-8 +from unittest import TestCase +import mock + +from earthdiagnostics.utils import TempFile, Utils + + +class TestTempFile(TestCase): + def setUp(self): + TempFile.scratch_folder = '/tmp' + TempFile.prefix = 'prefix' + + def test_get(self): + self.assertEquals(TempFile.get('tempfile', clean=False), '/tmp/tempfile') + self.assertEquals(TempFile.get('tempfile2', clean=True), '/tmp/tempfile2') + self.assertNotIn('/tmp/tempfile', TempFile.files) + self.assertIn('/tmp/tempfile2', TempFile.files) + + TempFile.autoclean = True + self.assertEquals(TempFile.get('tempfile3'), '/tmp/tempfile3') + self.assertIn('/tmp/tempfile3', TempFile.files) + + TempFile.autoclean = False + self.assertEquals(TempFile.get('tempfile4'), '/tmp/tempfile4') + self.assertNotIn('/tmp/tempfile4', TempFile.files) + + with mock.patch('tempfile.mkstemp') as mkstemp_mock: + with mock.patch('os.close') as close_mock: + mkstemp_mock.return_value = (34, 'path_to_tempfile') + TempFile.get() + TempFile.get(suffix='suffix') + + mkstemp_mock.assert_has_calls((mock.call(dir='/tmp', prefix='prefix', suffix='.nc'), + mock.call(dir='/tmp', prefix='prefix', suffix='suffix'))) + close_mock.assert_has_calls((mock.call(34), mock.call(34))) + + def test_clean(self): + with mock.patch('os.path.exists') as exists_mock: + with mock.patch('tempfile.mkstemp'): + with mock.patch('os.close'): + with mock.patch('os.remove'): + TempFile.clean() + TempFile.clean() + exists_mock.side_effect = [True, False] + TempFile.autoclean = True + TempFile.get('tempfile') + TempFile.get('tempfile2') + TempFile.clean() + self.assertEquals(len(TempFile.files), 0) + + +class TestUtils(TestCase): + + def test_rename_variable(self): + with mock.patch('earthdiagnostics.utils.Utils.rename_variables') as rename_mock: + Utils.rename_variable('file', 'old', 'new') + Utils.rename_variable('file', 'old', 'new', False, True) + rename_mock.assert_has_calls((mock.call('file', {'old': 'new'}, True, False), + mock.call('file', {'old': 'new'}, False, True))) + + def test_rename_variables(self): + mock_handler = mock.Mock() + mock_handler.variables = dict() + mock_handler.dimensions = dict() + mock_handler.variables['old'] = mock.Mock() + mock_handler.variables['old_var'] = mock.Mock() + mock_handler.dimensions['old'] = mock.Mock() + mock_handler.ncattrs.return_value = 'attribute' + mock_handler.attribute = 'value' + + with mock.patch('earthdiagnostics.utils.Utils.openCdf') as opencdf_mock: + with mock.patch('shutil.copyfile'): + with mock.patch('earthdiagnostics.utils.Utils.move_file'): + opencdf_mock.return_value = mock_handler + Utils.rename_variables('file', {'old': 'old_var'}) + Utils.rename_variables('file', {'old2': 'new'}, False) + Utils.rename_variables('file', {'old2': 'new', 'old': 'new'}, False) + Utils.rename_variables('file', {'old': 'new'}, False, True) + Utils.rename_variables('file', {'old_var': 'new'}, False, True) + + with self.assertRaises(ValueError): + Utils.rename_variables('file', {'new': 'new'}) + with self.assertRaises(Exception): + Utils.rename_variables('file', {'old2': 'new'}) + with self.assertRaises(Exception): + Utils.rename_variables('file', {'old2': 'new', 'old': 'new'}) + with self.assertRaises(Exception): + Utils.rename_variables('file', {'old_var': 'new'}, rename_dimension=True) + + def test_convert2netcdf4(self): + mock_handler = mock.Mock() + + with mock.patch('earthdiagnostics.utils.Utils.openCdf') as opencdf_mock: + with mock.patch('earthdiagnostics.utils.Utils.execute_shell_command') as execute_mock: + with mock.patch('earthdiagnostics.utils.TempFile.get') as tempfile_mock: + with mock.patch('shutil.move'): + tempfile_mock.return_value = 'tempfile' + opencdf_mock.return_value = mock_handler + mock_handler.file_format = 'NETCDF4' + Utils.convert2netcdf4('file') + + mock_handler.file_format = 'OTHER' + Utils.convert2netcdf4('file2') + execute_mock.assert_called_with(['nccopy', '-4', '-d4', '-s', 'file2', 'tempfile']) + + mock_handler.file_format = 'NETCDF4' + Utils.convert2netcdf4('file3') + execute_mock.assert_called_with(['nccopy', '-4', '-d4', '-s', 'file3', 'tempfile']) + + self.assertEqual(execute_mock.call_count, 2) diff --git a/test/unit/test_variable.py b/test/unit/test_variable.py new file mode 100644 index 0000000000000000000000000000000000000000..67d68dc6f38f78ee29cfb3b0e4a101d0cc6cf5c5 --- /dev/null +++ b/test/unit/test_variable.py @@ -0,0 +1,27 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.variable import Variable + + +class TestVariable(TestCase): + + def test__init__(self): + variable = Variable('alias:alias2,name,standard_name,long_name,domain,basin,units,' + 'valid_min,valid_max'.split(',')) + self.assertEqual(variable.short_name, 'name') + self.assertEqual(variable.standard_name, 'standard_name') + self.assertEqual(variable.long_name, 'long_name') + self.assertEqual(variable.domain, 'domain') + self.assertEqual(variable.basin, None) + self.assertEqual(variable.units, 'units') + self.assertEqual(variable.valid_min, 'valid_min') + self.assertEqual(variable.valid_max, 'valid_max') + + def test_get_variable(self): + Variable._dict_variables = dict() + variable = Variable('alias:alias2,name,standard_name,long_name,domain,basin,units,valid_min,' + 'valid_max'.split(',')) + Variable._dict_variables['var'] = variable + self.assertIs(Variable.get_variable('var'), variable) + self.assertIsNone(Variable.get_variable('novar')) diff --git a/test/unit/test_verticalmean.py b/test/unit/test_verticalmean.py new file mode 100644 index 0000000000000000000000000000000000000000..dc2d32ae8d45f5160f453acdf7263412c53eda90 --- /dev/null +++ b/test/unit/test_verticalmean.py @@ -0,0 +1,50 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.ocean.verticalmean import VerticalMean +from mock import Mock + + +class TestVerticalMean(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.box = Box() + self.box.min_depth = 0 + self.box.max_depth = 100 + + self.mixed = VerticalMean(self.data_manager, '20000101', 1, 1, 'var', self.box) + + def test_generate_jobs(self): + jobs = VerticalMean.generate_jobs(self.diags, ['psi', 'var', '0', '100']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], VerticalMean(self.data_manager, '20010101', 0, 0, 'var', self.box)) + self.assertEqual(jobs[1], VerticalMean(self.data_manager, '20010101', 0, 1, 'var', self.box)) + + jobs = VerticalMean.generate_jobs(self.diags, ['psi', 'var', '0']) + box = Box() + box.min_depth = 0 + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], VerticalMean(self.data_manager, '20010101', 0, 0, 'var', box)) + self.assertEqual(jobs[1], VerticalMean(self.data_manager, '20010101', 0, 1, 'var', box)) + + jobs = VerticalMean.generate_jobs(self.diags, ['psi', 'var']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], VerticalMean(self.data_manager, '20010101', 0, 0, 'var', Box())) + self.assertEqual(jobs[1], VerticalMean(self.data_manager, '20010101', 0, 1, 'var', Box())) + + with self.assertRaises(Exception): + VerticalMean.generate_jobs(self.diags, ['psi']) + + with self.assertRaises(Exception): + VerticalMean.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.mixed), 'Vertical mean Startdate: 20000101 Member: 1 Chunk: 1 Variable: var ' + 'Box: 0-100') diff --git a/test/unit/test_verticalmeanmeters.py b/test/unit/test_verticalmeanmeters.py new file mode 100644 index 0000000000000000000000000000000000000000..20599cd56fe64c3f4e817a60c396cf167164fc59 --- /dev/null +++ b/test/unit/test_verticalmeanmeters.py @@ -0,0 +1,50 @@ +# coding=utf-8 +from unittest import TestCase + +from earthdiagnostics.box import Box +from earthdiagnostics.ocean.verticalmeanmeters import VerticalMeanMeters +from mock import Mock + + +class TestVerticalMeanMeters(TestCase): + + def setUp(self): + self.data_manager = Mock() + + self.diags = Mock() + self.diags.model_version = 'model_version' + self.diags.config.experiment.get_chunk_list.return_value = (('20010101', 0, 0), ('20010101', 0, 1)) + + self.box = Box(True) + self.box.min_depth = 0 + self.box.max_depth = 100 + + self.mixed = VerticalMeanMeters(self.data_manager, '20000101', 1, 1, 'var', self.box) + + def test_generate_jobs(self): + jobs = VerticalMeanMeters.generate_jobs(self.diags, ['psi', 'var', '0', '100']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, 'var', self.box)) + self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, 'var', self.box)) + + jobs = VerticalMeanMeters.generate_jobs(self.diags, ['psi', 'var', '0']) + box = Box(True) + box.min_depth = 0 + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, 'var', box)) + self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, 'var', box)) + + jobs = VerticalMeanMeters.generate_jobs(self.diags, ['psi', 'var']) + self.assertEqual(len(jobs), 2) + self.assertEqual(jobs[0], VerticalMeanMeters(self.data_manager, '20010101', 0, 0, 'var', Box(True))) + self.assertEqual(jobs[1], VerticalMeanMeters(self.data_manager, '20010101', 0, 1, 'var', Box(True))) + + with self.assertRaises(Exception): + VerticalMeanMeters.generate_jobs(self.diags, ['psi']) + + with self.assertRaises(Exception): + VerticalMeanMeters.generate_jobs(self.diags, ['psi', '0', '0', '0', '0', '0', '0', '0']) + + def test_str(self): + self.assertEquals(str(self.mixed), 'Vertical mean meters Startdate: 20000101 Member: 1 Chunk: 1 Variable: var ' + 'Box: 0m-100m') diff --git a/testing_ocean_pp_moore.job b/testing_ocean_pp_moore.job deleted file mode 100755 index 9e2cab3eb0fbcc58d192e0e6b61056aa2d7c79bb..0000000000000000000000000000000000000000 --- a/testing_ocean_pp_moore.job +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -#$ -l h_vmem=4G -#$ -l s_rt=24:00:00 -#$ -l h_rt=24:00:00 - -set -evx -oceanpp_repository=/home/Earth/pbretonn/es_git/ocean_diagnostics -workdir=/scratch/tmp/post_ocean/$$ - -mkdir -p $workdir -cd $workdir -if [ -e ${oceanpp_repository}/ocean_pp.bash ] ; then - cp ${oceanpp_repository}/ocean_pp.bash . -else - echo "Please fill up the location of your ocean_pp repository" - exit 1 -fi - -#lstexp=('b02s_bis') -#lstexp=('b02s_ter' 'l00w_bis') #orig -lstexp=('b02s' 'i00k' 'i01t' 'l00v' 'l00w' 'glorys' 'nemovar' 'b02s_bis' 'b02s_ter' 'l00w_bis') #orig -for exp in ${lstexp[@]} ; do - tmp=${oceanpp_repository//\//\\\/} - sed -e "s/PATHCOMMONOCEANDIAG=.*/PATHCOMMONOCEANDIAG=${tmp}/g" /shared/earth/software/scripts/testing_ocean_pp/config_file-ocean_pp_${exp}.bash &> config_file-ocean_pp_${exp}.bash - ./ocean_pp.bash config_file-ocean_pp_${exp}.bash -done