Commits (3)
import netCDF4 as nc
import numpy as np
# Define dimensions
y_size_1 = 88838
y_size_2 = 105704
y_size_3 = 131072
x_size = 1
# Create NetCDF file
rstas_file = nc.Dataset('rstas.nc', 'w', format='NETCDF4')
rstos_file = nc.Dataset('rstos.nc', 'w', format='NETCDF4')
# Define dimensions in the NetCDF file
rstas_file.createDimension('y', y_size_1)
rstas_file.createDimension('x', x_size)
rstos_file.createDimension('y', y_size_2)
rstos_file.createDimension('x', x_size)
# Create a variable 'O_f1_atm'
f1 = rstas_file.createVariable('A_f1_oce', 'f4', ('y', 'x'))
f2 = rstas_file.createVariable('A_f2_rnf', 'f4', ('y', 'x'))
f3 = rstas_file.createVariable('R_f3_oce', 'f4', ('y', 'x'))
f4 = rstos_file.createVariable('O_f4_atm', 'f4', ('y', 'x'))
# Generate some dummy data for the variable
data_1 = np.random.rand(y_size_1, x_size)
data_2 = np.random.rand(y_size_2, x_size)
# Assign data to the variable
f1[:, :] = data_1
f2[:, :] = data_1
f3[:, :] = data_1
f4[:, :] = data_2
# Close the NetCDF file
rstas_file.close()
rstos_file.close()
#!/bin/bash
# slurm specific options for Marenostrum4
#SBATCH --qos=interactive
#SBATCH -p interactive
#SBATCH -N 1
#SBATCH -n 3
#SBATCH -t 1:00:00
#SBATCH -t 0:10:00
##SBATCH --exclusive
#load python and netcdf modules
......@@ -25,7 +25,7 @@ export MPIRUN4PY="mpirun"
# prepare rundir
rundir=run/
srcdir=`pwd`
datadir=/gpfs/scratch/bsc32/bsc32442/python-amip-reader/tests/data/ece3-toy-model
datadir=/gpfs/scratch/bsc32/bsc32442/python-amip-reader/data/ece3-toy-model
rm -rf $rundir
mkdir -p $rundir
......@@ -38,12 +38,17 @@ cp -f $srcdir/*.py $rundir
cp -f $srcdir/namcouple $rundir
cp -f $srcdir/*.yaml $rundir
# Copy the restart files
cp -f $srcdir/rstas.nc $rundir
cp -f $srcdir/rstos.nc $rundir
cd $rundir
# run IFS+NEMO+runoff toy model
ifs_nproc=1
nemo_nproc=1
ifs_nproc=3
nemo_nproc=2
rnf_neproc=1
mpirun -np $ifs_nproc python3 $srcdir/toy_model.py --yaml_file_path ifs_toy_model.yaml : -np $nemo_nproc python3 $srcdir/toy_model.py --yaml_file_path nemo_toy_model.yaml : -np $rnf_neproc python3 $srcdir/toy_model.py --yaml_file_path runoff_toy_model.yaml
toy_model_script=$srcdir/toy_model.py
mpirun -np $ifs_nproc python3 $toy_model_script --yaml_file_path ifs_toy_model.yaml : -np $nemo_nproc python3 $toy_model_script --yaml_file_path nemo_toy_model.yaml : -np $rnf_neproc python3 $toy_model_script --yaml_file_path runoff_toy_model.yaml
......@@ -19,7 +19,7 @@
# Fields send from Atmosphere to Ocean
# =================================================================================================
A_f1_oce O_f1 1 2700 2 rstas.nc EXPOUT
A128 O1u0 LAG=0
A128 O1u0 LAG=2700
P 0 P 2
LOCTRANS SCRIPR
AVERAGE
......@@ -29,7 +29,7 @@
# Fields send from Atmosphere to Runoff mapper
# =================================================================================================
A_f2_rnf R_f2 1 2700 3 rstas.nc EXPOUT
R128 RnfA LAG=0
R128 RnfA LAG=2700
P 0 P 0
LOCTRANS SCRIPR CONSERV
AVERAGE
......@@ -52,7 +52,7 @@
# Fields send from Ocean to Atmosphere
# =================================================================================================
O_f4_atm A_f4 1 2700 2 rstos.nc EXPOUT
O1t0 L128 LAG=0
O1t0 L128 LAG=2700
P 2 P 0
LOCTRANS SCRIPR
AVERAGE
......
......@@ -15,6 +15,41 @@ from pyoasis import OASIS # pylint: disable=no-name-in-module
import numpy as np
import time
# ++++++++++++++++
# OASIS PUT
# ++++++++++++++++
def run_oasis_put():
logging.debug('ITAP_SEC: {} sending fields'.format(itap_sec))
logging.debug(str(out_coupling_fields.keys()))
for id, field in out_coupling_fields.items():
sigma=1
mu=1
# send dummy data (100 for runoff, random for other variables)
#TODO send dummy data read from oasis restart file
if id == "A_Runoff":
field_data[:]=1.4658e-06
field_data[:]=100
else:
field_data[:]=sigma * np.random.randn(local_size) + mu
field.put(itap_sec, field_data)
logging.debug('{} put -> {:.3e} {:.3e} {:.3e}'.format( id, np.min(field_data), np.mean(field_data), np.max(field_data)))
logging.debug('--------------------------------------\n')
# ++++++++++++++++
# OASIS GET
# ++++++++++++++++
def run_oasis_get():
logging.debug('ITAP_SEC: {} receiving fields'.format(itap_sec))
logging.debug(str(in_coupling_fields.keys()))
for id, field in in_coupling_fields.items():
field.get(itap_sec, field_data)
logging.debug('{} -> {:.3e} {:.3e} {:.3e}'.format( id, np.min(field_data), np.mean(field_data), np.max(field_data)))
logging.debug('--------------------------------------\n')
if __name__ == '__main__':
### Read in YAML file
......@@ -98,7 +133,7 @@ if __name__ == '__main__':
for v in coupling_out_vars:
out_coupling_fields[v] = pyoasis.Var(v, partition, OASIS.OUT)
logging.warning('var_id {}, added as OASIS_OUT var'.format(v))
logging.info('var_id {}, added as OASIS_OUT var'.format(v))
in_coupling_fields={}
......@@ -108,19 +143,29 @@ if __name__ == '__main__':
logging.info('var_id {}, added as OASIS_IN var'.format(v))
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# DEFINE THE SPEED OF THE COMPONENT
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# If the number of processes from the YAML configuraion is not defined (or 0), use the communcator size
if model_nproc is None or model_nproc == 0:
model_nproc = npes
if mype == 0:
logging.info('WARNING: Model {} nproc was not defined in the YAML file. Using the communicator size instead ({} processes)'.format(model_name, model_nproc))
# Read the scalaiblity properties of the component
arr = np.genfromtxt(model_scalability, delimiter=",", skip_header=1)
# TODO: Use npes or model_nproc?
idx_closest_npes = np.argmin(abs((arr[:,0] - model_nproc)))
sypd = arr[idx_closest_npes, 1]
max_sypd = max(arr[:,1])
n_ts_1SY = 365*24*3600/2700
sec_to_run_1year = 24*3600/arr[:,1]
sec_to_run_1ts = sec_to_run_1year / n_ts_1SY
sleep_time = sec_to_run_1ts[idx_closest_npes]
print("SYPD: ", sypd)
print("sleep_time", sleep_time)
num_ts_in_one_sy = 365*24*3600/2700
num_ts_executed = sypd * num_ts_in_one_sy
runtime = 24*3600 # 1 day of execution
timestep_length = runtime / num_ts_executed
if mype == 0:
print('Model {} is using {} processes and runs at {} SYPD. The timestep_length is {:.4f}.'.format(model_name, model_nproc, sypd, timestep_length))
logging.info('SYPD: {}'.format(sypd))
logging.info('timestep_length: {}'.format(timestep_length))
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
......@@ -138,48 +183,17 @@ if __name__ == '__main__':
field_data = pyoasis.asarray(np.full(local_size, -1.0))
for itap_sec in range(0, simulation_run_length_sec, simulation_coupling_interval):
# Call OASIS_GET
run_oasis_get
# Simulate component computation (sleep)
time.sleep(timestep_length)
# Call OASIS_PUT
run_oasis_put
# ++++++++++++++++
# OASIS PUT
# ++++++++++++++++
logging.debug('ITAP_SEC: {} sending fields'.format(itap_sec))
logging.debug(str(out_coupling_fields.keys()))
for id, field in out_coupling_fields.items():
sigma=1
mu=1
# send dummy data (100 for runoff, random for other variables)
#TODO send dummy data read from oasis restart file
if id == "A_Runoff":
field_data[:]=1.4658e-06
field_data[:]=100
else:
field_data[:]=sigma * np.random.randn(local_size) + mu
field.put(itap_sec, field_data)
logging.debug('{} put -> {:.3e} {:.3e} {:.3e}'.format( id, np.min(field_data), np.mean(field_data), np.max(field_data)))
logging.debug('--------------------------------------\n')
# ++++++++++++++++
# Simulate component timestep
# ++++++++++++++++
#sleep_time = max_sypd/sypd
time.sleep(sleep_time)
# ++++++++++++++++
# OASIS GET
# ++++++++++++++++
logging.debug('ITAP_SEC: {} receiving fields'.format(itap_sec))
logging.debug(str(in_coupling_fields.keys()))
for id, field in in_coupling_fields.items():
field.get(itap_sec, field_data)
logging.debug('{} -> {:.3e} {:.3e} {:.3e}'.format( id, np.min(field_data), np.mean(field_data), np.max(field_data)))
logging.debug('--------------------------------------\n')
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# TERMINATION
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
logging.debug('End of the program')
print('Model {} is finished successfully.'.format(model_name))
if mype == 0:
print('Model {} is finished successfully.'.format(model_name))
del comp