Due to a shift in policy, from 0900 GMT on Wednesday 14th July 2021, we will be disabling ssh access to the server for external users. External users who wish to continue to access code repositories on the server will need to switch to using https. This can be accomplished in the following way: 1) On the repo on gitlab, use the clone dialogue and select ‘Clone with HTTPS’ to get the address of the repo; 2) From within the checkout of your repo run: $ git remote set-url origin HTTPS_ADDRESS. Here, replace HTTPS_ADDRESS with the address you have just copied from GitLab. Pulls and pushes will now require you to enter a username and password rather than using a ssh key. If you would prefer not to enter a password each time, you might consider caching your login credentials.

Commit 8496bff0 authored by Modellers Operational's avatar Modellers Operational

Merge branch 'master' of gitlab.ecosystem-modelling.pml.ac.uk:pml-modelling/rose_fvcom_setup

parents 651b9788 e1da5554
......@@ -74,7 +74,7 @@ for this_fvcom, this_var in fvcom_cmems_names.items():
if len(cmems_file_list) > 1:
for this_file in cmems_file_list[1:]:
this_data_reader += reg_reader(this_file, [this_var[1]])
this_data_reader = reg_reader(this_file, [this_var[1]]) >> this_data_reader
aqua_prep.add_nests_regular(this_fvcom, this_data_reader, this_var[1], constrain_coordinates=True)
......
import multiprocessing
import numpy as np
import datetime as dt
import glob as gb
import sys
from pathlib import Path
import PyFVCOM as pf
from PyFVCOM.utilities.time import date_range
cmems_data_dir = sys.argv[1]
start_date = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d')
grid = sys.argv[3]
donor_filepath = sys.argv[4]
"""
cmems_data_dir = '/data/sthenno1/scratch/modop/Data/CMEMS'
start_date = dt.datetime(2019,1,15)
grid = 'tamar_v2'
"""
cmems_time_res = 'hi'
fvcom_cmems_names = {'salinity':['SAL', 'vosaline'], 'temp':['TEM', 'votemper'],
'v':['CUR', 'vomecrty'], 'u':['CUR', 'vozocrtx'],
'zeta':['SSH', 'sossheig']}
# Modify a donor restart file.
restart = pf.preproc.Restart(donor_filepath,
variables=['siglay', 'siglev'])
# and alter the time variable
restart.time.datetime = np.asarray([start_date])
ref_date = dt.datetime(1858,11,17,0,0,0)
restart.time.time = np.asarray([(start_date - ref_date).days])
restart.time.Itime = np.asarray([(start_date - ref_date).days])
restart.time.Times = np.asarray(['{}T00:00:00.000000'.format(start_date.strftime('%Y-%m-%d'))])
restart.replaced.append('time')
restart.replaced.append('Itime')
restart.replaced.append('Times')
# We need to bracket the restart data in time with CMEMS data to ensure it interpolates properly.
for this_fvcom, this_var in fvcom_cmems_names.items():
cmems_file_list = []
offset = dt.timedelta(days=1)
for this_date_dt in date_range(start_date - offset, start_date + offset):
this_date = this_date_dt.strftime('%Y%m%d')
if this_var[0] == 'SSH':
poss_files = gb.glob('{}/*{}*{}*/*{}.nc'.format(cmems_data_dir, 'hi', this_var[0], this_date))
else:
poss_files = gb.glob('{}/*{}*{}*/*{}.nc'.format(cmems_data_dir, cmems_time_res, this_var[0], this_date))
# Handle that sometimes theres multiple files for one day from different forecast runs
if len(poss_files) > 1:
chosen_file = poss_files[0]
for this_file in poss_files[1:]:
if this_file > chosen_file:
chosen_file = this_file
cmems_file_list.append(chosen_file)
elif len(poss_files) == 1:
cmems_file_list.append(poss_files[0])
if this_var[0] =='SSH':
reg_reader = pf.preproc.Regular2DReader
else:
reg_reader = pf.preproc.RegularReader
this_data_reader = reg_reader(cmems_file_list[0], [this_var[1]])
if len(cmems_file_list) > 1:
for this_file in cmems_file_list[1:]:
this_data_reader = reg_reader(this_file, [this_var[1]]) >> this_data_reader
# Interpolate onto the FVCOM grid.
if this_fvcom in ['u', 'v']:
this_mode = 'elements'
elif this_fvcom == 'zeta':
this_mode = 'surface'
else:
this_mode = 'nodes'
restart.replace_variable_with_regular(this_fvcom, this_var[1], this_data_reader, constrain_coordinates=True, mode=this_mode)
# replace Times as need to be a 26 character array
restart.time.Times = np.asarray(list(restart.time.Times[0]))[np.newaxis,:]
restart.write_restart('{}_restart_0001.nc'.format(grid))
[command]
default = python3 make_restart.py ${CMEMS_DATA_DIR} ${START_DAY} ${GRID_NAME} ${DONOR_RESTART_FILE_PATH}; cp ${GRID_NAME}_restart_0001.nc /${REMOTE_TRANSFER_DIR}/
[command]
default = cp /pml${REMOTE_TRANSFER_DIR}/${GRID_NAME}_restart_0001.nc ${ROSE_DATAC};
[command]
default = log_file=${ROSE_TASK_LOG_ROOT}; log_dir=${log_file::${#log_dir}-4}
cd ${log_dir}; cd ../../run_fvcom/01/;
nanlines=$(grep -w "NaN" job.out| wc -l");
if [ ${nanlines} -gt 0 ]; then exit 1 fi
import matplotlib as mpl
mpl.use('Agg')
import sys
import multiprocessing
import numpy as np
from cmocean import cm
import PyFVCOM as pf
import pmltools as pt
labels = {'q2': 'Turbulent kinetic energy $(m^{2}s^{-2})$',
'l': 'Turbulent macroscale $(m^{3}s^{-2})$',
'q2l': 'Turbulent kinetic\nenergy x turblent\nmacroscale ($cm^{3}s^{-2}$)',
'tke': 'Turbulent kinetic energy $(m^{2}s^{-2})$',
'viscofh': 'Horizontal Turbulent Eddy Viscosity $(m^{2}s^{-1})$',
'teps': 'Turbulent kinetic\nenergy x turblent\nmacroscale ($cm^{3}s^{-2}$)',
'tauc': 'Bed shear stress $(m^{2}s^{-2})$',
'temp': 'Temperature ($\degree C$)',
'salinity': 'Salinity (PSU)',
'zeta': 'Surface elevation (m)',
'uv': 'Speed $(ms^{-1})$',
'uava': 'Depth averaged speed $(ms^{-1})$',
'uvanomaly': 'Speed anomaly $(ms^{-1})$',
'direction': 'Direction $(\degree)$',
'O3_c': 'Carbonate total dissolved\ninorganic carbon $(mmol C/m^3)$',
'O3_pH': 'Carbonate pH',
'O3_TA': 'Total alkalinity $(umol/kg)$',
'O3_fair': 'Carbonate air-sea flux of $CO_{2} (mmol C/m^{2}/d)$',
'volume': 'Node-based control water column volume $(m^{3})$'}
def plot_var(idx):
plot = pf.plot.Plotter(fvcom, figsize=(23, 18), cmap=cmap, cb_label=label, extend=extension, res=None)
plot.plot_field(np.squeeze(getattr(fvcom.data, var))[idx, level, :])
plot.tripcolor_plot.set_clim(clim[0], clim[1])
plot.axes.set_title(fvcom.time.Times[idx][:-7].replace('T', ' '))
suffix = ''
plot.figure.savefig('{}_{:04d}.png'.format(var, idx + 1),
bbox_inches='tight',
pad_inches=0.2,
dpi=120)
plot.close()
fname = sys.argv[1]
var = sys.argv[2]
clim = [float(sys.argv[3]), float(sys.argv[4])]
cmap = pt.plotting.pmlcmaps(var)
pool_size = 4
fvcom = pf.read.FileReader(fname, [var])
label = labels[var]
extension = pt.plotting.colourbar_extension(*clim, getattr(fvcom.data, var).min(), getattr(fvcom.data, var).max())
level = 0
time_indices = range(fvcom.dims.time)
# Launch the parallel plotting and then close the pool ready for the
# next variable.
pool = multiprocessing.Pool(pool_size)
pool.map(plot_var, time_indices)
pool.close()
[command]
default = today_output=/${ARCHIVE_DIR}/${START_DAY}/${GRID}_0001.nc;
python3 plot_var.py ${today_output} temp 8 18; python3 plot_var.py ${today_output} salinity 31 36;
mkdir /${PLOT_DIR}/${START_DAY}/; mv *.png /${PLOT_DIR}/${START_DAY}/
[command]
default = dst=/pml${ARCHIVE_DIR}today;
default = src=${ROSE_DATAC}/output/${GRID_NAME}_0001.nc;
dst=/pml${ARCHIVE_DIR}today;
rm /pml${ARCHIVE_DIR}today/*;
ssh ceto6 -t "rsync -aph --no-o --no-g $src $dst";
......@@ -8,7 +8,7 @@ MAX_NODES=10
FORECAST=True
REMOTE_USER='modop'
INITIAL_START_DATE='2018-11-28T00:00:00Z'
INITIAL_START_DATE='2019-01-31T00:00:00Z'
FINAL_CYCLE_POINT='NONE'
RUNDAYS=1
MAIL_TO='mbe@pml.ac.uk'
......@@ -17,6 +17,7 @@ MAIL_TO='mbe@pml.ac.uk'
GRID_NAME='tamar_v2'
COMMON_FILES_PATH='/users/modellers/modop/Models/FVCOM_tamar_common/'
ARCHIVE_DIR='data/sthenno1/scratch/modop/Model/FVCOM_tamar/output'
PLOT_DIR='data/sthenno1/scratch/modop/Model/FVCOM_tamar/plots'
TEMP_ACTIVE='T'
SALT_ACTIVE='T'
......@@ -29,7 +30,7 @@ AIR_PRESSURE_ON='T'
## WRF suite settings if any of the above are on
WRF_RUN_SUITE='wrf'
WRF_FORECAST_FILE_DIR='/gpfs1/users/modellers/modop/Models/WRF_transfer_dir/'
WRF_ARCHIVE_DIR='data/sthenno1/backup/pica/models/WRF/wrf-preprocess-gfs/output'
WRF_ARCHIVE_DIR='/data/sthenno1/scratch/modop/Model/WRF/output'
WRF_ARCHIVE_DATEFMT='%Y%m%d'
......@@ -48,6 +49,8 @@ BOUNDARY_FORCING='CMEMS'
HARMONICS_FILE_PATH='/users/modellers/modop/Models/FVCOM_tamar_harmonics/tamar_2006_harmonics.nc'
## Required for CMEMS
CMEMS_DATA_DIR='/data/sthenno1/scratch/modop/Data/CMEMS'
## Required for CMEMS warm start
DONOR_RESTART_FILE_PATH='/users/modellers/modop/Models/FVCOM_tamar_restart/tamar_v2_donor_restart.nc'
## Required for NEST_FILE
NEST_RUN_SUITE='fvcom-rosa'
# The time resolution of the nest output, as divisions of 1 day (i.e. 24 would be hourly, 48 every half hour)
......@@ -70,6 +73,6 @@ VELOCITY_OUT='T'
SALT_TEMP_OUT='T'
TURBULENCE_OUT='T'
WIND_OUT='F'
SURF_HEAT_RAIN_OUT='F'
HEAT_RAIN_OUT='F'
......@@ -14,7 +14,11 @@
[[[R1]]]
graph = """
copy_common_to_remote => softlink_forcing_remote
write_run_namelist => hot_cold_start => run_fvcom
{% if COLD_START and BOUNDARY_FORCING == 'CMEMS' %}
write_run_namelist => adjust_namelist & generate_CMEMS_start => mv_start_file => run_fvcom
{% elif COLD_START and BOUNDARY_FORCING == 'NEST_FILE' %}
write_run_namelist => adjust_namelist & generate_nest_start => mv_start_file => run_fvcom
{% endif %}
"""
[[[P1D]]]
graph = """
......@@ -54,15 +58,16 @@
start_cycle => get_river_files => write_run_namelist => write_river_number => run_fvcom
{% endif %}
{%- if SEDIMENT %}
{% if SEDIMENT %}
write_cstms_namelist => run_fvcom
{%- endif %}
{% endif %}
{% if FORECAST %}
write_run_namelist => run_fvcom => transfer_data_today => transfer_data
{% else %}
write_run_namelist => run_fvcom => transfer_data
{% endif %}
run_fvcom => nan_check & plot_sst
"""
......@@ -93,8 +98,10 @@
RIVER_MODEL_PATH={{RIVER_MODEL_PATH}}
REMOTE_TRANSFER_DIR={{REMOTE_TRANSFER_DIR}}
ARCHIVE_DIR={{ARCHIVE_DIR}}
PLOT_DIR={{PLOT_DIR}}
HARMONICS_FILE_PATH={{HARMONICS_FILE_PATH}}
DONOR_RESTART_FILE_PATH={{DONOR_RESTART_FILE_PATH}}
CMEMS_DATA_DIR={{CMEMS_DATA_DIR}}
NEST_RUN_SUITE={{NEST_RUN_SUITE}}
NEST_INTERVAL={{NEST_INTERVAL}}
......@@ -135,20 +142,20 @@
{% endif %}
{%- if BOUNDARY_FORCING == 'TIDAL' %}
{% if BOUNDARY_FORCING == 'TIDAL' %}
ELEV_FORCE='T'
{%- else %}
{% else %}
ELEV_FORCE='F'
{%- endif %}
{% endif %}
{%- if BOUNDARY_FORCING in ['CMEMS', 'NEST_FILE'] %}
{% if BOUNDARY_FORCING in ['CMEMS', 'NEST_FILE'] %}
NEST_ON='T'
NEST_TYPE='3'
{%- else %}
{% else %}
NEST_ON='T'
NEST_TYPE='3'
{%- endif %}
{% endif %}
[[slurm_job]]
......@@ -161,11 +168,11 @@
--ntasks-per-node=20
--threads-per-core=1
--time=24:00:00
{%- if USE_CETO %}
{% if USE_CETO %}
[[[remote]]]
host = login.ceto.npm.ac.uk
owner = {{REMOTE_USER}}
{%- endif %}
{% endif %}
[[slurm_job_1]]
inherit = slurm_job
[[[directives]]]
......@@ -174,23 +181,28 @@
--threads-per-core=1
--time=24:00:00
[[remote_job]]
{%- if USE_CETO %}
{% if USE_CETO %}
[[[remote]]]
host = login.ceto.npm.ac.uk
owner = {{REMOTE_USER}}
{%- endif %}
{% endif %}
[[write_run_namelist]]
inherit = slurm_job_1
{%- if SEDIMENT %}
{% if SEDIMENT %}
[[write_run_cstms]]
{%- endif %}
{% endif %}
[[check_output_dir_len]]
script = """
echo "Need to check suite name doesn't take the fvcom output dir over the max length"
"""
[[copy_common_to_remote]]
inherit = remote_job
[[softlink_forcing]]
[[softlink_forcing_remote]]
[[softlink_forcing]]
[[softlink_forcing_remote]]
inherit = slurm_job_1
[[wrf_suite_trigger]]
......@@ -263,20 +275,19 @@
[[run_fvcom]]
inherit = slurm_job
[[hot_cold_start]]
[[adjust_namelist]]
inherit = remote_job
{%- if COLD_START %}
script = """
sed -i "s|.*STARTUP_TYPE.*|STARTUP_TYPE = 'coldstart',|" ${ROSE_DATAC}/${GRID_NAME}_run.nml
sed -i "s|.*STARTUP_FILE.*|STARTUP_FILE = 'none',|" ${ROSE_DATAC}/${GRID_NAME}_run.nml
sed -i "s|.*STARTUP_UV_TYPE.*|STARTUP_UV_TYPE = 'default',|" ${ROSE_DATAC}/${GRID_NAME}_run.nml
sed -i "s|.*STARTUP_TURB_TYPE.*|STARTUP_TURB_TYPE = 'default',|" ${ROSE_DATAC}/${GRID_NAME}_run.nml
sed -i "s|.*STARTUP_TS_TYPE.*|STARTUP_TS_TYPE = 'constant',|" ${ROSE_DATAC}/${GRID_NAME}_run.nml
{%- if SEDIMENT %}
sed -i "s|.*STARTUP_TS_TYPE.*|STARTUP_TS_TYPE = 'set values',|" ${ROSE_DATAC}/${GRID_NAME}_run.nml
{% if SEDIMENT %}
sed -i "s|.*SED_HOT_START.*|SED_HOT_START = F|" ${ROSE_DATAC}/cstms_sediment.inp
{%- endif %}
{% endif %}
"""
{%- endif %}
[[generate_CMEMS_start]]
[[mv_start_file]]
inherit = remote_job
[[transfer_data]]
inherit = slurm_job_1
[[transfer_data_today]]
......@@ -290,3 +301,6 @@
script = """
ln -s ${ROSE_DATACP1D}/output/${GRID_NAME}_restart_0001.nc ${ROSE_DATAC}/${GRID_NAME}_restart_0001.nc
"""
[[nan_check]]
inherit = remote_job
[[plot_sst]]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment