Commit 20b16551 authored by Modellers Operational's avatar Modellers Operational
Browse files

Initial commit of regridding suite

parents
from mpi4py import MPI
import numpy as np
import sys
import PyFVCOM as pf
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
fvcom_file = sys.argv[1]
lower_left_ll = np.asarray(sys.argv[2].split(','), dtype=float)
upper_right_ll = np.asarray(sys.argv[3].split(','), dtype=float)
grid_res = sys.argv[4]
depth_layers = np.asarray(sys.argv[5].split(','), dtype=float)
var_list_raw = sys.argv[6].split(',')
varmatch = {'temp':'nodes', 'salinity':'nodes', 'u':'elements', 'v':'elements', 'zeta':'surface'}
varlist = {}
for this_var in var_list_raw:
varlist[this_var] = varmatch[this_var]
# get the fvcom points within the extended grid and land mask the grid mesh
if rank == 0:
# Load data, get time indices.
fvcom = pf.read.FileReader(fvcom_file)
nt = fvcom.dims.time
start, stop = 0, nt
global_time_indices = np.array_split(np.arange(start, stop), size)
else:
global_time_indices = None
time_indices = comm.scatter(global_time_indices, root=0)
interped_data = {}
worker = pf.interpolate.MPIRegularInterpolateWorker(fvcom_file, time_indices, verbose=True)
worker.InitialiseGrid(lower_left_ll, upper_right_ll, grid_res, depth_layers, time_varying_depth=True)
for var, var_mode in varlist.items():
interped_data[var] = worker.InterpolateRegular(var, mode=var_mode)
all_interped_data = comm.gather(interped_data, root=0)
# resort the data by timestep
if rank == 0:
np.save('test_dict.npy', all_interped_data)
# write to cmems format
reg_grid_data = np.ma.masked_invalid(reg_grid_data)
output_file = 'output.nc'
fvcom = pf.read.FileReader(fvcom_file)
dims = {'time':len(fvcom.time.datetime), 'depth':len(worker.regular_grid.dep_lays), 'lon':len(worker.regular_grid.lons),
'lat':len(worker.regular_grid.lats)}
all_attributes =
{'lon':{'standard_name':'longitude', 'units':'degrees_east'}
'lat':{'standard_name':'latitude', 'units':'degrees_north'}
'temp':{'standard_name':'sea_water_potential_temperature','units':'C','_FillValue':'-32768s', 'missing_value':'-32768s'} ,
'salinity':{'standard_name':'sea_water_salinity','units':'psu','_FillValue':'-32768s', 'missing_value':'-32768s'},
'u':{'standard_name':'eastward_sea_water_velocity','units':'m s-1','_FillValue':'-32768s', 'missing_value':'-32768s'},
'v':{'standard_name':'northward_sea_water_velocity','units':'m s-1','_FillValue':'-32768s', 'missing_value':'-32768s'},
'zeta':{'standard_name':'sea_surface_height_above_geoid','units':'m','_FillValue':'-32768s', 'missing_value':'-32768s'}}
globals = {'type': 'OPERATIONAL MODEL REGULAR GRID OUTPUT',
'title': 'Regularly gridded data interpolated from FVCOM output',
'history': 'File created using PyFVCOM',
'filename': str(output_file),
'Conventions': 'CF-1.0'}
with pf.preproc.WriteForcing(output_file, dims, global_attributes=globals, clobber=True, format='NETCDF4') as outfile:
# Add the variables.
outfile.add_variable('lon', worker.regular_grid.lons, ['lon'], attributes=all_attributes['lon'])
outfile.add_variable('lat', worker.regular_grid.lats, ['lat'], attributes=all_attributes['lat'])
outfile.add_variable('depth', worker.regular_grid.dep_lays, ['depth'], attributes=all_attributes['depth'])
for this_var, this_mode in varlist.items:
if this_mode == 'surface':
outfile.add_variable(this_var, collected_interp_data[this_var], ['time', 'lon', 'lat'],
attributes=all_attributes[this_var])
else:
outfile.add_variable(this_var, collected_interp_data[this_var], ['time', 'lon', 'lat', 'depth'],
attributes=all_attributes[this_var])
outfile.write_fvcom_time(fvcom.time.datetime)
else:
print('Rank {} process finished'.format(rank))
[command]
default = module load mpi/openmpi-x86_64;
mpiexec -n ${NPROCS} python3 grid_interp.py ${FVCOM_FILES} ${GRID_LOWER_LEFT} ${GRID_UPPER_RIGHT} ${HORIZ_RES} ${DEPTH_LAYERS} ${VARS};
mkdir -p ${OUTPUT_DIR}/${TODAY_DATESTR};
mv output.nc ${OUTPUT_DIR}/${TODAY_DATESTR}/${OUTPUT_FILENAME}
[jinja2:suite.rc]
## Run properties
INITIAL_START_DATE='2019-02-28T00:00:00Z'
FINAL_CYCLE_POINT='NONE'
MAIL_TO='mbe@pml.ac.uk'
NPROCS=12
TRIGGER_SUITE='fvcom_tamar'
TRIGGER_TASK='transfer_data'
# MODEL SETUP
FVCOM_GRID_NAME='tamar_v0'
FVCOM_OUTPUT_DIR='/data/sthenno1/scratch/modop/Model/FVCOM_tamar/output/'
GRID_LOWER_LEFT='-4.3, 50.24'
GRID_UPPER_RIGHT='-4.05, 50.55'
HORIZ_RES=0.001
DEPTH_LAYERS='0,2,5,10,15,25,40,60'
VARS='temp,salinity,u,v,zeta'
OUTPUT_DIR='/data/sthenno1/scratch/modop/Model/FVCOM_tamar/estuary_output/'
OUTPUT_FILENAME='test_out.nc'
access-list=mbe
owner=mbe
project=test_suite
sub-project=A
title=hmm_title
#!jinja2
[cylc]
UTC mode = True # Ignore DST
abort if any task fails = False
[scheduling]
initial cycle point = {{INITIAL_START_DATE}}
{%- if FINAL_CYCLE_POINT not in ['NONE','None'] %}
final cycle point = {{FINAL_CYCLE_POINT}}
{%- endif %}
[[special tasks]]
clock-trigger = start_cycle(PT0M)
[[dependencies]]
[[[P1D]]]
graph = """
regrid_domain[-P1D]:finish => start_suite => suite_trigger <{{TRIGGER_SUITE}}::{{TRIGGER_TASK}}> => regrid_domain
"""
[runtime]
[[root]]
env-script = eval $(rose task-env --cycle-offset=P1D)
script = rose task-run --verbose
[[[job]]]
execution time limit = PT3H
[[[events]]]
mail events = submission timeout, execution timeout, failed
mail to = {{MAIL_TO}}
submission timeout = P1D
[[[environment]]]
NPROCS={{NPROCS}}
FVCOM_GRID_NAME={{FVCOM_GRID_NAME}}
GRID_LOWER_LEFT={{GRID_LOWER_LEFT}}
GRID_UPPER_RIGHT={{GRID_UPPER_RIGHT}}
HORIZ_RES={{HORIZ_RES}}
DEPTH_LAYERS={{DEPTH_LAYERS}}
VARS={{VARS}}
FORECAST_DAY=$(rose date --print-format='%Y-%m-%d' $CYLC_TASK_CYCLE_POINT)
FVCOM_FILE={{FVCOM_OUTPUT_DIR}}/${FORECAST_DAY/${FVCOM_GRID_NAME}_0001.nc
[[start_cycle]]
script = """
"""
[[fvcom_suite_trigger]]
script =""
[[[suite state polling]]]
interval = PT10M
max-polls = 1440
[[[job]]]
execution retry delays = 3*PT15M
[[regrid_domain]]
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment