...
 
......@@ -994,7 +994,7 @@ class Domain(object):
return isintriangle(tri_x, tri_y, x, y)
def in_domain(self, x, y, z=None, z_meth='nearest_neighbour', cartesian=False):
def in_domain(self, x, y, z=None, z_meth='barycentric', cartesian=False, zeta_timestep=None):
"""
Identify if point or array of points (x,y) is within the domain
......@@ -1028,7 +1028,12 @@ class Domain(object):
finder = tri.get_trifinder()
in_domain_xy = finder(x,y) != -1
if z is not None:
if z is not None:
if zeta_timestep == None:
grid_h = self.grid.h
else:
grid_h = self.grid.h + self.data.zeta[zeta_timestep, :]
if z_meth == 'nearest_neighbour':
node_dist = self.closest_node(np.asarray([x,y]), cartesian=cartesian, return_dists=True)
......@@ -1039,6 +1044,12 @@ class Domain(object):
in_domain_xy[node_check] = z[node_check] <= self.grid.h[node_dist[0][node_check]]
in_domain_xy[ele_check] = z[ele_check] <= self.grid.h_center[ele_dist[0][ele_check]]
elif z_meth == 'barycentric':
xy_red = np.asarray([x,y]).T[in_domain_xy]
interped_h = interpolate_node_barycentric(xy_red, grid_h, grid_x, grid_y, self.grid.triangles)
in_depth = z[in_domain_xy] <= interped_h
in_domain_xy[in_domain_xy==True] = in_depth
else:
print('Other interpolation methods not implemented yet')
return None
......@@ -3539,6 +3550,14 @@ def write_fvcom_mesh(triangles, nodes, x, y, z, mesh, extra_depth=None):
for node in zip(x, y, z):
f.write('{:.6f} {:.6f} {:.6f}\n'.format(*node))
def write_obc_file(obc_nodes, obc_types, obc_file):
number_of_nodes = len(obc_nodes)
with open(str(obc_file), 'w') as f:
f.write('OBC Node Number = {:d}\n'.format(number_of_nodes))
for count, node, obc_type in zip(np.arange(number_of_nodes) + 1, obc_nodes, obc_types):
f.write('{} {:d} {:d}\n'.format(count, int(node) + 1, int(obc_type)))
def write_sms_cst(obc, file, sort=False):
"""
......@@ -6190,3 +6209,133 @@ class GraphFVCOMdepth(Graph):
return np.asarray(self.node_index[np.asarray(red_node_list)])
def get_barycentric_coords(x, y,x_tri, y_tri):
""" Get barycentric coordinates.
Compute and return barycentric coordinates for the point (x,y) within the
triangle defined by x/y coordinates stored in the arrays x_tri and y_tri.
Code by James Clark from PyLAG
Parameters:
-----------
x : float
x-position.
y : float
y-position.
x_tri : C array, float
Triangle x coordinates.
y_tri : C array, float
Triangle y coordinates.
Returns:
--------
phi : C array, float
Barycentric coordinates.
"""
a1 = x_tri[1] - x_tri[0]
a2 = y_tri[2] - y_tri[0]
a3 = y_tri[1] - y_tri[0]
a4 = x_tri[2] - x_tri[0]
# Determinant
det = a1 * a2 - a3 * a4
phi = [0,0,0]
# Transformation to barycentric coordinates
phi[2] = (a1*(y - y_tri[0]) - a3*(x - x_tri[0]))/det
phi[1] = (a2*(x - x_tri[2]) - a4*(y - y_tri[2]))/det
phi[0] = 1.0 - phi[1] - phi[2]
return phi
def _get_ele_nodes(positions, x, y, triangles):
tri = Triangulation(x, y, triangles)
finder = tri.get_trifinder()
eles = finder(positions[:,0], positions[:,1])
return triangles[eles,:]
def _interpolate_within_element(var, phi):
return var[0] * phi[0] + var[1] * phi[1] + var[2] * phi[2]
def interpolate_node_barycentric(positions, data, x, y, triangles):
"""
Interpolate linearly from node values to positions using barycentric coordinates.
This is to mimic the functions in PyLAG
"""
position_ele_nodes = _get_ele_nodes(positions, x, y, triangles)
interped_data = []
for this_pos, this_nodes in zip(positions, position_ele_nodes):
this_phi = get_barycentric_coords(this_pos[0], this_pos[1], x[this_nodes], y[this_nodes])
interped_data.append(_interpolate_within_element(data[this_nodes], this_phi))
return np.asarray(interped_data)
def generalised_barycentric(x, y,x_poly, y_poly):
""" Get barycentric coordinates.
Compute and return barycentric coordinates for the point (x,y) within the
triangle defined by x/y coordinates stored in the arrays x_tri and y_tri.
From Generalized Barycentric Coordinates on Irregular Polygons, Meyer et al, Journal of Graphic tools 2012
Parameters:
-----------
x : float
x-position.
y : float
y-position.
x_poly : C array, float
Triangle x coordinates.
y_poly : C array, float
Triangle y coordinates.
epsilon : optional, float
Epsilon for determining if on a line
Returns:
"""
if not on_line:
weightSum = 0
wj = []
for ind_qj, qj in enumerate(zip(x_poly, y_poly)):
prev_ind = np.mod(ind_qj-1,len(x_poly))
next_ind = np.mod(ind_qj+1,len(x_poly))
qprev = np.asarray([x_poly[prev_ind], y_poly[prev_ind]])
qnext = np.asarray([x_poly[next_ind], y_poly[next_ind]])
wj.append((_cotangent(p,qj,qprev) + _cotangent(p,qj,qnext))/ ((p[0]-qj[0]**2) + (p[1]-qj[1]**2)))
weightSum += wj[-1]
# Normalize the weights
wj = np.asarray(wj)/weightSum
return wj
def _cotangent(p,q1,q2):
vec_1 = np.asarray([q1[0] - p[0], q1[1] - p[1]])
vec_2 = np.asarray([q1[0] - q1[0], p[1] - q2[1]])
return np.dot(vec_1, vec_2)/np.cross(vec_1,vec_2)
def check_on_line(x,y,x_line,y_line,epsilon=0.001):
on_line = True
return on_line
import numpy as np
import scipy.interpolate as si
import shapely.geometry as sg
def read_smesh_polygons(boundary_file='boundary_poly.txt', islands_file='island_polys.txt'):
boundary_polygon = []
......
......@@ -1310,6 +1310,8 @@ def cfl(fvcom, timestep, depth_averaged=False, verbose=False, **kwargs):
u = getattr(fvcom.data, uname)
v = getattr(fvcom.data, vname)
z = getattr(fvcom.data, 'zeta')
spd = np.sqrt(u**2 + v**2)
element_sizes = element_side_lengths(fvcom.grid.triangles, fvcom.grid.x, fvcom.grid.y)
minimum_element_size = np.min(element_sizes, axis=1)
......@@ -1323,8 +1325,9 @@ def cfl(fvcom, timestep, depth_averaged=False, verbose=False, **kwargs):
# This is based on equation 6.1 on pg 33 of the MIKE hydrodynamic module manual (modified for using a single
# characteristic length rather than deltaX/deltaY)
cfl = (2 * np.sqrt(g * element_water_depth) + u + v) * (timestep / minimum_element_size)
cfl = (2 * np.sqrt(g * element_water_depth) + np.abs(u) + np.abs(v)) * (timestep / minimum_element_size)
cfl_2 = (2 * np.sqrt(g * element_water_depth) + spd) * (timestep / minimum_element_size)
cfl_3 = spd * (timestep / minimum_element_size)
if verbose:
val = np.nanmax(cfl)
ind = np.unravel_index(np.nanargmax(cfl), cfl.shape)
......@@ -1344,7 +1347,7 @@ def cfl(fvcom, timestep, depth_averaged=False, verbose=False, **kwargs):
fvcom.grid.lonc[element_ind], fvcom.grid.latc[element_ind],
layer_ind, fvcom.time.datetime[time_ind].strftime('%Y-%m-%d %H:%M:%S')))
return cfl
return cfl, cfl_2, cfl_3
def turbulent_kinetic_energy(u, v, w, debug=False):
......
......@@ -25,9 +25,10 @@ from PyFVCOM.coordinate import utm_from_lonlat, lonlat_from_utm
from PyFVCOM.grid import Domain, grid_metrics, read_fvcom_obc, nodes2elems
from PyFVCOM.grid import find_connected_elements, mp_interp_func
from PyFVCOM.grid import find_bad_node, element_side_lengths, reduce_triangulation
from PyFVCOM.grid import write_fvcom_mesh, connectivity, haversine_distance, subset_domain
from PyFVCOM.grid import write_fvcom_mesh, write_obc_file, connectivity, haversine_distance, subset_domain
from PyFVCOM.grid import expand_connected_nodes
from PyFVCOM.grid import OpenBoundary, Nest
from PyFVCOM.grid import interpolate_node_barycentric
from PyFVCOM.read import FileReader, _TimeReader, control_volumes
from PyFVCOM.utilities.general import flatten_list, PassiveStore, warn
from PyFVCOM.utilities.time import date_range
......@@ -3367,13 +3368,7 @@ class Model(Domain):
ids += boundary.nodes
types += [boundary.type] * len(boundary.nodes)
# I feel like this should be in self.dims.
number_of_nodes = len(ids)
with open(str(obc_file), 'w') as f:
f.write('OBC Node Number = {:d}\n'.format(number_of_nodes))
for count, node, obc_type in zip(np.arange(number_of_nodes) + 1, ids, types):
f.write('{} {:d} {:d}\n'.format(count, node + 1, obc_type))
write_obc_file(ids, types, obc_file)
def add_groundwater(self, locations, flux, temperature=15, salinity=35):
"""
......@@ -6428,6 +6423,124 @@ class Restart(FileReader):
self.replace_variable(variable, interpolated_coarse_data)
def replace_variable_with_fvcom(self, var, coarse_fvcom, constrain_coordinates=False):
"""
Interpolate the given regularly gridded data onto the grid nodes.
Parameters
----------
coarse : PyFVCOM.preproc.RegularReader
The regularly gridded data to interpolate onto the grid nodes. This must include time (coarse.time), lon,
lat and depth data (in coarse.grid) as well as the time series to interpolate (4D volume [time, depth,
lat, lon]) in coarse.data.
constrain_coordinates : bool, optional
Set to True to constrain the grid coordinates (lon, lat, depth) to the supplied coarse data.
This essentially squashes the ogrid to fit inside the coarse data and is, therefore, a bit of a
fudge! Defaults to False.
"""
# This is more or less a copy-paste of PyFVCOM.grid.add_nested_forcing except we use the full grid
# coordinates instead of those on the open boundary only. Feels like unnecessary duplication of code.
# We need the vertical grid data for the interpolation, so load it now.
coarse_data = np.squeeze(getattr(coarse_fvcom.data, var))
var_shape = coarse_data.shape
if var_shape[-1] == len(coarse_fvcom.grid.lon):
mode='node'
coarse_x = coarse_fvcom.grid.lon
coarse_y = coarse_fvcom.grid.lat
coarse_z = coarse_fvcom.grid.h * -coarse_fvcom.grid.siglay
coarse_tri = coarse_fvcom.grid.triangles
else:
mode='elements'
coarse_x = coarse_fvcom.grid.lonc
coarse_y = coarse_fvcom.grid.latc
coarse_z = coarse_fvcom.grid.h_center * -coarse_fvcom.grid.siglay_center
if len(var_shape) == 1:
mode+='_surface'
self.load_data(['siglay'])
self.data.siglay_center = nodes2elems(self.data.siglay, self.grid.triangles)
if 'elements' in mode:
x = copy.deepcopy(self.grid.lonc)
y = copy.deepcopy(self.grid.latc)
# Keep depths positive down.
z = self.grid.h_center * -self.data.siglay_center
else:
x = copy.deepcopy(self.grid.lon[:])
y = copy.deepcopy(self.grid.lat[:])
# Keep depths positive down.
z = self.grid.h * -self.data.siglay
if constrain_coordinates:
x[x < coarse_x.min()] = coarse_x.min()
x[x > coarse_x.max()] = coarse_x.max()
y[y < coarse_y.min()] = coarse_y.min()
y[y > coarse_y.max()] = coarse_y.max()
# Internal landmasses also need to be dealt with, so test if a point lies within the mask of the grid and
# move it to the nearest in grid point if so.
# use .in_domain
is_in = coarse_fvcom.in_domain(x,y)
if np.sum(is_in) < len(is_in):
if 'elements' in mode:
close_ind = coarse_fvcom.closest_element([x[~is_in], y[~is_in]])
else:
close_ind = coarse_fvcom.closest_node([x[~is_in], y[~is_in]])
x[~is_in] = coarse_x[close_ind]
y[~is_in] = coarse_y[close_ind]
# The depth data work differently as we need to squeeze each FVCOM water column into the available coarse
# data. The only way to do this is to adjust each FVCOM water column in turn by comparing with the
# closest coarse depth.
if 'surface' not in mode:
# Go through each open boundary position and if its depth is deeper than the closest coarse data,
# squash the open boundary water column into the coarse water column.
node_tris = coarse_fvcom.grid.triangles[coarse_fvcom.closest_element([x,y]),:]
for idx, node in enumerate(zip(x, y, z.T)):
grid_depth = np.min(coarse_fvcom.grid.h[node_tris[idx,:]])
if grid_depth < node[2].max():
# Squash the FVCOM water column into the coarse water column.
z[:, idx] = (node[2] / node[2].max()) * grid_depth
# Fix all depths which are shallower than the shallowest coarse depth. This is more straightforward as
# it's a single minimum across all the open boundary positions.
z[z < coarse_z.min()] = coarse_z.min()
# Now do the interpolation. We interpolate across each horizontal layer then linearly on depth.
if 'surface' in mode:
interp_data = _interp_to_fvcom_layer(coarse_x, coarse_y, coarse_tri, coarse_data, x, y)
else:
first_interp_coarse_data = []
interped_coarse_z =[]
for this_layer in np.arange(0,len(coarse_data)):
print('Interp layer {} var'.format(this_layer))
first_interp_coarse_data.append(_interp_to_fvcom_layer(coarse_x, coarse_y, coarse_tri, coarse_data[this_layer,:], x, y))
print('Interp layer {} z'.format(this_layer))
interped_coarse_z.append(_interp_to_fvcom_layer(coarse_x, coarse_y, coarse_tri, coarse_z[this_layer,:], x, y))
print('Interp layer depth')
interpolated_coarse_data = _interp_to_fvcom_depth(np.asarray(interped_coarse_z), np.asarray(first_interp_coarse_data), z)
self.replace_variable(var, interpolated_coarse_data[np.newaxis,:,:])
def write_restart(self, restart_file, global_atts=None, **ncopts):
"""
Write out an FVCOM-formatted netCDF file based.
......@@ -6502,3 +6615,14 @@ class Restart(FileReader):
self.regular = read_regular(*args, noisy=self._noisy, **kwargs)
def _interp_to_fvcom_layer(in_x, in_y, in_triangles, in_data, out_x, out_y):
interped_data = interpolate_node_barycentric(np.asarray([out_x, out_y]).T, in_data, in_x, in_y, in_triangles)
return interped_data
def _interp_to_fvcom_depth(in_z, in_data, out_z):
interped_data = []
for i, this_data in enumerate(in_data.T):
interped_data.append(np.interp(out_z[:,i], in_z[:,i], this_data))
return np.asarray(interped_data).T
......@@ -445,17 +445,23 @@ class ValidationComparison():
self.time_match = time_match
self.ignore_deep = ignore_deep
self._match_mod_obs()
def find_matching_obs(self):
obs_in_mod_xy = np.asarray([self.fvcom_data.in_domain(this_lon, this_lat) for this_lon, this_lat in zip(self.obs_data.grid.lon, self.obs_data.grid.lat)])
print('Finding observations within model time/space')
obs_in_mod_t = np.logical_and(self.obs_data.time.datetime <= np.max(self.fvcom_data.time.datetime),
self.obs_data.time.datetime >= np.min(self.fvcom_data.time.datetime))
self.chosen_obs = np.logical_and(obs_in_mod_xy, obs_in_mod_t)
obs_in_mod_xy = self.fvcom_data.in_domain(self.obs_data.grid.lon[obs_in_mod_t], self.obs_data.grid.lat[obs_in_mod_t])
obs_in_mod_t[obs_in_mod_t] = obs_in_mod_xy
self.chosen_obs = obs_in_mod_t
self.chosen_obs_ll = np.asarray([self.obs_data.grid.lon[self.chosen_obs], self.obs_data.grid.lat[self.chosen_obs]]).T
self.chosen_obs_dep = self.obs_data.grid.depth[self.chosen_obs]
def find_model_horizontal(self):
print('Matching model horizontal')
if self.mode == 'nodes':
if self.horizontal_match == 'nearest':
self.chosen_mod_nodes = np.squeeze(np.asarray([self.fvcom_data.closest_node(this_ll) for this_ll in self.chosen_obs_ll]))[:, np.newaxis]
......@@ -489,6 +495,7 @@ class ValidationComparison():
# fill in using grid.nbe, trickier cos of boundary elements (-1s)
def find_model_time(self):
print('Matching model time')
if self.time_match == 'nearest':
self.chosen_mod_times = np.asarray([self.fvcom_data.closest_time(this_t) for this_t in self.obs_data.time.datetime[self.chosen_obs]])[:, np.newaxis]
self.chosen_mod_times_weights = np.ones(len(self.chosen_mod_times))[:, np.newaxis]
......@@ -515,6 +522,7 @@ class ValidationComparison():
self.chosen_mod_times_weights = np.asarray(chosen_mod_times_weights)
def find_model_vertical(self):
print('Finding model vertical match')
if not hasattr(self.fvcom_data.grid, 'depth'):
self.fvcom_data._get_cv_volumes()
......@@ -559,10 +567,21 @@ class ValidationComparison():
self.chosen_mod_times = self.chosen_mod_times[adjust_chosen,:]
self.chosen_mod_times_weights = self.chosen_mod_times_weights[adjust_chosen,:]
def _match_mod_obs(self):
self.find_matching_obs()
self.find_model_horizontal()
self.find_model_time()
self.find_model_vertical()
def get_matching_mod(self, varlist, return_time_ll_depth=False):
match_dict = {}
for this_var in varlist:
self.fvcom_data.load_data([this_var])
if not hasattr(self.fvcom_data.data, this_var):
self.fvcom_data.load_data([this_var])
delete_var = True
else:
delete_var = False
raw_data = getattr(self.fvcom_data.data, this_var)
# Do horizontal weighting first as largest dimension
......@@ -577,7 +596,8 @@ class ValidationComparison():
chosen = chosen_depth.diagonal().diagonal()
obs_data = getattr(self.obs_data.data, this_var)[self.chosen_obs]
delattr(self.fvcom_data.data, this_var)
if delete_var:
delattr(self.fvcom_data.data, this_var)
match_dict[this_var] = [chosen, obs_data]
if return_time_ll_depth:
......