preproc.py 290 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
"""
Tools to prepare data for an FVCOM run.

A very gradual port of the most used functions from the MATLAB toolbox:
    https://github.com/pwcazenave/fvcom-toolbox/tree/master/fvcom_prepro/

Author(s):

Mike Bedington (Plymouth Marine Laboratory)
Pierre Cazenave (Plymouth Marine Laboratory)

"""

14
import copy
15
import inspect
16
import multiprocessing
17
from datetime import datetime
18
from functools import partial
19
from pathlib import Path
20

21
22
23
import numpy as np
import scipy.optimize
from PyFVCOM.coordinate import utm_from_lonlat, lonlat_from_utm
Pierre Cazenave's avatar
Pierre Cazenave committed
24
from PyFVCOM.grid import Domain, grid_metrics, read_fvcom_obc, nodes2elems
25
from PyFVCOM.grid import OpenBoundary, find_connected_elements, mp_interp_func
26
from PyFVCOM.grid import find_bad_node, element_side_lengths, reduce_triangulation
27
28
from PyFVCOM.grid import write_fvcom_mesh, connectivity, haversine_distance, subset_domain
from PyFVCOM.read import FileReader, _TimeReader, control_volumes
29
from PyFVCOM.utilities.general import flatten_list, PassiveStore, warn
30
from PyFVCOM.utilities.time import date_range
Pierre Cazenave's avatar
Pierre Cazenave committed
31
32
33
from dateutil.relativedelta import relativedelta
from netCDF4 import Dataset, date2num, num2date, stringtochar
from scipy.interpolate import RegularGridInterpolator
34
35
from scipy.spatial import Delaunay
from shapely.geometry import Polygon
36
37


38
class Model(Domain):
39
40
41
    """
    Everything related to making a new model run.

42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
    Methods are, in loosely logical order:

    Create inputs:
        estimate_time_step
        add_open_boundaries
        add_obc_types
        add_grid_metrics
        add_bed_roughness
        add_sigma_coordinates
        sigma_generalized
        sigma_geometric
        sigma_tanh
        hybrid_sigma_coordinate
        load_elevtide
        add_rivers
        read_nemo_rivers
        read_ea_river_temperature_climatology
        check_rivers
        add_groundwater
        add_probes
        add_stations
        add_nests
        add_nests_harmonics
        add_nests_regular
        avg_nest_force_vel
        load_nested_forcing
        subset_existing_nest
        read_regular
        interp_sst_assimilation
        interp_ady

    Write to file:
        write_grid
        write_obc
        write_coriolis
        write_bed_roughness
        write_sigma
        write_sponge
        write_tides
        write_tsobc
        write_river_forcing
        write_river_namelist
        write_groundwater
        write_probes
        write_stations
        write_nested_forcing
        write_sstgrd
        write_adygrd

    A brief example of how to use this is in the examples directory (pyfvcom_preprocessing_example.py or
    pyfvcom_preprocessing_example.ipynb).

94
    """
95

96
97
98
99
100
101
102
103
104
    # There should be more use of objects here. For example, each open boundary should be a Boundary object which has
    # methods for interpolating data onto it (tides, temperature, salinity, ERSEM variables etc.). The coastline
    # could be an object which has methods related to rivers and checking depths. Likewise, the model grid object
    # could contain methods for interpolating SST, creating restart files etc.

    # TODO:
    #  - Open boundaries end up held in Model.open_boundaries and Model.grid.open_boundaries which seems wrong.
    #  - Make a method to create a subdomain input file for namelist outputs over different spatial domains
    #  (NC{,AV}_SUBDOMAIN_FILES in the NML_NETCDF{,_AV} namelist section).
105

106
    def __init__(self, start, end, *args, **kwargs):
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
        """
        Initialise an FVCOM model configuration object with a given start and end date.

        Parameters
        ----------
        start : datetime.datetime
            The start of the model run.
        end : datetime.datetime
            The end of the model run (inclusive).
        grid : str, pathlib.Path
            The model grid to read.
        native_coordinates : str
            Defined the coordinate system used in the grid ('spherical' or 'cartesian'). Defaults to `spherical'.
        zone : str, optional
            If `native_coordinates' is 'cartesian', give the UTM zone as a string, formatted as, for example,
            '30N'. Ignored if `native_coordinates' is 'spherical'.
        sampling : float, optional
            The sampling interval for the time series data generated for this model run. If omitted, defaults to hourly.
        noisy : bool, optional
            Set to True to enable verbose output. Defaults to False.
        debug : bool, optional
            Set to True to enable debugging output. Defaults to False.

        Most data are stored in objects within this object:

        self.time : time related data (e.g. Modified Julian Days).
        self.sigma : vertical grid discretisation.
        self.sst : sea surface temperature data assimilation data.
        self.nest : data pertaining to the nested forcing.
        self.stations : information on any defined stations.
        self.probes : information on any defined stations.
        self.ady : information on the light absorption for use in ERSEM.
        self.regular : regularly gridded model information used for interpolation to the boundaries.
        self.groundwater : configuration information for the groundwater module in FVCOM.

        """
Pierre Cazenave's avatar
Pierre Cazenave committed
143

144
145
146
147
148
        sampling = 1
        if 'sampling' in kwargs:
            sampling = kwargs['sampling']
            kwargs.pop('sampling')

Pierre Cazenave's avatar
Pierre Cazenave committed
149
        # Inherit everything from PyFVCOM.grid.Domain, but extend it for our purposes. This doesn't work with Python 2.
150
151
        super().__init__(*args, **kwargs)

152
        self.noisy = False
153
        self._debug = False
154
155
156
        if 'noisy' in kwargs:
            self.noisy = kwargs['noisy']

157
158
159
        # Useful to have a central place for this.
        self._mjd_origin = 'days since 1858-11-17 00:00:00'

160
        # Initialise things so we can add attributes to them later.
161
162
163
164
165
166
167
        self.time = PassiveStore()
        self.sigma = PassiveStore()
        self.sst = PassiveStore()
        self.nest = PassiveStore()
        self.stations = PassiveStore()
        self.probes = PassiveStore()
        self.ady = PassiveStore()
168
        self.regular = None
169
        self.groundwater = PassiveStore()
170
171

        # Make some potentially useful time representations.
172
173
        self.start = start
        self.end = end
174
        self.sampling = sampling
175
        self._add_time()
176

177
        # Initialise the open boundary objects from the nodes we've read in from the grid (if any).
178
        self._initialise_open_boundaries_on_nodes()
179

180
        # Initialise the river structure.
181
        self._prep_rivers()
182

183
184
185
186
        # Add the coastline to the grid object for use later on.
        *_, bnd = connectivity(np.array((self.grid.lon, self.grid.lat)).T, self.grid.triangles)
        self.grid.coastline = np.argwhere(bnd)
        # Remove the open boundaries, if we have them.
187
        if self.grid.open_boundary_nodes:
188
            land_only = np.isin(np.squeeze(np.argwhere(bnd)), flatten_list(self.grid.open_boundary_nodes), invert=True)
189
190
            self.grid.coastline = np.squeeze(self.grid.coastline[land_only])

191
    def _prep_rivers(self):
Pierre Cazenave's avatar
Pierre Cazenave committed
192
        """ Create a few object and attributes which are useful for the river data. """
193
        self.river = PassiveStore()
194
195
196
197
        self.dims.river = 0  # assume no rivers.

        self.river.history = ''
        self.river.info = ''
198
199
        self.river.source = ''

200
    def _add_time(self):
201
202
203
204
205
        """
        Add time variables we might need for the various bits of processing.

        """

206
        self.time.datetime = date_range(self.start, self.end, inc=self.sampling)
207
        self.time.time = date2num(getattr(self.time, 'datetime'), units=self._mjd_origin)
208
209
210
211
        self.time.Itime = np.floor(getattr(self.time, 'time'))  # integer Modified Julian Days
        self.time.Itime2 = (getattr(self.time, 'time') - getattr(self.time, 'Itime')) * 24 * 60 * 60 * 1000  # milliseconds since midnight
        self.time.Times = [t.strftime('%Y-%m-%dT%H:%M:%S.%f') for t in getattr(self.time, 'datetime')]

212
    def _initialise_open_boundaries_on_nodes(self):
213
214
        """ Add the relevant node-based grid information for any open boundaries we've got. """

215
        self.open_boundaries = []
216
        self.dims.open_boundary_nodes = 0  # assume no open boundary nodes
217
        if self.grid.open_boundary_nodes:
218
219
            for nodes in self.grid.open_boundary_nodes:
                self.open_boundaries.append(OpenBoundary(nodes))
220
221
                # Update the dimensions.
                self.dims.open_boundary_nodes += len(nodes)
222
223
224
225
226
227
228
229
230
231
                # Add the positions of the relevant bits of information.
                for attribute in ('lon', 'lat', 'x', 'y', 'h'):
                    try:
                        setattr(self.open_boundaries[-1].grid, attribute, getattr(self.grid, attribute)[nodes, ...])
                    except AttributeError:
                        pass
                # Add all the time data.
                setattr(self.open_boundaries[-1].time, 'start', self.start)
                setattr(self.open_boundaries[-1].time, 'end', self.end)

232
    def _update_open_boundaries(self):
233
234
235
236
237
238
239
240
241
242
        """
        Call this when we've done something which affects the open boundary objects and we need to update their
        properties.

        For example, this updates sigma information if we've added the sigma distribution to the Model object.

        """

        # Add the sigma data to any open boundaries we've got loaded.
        for boundary in self.open_boundaries:
243
            for attribute in self.sigma:
244
245
246
247
248
249
250
251
252
                try:
                    # Ignore element-based data for now.
                    if 'center' not in attribute:
                        setattr(boundary.sigma, attribute, getattr(self.sigma, attribute)[boundary.nodes, ...])
                except (IndexError, TypeError):
                    setattr(boundary.sigma, attribute, getattr(self.sigma, attribute))
                except AttributeError:
                    pass

253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
    def estimate_time_step(self, maximum_speed, maximum_elevation):
        """
        Estimate the time step for the current grid based on the given anticipated maximum speed and surface elevation.

        Parameters
        ----------
        maximum_speed : float
            The anticipated maximum speed.
        maximum_elevation : float
            The anticipated maximum surface elevation.

        """

        gravity = 9.81

        # Calculate the length of each side in the elements in the grid and the propagation of a gravity wave across
        # those distances to figure out what the time step should be.

        # *lick index finger and stick in the air now*
        lengths = element_side_lengths(self.grid.triangles, self.grid.x, self.grid.y)
        depths = np.max(self.grid.h[self.grid.triangles] + maximum_elevation, axis=1)
        timesteps = (lengths.T / (np.sqrt(gravity * depths) + maximum_speed)).T

        self.time_step = np.nanmin(timesteps)

        if self._noisy:
            print(f'Estimated time step {self.time_step} seconds.')

281
282
283
284
285
286
287
288
289
290
291
    def write_grid(self, grid_file, depth_file=None):
        """
        Write out the unstructured grid data to file.

        grid_file : str, pathlib.Path
            Name of the file to which to write the grid.
        depth_file : str, pathlib.Path, optional
            If given, also write out the bathymetry file.

        """
        grid_file = str(grid_file)
292
        if depth_file is not None:
293
294
295
296
297
298
299
300
            depth_file = str(depth_file)

        nodes = np.arange(self.dims.node) + 1
        if self.grid.native_coordinates.lower() == 'spherical':
            x, y = self.grid.lon, self.grid.lat
        else:
            x, y = self.grid.x, self.grid.y

301
302
303
304
305
306
307
308
309
310
311
        # Check for the distribution of depths. Since FVCOM is positive down and some grids are specified as negative
        # down, do a sanity check here. If we've got more negatives than positive depths, then flip the sign (and
        # warn we're doing that), otherwise, go as is.
        negative_total = sum(self.grid.h < 0)
        positive_total = sum(self.grid.h > 0)
        depth = self.grid.h
        if negative_total > positive_total:
            depth = -depth
            warn('Flipping depths to be positive down since we have been supplied with mostly negative depths.')

        write_fvcom_mesh(self.grid.triangles, nodes, x, y, depth, grid_file, extra_depth=depth_file)
312

313
314
    def write_coriolis(self, coriolis_file):
        """
Pierre Cazenave's avatar
Pierre Cazenave committed
315
        Write an FVCOM-formatted Coriolis file.
316
317
318
319
320
321
322
323

        Parameters
        ----------
        coriolis_file : str, pathlib.Path
            Name of the file to which to write the coriolis data.

        """

324
325
326
327
        if isinstance(coriolis_file, str):
            coriolis_file = Path(coriolis_file)

        with coriolis_file.open('w') as f:
328
329
330
331
332
333
334
335
336
            if self.grid.native_coordinates.lower() == 'spherical':
                x, y = self.grid.lon, self.grid.lat
            else:
                x, y = self.grid.x, self.grid.y

            f.write('Node Number = {:d}\n'.format(self.dims.node))
            for line in zip(x, y, self.grid.lat):
                f.write('{:.6f} {:.6f} {:.6f}\n'.format(*line))

337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
    def add_bed_roughness(self, roughness):
        """
        Add a uniform or spatially varying bed roughness to the model.

        Parameters
        ----------
        roughness : float, np.ndarray
            The bed roughness (in metres).

        """

        setattr(self.grid, 'roughness', roughness)

    def write_bed_roughness(self, roughness_file, ncopts={'zlib': True, 'complevel': 7}, **kwargs):
        """
        Write the bed roughness to netCDF.

        Parameters
        ----------
        roughness_file:
            File to which to write bed roughness data.
        ncopts : dict, optional
            Dictionary of options to use when creating the netCDF variables. Defaults to compression on.

        Remaining arguments are passed to WriteForcing.

        """
        globals = {'title': 'bottom roughness',
365
                   'history': 'File created using {} from PyFVCOM'.format(inspect.stack()[0][3])}
366
367
368
369
370
371
372
373
374
375
        dims = {'nele': self.dims.nele}

        with WriteForcing(str(roughness_file), dims, global_attributes=globals, clobber=True, format='NETCDF4', **kwargs) as z0:
            # Add the variables.
            atts = {'long_name': 'bottom roughness', 'units': 'm', 'type': 'data'}
            z0.add_variable('z0b', self.grid.roughness, ['nele'], attributes=atts, ncopts=ncopts)
            # Pretty sure this variable isn't necessary for an ordinary physics run. At least, we've never written it
            #  to file to date.
            atts = {'long_name': 'bottom roughness minimum', 'units': 'None', 'type': 'data'}
            z0.add_variable('cbcmin', None, ['nele'], attributes=atts, ncopts=ncopts)
376

377
    def interp_sst_assimilation(self, sst_dir, offset=0, serial=False, pool_size=None, noisy=False):
378
379
380
381
382
383
        """
        Interpolate SST data from remote sensing data onto the supplied model
        grid.

        Parameters
        ----------
384
        sst_dir : str, pathlib.Path
385
            Path to directory containing the SST data. Assumes there are directories per year within this directory.
386
387
        offset : int, optional
            Number of days by which to offset the time period in the time series. Defaults to zero.
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
        serial : bool, optional
            Run in serial rather than parallel. Defaults to parallel.
        pool_size : int, optional
            Specify number of processes for parallel run. By default it uses all available.
        noisy : bool, optional
            Set to True to enable some sort of progress output. Defaults to False.

        Returns
        -------
        Adds a new `sst' object with:
        sst : np.ndarray
            Interpolated SST time series for the supplied domain.
        time : np.ndarray
            List of python datetimes for the corresponding SST data.

        Example
        -------
        >>> from PyFVCOM.preproc import Model
        >>> sst_dir = '/home/mbe/Data/SST_data/2006/'
        >>> model = Model('/home/mbe/Models/FVCOM/tamar/tamar_v2_grd.dat',
        >>>     native_coordinates='cartesian', zone='30N')
        >>> model.interp_sst_assimilation(sst_dir, 2006, pool_size=20)
        >>> # Save to netCDF
        >>> model.write_sstgrd('casename_sstgrd.nc')

        Notes
        -----
        - Based on https://github.com/pwcazenave/fvcom-toolbox/tree/master/fvcom_prepro/interp_sst_assimilation.m.

        """

419
420
421
        if isinstance(sst_dir, str):
            sst_dir = Path(sst_dir)

422
423
424
425
426
427
428
        # Make daily data.
        dates = date_range(self.start - relativedelta(days=offset), self.end + relativedelta(days=offset))

        sst_files = []
        for date in dates:
            sst_base = sst_dir / Path(str(date.year))
            sst_files += list(sst_base.glob('*{}*.nc'.format(date.strftime('%Y%m%d'))))
429
430
431
432
433
434
435
436
437
438
439

        if noisy:
            print('To do:\n{}'.format('|' * len(sst_files)), flush=True)

        # Read SST data files and interpolate each to the FVCOM mesh
        lonlat = np.array((self.grid.lon, self.grid.lat))

        if serial:
            results = []
            for sst_file in sst_files:
                results.append(self._inter_sst_worker(lonlat, sst_file, noisy))
440
        else:
441
            if not pool_size:
442
                pool = multiprocessing.Pool()
443
            else:
444
                pool = multiprocessing.Pool(pool_size)
445
446
447
448
449
450
451
452
            part_func = partial(self._inter_sst_worker, lonlat, noisy=noisy)
            results = pool.map(part_func, sst_files)
            pool.close()

        # Sort data and prepare date lists
        dates = np.empty(len(results)).astype(datetime)
        sst = np.empty((len(results), self.dims.node))
        for i, result in enumerate(results):
453
454
455
            # Force the data to be at midday instead of whatever's in the input netCDFs. This is because FVCOM seems
            # to want times at midday.
            dates[i] = datetime(*[getattr(result[0][0], i) for i in ('year', 'month', 'day')], 12)
456
457
458
459
460
461
462
463
464
465
466
467
            sst[i, :] = result[1]

        # Sort by time.
        idx = np.argsort(dates)
        dates = dates[idx]
        sst = sst[idx, :]

        # Store everything in an object.
        self.sst.sst = sst
        self.sst.time = dates

    @staticmethod
468
    def _inter_sst_worker(fvcom_lonlat, sst_file, noisy=False, var_name='analysed_sst', var_offset=-273.15):
469
470
471
472
473
        """ Multiprocessing worker function for the SST interpolation. """
        if noisy:
            print('.', end='', flush=True)

        with Dataset(sst_file, 'r') as sst_file_nc:
474
            sst_eo = np.squeeze(sst_file_nc.variables[var_name][:]) + var_offset  # Kelvin to Celsius
475
            mask = sst_file_nc.variables['mask']
476
            if len(sst_eo.shape) ==3 and len(mask) ==2:
Pierre Cazenave's avatar
Pierre Cazenave committed
477
                sst_eo[np.tile(mask[:][np.newaxis, :], (sst_eo.shape[0], 1, 1)) == 1] = np.nan
478
479
            else:
                sst_eo[mask == 1] = np.nan
480
481
482
483
            sst_lon = sst_file_nc.variables['lon'][:]
            sst_lat = sst_file_nc.variables['lat'][:]
            time_out_dt = num2date(sst_file_nc.variables['time'][:], units=sst_file_nc.variables['time'].units)

484
        ft = RegularGridInterpolator((sst_lon, sst_lat), sst_eo.T, method='linear', fill_value=None)
485
486
487
488
489
490
        interp_sst = ft(fvcom_lonlat.T)

        return time_out_dt, interp_sst

    def write_sstgrd(self, output_file, ncopts={'zlib': True, 'complevel': 7}, **kwargs):
        """
Pierre Cazenave's avatar
Pierre Cazenave committed
491
        Generate a sea surface temperature data assimilation file for the given FVCOM domain from the self.sst data.
492

493
494
495
496
497
498
        Parameters
        ----------
        output_file : str, pathlib.Path
            File to which to write SST data.
        ncopts : dict
            Dictionary of options to use when creating the netCDF variables. Defaults to compression on.
499

500
        Remaining arguments are passed to WriteForcing.
Pierre Cazenave's avatar
Pierre Cazenave committed
501

502
        """
503

504
505
506
507
        globals = {'year': str(np.argmax(np.bincount([i.year for i in self.sst.time]))),  # gets the most common year value
                   'title': 'FVCOM SST 1km merged product File',
                   'institution': 'Plymouth Marine Laboratory',
                   'source': 'FVCOM grid (unstructured) surface forcing',
508
                   'history': 'File created using {} from PyFVCOM'.format(inspect.stack()[0][3]),
509
510
511
512
513
514
515
516
517
518
519
                   'references': 'http://fvcom.smast.umassd.edu, http://codfish.smast.umassd.edu, http://pml.ac.uk/modelling',
                   'Conventions': 'CF-1.0',
                   'CoordinateProjection': 'init=WGS84'}
        dims = {'nele': self.dims.nele, 'node': self.dims.node, 'time': 0, 'DateStrLen': 26, 'three': 3}

        with WriteForcing(str(output_file), dims, global_attributes=globals, clobber=True, format='NETCDF4', **kwargs) as sstgrd:
            # Add the variables.
            atts = {'long_name': 'nodel longitude', 'units': 'degrees_east'}
            sstgrd.add_variable('lon', self.grid.lon, ['node'], attributes=atts, ncopts=ncopts)
            atts = {'long_name': 'nodel latitude', 'units': 'degrees_north'}
            sstgrd.add_variable('lat', self.grid.lat, ['node'], attributes=atts, ncopts=ncopts)
520
            sstgrd.write_fvcom_time(self.sst.time)
521
522
523
524
525
526
            atts = {'long_name': 'sea surface Temperature',
                    'units': 'Celsius Degree',
                    'grid': 'fvcom_grid',
                    'type': 'data'}
            sstgrd.add_variable('sst', self.sst.sst, ['time', 'node'], attributes=atts, ncopts=ncopts)

527
528
529
    def interp_ady(self, ady_dir, serial=False, pool_size=None, noisy=False):

        """
530
        Interpolate Geblstoff absorption from a regular grid to an FVCOM grid.
531
532
533

        Parameters
        ----------
534
535
536
        ady_dir : str, pathlib.Path
            Path to directory containing the absorption data. We will find any file ending in '.nc' and use them to
            load the `gelbstoff_absorption_satellite' variable.
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
        serial : bool, optional
            Run in serial rather than parallel. Defaults to parallel.
        pool_size : int, optional
            Specify number of processes for parallel run. By default it uses all available.
        noisy : bool, optional
            Set to True to enable some sort of progress output. Defaults to False.

        Returns
        -------
        Adds a new `ady' object with:
        ady : np.ndarray
            Interpolated absorption time series for the supplied domain.
        time : np.ndarray
            List of python datetimes for the corresponding SST data.

        Example
        -------
        >>> from PyFVCOM.preproc import Model
        >>> ady_dir = '/home/mbe/Code/fvcom-projects/locate/python/ady_preproc/Data/yr_data/'
        >>> model = Model('/home/mbe/Models/FVCOM/tamar/tamar_v2_grd.dat',
        >>>     native_coordinates='cartesian', zone='30N')
558
        >>> model.interp_ady(ady_dir, pool_size=20)
559
560
561
562
563
        >>> # Save to netCDF
        >>> model.write_adygrd('casename_adygrd.nc')

        Notes
        -----
Pierre Cazenave's avatar
Pierre Cazenave committed
564
        TODO: Combine interpolation routines (sst, ady, etc) to make more efficient
565
566
567
568
569
570

        """

        if isinstance(ady_dir, str):
            ady_dir = Path(ady_dir)

Pierre Cazenave's avatar
Pierre Cazenave committed
571
        ady_files = list(ady_dir.glob('*.nc'))
572
573
574
575
576
577
578
579
580
581

        if noisy:
            print('To do:\n{}'.format('|' * len(ady_files)), flush=True)

        # Read ADY data files and interpolate each to the FVCOM mesh
        lonlat = np.array((self.grid.lon, self.grid.lat))

        if serial:
            results = []
            for ady_file in ady_files:
Pierre Cazenave's avatar
Pierre Cazenave committed
582
583
                results.append(self._inter_sst_worker(lonlat, ady_file, noisy,
                                                      var_name='gelbstoff_absorption_satellite', var_offset=0))
584
585
586
587
588
        else:
            if not pool_size:
                pool = multiprocessing.Pool()
            else:
                pool = multiprocessing.Pool(pool_size)
Pierre Cazenave's avatar
Pierre Cazenave committed
589
590
            part_func = partial(self._inter_sst_worker, lonlat, noisy=noisy,
                                var_name='gelbstoff_absorption_satellite', var_offset=0)
591
592
593
594
595
596
597
598
599
600
601
602
603
            results = pool.map(part_func, ady_files)
            pool.close()

        # Sort data and prepare date lists
        dates = []
        ady = []

        for this_result in results:
            dates.append(this_result[0])
            ady.append(this_result[1])

        ady = np.vstack(ady).T
        # FVCOM wants times at midday whilst the data are at midnight
604
        # dates = np.asarray([this_date + relativedelta(hours=12) for sublist in dates for this_date in sublist])
605
606
607
608
609
610
611
612
613
614

        # Sort by time.
        idx = np.argsort(dates)
        dates = dates[idx]
        ady = ady[idx, :]

        # Store everything in an object.
        self.ady.ady = ady
        self.ady.time = dates

615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
    def interp_ady_climatology(self, ady_file, tmask, serial=False, pool_size=None, noisy=False):

        """
        Interpolate Geblstoff absorption climatology from a regular grid to an FVCOM grid.

        Parameters
        ----------
        ady_file : str, pathlib.Path
            Path to directory containing the absorption data. We will find any file ending in '.nc' and use them to
            load the `gelbstoff_absorption_satellite' variable.
        tmask : str, pathlib.Path
            Path to the NEMO tmask file with the grid information in it.
        serial : bool, optional
            Run in serial rather than parallel. Defaults to parallel.
        pool_size : int, optional
            Specify number of processes for parallel run. By default it uses all available.
        noisy : bool, optional
            Set to True to enable some sort of progress output. Defaults to False.

        Returns
        -------
        Adds a new `ady' object with:
        ady : np.ndarray
            Interpolated absorption time series for the supplied domain.
        time : np.ndarray
            List of python datetimes for the corresponding SST data.

        Example
        -------
        >>> from PyFVCOM.preproc import Model
        >>> ady_file = '/home/mbe/Code/fvcom-projects/locate/python/ady_preproc/Data/AMM7-ADY-broadband.nc'
        >>> tmask = '/data/euryale4/to_archive/momm-AMM7-INPUTS/GRID/mesh_mask.nc'
        >>> model = Model('/home/mbe/Models/FVCOM/tamar/tamar_v2_grd.dat',
        >>>               native_coordinates='cartesian', zone='30N')
        >>> model.interp_ady_climatology(ady_file, pool_size=20)
        >>> # Save to netCDF
        >>> model.write_adygrd('casename_adygrd.nc')

        Notes
        -----
        TODO: Combine interpolation routines (sst, ady, ady_climatology, etc.) to make more efficient

        """

        # This is a reimplementation of the MATLAB script interp_ady_assimilation.m from PML's projects.

        if isinstance(ady_file, str):
            ady_file = Path(ady_file)
        if isinstance(tmask, str):
            tmask = Path(tmask)

        # Read ADY data files and interpolate each to the FVCOM mesh
        with Dataset(tmask) as ds:
            land_mask = ds.variables['tmask'][:][0, 0].astype(bool)  # grab the surface only
            # Make the
            lon = ds.variables['nav_lon'][:][land_mask]
            lat = ds.variables['nav_lat'][:][land_mask]

        with Dataset(ady_file) as ds:
            # NEMO-ERSEM ADY Gelbstoff absorption climatology file stores a number of hours since 1900-01-01 00:00:00
            # Make time relative to our model start instead.
            model_start = datetime.strptime(f'{self.start.year}-01-01 00:00:00', '%Y-%m-%d %H:%M:%S')
            dates = [model_start + relativedelta(hours=t) for t in ds.variables['t'][:]]
            # Make the land mask match the time dimension.
            land_mask = np.tile(land_mask, (len(dates), 1, 1))

            regular_ady = ds.variables['gelbstoff_absorption_satellite'][:]
            # Flatten the space dimensions.
            regular_ady = regular_ady[land_mask].reshape(-1, land_mask[0].sum())

        # If the start or end of the model are outside the data, wrap the end of the climatology appropriately. Set
        # the time accordingly.
        original_dates = copy.copy(dates)
        original_ady = regular_ady.copy()
        if self.start < dates[0]:
            interval = dates[-1] - dates[-2]
            dates.insert(0, dates[0] - interval)
            regular_ady = np.concatenate((original_ady[-1][np.newaxis], regular_ady), axis=0)
        if self.end > dates[-1]:
            relative_intervals = [i - original_dates[0] for i in original_dates]
            new_dates = [original_dates[-1] + i for i in relative_intervals]
            dates.append(new_dates[1])
            regular_ady = np.concatenate((regular_ady, original_ady[0][np.newaxis]), axis=0)
        del original_ady, original_dates

        # Drop data outside the current model period. Offset by one either way so we can cover the current model
        # period.
        start_index = np.argwhere(np.asarray(dates) >= self.start).ravel()[0]
        end_index = np.argwhere(np.asarray(dates) <= self.end).ravel()[-1]
        if start_index != 0:
            start_index -= 1
        if end_index != len(dates):
            # Add two here: one for indexing and one for covering the period of interest, making sure we aren't too big.
            end_index = min(end_index + 2, len(dates))

        dates = dates[start_index:end_index]
        regular_ady = regular_ady[start_index:end_index]
        print(dates[0], dates[-1])
        print(self.start, self.end)

        if noisy:
            print(f'Interpolating ADY data for {len(dates)} times.', flush=True)

        # Now for each time in the ADY data, interpolate it to the model grid.
        if serial:
            ady = []
            for data in regular_ady:
                ady.append(mp_interp_func((lon.ravel(), lat.ravel(), data.ravel(), self.grid.lon, self.grid.lat)))
        else:
            if pool_size is None:
                pool = multiprocessing.Pool()
            else:
                pool = multiprocessing.Pool(pool_size)
            args = [(lon.ravel(), lat.ravel(), data.ravel(), self.grid.lon, self.grid.lat) for data in regular_ady]
            ady = pool.map(mp_interp_func, args)
            pool.close()
        ady = np.asarray(ady)

        # Make sure we enclose our current model run by adding a point at the start and end too if necessary.
        if dates[0] > self.start:
            dates.insert(0, self.start)
            ady = np.concatenate((ady[0][np.newaxis], ady), axis=0)
        if dates[-1] < self.end:
            dates.append(self.end)
            ady = np.concatenate((ady, ady[-1][np.newaxis]), axis=0)

        # Store everything in an object.
        self.ady.ady = ady
        self.ady.time = dates

745
746
    def write_adygrd(self, output_file, ncopts={'zlib': True, 'complevel': 7}, **kwargs):
        """
Pierre Cazenave's avatar
Pierre Cazenave committed
747
        Generate a Gelbstoff absorption file for the given FVCOM domain from the self.ady data.
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780

        Parameters
        ----------
        output_file : str, pathlib.Path
            File to which to write  data.
        ncopts : dict
            Dictionary of options to use when creating the netCDF variables. Defaults to compression on.

        Remaining arguments are passed to WriteForcing.
        """
        globals = {'year': str(np.argmax(np.bincount([i.year for i in self.ady.time]))),  # gets the most common year value
                   'title': 'FVCOM Satellite derived Gelbstoff climatology product File',
                   'institution': 'Plymouth Marine Laboratory',
                   'source': 'FVCOM grid (unstructured) surface forcing',
                   'history': 'File created using {} from PyFVCOM'.format(inspect.stack()[0][3]),
                   'references': 'http://fvcom.smast.umassd.edu, http://codfish.smast.umassd.edu, http://pml.ac.uk/modelling',
                   'Conventions': 'CF-1.0',
                   'CoordinateProjection': 'init=WGS84'}
        dims = {'nele': self.dims.nele, 'node': self.dims.node, 'time': 0, 'DateStrLen': 26, 'three': 3}

        with WriteForcing(str(output_file), dims, global_attributes=globals, clobber=True, format='NETCDF4', **kwargs) as sstgrd:
           # Add the variables.
            atts = {'long_name': 'nodel longitude', 'units': 'degrees_east'}
            sstgrd.add_variable('lon', self.grid.lon, ['node'], attributes=atts, ncopts=ncopts)
            atts = {'long_name': 'nodel latitude', 'units': 'degrees_north'}
            sstgrd.add_variable('lat', self.grid.lat, ['node'], attributes=atts, ncopts=ncopts)
            sstgrd.write_fvcom_time(self.ady.time)
            atts = {'long_name': 'gelbstoff_absorption_satellite',
                    'units': '1/m',
                    'grid': 'fvcom_grid',
                    'type': 'data'}
            sstgrd.add_variable('Kd_ady', self.ady.ady, ['time', 'node'], attributes=atts, ncopts=ncopts)

781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
    def add_sigma_coordinates(self, sigma_file, noisy=False):
        """
        Read in a sigma coordinates file and apply to the grid object.

        Parameters
        ----------
        sigma_file : str, pathlib.Path
            FVCOM sigma coordinates .dat file.

        Notes
        -----
        This is more or less a direct python translation of the original MATLAB fvcom-toolbox function read_sigma.m

        """

        sigma_file = str(sigma_file)

798
        # Make an object to store the sigma data.
799
        self.sigma = PassiveStore()
800

801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
        with open(sigma_file, 'r') as f:
            lines = f.readlines()
            for line in lines:
                line = line.strip()
                option, value = line.split('=')
                option = option.strip().lower()
                value = value.strip()

                # Grab the various bits we need.
                if option == 'number of sigma levels':
                    nlev = int(value)
                elif option == 'sigma coordinate type':
                    sigtype = value
                elif option == 'sigma power':
                    sigpow = float(value)
                elif option == 'du':
                    du = float(value)
                elif option == 'dl':
                    dl = float(value)
                elif option == 'min constant depth':
                    min_constant_depth = float(value)
                elif option == 'ku':
823
                    ku = int(value)
824
                elif option == 'kl':
825
                    kl = int(value)
826
                elif option == 'zku':
827
                    s = [float(i) for i in value.split()]
828
                    zku = np.zeros(ku)
829
                    for i in range(ku):
830
831
                        zku[i] = s[i]
                elif option == 'zkl':
832
                    s = [float(i) for i in value.split()]
833
                    zkl = np.zeros(kl)
834
                    for i in range(kl):
835
836
                        zkl[i] = s[i]

837
838
839
        # Calculate the sigma level distributions at each grid node.
        if sigtype.lower() == 'generalized':
            # Do some checks if we've got uniform or generalised coordinates to make sure the input is correct.
840
841
842
843
            if len(zku) != ku:
                raise ValueError('Number of zku values does not match the number specified in ku')
            if len(zkl) != kl:
                raise ValueError('Number of zkl values does not match the number specified in kl')
844
            sigma_levels = np.empty((self.dims.node, nlev)) * np.nan
845
            for i in range(self.dims.node):
846
                sigma_levels[i, :] = self.sigma_generalized(nlev, dl, du, self.grid.h[i], min_constant_depth)
847
        elif sigtype.lower() == 'uniform':
848
            sigma_levels = np.repeat(self.sigma_geometric(nlev, 1), self.dims.node).reshape(self.dims.node, -1)
849
        elif sigtype.lower() == 'geometric':
850
            sigma_levels = np.repeat(self.sigma_geometric(nlev, sigpow), self.dims.node).reshape(self.dims.node, -1)
851
852
        elif sigtype.lower() == 'tanh':
            sigma_levels = np.repeat(self.sigma_tanh(nlev, dl, du), self.dims.node).reshape(self.dims.node, -1)
853
854
855
856
        else:
            raise ValueError('Unrecognised sigtype {} (is it supported?)'.format(sigtype))

        # Create a sigma layer variable (i.e. midpoint in the sigma levels).
857
858
859
860
861
862
863
        sigma_layers = sigma_levels[:, 0:-1] + (np.diff(sigma_levels, axis=1) / 2)

        self.sigma.type = sigtype
        self.sigma.layers = sigma_layers
        self.sigma.levels = sigma_levels
        self.sigma.layers_center = nodes2elems(self.sigma.layers.T, self.grid.triangles).T
        self.sigma.levels_center = nodes2elems(self.sigma.levels.T, self.grid.triangles).T
864

865
        if sigtype.lower() == 'geometric':
866
            self.sigma.power = sigpow
867

868
869
870
871
872
873
874
875
876
877
878
879
        if sigtype.lower() == 'generalized':
            self.sigma.upper_layer_depth = du
            self.sigma.lower_layer_depth = dl
            # Has to be indexable as we assume transition_depth is in Model.write_sigma. We do so because if we're
            # generating the transition depth, it'll be an array and we only want the value of the array rather than
            # its entirety as a string.
            self.sigma.transition_depth = [min_constant_depth]
            self.sigma.total_upper_layers = ku
            self.sigma.total_lower_layers = kl
            self.sigma.upper_layer_thickness = zku
            self.sigma.lower_layer_thickness = zkl

880
        # Make some depth-resolved sigma distributions.
881
882
883
        self.sigma.layers_z = self.grid.h[:, np.newaxis] * self.sigma.layers
        self.sigma.layers_center_z = self.grid.h_center[:, np.newaxis] * self.sigma.layers_center
        self.sigma.levels_z = self.grid.h [:, np.newaxis] * self.sigma.levels
Pierre Cazenave's avatar
Pierre Cazenave committed
884
        self.sigma.levels_center_z = self.grid.h_center[:, np.newaxis] * self.sigma.levels_center
885
886
887

        # Make some dimensions
        self.dims.levels = nlev
888
        self.dims.layers = self.dims.levels - 1
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909

        # Print the sigma file configuration we've parsed.
        if noisy:
            # Should be present in all sigma files.
            print('nlev\t{:d}\n'.format(nlev))
            print('sigtype\t%s\n'.format(sigtype))

            # Only present in geometric sigma files.
            if sigtype == 'GEOMETRIC':
                print('sigpow\t{:d}\n'.format(sigpow))

            # Only in the generalised or uniform sigma files.
            if sigtype == 'GENERALIZED':
                print('du\t{:d}\n'.format(du))
                print('dl\t{:d}\n'.format(dl))
                print('min_constant_depth\t%f\n'.format(min_constant_depth))
                print('ku\t{:d}\n'.format(ku))
                print('kl\t{:d}\n'.format(kl))
                print('zku\t{:d}\n'.format(zku))
                print('zkl\t{:d}\n'.format(zkl))

910
        # Update the open boundaries.
911
        self._update_open_boundaries()
912

913
    def sigma_generalized(self, levels, dl, du, h, hmin):
914
915
916
917
918
        """
        Generate a generalised sigma coordinate distribution.

        Parameters
        ----------
919
920
        levels : int
            Number of sigma levels.
921
        dl : float
Pierre Cazenave's avatar
Pierre Cazenave committed
922
            The lower depth boundary from the bottom, down to which the layers are uniform thickness.
923
        du : float
Pierre Cazenave's avatar
Pierre Cazenave committed
924
            The upper depth boundary from the surface, up to which the layers are uniform thickness.
925
        h : float
926
            Water depth (positive down).
927
        hmin : float
928
            Minimum water depth (positive down).
929
930
931
932
933
934
935
936

        Returns
        -------
        dist : np.ndarray
            Generalised vertical sigma coordinate distribution.

        """

937
938
939
        # Make sure we have positive down depths by nuking negatives.
        h = np.abs(h)
        hmin = np.abs(hmin)
940

941
        if h > hmin:
942
943
            # Hyperbolic tangent for deep areas
            dist = self.sigma_tanh(levels, dl, du)
944
        else:
945
            # Uniform for shallow areas
946
            dist = self.sigma_geometric(levels, 1)
947

948
949
        return dist

Pierre Cazenave's avatar
Pierre Cazenave committed
950
951
    @staticmethod
    def sigma_geometric(levels, p_sigma):
952
953
954
955
956
        """
        Generate a geometric sigma coordinate distribution.

        Parameters
        ----------
957
958
        levels : int
            Number of sigma levels.
959
960
961
962
963
964
965
966
967
968
969
        p_sigma : float
            Power value. 1 for uniform sigma layers, 2 for parabolic function. See page 308-309 in the FVCOM manual
            for examples.

        Returns
        -------
        dist : np.ndarray
            Geometric vertical sigma coordinate distribution.

        """

970
        dist = np.empty(levels) * np.nan
971

972
        if p_sigma == 1:
Pierre Cazenave's avatar
Pierre Cazenave committed
973
            for k in range(1, levels + 1):
974
                dist[k -1] = -((k - 1) / (levels - 1))**p_sigma
975
        else:
976
977
978
979
980
981
982
983
984
            split = int(np.floor((levels + 1) / 2))
            for k in range(split):
                dist[k] = -(k / ((levels + 1) / 2 - 1))**p_sigma / 2
            # Mirror the first half to make the second half of the parabola. We need to offset by one if we've got an
            # odd number of levels.
            if levels % 2 == 0:
                dist[split:] = -(1 - -dist[:split])[::-1]
            else:
                dist[split:] = -(1 - -dist[:split - 1])[::-1]
985

986
        return dist
987

Pierre Cazenave's avatar
Pierre Cazenave committed
988
989
    @staticmethod
    def sigma_tanh(levels, dl, du):
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
        """
        Generate a hyperbolic tangent vertical sigma coordinate distribution.

        Parameters
        ----------
        levels : int
            Number of sigma levels (layers + 1)
        dl : float
            The lower depth boundary from the bottom down to which the coordinates are parallel with uniform thickness.
        du : float
            The upper depth boundary from the surface up to which the coordinates are parallel with uniform thickness.

        Returns
        -------
        dist : np.ndarray
            Hyperbolic tangent vertical sigma coordinate distribution.

        """

1009
1010
        kbm1 = levels - 1

1011
1012
        dist = np.zeros(levels)

1013
1014
        # Loop has to go to kbm1 + 1 (or levels) since python ranges stop before the end point.
        for k in range(1, levels):
1015
            x1 = dl + du
1016
            x1 = x1 * (kbm1 - k) / (kbm1)
1017
1018
1019
1020
            x1 = x1 - dl
            x1 = np.tanh(x1)
            x2 = np.tanh(dl)
            x3 = x2 + np.tanh(du)
1021
1022
            # k'th position starts from 1 which is right because we want the initial value to be zero for sigma levels.
            dist[k] = (x1 + x2) / x3 - 1
1023

1024
1025
        return dist

1026
1027
    def hybrid_sigma_coordinate(self, levels, transition_depth, upper_layer_depth, lower_layer_depth,
                                total_upper_layers, total_lower_layers, noisy=False):
1028
1029
1030
1031
1032
        """
        Create a hybrid vertical coordinate system.

        Parameters
        ----------
1033
1034
1035
        levels : int
            Number of vertical levels.
        transition_depth : float
1036
            Transition depth of the hybrid coordinates
1037
        upper_layer_depth : float
1038
            Upper water boundary thickness (metres)
1039
        lower_layer_depth : float
1040
            Lower water boundary thickness (metres)
1041
        total_upper_layers : int
1042
            Number of layers in the DU water column
1043
        total_lower_layers : int
1044
1045
1046
1047
1048
1049
1050
1051
            Number of layers in the DL water column

        Populates
        ---------
        self.dims.layers : int
            Number of sigma layers.
        self.dims.levels : int
            Number of sigma levels.
1052
        self.sigma.levels : np.ndarray
1053
            Sigma levels at the nodes
1054
        self.sigma.layers : np.ndarray
1055
            Sigma layers at the nodes
1056
        self.sigma.levels_z : np.ndarray
1057
            Water depth levels at the nodes
1058
        self.sigma.layers_z : np.ndarray
1059
            Water depth layers at the nodes
1060
        self.sigma.levels_center : np.ndarray
1061
            Sigma levels at the elements
1062
        self.sigma.layers_center : np.ndarray
1063
            Sigma layers at the elements
1064
        self.sigma.levels_z_center : np.ndarray
1065
            Water depth levels at the elements
1066
        self.sigma.layers_z_center : np.ndarray
1067
1068
1069
1070
            Water depth layers at the elements

        """

1071
        # Make an object to store the sigma data.
1072
        self.sigma = PassiveStore()
1073

1074
1075
1076
1077
        self.dims.levels = levels
        self.dims.layers = self.dims.levels - 1

        # Optimise the transition depth to minimise the error between the uniform region and the hybrid region.
1078
1079
        if noisy:
            print('Optimising the hybrid coordinates... ')
1080
1081
        upper_layer_thickness = np.repeat(upper_layer_depth / total_upper_layers, total_upper_layers)
        lower_layer_thickness = np.repeat(lower_layer_depth / total_lower_layers, total_lower_layers)
1082
        optimisation_settings = {'maxfun': 5000, 'maxiter': 5000, 'ftol': 10e-5, 'xtol': 1e-7}
1083
1084
1085
1086
        fparams = lambda depth_guess: self.__hybrid_coordinate_hmin(depth_guess, self.dims.levels,
                                                                    upper_layer_depth, lower_layer_depth,
                                                                    total_upper_layers, total_lower_layers,
                                                                    upper_layer_thickness, lower_layer_thickness)
1087
        optimised_depth = scipy.optimize.fmin(func=fparams, x0=transition_depth, disp=False, **optimisation_settings)
1088
1089
        min_error = transition_depth - optimised_depth  # this isn't right
        self.sigma.transition_depth = optimised_depth
1090
1091

        if noisy:
1092
1093
            print('Hmin found {} with a maximum error in vertical distribution of {} metres\n'.format(optimised_depth,
                                                                                                      min_error))
1094
1095

        # Calculate the sigma level distributions at each grid node.
1096
        sigma_levels = np.empty((self.dims.node, self.dims.levels)) * np.nan
1097
        for i in range(self.dims.node):
1098
1099
            sigma_levels[i, :] = self.sigma_generalized(levels, lower_layer_depth, upper_layer_depth,
                                                        self.grid.h[i], optimised_depth)
1100
1101

        # Create a sigma layer variable (i.e. midpoint in the sigma levels).
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
        sigma_layers = sigma_levels[:, 0:-1] + (np.diff(sigma_levels, axis=1) / 2)

        # Add to the grid object.
        self.sigma.type = 'GENERALIZED'  # hybrid is a special case of generalised vertical coordinates
        self.sigma.upper_layer_depth = upper_layer_depth
        self.sigma.lower_layer_depth = lower_layer_depth
        self.sigma.total_upper_layers = total_upper_layers
        self.sigma.total_lower_layers = total_lower_layers
        self.sigma.upper_layer_thickness = upper_layer_thickness
        self.sigma.lower_layer_thickness = lower_layer_thickness
        self.sigma.layers = sigma_layers
        self.sigma.levels = sigma_levels
        # Transpose on the way in and out so the slicing within nodes2elems works properly.
        self.sigma.layers_center = nodes2elems(self.sigma.layers.T, self.grid.triangles).T
        self.sigma.levels_center = nodes2elems(self.sigma.levels.T, self.grid.triangles).T
1117
1118

        # Make some depth-resolved sigma distributions.
1119
1120
1121
1122
        self.sigma.layers_z = self.grid.h[:, np.newaxis] * self.sigma.layers
        self.sigma.layers_center_z = self.grid.h_center[:, np.newaxis] * self.sigma.layers_center
        self.sigma.levels_z = self.grid.h [:, np.newaxis] * self.sigma.levels
        self.sigma.levels_center_z = self.grid.h_center[:, np.newaxis]  * self.sigma.levels_center
1123

1124
        # Update the open boundaries.
1125
        self._update_open_boundaries()
1126

1127
    def __hybrid_coordinate_hmin(self, h, levels, du, dl, ku, kl, zku, zkl):
1128
1129
1130
1131
1132
        """
        Helper function to find the relevant minimum depth.

        Parameters
        ----------
1133
        h : float
1134
1135
1136
            Transition depth of the hybrid coordinates?
        levels : int
            Number of vertical levels (layers + 1)
1137
        du : float
1138
            Upper water boundary thickness (metres)
1139
        dl : float
1140
            Lower water boundary thickness (metres)
1141
        ku : int
1142
            Layer number in the water column of DU
1143
        kl : int
1144
1145
1146
1147
            Layer number in the water column of DL

        Returns
        -------
1148
        zz : float
1149
1150
            Minimum water depth.

1151
        """
Pierre Cazenave's avatar
Pierre Cazenave committed
1152
        # This is essentially identical to self.sigma_tanh, so we should probably just use that instead.
1153
1154
        z0 = self.sigma_tanh(levels, du, dl)
        z2 = np.zeros(levels)
1155

1156
        # s-coordinates
1157
1158
1159
        x1 = (h - du - dl)
        x2 = x1 / h
        dr = x2 / (levels - ku - kl - 1)
1160

1161
1162
        for k in range(1, ku + 1):
            z2[k] = z2[k - 1] - (zku[k - 1] / h)
1163

1164
1165
        for k in range(ku + 2, levels - kl):
            z2[k] = z2[k - 1] - dr
1166

1167
1168
1169
1170
        kk = 0
        for k in range(levels - kl + 1, levels):
            kk += 1
            z2[k] = z2[k - 1] - (zkl[kk] / h)
1171

1172
        zz = np.max(h * z0 - h * z2)
1173

1174
        return zz
1175
1176
1177
1178
1179
1180
1181
1182
1183

    def write_sigma(self, sigma_file):
        """
        Write the sigma distribution to file.

        Parameters
        ----------
        sigma_file : str, pathlib.Path
            Path to which to save sigma data.
1184

1185
1186
1187
        Notes
        -----
        TODO: Add support for writing all the sigma file formats.
1188

1189
1190
        """

1191
1192
1193
1194
        if isinstance(sigma_file, str):
            sigma_file = Path(sigma_file)

        with sigma_file.open('w') as f:
1195
            # All types of sigma distribution have the two following lines.
1196
            f.write('NUMBER OF SIGMA LEVELS = {:d}\n'.format(self.dims.levels))
1197
1198
1199
1200
            f.write('SIGMA COORDINATE TYPE = {}\n'.format(self.sigma.type))
            if self.sigma.type.lower() == 'generalized':
                f.write('DU = {:4.1f}\n'.format(self.sigma.upper_layer_depth))
                f.write('DL = {:4.1f}\n'.format(self.sigma.lower_layer_depth))
Pierre Cazenave's avatar
Pierre Cazenave committed
1201
                # Why do we go to all the trouble of finding the transition depth only to round it anyway?
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
                f.write('MIN CONSTANT DEPTH = {:10.1f}\n'.format(np.round(self.sigma.transition_depth[0])))  # don't like the [0]
                f.write('KU = {:d}\n'.format(self.sigma.total_upper_layers))
                f.write('KL = {:d}\n'.format(self.sigma.total_lower_layers))
                # Add the thicknesses with a loop.
                f.write('ZKU = ')
                for ii in self.sigma.upper_layer_thickness:
                    f.write('{:4.1f}'.format(ii))
                f.write('\n')
                f.write('ZKL = ')
                for ii in self.sigma.lower_layer_thickness:
                    f.write('{:4.1f}'.format(ii))
                f.write('\n')
            elif self.sigma.type.lower() == 'geometric':
1215
                f.write('SIGMA POWER = {:.1f}\n'.format(self.sigma.power))
1216

1217
    def add_open_boundaries(self, obc_file, reload=False):
1218
        """
1219
        Add open boundaries from a given FVCOM-formatted open boundary file.
1220

1221
1222
        Parameters
        ----------
1223
        obc_file : str, pathlib.Path
1224
1225
1226
            FVCOM open boundary specification file.
        reload : bool
            Set to True to overwrite any automatically or already loaded open boundary nodes. Defaults to False.
1227

1228
        """
1229
1230
1231
1232
1233
        try:
            open_bds = np.asarray([np.any(this_bd) for this_bd in self.grid.open_boundary_nodes])
        except:
            open_bds = self.grid.open_boundary_nodes
        if np.any(open_bds) and np.any(self.grid.types) and reload:
1234
1235
1236
1237
            # We've already got some, so warn and return.
            warn('Open boundary nodes already loaded and reload set to False.')
            return
        else:
1238
            self.grid.open_boundary_nodes, self.grid.types, _ = read_fvcom_obc(str(obc_file))
1239

1240
1241
1242
    def write_sponge(self, sponge_file):
        """
        Write out the sponge data to an FVCOM-formatted ASCII file.
1243

1244
1245
1246
1247
1248
1249
1250
        Parameters
        ----------
        sponge_file : str, pathlib.Path
            Path to the file to create.

        """

1251
1252
1253
        if isinstance(sponge_file, str):
            sponge_file = Path(sponge_file)

1254
1255
1256
        # Work through all the open boundary objects collecting all the information we need and then dump that to file.
        radius = []
        coefficient = []
1257
        nodes = []
1258
1259
1260
        for boundary in self.open_boundaries:
            radius += boundary.sponge_radius.tolist()
            coefficient += boundary.sponge_coefficient.tolist()
1261
            nodes += boundary.nodes
1262

1263
1264
1265
        # I feel like this should be in self.dims.
        number_of_nodes = len(radius)

1266
        with sponge_file.open('w') as f:
1267
            f.write('Sponge Node Number = {:d}\n'.format(number_of_nodes))
1268
            for node in zip([i + 1 for i in nodes], radius, coefficient):
1269
                f.write('{} {:.6f} {:.6f}\n'.format(*node))
1270