make_forcing.m 14 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46
% Create the heat flux data for the FVCOM Estuary example.
%
% This requires:
%   - my fork of the fvcom-toolbox
%       > https://gitlab.em.pml.ac.uk/pica/fvcom-toolbox
%
% Author(s):
%   Pierre Cazenave (Plymouth Marine Laboratory)
%
% Revision history:
%
%   2015-01-12 Create new version based on the Irish Sea model
%   make_model_input.m script.

matlabrc
close all
clc

global ftbverbose
ftbverbose = 1; % be noisy

addpath('/users/modellers/pica/Code/fvcom-toolbox/utilities')
addpath('/users/modellers/pica/Code/fvcom-toolbox/fvcom_prepro/')
addpath('/users/modellers/rito/matlab/air-sea')

%%%------------------------------------------------------------------------
%%%                          INPUT CONFIGURATION
%%%
%%% Define the model input parameters here e.g. number of tidal components,
%%% type of boundary forcing, estimated current velocity, sponge layer
%%% coefficient and radius, forcing source etc.
%%%
%%%------------------------------------------------------------------------

inputConf.base = '/data/medusa/pica/models/FVCOM/fvcom-examples/Estuary/';

% Which version of FVCOM are we using (for the forcing file formats)?
inputConf.FVCOM_version = '3.1.6';

%%%------------------------------------------------------------------------
%%%                             Time stuff
%%%------------------------------------------------------------------------

% Model time ([Y, M, D, h, m, s])
inputConf.modelYear = 2013;
inputConf.startDate = [inputConf.modelYear, 04, 01, 00, 00, 00];
Pierre Cazenave's avatar
Pierre Cazenave committed
47
inputConf.endDate = [inputConf.modelYear, 12, 01, 00, 00, 00];
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164

% Surface sampling frequency (days).
inputConf.sampling.surface = 1;

%%%------------------------------------------------------------------------
%%%                           Spatial stuff
%%%------------------------------------------------------------------------

% Case name for the model inputs and outputs
inputConf.casename = 'tst';

inputConf.coordType = 'cartesian'; % 'cartesian' or 'spherical'
% Proj conversion struct.
fig = figure;
inputConf.proj = gcm(axesm('tranmerc'));
close(fig); clear fig
inputConf.proj.falsenorthing = 0;
inputConf.proj.falseeasting = 900000;
inputConf.proj.geoid = [6378137 0.081819191042815];
% inputConf.proj.mapparallels = 40.50; % what should this be for the test case?
% inputConf.proj.nparallels = 1; % what should this be for the test case?
inputConf.proj.origin = [42+(50/60) -70+(10/60) 0];
inputConf.proj.scalefactor = 0.9999666666666667;

% Sigma layer definition file.
inputConf.sigma_file = fullfile(inputConf.base, 'tstinp', 'sigma.dat');

% Give some names to the boundaries. This must match the number of node
% strings defined in SMS. Ideally, the order of the names should match the
% order in which the boundaries were made in SMS.
inputConf.boundaryNames = {'West'}; % Number is (relatively) important!

%%%------------------------------------------------------------------------
%%%                     Model constants and stuff
%%%------------------------------------------------------------------------

% Type of open boundary treatment (see Table 6.1 (?) in the manual).
% 1 - Active (ASL): sea level is specified at the open boundary.
% 2 - Clamped: zeta = 0 at the boundary (Beardsley and Haidvogel, 1981).
% 3 - Implicit Gravity Wave Radiation.
% 4 - Partial Clamped Gravity Wave Radiation (Blumberg and Kantha, 1985).
% 5 - Explicit Orlanski Radiation (Orlanski, 1976; Chapman, 1985)
inputConf.obc_type = 1;

%%%------------------------------------------------------------------------
%%%                          Forcing and stuff
%%%------------------------------------------------------------------------

% Surface heat fluxes. Currently only 'NCEP' heat fluxes work with
% HEATING_CALCULATED.
inputConf.surface_heat = 'NCEP';

%%%------------------------------------------------------------------------
%%%                      END OF INPUT CONFIGURATION
%%%------------------------------------------------------------------------

%% Process the grid files

% Read the input mesh and bathymetry. Also creates the data necessary for
% the Coriolis correction in FVCOM.
Mobj = read_fvcom_mesh(fullfile(...
    inputConf.base, 'tstinp/', [inputConf.casename, '_grd.dat']));

Mobj.h = read_fvcom_bath(fullfile(...
    inputConf.base, 'tstinp/', [inputConf.casename, '_dep.dat']));

Mobj = read_fvcom_obc(Mobj, fullfile(...
    inputConf.base, 'tstinp/', [inputConf.casename, '_obc.dat']));

% Add grid metrics.
Mobj = setup_metrics(Mobj);

% Parse the open boundary nodes and add accordingly.
if Mobj.have_strings
    for i = 1:size(Mobj.read_obc_nodes, 2)
        nodeList = double(cell2mat(Mobj.read_obc_nodes(i)));
        Mobj = add_obc_nodes_list(Mobj, nodeList, inputConf.boundaryNames{i}, inputConf.obc_type);
    end
    clear nodeList
end

% Get the sigma depths in order to interpolate from the POLCOMS depths
Mobj = read_sigma(Mobj, inputConf.sigma_file);

% Convert the grid from NAD to spherical coordinates.
Mobj.have_lonlat = true;
[Mobj.lat, Mobj.lon] = projinv(inputConf.proj, Mobj.x, Mobj.y);

%% Prepare the heat flux forcing data

inputConf.startDateMJD = greg2mjulian(inputConf.startDate(1), ...
    inputConf.startDate(2), ...
    inputConf.startDate(3), ...
    inputConf.startDate(4), ...
    inputConf.startDate(5), ...
    inputConf.startDate(6));
inputConf.endDateMJD = greg2mjulian(inputConf.endDate(1), ...
    inputConf.endDate(2), ...
    inputConf.endDate(3), ...
    inputConf.endDate(4), ...
    inputConf.endDate(5), ...
    inputConf.endDate(6));

% Output directory will contain some duplicate files (e.g. model grid
% etc.) but this makes things easier to manage. One subdirectory per
% month of the year in question.
inputConf.outbase = fullfile(inputConf.base, 'tstinp');

% Get the surface heating data.
if strcmpi(inputConf.surface_heat, 'NCEP')
    % Use the OPeNDAP NCEP script (get_NCEP_forcing.m) to get the
    % following parameters:
    %     - Downward longwave radiation surface (dlwrs) [W/m^2]
    %     - Downward shortwave radiation surface (dswrs) [W/m^2]
    %     - Air temperature (air) [celsius]
    %     - Relative humidity (rhum) [%]
    %     - Sea level pressure (pres) [Pa]
165
    %     - Wind (u and v) (uwnd, vwnd) [ms^{-1}]
166 167 168 169 170 171 172
    %
    % The script converts the NCEP data from the OPeNDAP server from
    % longitudes in the 0 to 360 range to the -180 to 180 range. It
    % also subsets for the right region (defined by Mobj.lon and
    % Mobj.lat).
    heating = get_NCEP_forcing(Mobj, ...
        [inputConf.startDateMJD, inputConf.endDateMJD], ...
173
        'varlist', {'dlwrf', 'dswrf', 'air', 'rhum', 'pres', 'uwnd', 'vwnd'});
174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289

    heating.domain_cols = length(heating.lon);
    heating.domain_rows = length(heating.lat);
    if isfield(heating, 'rhum')
        heating.domain_cols_alt = length(heating.rhum.lon);
        heating.domain_rows_alt = length(heating.rhum.lat);
    end

    % Convert the small subdomain into cartesian coordinates. We need
    % to do this twice because some of the NCEP data are on different
    % grids (e.g. sea level pressure, relative humidity etc.).
    [tmpLon, tmpLat] = meshgrid(heating.lon, heating.lat);
    [heating.x, heating.y] = projfwd(inputConf.proj, tmpLat, tmpLon);
    if isfield(heating, 'rhum')
        [tmpLon2, tmpLat2] = meshgrid(heating.rhum.lon, heating.rhum.lat);
        [heating.xalt, heating.yalt] = projfwd(inputConf.proj, tmpLat2, tmpLon2);
    end
    clear tmpLon tmpLat tmpLon2 tmpLat2 tmpZone
    % Create arrays of the x and y positions.
    heating.x = reshape(heating.x, heating.domain_rows, heating.domain_cols);
    heating.y = reshape(heating.y, heating.domain_rows, heating.domain_cols);
    if isfield(heating, 'rhum')
        heating.xalt = reshape(heating.xalt, heating.domain_rows_alt, heating.domain_cols_alt);
        heating.yalt = reshape(heating.yalt, heating.domain_rows_alt, heating.domain_cols_alt);
    end

    [heating.lon, heating.lat] = meshgrid(heating.lon, heating.lat);

    heating = rmfield(heating, {'domain_rows', 'domain_cols'});
    if isfield(heating, 'rhum')
        heating = rmfield(heating, {'domain_rows_alt', 'domain_cols_alt'});
    end


    % Scale data appropriately in time (i.e. increase time sampling)
    % while maintaining the spatial structure. Only correct the heating
    % part: calculate theoretical instantaneous irradiance as
    % calculated in ERSEM. The other outputs are interpolated to the
    % same time sampling so that the grid can still be interpolated as
    % before with grid2fvcom.

    % Loop through each day.
    MJD = inputConf.startDateMJD:inputConf.endDateMJD;
    loopN = 0;
    DTinterp = inputConf.sampling.surface / 24; % in days

    % Preallocate the new irradiance array for the number of days
    % we have.
    dswrf = nan(size(heating.dswrf.data, 1), size(heating.dswrf.data, 2), (length(MJD) - 1) * 24);
    swtimes = nan((length(MJD) - 1) * 24, 1);

    % Turn off the verbose output for these loops.
    oldftbverbose = ftbverbose;
    ftbverbose = 0; %#ok<NASGU>

    for dd = datenum(inputConf.startDate):datenum(inputConf.endDate)
        loopN = loopN + 1;

        % Find the NCEP time indices which cover this day (dd).
        dayidx = find(heating.time >= MJD(loopN) & heating.time <= MJD(loopN) + 1);

        if length(dayidx) > 1

            Time_forcing_interp = dd:DTinterp:dd + 1;
            Time_forcing_interpMJD = MJD(loopN):DTinterp:MJD(loopN) + 1;
            % Drop the last time step so we have 24 samples (hourly).
            Time_forcing_interp = Time_forcing_interp(1:end - 1);
            Time_forcing_interpMJD = Time_forcing_interpMJD(1:end - 1);

            nt = length(Time_forcing_interp);

            % Preallocate the irradiance arrays for 25 hours of data (based
            % on DTinterp interval).
            instant_rad = nan(nt, 1);

            % We need to work on rows of latitude since the irradiance
            % only varies with latitude (not longitude) and time. Since
            % we need to calculate the irradiance at each location
            % within the grid anyway (each position has a different
            % time series) we still need to loop through the
            % longitudes, but we can do the irradiance calculation only
            % on the latitude loops.

            % Loop through each position and find this day's maximum
            % and scale it to the norm_rad shape.

            for i = 1:size(heating.lon, 1) % latitudes
                for tt = 1:length(Time_forcing_interp)
                    % Calculation done without any clouds.
                    [instant_rad(tt), ~] = calc_rad(...
                        0, heating.lat(i, 1), ...
                        floor(Time_forcing_interp(tt)), ...
                        (Time_forcing_interp(tt) - ...
                        floor(Time_forcing_interp(tt))) * 24 * 60 * 60);
                end

                % Build a normalized day curve for this day and this
                % latitudes.
                norm_data = abs((instant_rad - max(instant_rad)) / ...
                    (max(instant_rad) - min(instant_rad)));

                % Now we have a normalised curve for this latitude, we
                % can scale each position in this row by that shape.
                for j = 1:size(heating.lon, 2) % longitudes
                    % Find the maximum of this day's indices (dayidx).
                    imax = max(heating.dswrf.data(j, i, dayidx(1):dayidx(end)));
                    dswrf(j, i, 1 + ((loopN - 1) * nt):loopN * nt) = norm_data * imax;
                end
            end
            % Save the times
            swtimes(1 + ((loopN - 1) * nt):loopN * nt) = Time_forcing_interpMJD;

        end
    end

    % Interpolate rhum, pres and longwave radiation to hourly values.
290
    todo = {'rhum', 'pres', 'dlwrf', 'air', 'uwnd', 'vwnd'};
291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337
    for ff = 1:length(todo)
        % Force the data onto the 'global' position arrays. This fixes
        % problems with grid2fvcom and the relative humidity grid in
        % particular.
        [X, Y, T] = meshgrid(heating.(todo{ff}).lon, heating.(todo{ff}).lat, heating.(todo{ff}).time);
        [XI, YI, TI] = meshgrid(unique(heating.lon), unique(heating.lat), swtimes);
        % Interpolate and extrapolate with the mean value of the first
        % time.
        heating.(todo{ff}).data = interp3(X, Y, T, ...
            permute(heating.(todo{ff}).data, [2, 1, 3]), XI, YI, TI, ...
            'linear', mean(mean(squeeze(heating.(todo{ff}).data(:, :, 1)))));
        heating.(todo{ff}).data = permute(heating.(todo{ff}).data, [2, 1, 3]);

        % Update the time data and positions.
        heating.(todo{ff}).time = swtimes;
        heating.(todo{ff}).lon = unique(heating.lon);
        heating.(todo{ff}).lat = unique(heating.lat);
    end

    % Get rid of the alternative arrays as we don't need those anymore.
    if isfield(heating, 'rhum')
        heating = rmfield(heating, {'xalt', 'yalt'});
    end

    % Do the shortwave arrays and the main time array.
    heating.time = swtimes;

    heating.dswrf.time = swtimes;
    heating.dswrf.data = dswrf;

    % Put the ftbverbose flag back to what it was.
    ftbverbose = oldftbverbose;

    % Tidy up a bit.
    clear swtimes dswrf % data arrays
    clear norm_data i j imax Time_forcing_interp todo ...
        Time_forcing_interpMJD dayidx dd instant_rad loopN xi yi tt ...
        MJD DTinterp X Y T XI YI TI nt oldftbverbose ff % junk

    % Interpolate the data onto the FVCOM unstructured grid.
    tic

    % If we've got a land mask, apply it to the heat flux data.
    %heating.nshf.data(repmat(heating.land_mask, ...
    %    [1, 1, size(heating.nshf.data, 3)])) = nan;

    interpfields = {'dswrf', 'dlwrf', 'pres', 'air', 'rhum', ...
338
        'uwnd', 'vwnd', ...
339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354
        'time', 'lon', 'lat', 'x', 'y'};
    heating_interp = grid2fvcom(Mobj, interpfields, heating);
    if ftbverbose
        fprintf('Elapsed interpolation time: %.2f minutes\n', toc / 60)
    end

    forcing = heating_interp;

    % Write out the surface forcing data to netCDF.
    if exist('forcing', 'var')
        forcingBase = fullfile(inputConf.outbase, inputConf.casename);
        write_FVCOM_forcing(Mobj, forcingBase, forcing, ...
            'Combined NCEP and MetUM atmospheric forcing data', ...
            inputConf.FVCOM_version);
    end
end