Commit a4d12cc4 authored by Pierre Cazenave's avatar Pierre Cazenave
Browse files

Add script to read the model input and create heatflux forcing for the test domain.

parent b6cab62b
% Create the heat flux data for the FVCOM Estuary example.
%
% This requires:
% - my fork of the fvcom-toolbox
% > https://gitlab.em.pml.ac.uk/pica/fvcom-toolbox
%
% Author(s):
% Pierre Cazenave (Plymouth Marine Laboratory)
%
% Revision history:
%
% 2015-01-12 Create new version based on the Irish Sea model
% make_model_input.m script.
matlabrc
close all
clc
global ftbverbose
ftbverbose = 1; % be noisy
addpath('/users/modellers/pica/Code/fvcom-toolbox/utilities')
addpath('/users/modellers/pica/Code/fvcom-toolbox/fvcom_prepro/')
addpath('/users/modellers/rito/matlab/air-sea')
%%%------------------------------------------------------------------------
%%% INPUT CONFIGURATION
%%%
%%% Define the model input parameters here e.g. number of tidal components,
%%% type of boundary forcing, estimated current velocity, sponge layer
%%% coefficient and radius, forcing source etc.
%%%
%%%------------------------------------------------------------------------
inputConf.base = '/data/medusa/pica/models/FVCOM/fvcom-examples/Estuary/';
% Which version of FVCOM are we using (for the forcing file formats)?
inputConf.FVCOM_version = '3.1.6';
%%%------------------------------------------------------------------------
%%% Time stuff
%%%------------------------------------------------------------------------
% Model time ([Y, M, D, h, m, s])
inputConf.modelYear = 2013;
inputConf.startDate = [inputConf.modelYear, 04, 01, 00, 00, 00];
inputConf.endDate = [inputConf.modelYear, 05, 01, 00, 00, 00];
% Surface sampling frequency (days).
inputConf.sampling.surface = 1;
%%%------------------------------------------------------------------------
%%% Spatial stuff
%%%------------------------------------------------------------------------
% Case name for the model inputs and outputs
inputConf.casename = 'tst';
inputConf.coordType = 'cartesian'; % 'cartesian' or 'spherical'
% Proj conversion struct.
fig = figure;
inputConf.proj = gcm(axesm('tranmerc'));
close(fig); clear fig
inputConf.proj.falsenorthing = 0;
inputConf.proj.falseeasting = 900000;
inputConf.proj.geoid = [6378137 0.081819191042815];
% inputConf.proj.mapparallels = 40.50; % what should this be for the test case?
% inputConf.proj.nparallels = 1; % what should this be for the test case?
inputConf.proj.origin = [42+(50/60) -70+(10/60) 0];
inputConf.proj.scalefactor = 0.9999666666666667;
% Sigma layer definition file.
inputConf.sigma_file = fullfile(inputConf.base, 'tstinp', 'sigma.dat');
% Give some names to the boundaries. This must match the number of node
% strings defined in SMS. Ideally, the order of the names should match the
% order in which the boundaries were made in SMS.
inputConf.boundaryNames = {'West'}; % Number is (relatively) important!
%%%------------------------------------------------------------------------
%%% Model constants and stuff
%%%------------------------------------------------------------------------
% Type of open boundary treatment (see Table 6.1 (?) in the manual).
% 1 - Active (ASL): sea level is specified at the open boundary.
% 2 - Clamped: zeta = 0 at the boundary (Beardsley and Haidvogel, 1981).
% 3 - Implicit Gravity Wave Radiation.
% 4 - Partial Clamped Gravity Wave Radiation (Blumberg and Kantha, 1985).
% 5 - Explicit Orlanski Radiation (Orlanski, 1976; Chapman, 1985)
inputConf.obc_type = 1;
%%%------------------------------------------------------------------------
%%% Forcing and stuff
%%%------------------------------------------------------------------------
% Surface heat fluxes. Currently only 'NCEP' heat fluxes work with
% HEATING_CALCULATED.
inputConf.surface_heat = 'NCEP';
%%%------------------------------------------------------------------------
%%% END OF INPUT CONFIGURATION
%%%------------------------------------------------------------------------
%% Process the grid files
% Read the input mesh and bathymetry. Also creates the data necessary for
% the Coriolis correction in FVCOM.
Mobj = read_fvcom_mesh(fullfile(...
inputConf.base, 'tstinp/', [inputConf.casename, '_grd.dat']));
Mobj.h = read_fvcom_bath(fullfile(...
inputConf.base, 'tstinp/', [inputConf.casename, '_dep.dat']));
Mobj = read_fvcom_obc(Mobj, fullfile(...
inputConf.base, 'tstinp/', [inputConf.casename, '_obc.dat']));
% Add grid metrics.
Mobj = setup_metrics(Mobj);
% Parse the open boundary nodes and add accordingly.
if Mobj.have_strings
for i = 1:size(Mobj.read_obc_nodes, 2)
nodeList = double(cell2mat(Mobj.read_obc_nodes(i)));
Mobj = add_obc_nodes_list(Mobj, nodeList, inputConf.boundaryNames{i}, inputConf.obc_type);
end
clear nodeList
end
% Get the sigma depths in order to interpolate from the POLCOMS depths
Mobj = read_sigma(Mobj, inputConf.sigma_file);
% Convert the grid from NAD to spherical coordinates.
Mobj.have_lonlat = true;
[Mobj.lat, Mobj.lon] = projinv(inputConf.proj, Mobj.x, Mobj.y);
%% Prepare the heat flux forcing data
inputConf.startDateMJD = greg2mjulian(inputConf.startDate(1), ...
inputConf.startDate(2), ...
inputConf.startDate(3), ...
inputConf.startDate(4), ...
inputConf.startDate(5), ...
inputConf.startDate(6));
inputConf.endDateMJD = greg2mjulian(inputConf.endDate(1), ...
inputConf.endDate(2), ...
inputConf.endDate(3), ...
inputConf.endDate(4), ...
inputConf.endDate(5), ...
inputConf.endDate(6));
% Output directory will contain some duplicate files (e.g. model grid
% etc.) but this makes things easier to manage. One subdirectory per
% month of the year in question.
inputConf.outbase = fullfile(inputConf.base, 'tstinp');
% Get the surface heating data.
if strcmpi(inputConf.surface_heat, 'NCEP')
% Use the OPeNDAP NCEP script (get_NCEP_forcing.m) to get the
% following parameters:
% - Downward longwave radiation surface (dlwrs) [W/m^2]
% - Downward shortwave radiation surface (dswrs) [W/m^2]
% - Air temperature (air) [celsius]
% - Relative humidity (rhum) [%]
% - Sea level pressure (pres) [Pa]
%
% The script converts the NCEP data from the OPeNDAP server from
% longitudes in the 0 to 360 range to the -180 to 180 range. It
% also subsets for the right region (defined by Mobj.lon and
% Mobj.lat).
heating = get_NCEP_forcing(Mobj, ...
[inputConf.startDateMJD, inputConf.endDateMJD], ...
'varlist', {'dlwrf', 'dswrf', 'air', 'rhum', 'pres'});
heating.domain_cols = length(heating.lon);
heating.domain_rows = length(heating.lat);
if isfield(heating, 'rhum')
heating.domain_cols_alt = length(heating.rhum.lon);
heating.domain_rows_alt = length(heating.rhum.lat);
end
% Convert the small subdomain into cartesian coordinates. We need
% to do this twice because some of the NCEP data are on different
% grids (e.g. sea level pressure, relative humidity etc.).
[tmpLon, tmpLat] = meshgrid(heating.lon, heating.lat);
[heating.x, heating.y] = projfwd(inputConf.proj, tmpLat, tmpLon);
if isfield(heating, 'rhum')
[tmpLon2, tmpLat2] = meshgrid(heating.rhum.lon, heating.rhum.lat);
[heating.xalt, heating.yalt] = projfwd(inputConf.proj, tmpLat2, tmpLon2);
end
clear tmpLon tmpLat tmpLon2 tmpLat2 tmpZone
% Create arrays of the x and y positions.
heating.x = reshape(heating.x, heating.domain_rows, heating.domain_cols);
heating.y = reshape(heating.y, heating.domain_rows, heating.domain_cols);
if isfield(heating, 'rhum')
heating.xalt = reshape(heating.xalt, heating.domain_rows_alt, heating.domain_cols_alt);
heating.yalt = reshape(heating.yalt, heating.domain_rows_alt, heating.domain_cols_alt);
end
[heating.lon, heating.lat] = meshgrid(heating.lon, heating.lat);
heating = rmfield(heating, {'domain_rows', 'domain_cols'});
if isfield(heating, 'rhum')
heating = rmfield(heating, {'domain_rows_alt', 'domain_cols_alt'});
end
% Scale data appropriately in time (i.e. increase time sampling)
% while maintaining the spatial structure. Only correct the heating
% part: calculate theoretical instantaneous irradiance as
% calculated in ERSEM. The other outputs are interpolated to the
% same time sampling so that the grid can still be interpolated as
% before with grid2fvcom.
% Loop through each day.
MJD = inputConf.startDateMJD:inputConf.endDateMJD;
loopN = 0;
DTinterp = inputConf.sampling.surface / 24; % in days
% Preallocate the new irradiance array for the number of days
% we have.
dswrf = nan(size(heating.dswrf.data, 1), size(heating.dswrf.data, 2), (length(MJD) - 1) * 24);
swtimes = nan((length(MJD) - 1) * 24, 1);
% Turn off the verbose output for these loops.
oldftbverbose = ftbverbose;
ftbverbose = 0; %#ok<NASGU>
for dd = datenum(inputConf.startDate):datenum(inputConf.endDate)
loopN = loopN + 1;
% Find the NCEP time indices which cover this day (dd).
dayidx = find(heating.time >= MJD(loopN) & heating.time <= MJD(loopN) + 1);
if length(dayidx) > 1
Time_forcing_interp = dd:DTinterp:dd + 1;
Time_forcing_interpMJD = MJD(loopN):DTinterp:MJD(loopN) + 1;
% Drop the last time step so we have 24 samples (hourly).
Time_forcing_interp = Time_forcing_interp(1:end - 1);
Time_forcing_interpMJD = Time_forcing_interpMJD(1:end - 1);
nt = length(Time_forcing_interp);
% Preallocate the irradiance arrays for 25 hours of data (based
% on DTinterp interval).
instant_rad = nan(nt, 1);
% We need to work on rows of latitude since the irradiance
% only varies with latitude (not longitude) and time. Since
% we need to calculate the irradiance at each location
% within the grid anyway (each position has a different
% time series) we still need to loop through the
% longitudes, but we can do the irradiance calculation only
% on the latitude loops.
% Loop through each position and find this day's maximum
% and scale it to the norm_rad shape.
for i = 1:size(heating.lon, 1) % latitudes
for tt = 1:length(Time_forcing_interp)
% Calculation done without any clouds.
[instant_rad(tt), ~] = calc_rad(...
0, heating.lat(i, 1), ...
floor(Time_forcing_interp(tt)), ...
(Time_forcing_interp(tt) - ...
floor(Time_forcing_interp(tt))) * 24 * 60 * 60);
end
% Build a normalized day curve for this day and this
% latitudes.
norm_data = abs((instant_rad - max(instant_rad)) / ...
(max(instant_rad) - min(instant_rad)));
% Now we have a normalised curve for this latitude, we
% can scale each position in this row by that shape.
for j = 1:size(heating.lon, 2) % longitudes
% Find the maximum of this day's indices (dayidx).
imax = max(heating.dswrf.data(j, i, dayidx(1):dayidx(end)));
dswrf(j, i, 1 + ((loopN - 1) * nt):loopN * nt) = norm_data * imax;
end
end
% Save the times
swtimes(1 + ((loopN - 1) * nt):loopN * nt) = Time_forcing_interpMJD;
end
end
% Interpolate rhum, pres and longwave radiation to hourly values.
todo = {'rhum', 'pres', 'dlwrf', 'air'};
for ff = 1:length(todo)
% Force the data onto the 'global' position arrays. This fixes
% problems with grid2fvcom and the relative humidity grid in
% particular.
[X, Y, T] = meshgrid(heating.(todo{ff}).lon, heating.(todo{ff}).lat, heating.(todo{ff}).time);
[XI, YI, TI] = meshgrid(unique(heating.lon), unique(heating.lat), swtimes);
% Interpolate and extrapolate with the mean value of the first
% time.
heating.(todo{ff}).data = interp3(X, Y, T, ...
permute(heating.(todo{ff}).data, [2, 1, 3]), XI, YI, TI, ...
'linear', mean(mean(squeeze(heating.(todo{ff}).data(:, :, 1)))));
heating.(todo{ff}).data = permute(heating.(todo{ff}).data, [2, 1, 3]);
% Update the time data and positions.
heating.(todo{ff}).time = swtimes;
heating.(todo{ff}).lon = unique(heating.lon);
heating.(todo{ff}).lat = unique(heating.lat);
end
% Get rid of the alternative arrays as we don't need those anymore.
if isfield(heating, 'rhum')
heating = rmfield(heating, {'xalt', 'yalt'});
end
% Do the shortwave arrays and the main time array.
heating.time = swtimes;
heating.dswrf.time = swtimes;
heating.dswrf.data = dswrf;
% Put the ftbverbose flag back to what it was.
ftbverbose = oldftbverbose;
% Tidy up a bit.
clear swtimes dswrf % data arrays
clear norm_data i j imax Time_forcing_interp todo ...
Time_forcing_interpMJD dayidx dd instant_rad loopN xi yi tt ...
MJD DTinterp X Y T XI YI TI nt oldftbverbose ff % junk
% Interpolate the data onto the FVCOM unstructured grid.
tic
% If we've got a land mask, apply it to the heat flux data.
%heating.nshf.data(repmat(heating.land_mask, ...
% [1, 1, size(heating.nshf.data, 3)])) = nan;
interpfields = {'dswrf', 'dlwrf', 'pres', 'air', 'rhum', ...
'time', 'lon', 'lat', 'x', 'y'};
heating_interp = grid2fvcom(Mobj, interpfields, heating);
if ftbverbose
fprintf('Elapsed interpolation time: %.2f minutes\n', toc / 60)
end
forcing = heating_interp;
% Write out the surface forcing data to netCDF.
if exist('forcing', 'var')
forcingBase = fullfile(inputConf.outbase, inputConf.casename);
write_FVCOM_forcing(Mobj, forcingBase, forcing, ...
'Combined NCEP and MetUM atmospheric forcing data', ...
inputConf.FVCOM_version);
end
end
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment