0001 function data = get_CFS_forcing(Mobj, modelTime, varargin)
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065 subname = 'get_CFS_forcing';
0066
0067 global ftbverbose;
0068 if ftbverbose
0069 fprintf('\nbegin : %s\n', subname)
0070 end
0071
0072
0073 varlist = [];
0074 if nargin > 2
0075 for a = 1:2:nargin - 2
0076 switch varargin{a}
0077 case 'varlist'
0078 varlist = varargin{a + 1};
0079 end
0080 end
0081 end
0082 if ftbverbose
0083 fprintf('Extracting CFSv2 data.\n')
0084 end
0085
0086
0087 if ~Mobj.have_lonlat
0088 error('Need spherical coordinates to extract the forcing data')
0089 else
0090
0091
0092 [dx, dy] = deal(0.5, 0.5);
0093 extents = [min(Mobj.lon(:)) - (2 * dx), ...
0094 max(Mobj.lon(:)) + (2 * dx), ...
0095 min(Mobj.lat(:)) - dy, ...
0096 max(Mobj.lat(:)) + dy];
0097 end
0098
0099
0100 [yyyy, mm, dd, HH, MM, SS] = mjulian2greg(modelTime);
0101 assert(min(yyyy) >= 1979, 'CFSv2 data not available prior to 1979')
0102 assert(max(yyyy) <= 2009, 'CFSv2 data not available after 2009')
0103 dates = datenum([yyyy; mm; dd; HH; MM; SS]');
0104 serial = dates(1):dates(2);
0105 [years, months, ~, ~, ~, ~] = datevec(serial);
0106 [months, idx] = unique(months, 'stable');
0107 years = years(idx);
0108 nt = length(months);
0109
0110 for t = 1:nt
0111 month = months(t);
0112 year = years(t);
0113 if ftbverbose
0114 fprintf('Downloading for %04d/%02d\n', year, month)
0115 end
0116
0117
0118
0119 url = 'http://nomads.ncdc.noaa.gov/thredds/dodsC/cfsr1hr/';
0120 ncep.dlwsfc = [url, ...
0121 sprintf('%04d%02d/dlwsfc.gdas.%04d%02d.grb2', year, month, ...
0122 year, month)];
0123 ncep.dswsfc = [url, ...
0124 sprintf('%04d%02d/dswsfc.gdas.%04d%02d.grb2', year, month, ...
0125 year, month)];
0126 ncep.lhtfl = [url, ...
0127 sprintf('%04d%02d/lhtfl.gdas.%04d%02d.grb2', year, month, ...
0128 year, month)];
0129 ncep.prate = [url, ...
0130 sprintf('%04d%02d/prate.gdas.%04d%02d.grb2', year, month, ...
0131 year, month)];
0132 ncep.pressfc = [url, ...
0133 sprintf('%04d%02d/pressfc.gdas.%04d%02d.grb2', year, month, ...
0134 year, month)];
0135 ncep.q2m = [url, ...
0136 sprintf('%04d%02d/q2m.gdas.%04d%02d.grb2', year, month, ...
0137 year, month)];
0138 ncep.tmp2m = [url, ...
0139 sprintf('%04d%02d/tmp2m.gdas.%04d%02d.grb2', year, month, ...
0140 year, month)];
0141 ncep.uswsfc = [url, ...
0142 sprintf('%04d%02d/uswsfc.gdas.%04d%02d.grb2', year, month, ...
0143 year, month)];
0144 ncep.uwnd = [url, ...
0145 sprintf('%04d%02d/wnd10m.gdas.%04d%02d.grb2', year, month, ...
0146 year, month)];
0147 ncep.vwnd = [url, ...
0148 sprintf('%04d%02d/wnd10m.gdas.%04d%02d.grb2', year, month, ...
0149 year, month)];
0150
0151
0152
0153 names.dlwsfc = 'Downward_Long-Wave_Rad_Flux';
0154 names.dswsfc = 'Downward_Short-Wave_Rad_Flux';
0155 names.lhtfl = 'Latent_heat_net_flux';
0156 names.prate = 'Precipitation_rate';
0157 names.pressfc = 'Pressure';
0158 names.q2m = 'Specific_humidity';
0159 names.tmp2m = 'Temperature';
0160 names.uswsfc = 'Upward_Short-Wave_Rad_Flux';
0161 names.uwnd = 'U-component_of_wind';
0162 names.vwnd = 'V-component_of_wind';
0163
0164 fields = fieldnames(ncep);
0165
0166 for aa = 1:length(fields)
0167
0168
0169 if ~isempty(varlist) && max(strcmp(fields{aa}, varlist)) ~= 1
0170 continue
0171 end
0172
0173 if ftbverbose
0174 fprintf('getting ''%s'' data... ', fields{aa})
0175 end
0176
0177
0178 if t == 1
0179 data.(fields{aa}).data = [];
0180 data.(fields{aa}).time = [];
0181 data.(fields{aa}).lat = [];
0182 data.(fields{aa}).lon = [];
0183 data.time = [];
0184 end
0185 scratch.(fields{aa}).data = [];
0186 scratch.(fields{aa}).time = [];
0187 scratch.(fields{aa}).lat = [];
0188 scratch.(fields{aa}).lon = [];
0189
0190
0191 ncid = netcdf.open(ncep.(fields{aa}));
0192
0193
0194
0195
0196
0197
0198 varid = netcdf.inqVarID(ncid, 'time');
0199 data_time = netcdf.getVar(ncid, varid, 'double');
0200 if max(data_time) == 6
0201
0202
0203
0204 data_time = 0:length(data_time) - 1;
0205 end
0206
0207 varid = netcdf.inqVarID(ncid,'lon');
0208 data_lon.lon = netcdf.getVar(ncid,varid,'double');
0209 varid = netcdf.inqVarID(ncid,'lat');
0210 data_lat.lat = netcdf.getVar(ncid,varid,'double');
0211
0212
0213 data_level_idx = [];
0214 try
0215 varid = netcdf.inqVarID(ncid, 'level');
0216 data_level.level = netcdf.getVar(ncid, varid, 'double');
0217 if length(data_level.level) > 1
0218
0219
0220
0221
0222 [~, data_level_idx] = min(abs(data_level.level - 1013));
0223 end
0224 catch
0225 true;
0226 end
0227 if isempty(data_level_idx)
0228 data_level_idx = 1;
0229 end
0230
0231
0232 timevec = datevec((data_time / 24) + datenum(year, month, 1, 0, 0, 0));
0233
0234
0235 scratch.time = greg2mjulian(...
0236 timevec(:, 1), ...
0237 timevec(:, 2), ...
0238 timevec(:, 3), ...
0239 timevec(:, 4), ...
0240 timevec(:, 5), ...
0241 timevec(:, 6));
0242
0243
0244
0245
0246
0247 startTime = max([modelTime(1), ...
0248 greg2mjulian(year, month, 1, 0, 0, 0)]);
0249
0250
0251 endTime = min([modelTime(end), ...
0252 greg2mjulian(year, month, eomday(year, month), 0, 0, 0) + 1]);
0253 data_time_mask = scratch.time >= startTime & scratch.time < endTime;
0254 data_time_idx = 1:size(scratch.time, 1);
0255 data_time_idx = data_time_idx(data_time_mask);
0256 if ~isempty(data_time_idx)
0257 scratch.time = scratch.time(data_time_mask);
0258 else
0259
0260
0261
0262 if length(scratch.time) == 1
0263 data_time_idx = 1:size(scratch.time, 1);
0264 end
0265 end
0266
0267
0268
0269
0270
0271
0272
0273
0274
0275
0276
0277
0278
0279 clear index_lon index_lat
0280 if extents(1) < 0 && extents(2) < 0
0281
0282 extents(1) = extents(1) + 360;
0283 extents(2) = extents(2) + 360;
0284 index_lon = find(data_lon.lon > extents(1) & ...
0285 data_lon.lon < extents(2));
0286 elseif extents(1) < 0 && extents(2) > 0
0287
0288
0289
0290 index_lon{1} = find(data_lon.lon >= extents(1) + 360);
0291 index_lon{2} = find(data_lon.lon <= extents(2));
0292 else
0293
0294
0295 index_lon = find(data_lon.lon > extents(1) & ...
0296 data_lon.lon < extents(2));
0297 end
0298
0299
0300 index_lat = find(data_lat.lat > extents(3) & data_lat.lat < extents(4));
0301 scratch.(fields{aa}).lat = data_lat.lat(index_lat);
0302
0303
0304 if iscell(index_lon)
0305 scratch.(fields{aa}).lon = data_lon.lon(cat(1, index_lon{:}));
0306
0307 varid = netcdf.inqVarID(ncid, names.(fields{aa}));
0308
0309 [~, ~, dimids, ~] = netcdf.inqVar(ncid,varid);
0310 if length(dimids) == 4
0311 start = [...
0312 min(index_lon{1}), ...
0313 min(index_lat), ...
0314 data_level_idx, ...
0315 min(data_time_idx)] - 1;
0316 count = [...
0317 length(index_lon{1}), ...
0318 length(index_lat), ...
0319 length(data_level_idx), ...
0320 length(data_time_idx)];
0321 elseif length(dimids) == 3
0322 start = [...
0323 min(index_lon{1}), ...
0324 min(index_lat), ...
0325 min(data_time_idx)] - 1;
0326 count = [...
0327 length(index_lon{1}), ...
0328 length(index_lat), ...
0329 length(data_time_idx)];
0330 end
0331
0332 data_west.(fields{aa}).(fields{aa}) = ...
0333 netcdf.getVar(ncid, varid, start, count, 'double');
0334
0335 if length(dimids) == 4
0336 start = [...
0337 min(index_lon{2}), ...
0338 min(index_lat), ...
0339 data_level_idx, ...
0340 min(data_time_idx)] - 1;
0341 count = [...
0342 length(index_lon{2}), ...
0343 length(index_lat), ...
0344 length(data_level_idx), ...
0345 length(data_time_idx)];
0346 elseif length(dimids) == 3
0347 start = [...
0348 min(index_lon{2}), ...
0349 min(index_lat), ...
0350 min(data_time_idx)] - 1;
0351 count = [...
0352 length(index_lon{2}), ...
0353 length(index_lat), ...
0354 length(data_time_idx)];
0355 end
0356 data_east.(fields{aa}).(fields{aa}) = ...
0357 netcdf.getVar(ncid, varid, start, count, 'double');
0358
0359 scratch.(fields{aa}).(fields{aa}).(fields{aa}) = ...
0360 cat(1, ...
0361 data_west.(fields{aa}).(fields{aa}), ...
0362 data_east.(fields{aa}).(fields{aa}));
0363
0364
0365 structfields = fieldnames(data_west.(fields{aa}));
0366 for ii = 1:length(structfields)
0367 switch structfields{ii}
0368 case 'lon'
0369
0370
0371
0372
0373 scratch.(fields{aa}).(structfields{ii}) = ...
0374 [data_west.(fields{aa}).(structfields{ii}); ...
0375 data_east.(fields{aa}).(structfields{ii})];
0376 case fields{aa}
0377
0378 scratch.(fields{aa}).(structfields{ii}) = ...
0379 [data_west.(fields{aa}).(structfields{ii}); ...
0380 data_east.(fields{aa}).(structfields{ii})];
0381 otherwise
0382
0383
0384
0385
0386
0387
0388
0389
0390 try
0391 tdata = ...
0392 data_west.(fields{aa}).(structfields{ii}) - ...
0393 data_east.(fields{aa}).(structfields{ii});
0394 if range(tdata(:)) == 0
0395
0396 scratch.(fields{aa}).(structfields{ii}) = ...
0397 data_west.(fields{aa}).(structfields{ii});
0398 else
0399 warning(['Unexpected data field and the', ...
0400 ' west and east halves don''t match.', ...
0401 ' Skipping.'])
0402 end
0403 catch
0404 warning(['Unexpected data field and the', ...
0405 ' west and east halves don''t match.', ...
0406 ' Skipping.'])
0407 end
0408 clearvars tdata
0409 end
0410 end
0411 clearvars data_west data_east
0412 else
0413
0414 scratch.(fields{aa}).lon = data_lon.lon(index_lon);
0415
0416 varid = netcdf.inqVarID(ncid, (fields{aa}));
0417
0418
0419
0420
0421 start = [...
0422 min(index_lon), ...
0423 min(index_lat), ...
0424 min(data_time_idx)] - 1;
0425 count = [...
0426 length(index_lon), ...
0427 length(index_lat), ...
0428 length(data_time_idx)];
0429
0430
0431
0432
0433
0434
0435 try
0436 scratch.(fields{aa}).(fields{aa}).(fields{aa}) = ...
0437 netcdf.getVar(ncid,varid,start,count,'double');
0438 catch
0439 start = [...
0440 min(index_lon), ...
0441 min(index_lat), ...
0442 data_level_idx, ...
0443 min(data_time_idx)] - 1;
0444 count = [...
0445 length(index_lon), ...
0446 length(index_lat), ...
0447 1, ...
0448 length(data_time_idx)];
0449 scratch.(fields{aa}).(fields{aa}) = ...
0450 netcdf.getVar(ncid, varid, start, count, 'double');
0451 end
0452
0453 end
0454 clearvars data_time* data_level_idx
0455
0456 scratch.(fields{aa}).lon(scratch.(fields{aa}).lon > 180) = ...
0457 scratch.(fields{aa}).lon(scratch.(fields{aa}).lon > 180) - 360;
0458
0459 datatmp = squeeze(scratch.(fields{aa}).(fields{aa}));
0460
0461
0462 data.(fields{aa}).data = cat(3, data.(fields{aa}).data, datatmp);
0463
0464 data.(fields{aa}).time = cat(1, data.(fields{aa}).time, scratch.time);
0465
0466
0467 data.(fields{aa}).lat = scratch.(fields{aa}).lat;
0468 data.(fields{aa}).lon = scratch.(fields{aa}).lon;
0469
0470
0471
0472
0473 if aa == 1
0474 data.time = data.(fields{aa}).time;
0475 else
0476 clearvars scratch
0477 end
0478
0479 if ftbverbose
0480 if isfield(data, fields{aa})
0481 fprintf('done.\n')
0482 else
0483 fprintf('error!\n')
0484 end
0485 end
0486 end
0487 end
0488
0489
0490
0491
0492
0493 fields = fieldnames(data);
0494 for f = 1:length(fields)
0495 if isfield(data.(fields{f}), 'data')
0496
0497
0498
0499 if any(strcmpi(fields{f}, {'pressfc', 'tmp2m', 'uwnd', 'vwnd'}))
0500 continue
0501 end
0502 [~, ~, nt] = size(data.(fields{f}).data);
0503 fixed = data.(fields{f}).data;
0504 if ftbverbose
0505 fprintf('De-averaging the n-hourly %s data to hourly... ', ...
0506 fields{f})
0507 end
0508
0509 for t = 1:6:nt
0510
0511
0512
0513
0514
0515
0516 for n = 1:5
0517 if t + n <= nt
0518 fixed(:, :, t + n) = ...
0519 (n * data.(fields{f}).data(:, :, t + n)) - ...
0520 ((n - 1) * data.(fields{f}).data(:, :, t + n - 1));
0521 end
0522 end
0523 end
0524 data.(fields{f}).data = fixed;
0525 clearvars fixed
0526 if ftbverbose; fprintf('done.\n'); end
0527 end
0528 end
0529
0530
0531 if isfield(data, 'uswsfc') && isfield(data, 'dswsfc')
0532 data.nswsfc.data = data.dswsfc.data - data.uswsfc.data;
0533 data.nswsfc.time = data.uswsfc.time;
0534 data.nswsfc.lon = data.uswsfc.lon;
0535 data.nswsfc.lat = data.uswsfc.lat;
0536 end
0537
0538
0539
0540 if isfield(data, 'prate')
0541 data.prate.data = data.prate.data / 1000;
0542 end
0543
0544
0545
0546
0547
0548
0549
0550
0551
0552
0553
0554 if isfield(data, 'prate') && isfield(data, 'lhtfl')
0555 Llv = 2.5 * 10^6;
0556 rho = 1025;
0557 Et = data.lhtfl.data / Llv / rho;
0558 data.P_E.data = data.prate.data - Et;
0559
0560
0561
0562 data.Et.data = -Et;
0563 end
0564
0565
0566
0567 for vv = 1:length(fields)
0568 if ~isempty(varlist) && max(strcmp(fields{vv}, varlist)) ~= 1
0569 continue
0570 end
0571
0572 switch fields{vv}
0573
0574
0575
0576 case {'topo', 'rhum', 'pres', 'press'}
0577 ii = vv;
0578 continue
0579 otherwise
0580
0581 ii = vv;
0582 break
0583 end
0584 end
0585 data.lon = data.(fields{ii}).lon;
0586 data.lon(data.lon > 180) = data.lon(data.lon > 180) - 360;
0587 data.lat = data.(fields{ii}).lat;
0588
0589
0590 if isfield(data, 'tmp2m')
0591 data.tmp2m.data = data.tmp2m.data - 273.15;
0592 end
0593
0594
0595 if isfield(data, 'q2m') && isfield(data, 'tmp2m') && isfield(data, 'pressfc')
0596
0597
0598 data.rhum.data = 100 * qair2rh(data.q2m.data, data.tmp2m.data, data.pressfc.data / 100);
0599 end
0600 if isfield(data, 'q2m')
0601 data.q2m.data = 100 * data.q2m.data;
0602 end
0603
0604
0605
0606 for aa = 1:length(fields)
0607 if (~isempty(varlist) && max(strcmp(fields{aa}, varlist)) ~= 1) || strcmpi(fields{aa}, 'time')
0608
0609
0610 continue
0611 else
0612 if isfield(data, fields{aa})
0613 [px, py] = deal(length(data.(fields{aa}).lon), ...
0614 length(data.(fields{aa}).lat));
0615 [ncx, ncy, ~] = size(data.(fields{aa}).data);
0616 if ncx ~= px || ncy ~= py
0617 data.(fields{aa}).data = ...
0618 permute(data.(fields{aa}).data, [2, 1, 3]);
0619
0620
0621 [ncx, ncy, ~] = size(data.(fields{aa}).data);
0622 if ncx ~= px || ncy ~= py
0623 error(['Unable to resize data arrays to match ', ...
0624 'position data orientation. Are these on a ', ...
0625 'different horizontal grid?'])
0626 end
0627 end
0628 else
0629 warning('Variable %s requested but not downloaded.', fields{aa})
0630 end
0631 end
0632 end
0633
0634
0635
0636
0637
0638
0639
0640
0641
0642
0643
0644
0645
0646 if ftbverbose
0647 fprintf('end : %s\n', subname)
0648 end
0649