Skip to content

Commit

Permalink
replaced deprecated Matlab functions "nanmean", "nanstd", and "nansum…
Browse files Browse the repository at this point in the history
…" with "mean", "std", and "sum"; avoids dependency on Stats toolbox (various *.m)
  • Loading branch information
gmao-rreichle committed Dec 19, 2023
1 parent 319905a commit 3ffee32
Show file tree
Hide file tree
Showing 6 changed files with 48 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@

header = [y1 m1 d1 0 0 0 y2 m2 d2 0 0 0 tc.N_tile 1];

tile_data = nanmean(L2_tau_tile([nidx_pre nidx nidx_nxt],:),1);
tile_data = mean(L2_tau_tile([nidx_pre nidx nidx_nxt],:),1,"omitnan");
tile_data(isnan(tile_data)) = 1.e15;

fwrite( ifp, 14*4, int_precision ); % fortran_tag
Expand All @@ -260,7 +260,7 @@
end

% ========================
% The final vegopacity.bin file contains data averaged (nanmean) of Asc
% The final vegopacity.bin file contains data averaged (mean) of Asc
% and Des tau climatology. VOD is climatology,the other 2 parameters are
% time constant with maximum spatial coverage
data_clim_tile = NaN + ones(48, tc.N_tile,2);
Expand Down Expand Up @@ -295,7 +295,7 @@
data_clim_tile(data_clim_tile < 0.) = 0.; % set small negative values to 0

% averaging A, D values
tile_data = nanmean(data_clim_tile,3);
tile_data = mean(data_clim_tile,3,"omitnan");
fname_out = strrep(fname, L2_Ascdes,'_AD_');
if fill_small_gaps

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
L2_version,start_time, end_time)

% function to compute the 2 RTM constant variables: Albedo and Roughness(H) based on the
% preprocessed data. Constants are taken as the long term temporal nanmean
% preprocessed data. Constants are taken as the long term temporal mean
% with maximum coverage across Asc/Desc passes.

% Q. Liu 18 Jul 2022
Expand Down Expand Up @@ -212,7 +212,7 @@
data_clim_tile(data_clim_tile < 0.) = 0.; % set small negative values to 0

% averaging A, D values
tile_data = nanmean(data_clim_tile,2);
tile_data = mean(data_clim_tile,2,"omitnan");

eval(['out_mwRTM.',out_Para{iPara},'=tile_data;'])
end
Original file line number Diff line number Diff line change
Expand Up @@ -500,17 +500,17 @@
% and across years!
% some obs can be found at multiple hours within a day
% e.g. at the poles.
% **nansum of NaN's** result in zero, this need to be
% **sum(...,"omitnan") of NaNs** results in zero, this need to be
% taken care of
o_data( pol(1),obs_i,angle_i,count) = nansum([o_data( pol(1),obs_i,angle_i,count); obs_obs_i' ]);
m_data( pol(1),obs_i,angle_i,count) = nansum([m_data( pol(1),obs_i,angle_i,count); obs_fcst_i']);
o_data( pol(1),obs_i,angle_i,count) = sum([o_data( pol(1),obs_i,angle_i,count); obs_obs_i' ], "omitnan");
m_data( pol(1),obs_i,angle_i,count) = sum([m_data( pol(1),obs_i,angle_i,count); obs_fcst_i' ], "omitnan");

% X^2
o_data2(pol(1),obs_i,angle_i,count) = nansum([o_data2(pol(1),obs_i,angle_i,count); obs_obs_i'.^2 ]);
m_data2(pol(1),obs_i,angle_i,count) = nansum([m_data2(pol(1),obs_i,angle_i,count); obs_fcst_i'.^2]);
o_data2(pol(1),obs_i,angle_i,count) = sum([o_data2(pol(1),obs_i,angle_i,count); obs_obs_i'.^2 ], "omitnan");
m_data2(pol(1),obs_i,angle_i,count) = sum([m_data2(pol(1),obs_i,angle_i,count); obs_fcst_i'.^2 ], "omitnan");

% Sum of obs or model elements at each location
N_data(pol(1),obs_i,angle_i,count) = nansum( [N_data( pol(1),obs_i,angle_i,count); ~isnan([obs_obs_i])']);
N_data(pol(1), obs_i,angle_i,count) = sum([N_data( pol(1),obs_i,angle_i,count); ~isnan([obs_obs_i])'], "omitnan");

else

Expand All @@ -523,19 +523,19 @@

% Sum of X
o_data(pol(1),s_eff,angle_i,count) = ...
nansum([o_data( pol(1),s_eff,angle_i,count); repmat(obs_obs_i( i_ind), 1,length(s_eff))]);
sum([o_data( pol(1),s_eff,angle_i,count); repmat( obs_obs_i( i_ind), 1,length(s_eff))], "omitnan");
m_data(pol(1),s_eff,angle_i,count) = ...
nansum([m_data( pol(1),s_eff,angle_i,count); repmat(obs_fcst_i(i_ind), 1,length(s_eff))]);
sum([m_data( pol(1),s_eff,angle_i,count); repmat( obs_fcst_i(i_ind), 1,length(s_eff))], "omitnan");

% Sum of X^2
o_data2(pol(1),s_eff,angle_i,count) = ...
nansum([o_data2(pol(1),s_eff,angle_i,count); repmat(obs_obs_i( i_ind).^2,1,length(s_eff))]);
sum([o_data2(pol(1),s_eff,angle_i,count); repmat( obs_obs_i( i_ind).^2,1,length(s_eff))], "omitnan");
m_data2(pol(1),s_eff,angle_i,count) = ...
nansum([m_data2(pol(1),s_eff,angle_i,count); repmat(obs_fcst_i(i_ind).^2,1,length(s_eff))]);
sum([m_data2(pol(1),s_eff,angle_i,count); repmat( obs_fcst_i(i_ind).^2,1,length(s_eff))], "omitnan");

% Sum of obs or model elements at each location
N_data(pol(1),s_eff,angle_i,count) = ...
nansum([N_data( pol(1),s_eff,angle_i,count); repmat(~isnan([obs_obs_i(i_ind)]),1,length(s_eff)) ]);
sum([N_data( pol(1),s_eff,angle_i,count); repmat(~isnan([obs_obs_i(i_ind)]), 1,length(s_eff))], "omitnan");

end

Expand Down Expand Up @@ -578,41 +578,41 @@

pp = pol*Nf;

N_hscale_window = nansum(N_data(1+pol,:,:,1:w_days),4);
N_hscale_window = sum(N_data(1+pol,:,:,1:w_days), 4,"omitnan");

if w_days == 95
N_hscale_inner_window = nansum(N_data(1+pol,:,:,((w_days+1)/2-15):((w_days+1)/2+15)),4);
N_hscale_inner_window = sum(N_data(1+pol,:,:,((w_days+1)/2-15):((w_days+1)/2+15)),4,"omitnan");
end

% OBSERVATIONS
%----------------
% o_data is a sum over neighbouring obs above;
% here then take a sum over the time steps in the window
data_out(1+pp,:,:) = nansum(o_data(1+pol,:,:,1:w_days),4);
data_out(1+pp,:,:) = sum( o_data( 1+pol,:,:,1:w_days),4,"omitnan");

% then make the average, by dividing over the sum of the number of
% timesteps and influencing obs at each location
data_out(1+pp,:,:) = data_out(1+pp,:,:)./N_hscale_window;
data_out(1+pp,:,:) = data_out( 1+pp, :,:)./N_hscale_window;

%stdv_H = sqrt(E[X^2] - E[X]^2)
data_out(2+pp,:,:) = nansum(o_data2(1+pol,:,:,1:w_days),4);
data_out(2+pp,:,:) = data_out(2+pp,:,:)./N_hscale_window;
data_out(2+pp,:,:) = sqrt( data_out(2+pp,:,:) - data_out(1+pp,:,:).^2);
data_out(2+pp,:,:) = sum( o_data2( 1+pol,:,:,1:w_days),4,"omitnan");
data_out(2+pp,:,:) = data_out( 2+pp, :,:)./N_hscale_window;
data_out(2+pp,:,:) = sqrt( data_out( 2+pp, :,:) - data_out(1+pp,:,:).^2);

% MODEL
%----------------
data_out(3+pp,:,:) = nansum(m_data(1+pol,:,:,1:w_days),4);
data_out(3+pp,:,:) = data_out(3+pp,:,:)./N_hscale_window;
data_out(3+pp,:,:) = sum( m_data( 1+pol,:,:,1:w_days),4,"omitnan");
data_out(3+pp,:,:) = data_out( 3+pp, :,:)./N_hscale_window;

data_out(4+pp,:,:) = nansum(m_data2(1+pol,:,:,1:w_days),4);
data_out(4+pp,:,:) = data_out(4+pp,:,:)./N_hscale_window;
data_out(4+pp,:,:) = sqrt( data_out(4+pp,:,:) - data_out(3+pp,:,:).^2);
data_out(4+pp,:,:) = sum( m_data2( 1+pol,:,:,1:w_days),4,"omitnan");
data_out(4+pp,:,:) = data_out( 4+pp, :,:)./N_hscale_window;
data_out(4+pp,:,:) = sqrt( data_out( 4+pp, :,:) - data_out(3+pp,:,:).^2);

data_out(5+pp,:,:) = N_hscale_window;

% Toss out stats that are based on too little data

data_out([1:5]+pp,N_hscale_window<Ndata_min) = NaN;
data_out( [1:5]+pp,N_hscale_window < Ndata_min ) = NaN;

if w_days == 95
data_out([1:5]+pp,N_hscale_inner_window < (Ndata_min/2.5)) = NaN;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,13 +203,13 @@
[~, obs_idx] = ismember([i_idx, j_idx], [i_out, j_out], 'rows');
obs_i = obsnum(obs_idx);

o_data_sum( scnt, obs_i, count) = nansum([o_data_sum( scnt, obs_i, count); obs_obs_i']);
m_data_sum( scnt, obs_i, count) = nansum([m_data_sum( scnt, obs_i, count); obs_fcst_i']);
o_data_sum2(scnt, obs_i, count) = nansum([o_data_sum2(scnt, obs_i, count); obs_obs_i'.^2]);
m_data_sum2(scnt, obs_i, count) = nansum([m_data_sum2(scnt, obs_i, count); obs_fcst_i'.^2]);
m_data_min( scnt, obs_i, count) = min( [m_data_min( scnt, obs_i, count); obs_fcst_i']);
m_data_max( scnt, obs_i, count) = max( [m_data_max( scnt, obs_i, count); obs_fcst_i']);
N_data( scnt, obs_i, count) = nansum([N_data( scnt, obs_i, count); ~isnan(obs_obs_i)']);
o_data_sum( scnt, obs_i, count) = sum([o_data_sum( scnt, obs_i, count); obs_obs_i' ], "omitnan");
m_data_sum( scnt, obs_i, count) = sum([m_data_sum( scnt, obs_i, count); obs_fcst_i' ], "omitnan");
o_data_sum2(scnt, obs_i, count) = sum([o_data_sum2(scnt, obs_i, count); obs_obs_i'.^2 ], "omitnan");
m_data_sum2(scnt, obs_i, count) = sum([m_data_sum2(scnt, obs_i, count); obs_fcst_i'.^2 ], "omitnan");
m_data_min( scnt, obs_i, count) = min([m_data_min( scnt, obs_i, count); obs_fcst_i' ] );
m_data_max( scnt, obs_i, count) = max([m_data_max( scnt, obs_i, count); obs_fcst_i' ] );
N_data( scnt, obs_i, count) = sum([N_data( scnt, obs_i, count); ~isnan(obs_obs_i)' ], "omitnan");
end
end
end
Expand All @@ -233,15 +233,15 @@

for i = 1:N_species

N_window = nansum(N_data(i,:,1:w_days),3);
N_window = sum(N_data( i,:,1:w_days), 3,"omitnan");

data2D(1,:) = nansum(o_data_sum( i,:,1:w_days),3)./N_window;
data2D(2,:) = sqrt(nansum(o_data_sum2(i,:,1:w_days),3)./N_window - data2D(1,:).^2);
data2D(3,:) = nansum(m_data_sum( i,:,1:w_days),3)./N_window;
data2D(4,:) = sqrt(nansum(m_data_sum2(i,:,1:w_days),3)./N_window - data2D(3,:).^2);
data2D(1,:) = sum(o_data_sum( i,:,1:w_days), 3,"omitnan")./N_window;
data2D(2,:) = sqrt(sum(o_data_sum2(i,:,1:w_days), 3,"omitnan")./N_window - data2D(1,:).^2);
data2D(3,:) = sum(m_data_sum( i,:,1:w_days), 3,"omitnan")./N_window;
data2D(4,:) = sqrt(sum(m_data_sum2(i,:,1:w_days), 3,"omitnan")./N_window - data2D(3,:).^2);
data2D(5,:) = N_window;
data2D(6,:) = min(m_data_min(i,:,1:w_days),[],3); % Want to use minimum mean daily value
data2D(7,:) = max(m_data_max(i,:,1:w_days),[],3); % Want to use maximum mean daily value
data2D(6,:) = min(m_data_min( i,:,1:w_days),[],3); % Want to use minimum mean daily value
data2D(7,:) = max(m_data_max( i,:,1:w_days),[],3); % Want to use maximum mean daily value

% Set NaNs where there is not enough data
data2D([1:Nf],N_window<Ndata_min) = NaN;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -358,11 +358,11 @@

tmp = reshape(squeeze(hist_data(tile, s, :)),1,[]);

data_out(s,tile,1) = nanmean(tmp); % mean
data_out(s,tile,2) = nanstd(tmp); % stdv
data_out(s,tile,3) = min(tmp); % min
data_out(s,tile,4) = max(tmp); % max
data_out(s,tile,5) = sum(~isnan(tmp)); % N_data
data_out(s,tile,1) = mean( tmp,"omitnan"); % mean
data_out(s,tile,2) = std( tmp,"omitnan"); % stdv
data_out(s,tile,3) = min( tmp ); % min
data_out(s,tile,4) = max( tmp ); % max
data_out(s,tile,5) = sum(~isnan(tmp) ); % N_data

% determine the CDF-parameters, or the edges for each
% percentile
Expand Down
1 change: 0 additions & 1 deletion src/Applications/LDAS_App/util/postproc/write_smapL4SMqa.m
Original file line number Diff line number Diff line change
Expand Up @@ -1258,7 +1258,6 @@
%
% GDL, 16 Jan 2014
% GDL, 30 Jan 2014: remove NaN prior to the calculation of stats
% and avoid using nanmean, nanstdv, etc..
% GDL, 1 Feb 2014: - weighted statistics for mean and stdv
% - add 6th statistic (frac):
% the actual used fraction of N_data (based on weights)
Expand Down

0 comments on commit 3ffee32

Please sign in to comment.