Skip to content

Commit

Permalink
Merge pull request #148 from NREL/gb/bc
Browse files Browse the repository at this point in the history
Gb/bc
  • Loading branch information
grantbuster committed Mar 7, 2023
2 parents e0a0bc7 + a2a4830 commit 348abdd
Show file tree
Hide file tree
Showing 3 changed files with 160 additions and 1 deletion.
81 changes: 81 additions & 0 deletions rex/sam_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -701,6 +701,87 @@ def append_var_list(self, var):

self.var_list.append(var)

def bias_correct(self, bc_df):
"""Bias correct wind or irradiance data using a table of linear
correction factors per resource gid.
Parameters
----------
bc_df : pd.DataFrame
DataFrame with wind or solar resource bias correction table. This
has columns: gid, adder, scalar. If either adder or scalar is not
present, scalar defaults to 1 and adder to 0. Only windspeed or
GHI+DNI are corrected depending on the technology. GHI and DNI are
corrected with the same correction factors.
"""

if 'adder' not in bc_df:
logger.info('Bias correction table provided, but "adder" not '
'found, defaulting to 0.')
bc_df['adder'] = 0

if 'scalar' not in bc_df:
logger.info('Bias correction table provided, but "scalar" not '
'found, defaulting to 1.')
bc_df['scalar'] = 1

if bc_df.index.name != 'gid':
msg = ('Bias correction table must have "gid" column but only '
'found: {}'.format(list(bc_df.columns)))
assert 'gid' in bc_df, msg
bc_df = bc_df.set_index('gid')

site_arr = np.array(self.sites)
isin = np.isin(self.sites, bc_df.index.values)
if not isin.all():
missing = site_arr[isin]
msg = ('{} sites were missing from the bias correction table, '
'not bias correcting: {}'.format(len(missing), missing))
logger.warning(msg)
warn(msg)

scalar = np.expand_dims(bc_df.loc[site_arr[isin], 'scalar'].values, 0)
adder = np.expand_dims(bc_df.loc[site_arr[isin], 'adder'].values, 0)

if 'ghi' in self._res_arrays and 'dni' in self._res_arrays:
ghi = self._res_arrays['ghi']
dni = self._res_arrays['dni']
dhi = self._res_arrays['dhi']
ghi_zeros = ghi == 0
dni_zeros = dni == 0
dhi_zeros = dhi == 0
with np.errstate(divide='ignore', invalid='ignore'):
cos_sza = (ghi - dhi) / dni

ghi[:, isin] = ghi[:, isin] * scalar + adder
dni[:, isin] = dni[:, isin] * scalar + adder
ghi = np.maximum(0, ghi)
dni = np.maximum(0, dni)
ghi[ghi_zeros] = 0
dni[dni_zeros] = 0
with np.errstate(divide='ignore', invalid='ignore'):
dhi[:, isin] = ghi[:, isin] - (dni[:, isin] * cos_sza[:, isin])

dhi[dni_zeros] = ghi[dni_zeros]
dhi = np.maximum(0, dhi)
dhi[dhi_zeros] = 0
assert not np.isnan(dhi).any()
self._res_arrays['ghi'] = ghi
self._res_arrays['dni'] = dni
self._res_arrays['dhi'] = dhi

elif 'windspeed' in self._res_arrays:
logger.debug('Bias correcting windspeeds.')
arr = self._res_arrays['windspeed']
arr[:, isin] = arr[:, isin] * scalar + adder
arr = np.maximum(arr, 0)
self._res_arrays['windspeed'] = arr

if self._mean_arrays is not None:
# pylint: disable=consider-iterating-dictionary
for var in self._mean_arrays.keys():
self._mean_arrays[var] = self._res_arrays[var].mean(axis=0)

def _check_physical_ranges(self, var, arr, var_slice):
"""Check physical range of array and enforce usable SAM data.
Expand Down
2 changes: 1 addition & 1 deletion rex/version.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""rex Version number"""

__version__ = "0.2.78"
__version__ = "0.2.79"
78 changes: 78 additions & 0 deletions tests/test_sam_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,5 +410,83 @@ def execute_pytest(capture='all', flags='-rapP'):
pytest.main(['-q', '--show-capture={}'.format(capture), fname, flags])


def test_bias_correct_wind():
"""Test linear bias correction functionality on windspeed"""
h5 = os.path.join(TESTDATADIR, 'wtk/ri_100_wtk_2012.h5')
sites = slice(0, 20)
hub_heights = 80
base_res = WindResource.preload_SAM(h5, sites, hub_heights)

n = 10
bc = pd.DataFrame({'gid': np.arange(n),
'adder': np.random.uniform(-1, 1, n),
'scalar': np.random.uniform(0.9, 1.1, n)})

with pytest.warns() as record:
res = WindResource.preload_SAM(h5, sites, hub_heights)
res.bias_correct(bc)

assert len(record) == 1
assert 'missing from the bias correction' in str(record[0].message)
assert np.allclose(res._res_arrays['windspeed'][:, 10:],
base_res._res_arrays['windspeed'][:, 10:])
assert not (res._res_arrays['windspeed'][:, :10] ==
base_res._res_arrays['windspeed'][:, :10]).any()
assert (res._res_arrays['windspeed'] >= 0).all()

n = 200
bc = pd.DataFrame({'gid': np.arange(n),
'adder': np.random.uniform(-1, 1, n),
'scalar': np.random.uniform(0.9, 1.1, n)})

with pytest.warns(None) as record:
res = WindResource.preload_SAM(h5, sites, hub_heights)
res.bias_correct(bc)

assert not any(record)
assert not (res._res_arrays['windspeed'] ==
base_res._res_arrays['windspeed']).any()
assert (res._res_arrays['windspeed'] >= 0).all()


def test_bias_correct_solar():
"""Test adder bias correction functionality on irradiance"""
h5 = os.path.join(TESTDATADIR, 'nsrdb/ri_100_nsrdb_2012.h5')
sites = slice(0, 10)
base_res = NSRDB.preload_SAM(h5, sites)

n = 10
bc = pd.DataFrame({'gid': np.arange(n),
'adder': np.random.uniform(-100, 100, n),
'scalar': np.random.uniform(1, 1, n)})

res = NSRDB.preload_SAM(h5, sites)
res.bias_correct(bc)

for gid in res.sites:
adder = bc.at[gid, 'adder']
base_ghi = base_res._res_arrays['ghi'][:, gid]
base_dni = base_res._res_arrays['dni'][:, gid]
base_dhi = base_res._res_arrays['dhi'][:, gid]
ghi = res._res_arrays['ghi'][:, gid]
dni = res._res_arrays['dni'][:, gid]
dhi = res._res_arrays['dhi'][:, gid]
assert (ghi >= 0).all()
assert (dni >= 0).all()
assert (dhi >= 0).all()
ghi_mask = (ghi > np.abs(adder)) & (base_ghi > np.abs(adder))
dni_mask = (dni > np.abs(adder)) & (base_dni > np.abs(adder))
assert np.allclose(ghi[ghi_mask], base_ghi[ghi_mask] + adder)
assert np.allclose(dni[dni_mask], base_dni[dni_mask] + adder)

ghi_mask = (ghi > np.abs(adder)) & (base_ghi > np.abs(adder))
dni_mask = (dni > np.abs(adder)) & (base_dni > np.abs(adder))
dhi_mask = (dhi > np.abs(adder)) & (base_dhi > np.abs(adder))
mask = ghi_mask & dni_mask & dhi_mask
cos_sza = (ghi[mask] - dhi[mask]) / (dni[mask])
base_cos_sza = (base_ghi[mask] - base_dhi[mask]) / (base_dni[mask])
assert np.allclose(cos_sza, base_cos_sza, atol=0.005)


if __name__ == '__main__':
execute_pytest()

0 comments on commit 348abdd

Please sign in to comment.