From d848a9f9fba94a29e2a60d9849eb0a1a82956172 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Thu, 15 Sep 2022 16:34:11 -0600 Subject: [PATCH 01/42] Added geotiff output option --- reVX/config/turbine_flicker.py | 5 + reVX/turbine_flicker/turbine_flicker.py | 100 +++++++++++--------- reVX/turbine_flicker/turbine_flicker_cli.py | 11 ++- tests/test_turbine_flicker.py | 52 +++++++++- 4 files changed, 120 insertions(+), 48 deletions(-) diff --git a/reVX/config/turbine_flicker.py b/reVX/config/turbine_flicker.py index 9086150d7..58d588b19 100644 --- a/reVX/config/turbine_flicker.py +++ b/reVX/config/turbine_flicker.py @@ -88,3 +88,8 @@ def out_layer(self): be saved """ return self.get('out_layer', None) + + @property + def out_tiff(self): + """str: Path to output tiff file where exclusions should be saved. """ + return self.get('out_tiff', None) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index f8c63271b..f10421e4b 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -422,14 +422,16 @@ def _get_sc_points(self, tm_dset='techmap_wtk'): def compute_exclusions(self, hub_height, rotor_diameter, building_threshold=0, flicker_threshold=30, - max_workers=None, out_layer=None): - """ + max_workers=None, out_layer=None, out_tiff=None): + """Compute turbine flicker exclusions. + Exclude all pixels that will cause flicker exceeding the - "flicker_threshold" on any building in "building_layer". Buildings - are defined as pixels with >= the "building_threshold value in - "building_layer". Shadow flicker is computed at the supply curve point - resolution based on a turbine with "hub_height" (m) and applied to all - buildings within that supply curve point sub-array. + "flicker_threshold" on any building in "building_layer". + Buildings are defined as pixels with >= the "building_threshold + value in "building_layer". Shadow flicker is computed at the + supply curve point resolution based on a turbine with + "hub_height" (m) and applied to all buildings within that supply + curve point sub-array. Parameters ---------- @@ -439,25 +441,25 @@ def compute_exclusions(self, hub_height, rotor_diameter, Rotor diameter in meters to compute turbine shadow flicker. building_threshold : float, optional Threshold for exclusion layer values to identify pixels with - buildings, values are % of pixel containing a building, - by default 0 + buildings, values are % of pixel containing a building. By + default, `0`. flicker_threshold : int, optional - Maximum number of allowable flicker hours, by default 30 - resolution : int, optional - SC resolution, must be input in combination with gid, - by default 640 - max_workers : None | int, optional - Number of workers to use, if 1 run in serial, if None use all - available cores, by default None + Maximum number of allowable flicker hours. By default, `30`. + max_workers : int, optional + Number of workers to use. If 1 run, in serial. If `None`, + use all available cores. By default, `None`. out_layer : str, optional Layer to save exclusions under. Layer will be saved in - "excl_fpath", by default None + `excl_fpath`. By default, `None`. + out_tiff : str, optional + Path to output tiff file where exclusions should be saved. + By default, `None`. Returns ------- flicker_arr : ndarray 2D inclusion array. Pixels to exclude (0) to prevent shadow - flicker on buildings in "building_layer" + flicker on buildings in "building_layer """ with ExclusionLayers(self._excl_h5) as f: exclusion_shape = f.shape @@ -525,6 +527,10 @@ def compute_exclusions(self, hub_height, rotor_diameter, ExclusionsConverter._write_layer(self._excl_h5, out_layer, profile, flicker_arr, description=description) + if out_tiff: + logger.info('Saving flicker inclusion layer to {}' + .format(out_tiff)) + ExclusionsConverter._write_geotiff(out_tiff, profile, flicker_arr) return flicker_arr @@ -532,48 +538,53 @@ def compute_exclusions(self, hub_height, rotor_diameter, def run(cls, excl_fpath, res_fpath, building_layer, hub_height, rotor_diameter, tm_dset='techmap_wtk', building_threshold=0, flicker_threshold=30, resolution=640, max_workers=None, - out_layer=None): - """ + out_layer=None, out_tiff=None): + """Run flicker exclusion layer generation. + Exclude all pixels that will cause flicker exceeding the - "flicker_threshold" on any building in "building_layer". Buildings - are defined as pixels with >= the "building_threshold value in - "building_layer". Shadow flicker is computed at the supply curve point - resolution based on a turbine with "hub_height" (m) and applied to all - buildings within that supply curve point sub-array. + "flicker_threshold" on any building in "building_layer". + Buildings are defined as pixels with >= the "building_threshold + value in "building_layer". Shadow flicker is computed at the + supply curve point resolution based on a turbine with + "hub_height" (m) and applied to all buildings within that supply + curve point sub-array. Parameters ---------- excl_fpath : str - Filepath to exclusions h5 file. File must contain "building_layer" - and "tm_dset". + Filepath to exclusions h5 file. File must contain + `building_layer` and `tm_dset`. res_fpath : str Filepath to wind resource .h5 file containing hourly wind - direction data + direction data. building_layer : str - Exclusion layer containing buildings from which turbine flicker - exclusions will be computed. + Exclusion layer containing buildings from which turbine + flicker exclusions will be computed. hub_height : int - Hub-height in meters to compute turbine shadow flicker. + Hub-height (m) used to compute turbine shadow flicker. rotor_diameter : int - Rotor diameter in meters to compute turbine shadow flicker. + Rotor diameter (m) used to compute turbine shadow flicker. tm_dset : str, optional - Dataset / layer name for wind toolkit techmap, - by default 'techmap_wtk' + Dataset / layer name for wind toolkit techmap. By default, + `'techmap_wtk'`. building_threshold : float, optional Threshold for exclusion layer values to identify pixels with - buildings, values are % of pixel containing a building, - by default 0 + buildings, values are % of pixel containing a building. By + default, `0`. flicker_threshold : int, optional - Maximum number of allowable flicker hours, by default 30 + Maximum number of allowable flicker hours. By default, `30`. resolution : int, optional - SC resolution, must be input in combination with gid, - by default 640 - max_workers : None | int, optional - Number of workers to use, if 1 run in serial, if None use all - available cores, by default None + SC resolution, must be input in combination with gid. + By default, `640`. + max_workers : int, optional + Number of workers to use. If 1 run, in serial. If `None`, + use all available cores. By default, `None`. out_layer : str, optional Layer to save exclusions under. Layer will be saved in - "excl_fpath", by default None + `excl_fpath`. By default, `None`. + out_tiff : str, optional + Path to output tiff file where exclusions should be saved. + By default, `None`. Returns ------- @@ -589,7 +600,8 @@ def run(cls, excl_fpath, res_fpath, building_layer, hub_height, building_threshold=building_threshold, flicker_threshold=flicker_threshold, max_workers=max_workers, - out_layer=out_layer + out_layer=out_layer, + out_tiff=out_tiff ) return out_excl diff --git a/reVX/turbine_flicker/turbine_flicker_cli.py b/reVX/turbine_flicker/turbine_flicker_cli.py index 0b3003cc5..b46f73712 100644 --- a/reVX/turbine_flicker/turbine_flicker_cli.py +++ b/reVX/turbine_flicker/turbine_flicker_cli.py @@ -63,6 +63,7 @@ def run_local(ctx, config): hub_height=config.hub_height, rotor_diameter=config.rotor_diameter, out_layer=config.out_layer, + out_tiff=config.out_tiff, tm_dset=config.tm_dset, building_threshold=config.building_threshold, flicker_threshold=config.flicker_threshold, @@ -119,6 +120,11 @@ def from_config(ctx, config, verbose): help=("Layer to save exclusions under. Layer will be saved in " "excl_fpath, if not provided will be generated from the " "building_layer name and hub-height")) +@click.option('--out_tiff', '-ot', default=None, type=STR, + show_default=True, + help=("Path to output tiff file where exclusions should be " + "saved, if not provided, data will not be written to " + "a file")) @click.option('--tm_dset', '-td', default='techmap_wtk', type=STR, show_default=True, help=("Dataset name in the techmap file containing the " @@ -147,7 +153,7 @@ def from_config(ctx, config, verbose): help='Flag to turn on debug logging. Default is not verbose.') @click.pass_context def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, - rotor_diameter, out_layer, tm_dset, building_threshold, + rotor_diameter, out_layer, out_tiff, tm_dset, building_threshold, flicker_threshold, resolution, max_workers, log_dir, verbose): """ Compute turbine flicker on local hardware @@ -171,7 +177,7 @@ def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, building_threshold=building_threshold, flicker_threshold=flicker_threshold, resolution=resolution, max_workers=max_workers, - out_layer=out_layer) + out_layer=out_layer, out_tiff=out_tiff) def get_node_cmd(config): @@ -196,6 +202,7 @@ def get_node_cmd(config): '-h {}'.format(SLURM.s(config.hub_height)), '-rd {}'.format(SLURM.s(config.rotor_diameter)), '-o {}'.format(SLURM.s(config.out_layer)), + '-ot {}'.format(SLURM.s(config.out_tiff)), '-td {}'.format(SLURM.s(config.tm_dset)), '-bldt {}'.format(SLURM.s(config.building_threshold)), '-ft {}'.format(SLURM.s(config.flicker_threshold)), diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index e764a8b22..794927dec 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -17,6 +17,7 @@ from reVX import TESTDATADIR from reVX.turbine_flicker.turbine_flicker import TurbineFlicker from reVX.turbine_flicker.turbine_flicker_cli import main +from reVX.handlers.geotiff import Geotiff pytest.importorskip('hybrid.flicker') @@ -154,8 +155,8 @@ def test_cli(runner): result = runner.invoke(main, ['from-config', '-c', config_path]) msg = 'Failed with error {}'.format( - traceback.print_exception(*result.exc_info) - ) + traceback.print_exception(*result.exc_info) + ) assert result.exit_code == 0, msg with ExclusionLayers(EXCL_H5) as f: @@ -169,6 +170,53 @@ def test_cli(runner): LOGGERS.clear() +def test_cli_tiff(runner): + """Test Turbine Flicker CLI for saving to tiff. """ + + with tempfile.TemporaryDirectory() as td: + excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) + shutil.copy(EXCL_H5, excl_h5) + out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd.tiff" + config = { + "log_directory": td, + "excl_fpath": excl_h5, + "execution_control": { + "option": "local", + }, + "building_layer": BLD_LAYER, + "hub_height": HUB_HEIGHT, + "out_tiff": os.path.join(td, out_tiff), + "rotor_diameter": ROTOR_DIAMETER, + "log_level": "INFO", + "res_fpath": RES_H5, + "resolution": 64, + "tm_dset": "techmap_wind" + } + config_path = os.path.join(td, 'config.json') + with open(config_path, 'w') as f: + json.dump(config, f) + + result = runner.invoke(main, ['from-config', '-c', config_path]) + msg = 'Failed with error {}'.format( + traceback.print_exception(*result.exc_info) + ) + assert result.exit_code == 0, msg + + with ExclusionLayers(EXCL_H5) as f: + baseline = f[BASELINE] + + with ExclusionLayers(excl_h5) as f: + assert out_tiff not in f.layers + assert out_tiff.split('.') not in f.layers + + with Geotiff(os.path.join(td, out_tiff)) as f: + test = f.values[0] + + assert np.allclose(baseline, test) + + LOGGERS.clear() + + def execute_pytest(capture='all', flags='-rapP'): """Execute module as pytest with detailed summary report. From a679fffc06f93c332c2df3f08d1c32a348143fc9 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Thu, 15 Sep 2022 17:31:02 -0600 Subject: [PATCH 02/42] Refactor class methods and parallel execution --- reVX/turbine_flicker/turbine_flicker.py | 443 ++++++++++++------------ tests/test_turbine_flicker.py | 53 ++- 2 files changed, 263 insertions(+), 233 deletions(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index f10421e4b..d94239ab9 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -109,189 +109,8 @@ def _compute_shadow_flicker(cls, lat, lon, blade_length, wind_dir): return shadow_flicker - @staticmethod - def _invert_shadow_flicker_arr(shadow_flicker): - """ - Check to ensure the shadow_flicker array is odd in shape, i.e. both - dimensions are odd allowing for a central pixel for the turbine to - sit on. Flip both axes to mimic the turbine sitting on each building. - All flicker pixels will now indicate locations where a turbine would - need to be to cause flicker on said building - - Parameters - ---------- - shadow_flicker : ndarray - 2D array centered on the turbine with the number of flicker hours - per "exclusion" pixel - - Returns - ------- - shadow_flicker : ndarray - Inverted 2D shadow flicker array with odd dimensions if needed. - """ - reduce_slice = () - reduce_arr = False - for s in shadow_flicker.shape: - if s % 2: - reduce_slice += (slice(None), ) - else: - reduce_slice += (slice(0, -1), ) - reduce_arr = True - - if reduce_arr: - shape_in = shadow_flicker.shape - shadow_flicker = shadow_flicker[reduce_slice] - msg = ('Shadow flicker array with shape {} does not have a ' - 'central pixel! Shade has been reduced to {}!' - .format(shape_in, shadow_flicker.shape)) - logger.warning(msg) - warn(msg) - - return shadow_flicker[::-1, ::-1] - - @classmethod - def _get_flicker_excl_shifts(cls, shadow_flicker, flicker_threshold=30): - """ - Determine locations of shadow flicker that exceed the given threshold, - convert to row and column shifts. These are the locations turbines - would need to in relation to building to cause flicker exceeding the - threshold value. - - Parameters - ---------- - shadow_flicker : ndarray - 2D array centered on the turbine with the number of flicker hours - per "exclusion" pixel - flicker_threshold : int, optional - Maximum number of allowable flicker hours, by default 30 - - Returns - ------- - row_shifts : ndarray - Shifts along axis 0 from building location to pixels to be excluded - col_shifts : ndarray - Shifts along axis 1 from building location to pixels to be excluded - """ - # ensure shadow_flicker array is regularly shaped and invert for - # mapping to buildings - shadow_flicker = cls._invert_shadow_flicker_arr(shadow_flicker) - - # normalize by number of time-steps to match shadow flicker results - flicker_threshold /= 8760 - shape = shadow_flicker.shape - row_shifts, col_shifts = np.where(shadow_flicker > flicker_threshold) - check = (np.any(np.isin(row_shifts, [0, shape[0] - 1])) - or np.any(np.isin(col_shifts, [0, shape[1] - 1]))) - if check: - msg = ("Turbine flicker appears to extend beyond the " - "FlickerModel domain! Please increase the " - "FLICKER_ARRAY_LEN and try again!") - logger.error(msg) - raise RuntimeError(msg) - - row_shifts -= shape[0] // 2 - col_shifts -= shape[1] // 2 - - return row_shifts, col_shifts - - @staticmethod - def _get_building_indices(excl_fpath, building_layer, gid, - resolution=640, building_threshold=0): - """ - Find buildings in sc point sub-array and convert indices to full - exclusion indices - - Parameters - ---------- - excl_fpath : str - Filepath to exclusions h5 file. File must contain "building_layer" - and "tm_dset". - building_layer : str - Exclusion layer containing buildings from which turbine flicker - exclusions will be computed. - gid : int - sc point gid to extract buildings for - resolution : int, optional - SC resolution, must be input in combination with gid, - by default 640 - building_threshold : float, optional - Threshold for exclusion layer values to identify pixels with - buildings, values are % of pixel containing a building, - by default 0 - - Returns - ------- - row_idx : ndarray - Axis 0 indices of building in sc point sub-array in full exclusion - array - col_idx : ndarray - Axis 1 indices of building in sc point sub-array in full exclusion - array - shape : tuple - Full exclusion array shape - """ - with ExclusionLayers(excl_fpath) as f: - shape = f.shape - row_slice, col_slice = MeanWindDirectionsPoint.get_agg_slices( - gid, shape, resolution - ) - - sc_blds = f[building_layer, row_slice, col_slice] - - row_idx = np.array(range(*row_slice.indices(row_slice.stop))) - col_idx = np.array(range(*col_slice.indices(col_slice.stop))) - bld_row_idx, bld_col_idx = np.where(sc_blds > building_threshold) - - return row_idx[bld_row_idx], col_idx[bld_col_idx], shape - - @staticmethod - def _create_excl_indices(bld_idx, flicker_shifts, shape): - """ - Create 2D (row, col) indices of pixels to be excluded based on - building indices and shadow flicker shifts. - - Parameters - ---------- - bld_idx : tuple - (row, col) indices of building onto which shadow flicker exclusions - are to be mapped. - flicker_shifts : tuple - Index shifts (row, col) from building locations to exclude based - on shadow flicker results. Shifts are based on shadow flicker - threshold. Shadow flicker array is inverted to represent mapping of - shadow onto buildings - shape : tuple - Full exclusion array shape - - Returns - ------- - excl_row_idx : ndarray - Row (axis 0) indices of pixels to be excluded because they will - cause excessive shadow flicker on building in supply curve point - gid subset - excl_col_idx : ndarray - Column (axis 1) indices of pixels to be excluded because they will - cause excessive shadow flicker on building in supply curve point - gid subset - """ - row_idx, col_idx = bld_idx - row_shifts, col_shifts = flicker_shifts - - excl_row_idx = (row_idx + row_shifts[:, None]).ravel() - excl_row_idx[excl_row_idx < 0] = 0 - excl_row_idx[excl_row_idx >= shape[0]] = shape[0] - 1 - - excl_col_idx = (col_idx + col_shifts[:, None]).ravel() - excl_col_idx[excl_col_idx < 0] = 0 - excl_col_idx[excl_col_idx >= shape[1]] = shape[1] - 1 - - return excl_row_idx.astype(np.uint32), excl_col_idx.astype(np.uint32) - - @classmethod - def _exclude_turbine_flicker(cls, point, excl_fpath, res_fpath, - building_layer, hub_height, rotor_diameter, - building_threshold=0, flicker_threshold=30, - resolution=640): + def _exclude_turbine_flicker(self, point, res_fpath, hub_height, + rotor_diameter, flicker_threshold=30): """ Exclude all pixels that will cause flicker exceeding the "flicker_threshold" on buildings that exist within @@ -306,38 +125,22 @@ def _exclude_turbine_flicker(cls, point, excl_fpath, res_fpath, ---------- gid : int Supply curve point gid to aggregate wind directions for - excl_fpath : str - Filepath to exclusions h5 file. File must contain "tm_dset". res_fpath : str Filepath to wind resource .h5 file containing hourly wind direction data - building_layer : str - Exclusion layer containing buildings from which turbine flicker - exclusions will be computed. hub_height : int Hub-height in meters to compute turbine shadow flicker. rotor_diameter : int Rotor diamter in meters to compute shadow flicker. - building_threshold : float, optional - Threshold for exclusion layer values to identify pixels with - buildings, values are % of pixel containing a building, - by default 0 flicker_threshold : int, optional Maximum number of allowable flicker hours, by default 30 - resolution : int, optional - SC resolution, must be input in combination with gid, - by default 640 Returns ------- excl_idx : tuple - (row, col) indices of pixels to be excluded because they will cause - excessive shadow flicker on building in supply curve point gid - subset + (row, col) shifts of pixels to be excluded because they + will cause excessive shadow flicker from building location """ - row_idx, col_idx, shape = cls._get_building_indices( - excl_fpath, building_layer, point.name, - resolution=resolution, building_threshold=building_threshold) with WindX(res_fpath, log_vers=False) as f: dset = 'winddirection_{}m'.format(hub_height) @@ -347,18 +150,15 @@ def _exclude_turbine_flicker(cls, point, excl_fpath, res_fpath, wind_dir = wind_dir[:-24] blade_length = rotor_diameter / 2 - shadow_flicker = cls._compute_shadow_flicker(point['latitude'], - point['longitude'], - blade_length, - wind_dir) + shadow_flicker = self._compute_shadow_flicker(point['latitude'], + point['longitude'], + blade_length, + wind_dir) - flicker_shifts = cls._get_flicker_excl_shifts( + flicker_shifts = _get_flicker_excl_shifts( shadow_flicker, flicker_threshold=flicker_threshold) - excl_idx = cls._create_excl_indices((row_idx, col_idx), - flicker_shifts, shape) - - return excl_idx + return flicker_shifts def _preflight_check(self, tm_dset='techmap_wtk'): """ @@ -468,9 +268,6 @@ def compute_exclusions(self, hub_height, rotor_diameter, if max_workers is None: max_workers = os.cpu_count() - etf_kwargs = {"building_threshold": building_threshold, - "flicker_threshold": flicker_threshold, - "resolution": self._res} flicker_arr = np.ones(exclusion_shape, dtype=np.uint8) if max_workers > 1: msg = ('Computing exclusions from {} based on {}m hub height ' @@ -482,17 +279,33 @@ def compute_exclusions(self, hub_height, rotor_diameter, loggers = [__name__, 'reVX', 'rex'] with SpawnProcessPool(max_workers=max_workers, loggers=loggers) as exe: - futures = [] + futures = {} for _, point in self._sc_points.iterrows(): + + row_idx, col_idx, shape = _get_building_indices( + self._excl_h5, self._bld_layer, point.name, + resolution=self._res, + building_threshold=building_threshold) + if row_idx.size == 0: + continue + future = exe.submit(self._exclude_turbine_flicker, - point, self._excl_h5, self._res_h5, - self._bld_layer, hub_height, + point, self._res_h5, hub_height, rotor_diameter, - **etf_kwargs) - futures.append(future) + flicker_threshold=flicker_threshold) + futures[future] = point for i, future in enumerate(as_completed(futures)): - row_idx, col_idx = future.result() + flicker_shifts = future.result() + point = futures[future] + + row_idx, col_idx, shape = _get_building_indices( + self._excl_h5, self._bld_layer, point.name, + resolution=self._res, + building_threshold=building_threshold) + row_idx, col_idx = _create_excl_indices( + (row_idx, col_idx), flicker_shifts, shape) + flicker_arr[row_idx, col_idx] = 0 logger.info('Completed {} out of {} gids' .format((i + 1), len(futures))) @@ -505,9 +318,18 @@ def compute_exclusions(self, hub_height, rotor_diameter, ) logger.info(msg) for i, (_, point) in enumerate(self._sc_points.iterrows()): - row_idx, col_idx = self._exclude_turbine_flicker( - point, self._excl_h5, self._res_h5, self._bld_layer, - hub_height, rotor_diameter, **etf_kwargs) + row_idx, col_idx, shape = _get_building_indices( + self._excl_h5, self._bld_layer, point.name, + resolution=self._res, building_threshold=building_threshold) + if row_idx.size == 0: + continue + + flicker_shifts = self._exclude_turbine_flicker( + point, self._res_h5, hub_height, rotor_diameter, + flicker_threshold=flicker_threshold) + row_idx, col_idx = _create_excl_indices( + (row_idx, col_idx), flicker_shifts, shape) + flicker_arr[row_idx, col_idx] = 0 logger.debug('Completed {} out of {} gids' .format((i + 1), len(self._sc_points))) @@ -603,5 +425,180 @@ def run(cls, excl_fpath, res_fpath, building_layer, hub_height, out_layer=out_layer, out_tiff=out_tiff ) - return out_excl + + +def _get_building_indices(excl_fpath, building_layer, gid, + resolution=640, building_threshold=0): + """Find buildings exclusion indices + + Parameters + ---------- + excl_fpath : str + Filepath to exclusions h5 file. File must contain + `building_layer` and `tm_dset`. + building_layer : str + Exclusion layer containing buildings from which turbine flicker + exclusions will be computed. + gid : int + SC point gid to extract buildings for. + resolution : int, optional + SC resolution, must be input in combination with gid. + By default, `640`. + building_threshold : float, optional + Threshold for exclusion layer values to identify pixels with + buildings, values are % of pixel containing a building. + By default, `0`. + + Returns + ------- + row_idx : ndarray + Axis 0 indices of building in sc point sub-array in full + exclusion array. + col_idx : ndarray + Axis 1 indices of building in sc point sub-array in full + exclusion array. + shape : tuple + Full exclusion array shape. + """ + with ExclusionLayers(excl_fpath) as f: + shape = f.shape + row_slice, col_slice = MeanWindDirectionsPoint.get_agg_slices( + gid, shape, resolution + ) + + sc_blds = f[building_layer, row_slice, col_slice] + + row_idx = np.array(range(*row_slice.indices(row_slice.stop))) + col_idx = np.array(range(*col_slice.indices(col_slice.stop))) + bld_row_idx, bld_col_idx = np.where(sc_blds > building_threshold) + + return row_idx[bld_row_idx], col_idx[bld_col_idx], shape + + +def _create_excl_indices(bld_idx, flicker_shifts, shape): + """ + Create 2D (row, col) indices of pixels to be excluded based on + building indices and shadow flicker shifts. + + Parameters + ---------- + bld_idx : tuple + (row, col) indices of building onto which shadow flicker + exclusions are to be mapped. + flicker_shifts : tuple + Index shifts (row, col) from building locations to exclude based + on shadow flicker results. Shifts are based on shadow flicker + threshold. Shadow flicker array is inverted to represent mapping + of shadow onto buildings + shape : tuple + Full exclusion array shape + + Returns + ------- + excl_row_idx : ndarray + Row (axis 0) indices of pixels to be excluded because they will + cause excessive shadow flicker on building in supply curve point + gid subset + excl_col_idx : ndarray + Column (axis 1) indices of pixels to be excluded because they + will cause excessive shadow flicker on building in supply curve + point gid subset + """ + row_idx, col_idx = bld_idx + row_shifts, col_shifts = flicker_shifts + + excl_row_idx = (row_idx + row_shifts[:, None]).ravel() + excl_row_idx[excl_row_idx < 0] = 0 + excl_row_idx[excl_row_idx >= shape[0]] = shape[0] - 1 + + excl_col_idx = (col_idx + col_shifts[:, None]).ravel() + excl_col_idx[excl_col_idx < 0] = 0 + excl_col_idx[excl_col_idx >= shape[1]] = shape[1] - 1 + + return excl_row_idx.astype(np.int32), excl_col_idx.astype(np.int32) + + +def _invert_shadow_flicker_arr(shadow_flicker): + """ + Check to ensure the shadow_flicker array is odd in shape, i.e. both + dimensions are odd allowing for a central pixel for the turbine to + sit on. Flip both axes to mimic the turbine sitting on each + building. All flicker pixels will now indicate locations where a + turbine would need to be to cause flicker on said building + + Parameters + ---------- + shadow_flicker : ndarray + 2D array centered on the turbine with the number of flicker + hours per "exclusion" pixel + + Returns + ------- + shadow_flicker : ndarray + Inverted 2D shadow flicker array with odd dimensions if needed. + """ + reduce_slice = () + reduce_arr = False + for s in shadow_flicker.shape: + if s % 2: + reduce_slice += (slice(None), ) + else: + reduce_slice += (slice(0, -1), ) + reduce_arr = True + + if reduce_arr: + shape_in = shadow_flicker.shape + shadow_flicker = shadow_flicker[reduce_slice] + msg = ('Shadow flicker array with shape {} does not have a ' + 'central pixel! Shade has been reduced to {}!' + .format(shape_in, shadow_flicker.shape)) + logger.warning(msg) + warn(msg) + + return shadow_flicker[::-1, ::-1] + + +def _get_flicker_excl_shifts(shadow_flicker, flicker_threshold=30): + """ + Determine locations of shadow flicker that exceed the given threshold, + convert to row and column shifts. These are the locations turbines + would need to in relation to building to cause flicker exceeding the + threshold value. + + Parameters + ---------- + shadow_flicker : ndarray + 2D array centered on the turbine with the number of flicker hours + per "exclusion" pixel + flicker_threshold : int, optional + Maximum number of allowable flicker hours, by default 30 + + Returns + ------- + row_shifts : ndarray + Shifts along axis 0 from building location to pixels to be excluded + col_shifts : ndarray + Shifts along axis 1 from building location to pixels to be excluded + """ + # ensure shadow_flicker array is regularly shaped and invert for + # mapping to buildings + shadow_flicker = _invert_shadow_flicker_arr(shadow_flicker) + + # normalize by number of time-steps to match shadow flicker results + flicker_threshold /= 8760 + shape = shadow_flicker.shape + row_shifts, col_shifts = np.where(shadow_flicker > flicker_threshold) + check = (np.any(np.isin(row_shifts, [0, shape[0] - 1])) + or np.any(np.isin(col_shifts, [0, shape[1] - 1]))) + if check: + msg = ("Turbine flicker appears to extend beyond the " + "FlickerModel domain! Please increase the " + "FLICKER_ARRAY_LEN and try again!") + logger.warning(msg) + warn(msg) + + row_shifts -= shape[0] // 2 + col_shifts -= shape[1] // 2 + + return row_shifts, col_shifts diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index 794927dec..e59dea427 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -15,7 +15,13 @@ from reV.handlers.exclusions import ExclusionLayers from reVX import TESTDATADIR -from reVX.turbine_flicker.turbine_flicker import TurbineFlicker +from reVX.turbine_flicker.turbine_flicker import ( + TurbineFlicker, + _create_excl_indices, + _get_building_indices, + _get_flicker_excl_shifts, + _invert_shadow_flicker_arr +) from reVX.turbine_flicker.turbine_flicker_cli import main from reVX.handlers.geotiff import Geotiff @@ -53,9 +59,10 @@ def test_shadow_mapping(shadow_loc): shadow_arr = np.zeros(shape, dtype=np.int8) shadow_arr[bld_idx[0] + shadow_loc[0], bld_idx[1] + shadow_loc[1]] = 1 - flicker_shifts = TurbineFlicker._get_flicker_excl_shifts(shadow_arr) - test_row_idx, test_col_idx = TurbineFlicker._create_excl_indices( - bld_idx, flicker_shifts, shape) + flicker_shifts = _get_flicker_excl_shifts(shadow_arr) + test_row_idx, test_col_idx = _create_excl_indices(bld_idx, + flicker_shifts, + shape) assert np.allclose(baseline_row_idx, test_row_idx) assert np.allclose(baseline_col_idx, test_col_idx) @@ -76,7 +83,7 @@ def test_shadow_flicker(flicker_threshold): baseline = (shadow_flicker[::-1, ::-1].copy() <= (flicker_threshold / 8760)).astype(np.int8) - row_shifts, col_shifts = TurbineFlicker._get_flicker_excl_shifts( + row_shifts, col_shifts = _get_flicker_excl_shifts( shadow_flicker, flicker_threshold=flicker_threshold) L = TurbineFlicker.FLICKER_ARRAY_LEN @@ -99,18 +106,44 @@ def test_excl_indices_mapping(): baseline = (arr <= 0.8).astype(np.int8) bld_idx = (np.array([64]), np.array([64])) - flicker_shifts = TurbineFlicker._get_flicker_excl_shifts( - arr[::-1, ::-1], flicker_threshold=(0.8 * 8760)) + flicker_shifts = _get_flicker_excl_shifts(arr[::-1, ::-1], + flicker_threshold=(0.8 * 8760)) - row_idx, col_idx = TurbineFlicker._create_excl_indices(bld_idx, - flicker_shifts, - shape) + row_idx, col_idx = _create_excl_indices(bld_idx, flicker_shifts, shape) test = np.ones(shape, dtype=np.int8) test[row_idx, col_idx] = 0 assert np.allclose(baseline, test) +def test_get_building_indices(): + """Test retrieving building indices. """ + row_idx, col_idx, __ = _get_building_indices(EXCL_H5, BLD_LAYER, 0, + resolution=64, + building_threshold=0) + with ExclusionLayers(EXCL_H5) as f: + buildings = f[BLD_LAYER, 0:64, 0:64] + + assert (buildings[row_idx, col_idx] > 0).all() + + +def test_invert_shadow_flicker_arr(): + """Test inverting the shadow flicker array. """ + + arr = np.array([[ 0, 1, 2, 3], + [ 4, 5, 6, 7], + [ 8, 9, 10, 11], + [12, 13, 14, 15]]) + + expected = np.array([[10, 9, 8], + [ 6, 5, 4], + [ 2, 1, 0]]) + + with pytest.warns(Warning): + out = _invert_shadow_flicker_arr(arr) + assert np.allclose(out, expected) + + @pytest.mark.parametrize('max_workers', [None, 1]) def test_turbine_flicker(max_workers): """ From 5095dfb1a1a9b80400f88e084c3d262ce3626d76 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Thu, 15 Sep 2022 17:34:26 -0600 Subject: [PATCH 03/42] Minor typo fixes and such --- reVX/turbine_flicker/turbine_flicker.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index d94239ab9..49303404a 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -58,9 +58,7 @@ def __init__(self, excl_fpath, res_fpath, building_layer, self._sc_points = self._get_sc_points(tm_dset=tm_dset) def __repr__(self): - msg = ("{} from {}" - .format(self.__class__.__name__, self._bld_layer)) - + msg = "{} from {}".format(self.__class__.__name__, self._bld_layer) return msg @classmethod @@ -131,7 +129,7 @@ def _exclude_turbine_flicker(self, point, res_fpath, hub_height, hub_height : int Hub-height in meters to compute turbine shadow flicker. rotor_diameter : int - Rotor diamter in meters to compute shadow flicker. + Rotor diameter in meters to compute shadow flicker. flicker_threshold : int, optional Maximum number of allowable flicker hours, by default 30 @@ -146,6 +144,7 @@ def _exclude_turbine_flicker(self, point, res_fpath, hub_height, dset = 'winddirection_{}m'.format(hub_height) wind_dir = f[dset, :, int(point['res_gid'])] + # pylint: disable=unsubscriptable-object if len(wind_dir) == 8784: wind_dir = wind_dir[:-24] @@ -551,8 +550,8 @@ def _invert_shadow_flicker_arr(shadow_flicker): shape_in = shadow_flicker.shape shadow_flicker = shadow_flicker[reduce_slice] msg = ('Shadow flicker array with shape {} does not have a ' - 'central pixel! Shade has been reduced to {}!' - .format(shape_in, shadow_flicker.shape)) + 'central pixel! Shade has been reduced to {}!' + .format(shape_in, shadow_flicker.shape)) logger.warning(msg) warn(msg) From f15c1e72864b9b30e742e8318055be1e822eb545 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Thu, 15 Sep 2022 18:42:38 -0600 Subject: [PATCH 04/42] Grid cell size and max flicker exclusion range can now be set in init - still have to add to config --- reVX/turbine_flicker/turbine_flicker.py | 67 +++++++++++++++---------- tests/test_turbine_flicker.py | 17 +++---- 2 files changed, 48 insertions(+), 36 deletions(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 49303404a..65107246c 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -26,11 +26,11 @@ class TurbineFlicker: cause excessive flicker on building """ STEPS_PER_HOUR = 1 - GRIDCELL_SIZE = 90 - FLICKER_ARRAY_LEN = 101 def __init__(self, excl_fpath, res_fpath, building_layer, - resolution=640, tm_dset='techmap_wtk'): + resolution=640, grid_cell_size=90, + max_flicker_exclusion_range=10_000, + tm_dset='techmap_wtk'): """ Parameters ---------- @@ -43,17 +43,27 @@ def __init__(self, excl_fpath, res_fpath, building_layer, building_layer : str Exclusion layer containing buildings from which turbine flicker exclusions will be computed. - tm_dset : str, optional - Dataset / layer name for wind toolkit techmap, - by default 'techmap_wtk' resolution : int, optional SC resolution, must be input in combination with gid, by default 640 + grid_cell_size : float, optional + Length (m) of a side of each grid cell in `excl_fpath`. + max_flicker_exclusion_range : float, optional + Max distance (m) that flicker exclusions will extend in + any of the cardinal directions. Note that increasing this + value can lead to drastically instead memory requirements. + This value may be increased slightly in order to yield + odd exclusion array shapes. + tm_dset : str, optional + Dataset / layer name for wind toolkit techmap, + by default 'techmap_wtk' """ self._excl_h5 = excl_fpath self._res_h5 = res_fpath self._bld_layer = building_layer self._res = resolution + self._grid_cell_size = grid_cell_size + self._max_flicker_exclusion_range = max_flicker_exclusion_range self._preflight_check(tm_dset=tm_dset) self._sc_points = self._get_sc_points(tm_dset=tm_dset) @@ -61,46 +71,44 @@ def __repr__(self): msg = "{} from {}".format(self.__class__.__name__, self._bld_layer) return msg - @classmethod - def _compute_shadow_flicker(cls, lat, lon, blade_length, wind_dir): + def _compute_shadow_flicker(self, lat, lon, rotor_diameter, wind_dir): """ Compute shadow flicker for given location Parameters ---------- lat : float - Latitude coordinate of turbine + Latitude coordinate of turbine. lon : float - Longitude coordinate of turbine - blade_length : float - Turbine blade length. + Longitude coordinate of turbine. + rotor_diameter : float + Turbine rotor diameter (m). wind_dir : ndarray - Time-series of wind direction for turbine + Time-series of wind direction for turbine. Returns ------- shadow_flicker : ndarray - 2D array centered on the turbine with the number of flicker hours - per "exclusion" pixel + 2D array centered on the turbine with the number of flicker + hours per "exclusion" pixel """ # Import HOPP dynamically so its not a requirement from hybrid.flicker.flicker_mismatch_grid import FlickerMismatch - mult = (cls.FLICKER_ARRAY_LEN * cls.GRIDCELL_SIZE) / 2 - mult = mult / (blade_length * 2) + mult = self._max_flicker_exclusion_range / rotor_diameter FlickerMismatch.diam_mult_nwe = mult FlickerMismatch.diam_mult_s = mult - FlickerMismatch.steps_per_hour = cls.STEPS_PER_HOUR + FlickerMismatch.steps_per_hour = self.STEPS_PER_HOUR FlickerMismatch.turbine_tower_shadow = False assert len(wind_dir) == 8760 shadow_flicker = FlickerMismatch(lat, lon, - blade_length=blade_length, + blade_length=rotor_diameter / 2, angles_per_step=None, wind_dir=wind_dir, - gridcell_height=cls.GRIDCELL_SIZE, - gridcell_width=cls.GRIDCELL_SIZE, + gridcell_height=self._grid_cell_size, + gridcell_width=self._grid_cell_size, gridcells_per_string=1) shadow_flicker = shadow_flicker.create_heat_maps(range(0, 8760), ("time", ))[0] @@ -148,10 +156,9 @@ def _exclude_turbine_flicker(self, point, res_fpath, hub_height, if len(wind_dir) == 8784: wind_dir = wind_dir[:-24] - blade_length = rotor_diameter / 2 shadow_flicker = self._compute_shadow_flicker(point['latitude'], point['longitude'], - blade_length, + rotor_diameter, wind_dir) flicker_shifts = _get_flicker_excl_shifts( @@ -190,6 +197,14 @@ def _preflight_check(self, tm_dset='techmap_wtk'): 'following error:\n{}'.format(e)) raise e + self._set_max_grid_size_for_odd_shaped_arr() + + def _set_max_grid_size_for_odd_shaped_arr(self): + """Set the max_flicker_exclusion_range to multiple of 0.5 grids """ + mult = np.round(self._max_flicker_exclusion_range + / self._grid_cell_size ) + 0.5 + self._max_flicker_exclusion_range = mult * self._grid_cell_size + def _get_sc_points(self, tm_dset='techmap_wtk'): """ Get the valid sc points to run turbine flicker for @@ -591,9 +606,9 @@ def _get_flicker_excl_shifts(shadow_flicker, flicker_threshold=30): check = (np.any(np.isin(row_shifts, [0, shape[0] - 1])) or np.any(np.isin(col_shifts, [0, shape[1] - 1]))) if check: - msg = ("Turbine flicker appears to extend beyond the " - "FlickerModel domain! Please increase the " - "FLICKER_ARRAY_LEN and try again!") + msg = ("Turbine flicker appears to extend beyond the FlickerModel " + "domain! Consider increasing the maximum flicker exclusion " + "range.") logger.warning(msg) warn(msg) diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index e59dea427..4a76ceb6e 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -73,24 +73,21 @@ def test_shadow_flicker(flicker_threshold): """ Test shadow_flicker """ - blade_length = ROTOR_DIAMETER / 2 lat, lon = 39.913373, -105.220105 wind_dir = np.zeros(8760) - shadow_flicker = TurbineFlicker._compute_shadow_flicker(lat, - lon, - blade_length, - wind_dir) + tf = TurbineFlicker(EXCL_H5, RES_H5, BLD_LAYER, grid_cell_size=90, + max_flicker_exclusion_range=4_510) + shadow_flicker = tf._compute_shadow_flicker(lat, lon, ROTOR_DIAMETER, + wind_dir) baseline = (shadow_flicker[::-1, ::-1].copy() <= (flicker_threshold / 8760)).astype(np.int8) row_shifts, col_shifts = _get_flicker_excl_shifts( shadow_flicker, flicker_threshold=flicker_threshold) - L = TurbineFlicker.FLICKER_ARRAY_LEN - L2 = int((L - 1) / 2) - test = np.ones((L, L), dtype=np.int8) - test[L2, L2] = 0 - test[row_shifts + L2, col_shifts + L2] = 0 + test = np.ones_like(baseline) + test[50, 50] = 0 + test[row_shifts + 50, col_shifts + 50] = 0 assert np.allclose(baseline, test) From 29eb61d8d2c61838d292627ee69156b6f2ce00a8 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 16 Sep 2022 11:19:31 -0600 Subject: [PATCH 05/42] Added two new options to config --- reVX/config/turbine_flicker.py | 16 +++++ reVX/turbine_flicker/turbine_flicker.py | 15 ++++- reVX/turbine_flicker/turbine_flicker_cli.py | 21 ++++++- tests/test_turbine_flicker.py | 67 +++++++++++++++++++++ 4 files changed, 115 insertions(+), 4 deletions(-) diff --git a/reVX/config/turbine_flicker.py b/reVX/config/turbine_flicker.py index 58d588b19..d425cd4c0 100644 --- a/reVX/config/turbine_flicker.py +++ b/reVX/config/turbine_flicker.py @@ -23,6 +23,8 @@ def __init__(self, config): super().__init__(config) self._default_tm_dset = 'techmap_wtk' self._default_resolution = 128 + self._default_grid_cell_size = 90 + self._default_max_flicker_exclusion_range = 10_000 self._default_building_threshold = 0 self._default_flicker_threshold = 30 @@ -65,6 +67,20 @@ def resolution(self): """Get the supply curve resolution.""" return self.get('resolution', self._default_resolution) + @property + def grid_cell_size(self): + """Get the length (m) of a side of each grid cell in `excl_fpath`.""" + return self.get('grid_cell_size', self._default_grid_cell_size) + + @property + def max_flicker_exclusion_range(self): + """ + Get the max distance (m) that flicker exclusions will extend in + any of the cardinal directions. + """ + return self.get('max_flicker_exclusion_range', + self._default_max_flicker_exclusion_range) + @property def building_threshold(self): """ diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 65107246c..133f11a00 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -373,7 +373,8 @@ def compute_exclusions(self, hub_height, rotor_diameter, @classmethod def run(cls, excl_fpath, res_fpath, building_layer, hub_height, rotor_diameter, tm_dset='techmap_wtk', building_threshold=0, - flicker_threshold=30, resolution=640, max_workers=None, + flicker_threshold=30, resolution=640, grid_cell_size=90, + max_flicker_exclusion_range=10_000, max_workers=None, out_layer=None, out_tiff=None): """Run flicker exclusion layer generation. @@ -412,6 +413,14 @@ def run(cls, excl_fpath, res_fpath, building_layer, hub_height, resolution : int, optional SC resolution, must be input in combination with gid. By default, `640`. + grid_cell_size : float, optional + Length (m) of a side of each grid cell in `excl_fpath`. + max_flicker_exclusion_range : float, optional + Max distance (m) that flicker exclusions will extend in + any of the cardinal directions. Note that increasing this + value can lead to drastically instead memory requirements. + This value may be increased slightly in order to yield + odd exclusion array shapes. max_workers : int, optional Number of workers to use. If 1 run, in serial. If `None`, use all available cores. By default, `None`. @@ -429,7 +438,9 @@ def run(cls, excl_fpath, res_fpath, building_layer, hub_height, flicker on buildings in "building_layer" """ flicker = cls(excl_fpath, res_fpath, building_layer, - resolution=resolution, tm_dset=tm_dset) + resolution=resolution, grid_cell_size=grid_cell_size, + max_flicker_exclusion_range=max_flicker_exclusion_range, + tm_dset=tm_dset) out_excl = flicker.compute_exclusions( hub_height, rotor_diameter, diff --git a/reVX/turbine_flicker/turbine_flicker_cli.py b/reVX/turbine_flicker/turbine_flicker_cli.py index b46f73712..e53103673 100644 --- a/reVX/turbine_flicker/turbine_flicker_cli.py +++ b/reVX/turbine_flicker/turbine_flicker_cli.py @@ -68,6 +68,8 @@ def run_local(ctx, config): building_threshold=config.building_threshold, flicker_threshold=config.flicker_threshold, resolution=config.resolution, + grid_cell_size=config.grid_cell_size, + max_flicker_exclusion_range=config.max_flicker_exclusion_range, max_workers=config.execution_control.max_workers, log_dir=config.log_directory, verbose=config.log_level) @@ -142,6 +144,16 @@ def from_config(ctx, config, verbose): help=("SC resolution, must be input in combination with gid. " "Prefered option is to use the row / col slices to define " "the SC point instead")) +@click.option('--grid_cell_size', '-gcs', default=90, type=INT, + show_default=True, + help=("Length (m) of a side of each grid cell in `excl_fpath`.")) +@click.option('--max_flicker_exclusion_range', '-mfer', default=10_000, + type=INT, show_default=True, + help=("Max distance (m) that flicker exclusions will extend in " + "any of the cardinal directions. Note that increasing " + "this value can lead to drastically instead memory " + "requirements. This value may be increased slightly in " + "order to yield odd exclusion array shapes.")) @click.option('--max_workers', '-mw', default=None, type=INT, show_default=True, help=("Number of cores to run summary on. None is all " @@ -154,7 +166,8 @@ def from_config(ctx, config, verbose): @click.pass_context def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, rotor_diameter, out_layer, out_tiff, tm_dset, building_threshold, - flicker_threshold, resolution, max_workers, log_dir, verbose): + flicker_threshold, resolution, grid_cell_size, + max_flicker_exclusion_range, max_workers, log_dir, verbose): """ Compute turbine flicker on local hardware """ @@ -177,7 +190,9 @@ def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, building_threshold=building_threshold, flicker_threshold=flicker_threshold, resolution=resolution, max_workers=max_workers, - out_layer=out_layer, out_tiff=out_tiff) + out_layer=out_layer, out_tiff=out_tiff, + grid_cell_size=grid_cell_size, + max_flicker_exclusion_range=max_flicker_exclusion_range) def get_node_cmd(config): @@ -207,6 +222,8 @@ def get_node_cmd(config): '-bldt {}'.format(SLURM.s(config.building_threshold)), '-ft {}'.format(SLURM.s(config.flicker_threshold)), '-res {}'.format(SLURM.s(config.resolution)), + '-gcs {}'.format(SLURM.s(config.grid_cell_size)), + '-mfer {}'.format(SLURM.s(config.max_flicker_exclusion_range)), '-mw {}'.format(SLURM.s(config.execution_control.max_workers)), '-log {}'.format(SLURM.s(config.log_directory)), ] diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index 4a76ceb6e..27786d8d4 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -247,6 +247,73 @@ def test_cli_tiff(runner): LOGGERS.clear() +def test_cli_max_flicker_exclusion_range(runner): + """Test Turbine Flicker CLI with max_flicker_exclusion_range value. """ + + with tempfile.TemporaryDirectory() as td: + excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) + shutil.copy(EXCL_H5, excl_h5) + out_tiff_def = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd.tiff" + config = { + "log_directory": td, + "excl_fpath": excl_h5, + "execution_control": { + "option": "local", + }, + "building_layer": BLD_LAYER, + "hub_height": HUB_HEIGHT, + "out_tiff": os.path.join(td, out_tiff_def), + "rotor_diameter": ROTOR_DIAMETER, + "log_level": "INFO", + "res_fpath": RES_H5, + "resolution": 64, + "tm_dset": "techmap_wind" + } + config_path = os.path.join(td, 'config.json') + with open(config_path, 'w') as f: + json.dump(config, f) + + result = runner.invoke(main, ['from-config', '-c', config_path]) + msg = 'Failed with error {}'.format( + traceback.print_exception(*result.exc_info) + ) + assert result.exit_code == 0, msg + + out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5k.tiff" + config["out_tiff"] = os.path.join(td, out_tiff) + config["max_flicker_exclusion_range"] = 5_000 + config_path = os.path.join(td, 'config.json') + with open(config_path, 'w') as f: + json.dump(config, f) + + result = runner.invoke(main, ['from-config', '-c', config_path]) + msg = 'Failed with error {}'.format( + traceback.print_exception(*result.exc_info) + ) + assert result.exit_code == 0, msg + + with ExclusionLayers(EXCL_H5) as f: + baseline = f[BASELINE] + + with ExclusionLayers(excl_h5) as f: + assert out_tiff_def not in f.layers + assert out_tiff_def.split('.') not in f.layers + assert out_tiff not in f.layers + assert out_tiff.split('.') not in f.layers + + with Geotiff(os.path.join(td, out_tiff_def)) as f: + test = f.values[0] + + with Geotiff(os.path.join(td, out_tiff)) as f: + test2 = f.values[0] + + assert np.allclose(baseline, test) + assert np.allclose(baseline, test2) + assert np.allclose(test, test2) + + LOGGERS.clear() + + def execute_pytest(capture='all', flags='-rapP'): """Execute module as pytest with detailed summary report. From 204c4e09cf18961d7275852a8eeac78983145de1 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 16 Sep 2022 11:20:43 -0600 Subject: [PATCH 06/42] Minor formatting --- reVX/turbine_flicker/turbine_flicker.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 133f11a00..b85f656aa 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -334,7 +334,8 @@ def compute_exclusions(self, hub_height, rotor_diameter, for i, (_, point) in enumerate(self._sc_points.iterrows()): row_idx, col_idx, shape = _get_building_indices( self._excl_h5, self._bld_layer, point.name, - resolution=self._res, building_threshold=building_threshold) + resolution=self._res, + building_threshold=building_threshold) if row_idx.size == 0: continue From 42605ebf2779fc57a54d306b83e551e82810b31b Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 16 Sep 2022 11:26:35 -0600 Subject: [PATCH 07/42] Removed extra spaces --- reVX/turbine_flicker/turbine_flicker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index b85f656aa..410463a20 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -334,7 +334,7 @@ def compute_exclusions(self, hub_height, rotor_diameter, for i, (_, point) in enumerate(self._sc_points.iterrows()): row_idx, col_idx, shape = _get_building_indices( self._excl_h5, self._bld_layer, point.name, - resolution=self._res, + resolution=self._res, building_threshold=building_threshold) if row_idx.size == 0: continue From 8769c85edc48fa35270320bb57f63e261c4cb44a Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 16 Sep 2022 11:29:22 -0600 Subject: [PATCH 08/42] Linter fixes --- reVX/turbine_flicker/turbine_flicker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 410463a20..fd6184f58 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -202,7 +202,7 @@ def _preflight_check(self, tm_dset='techmap_wtk'): def _set_max_grid_size_for_odd_shaped_arr(self): """Set the max_flicker_exclusion_range to multiple of 0.5 grids """ mult = np.round(self._max_flicker_exclusion_range - / self._grid_cell_size ) + 0.5 + / self._grid_cell_size) + 0.5 self._max_flicker_exclusion_range = mult * self._grid_cell_size def _get_sc_points(self, tm_dset='techmap_wtk'): @@ -616,7 +616,7 @@ def _get_flicker_excl_shifts(shadow_flicker, flicker_threshold=30): shape = shadow_flicker.shape row_shifts, col_shifts = np.where(shadow_flicker > flicker_threshold) check = (np.any(np.isin(row_shifts, [0, shape[0] - 1])) - or np.any(np.isin(col_shifts, [0, shape[1] - 1]))) + or np.any(np.isin(col_shifts, [0, shape[1] - 1]))) if check: msg = ("Turbine flicker appears to extend beyond the FlickerModel " "domain! Consider increasing the maximum flicker exclusion " From 4cd2084c611e9959bbbd36a952906b4e8feae2e4 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 16 Sep 2022 12:34:12 -0600 Subject: [PATCH 09/42] Refactored to add new obj attributes --- reVX/turbine_flicker/turbine_flicker.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index fd6184f58..21838dd88 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -66,6 +66,9 @@ def __init__(self, excl_fpath, res_fpath, building_layer, self._max_flicker_exclusion_range = max_flicker_exclusion_range self._preflight_check(tm_dset=tm_dset) self._sc_points = self._get_sc_points(tm_dset=tm_dset) + with ExclusionLayers(excl_fpath) as f: + self.profile = f.profile + self._exclusion_shape = f.shape def __repr__(self): msg = "{} from {}".format(self.__class__.__name__, self._bld_layer) @@ -275,14 +278,11 @@ def compute_exclusions(self, hub_height, rotor_diameter, 2D inclusion array. Pixels to exclude (0) to prevent shadow flicker on buildings in "building_layer """ - with ExclusionLayers(self._excl_h5) as f: - exclusion_shape = f.shape - profile = f.profile if max_workers is None: max_workers = os.cpu_count() - flicker_arr = np.ones(exclusion_shape, dtype=np.uint8) + flicker_arr = np.ones(self._exclusion_shape, dtype=np.uint8) if max_workers > 1: msg = ('Computing exclusions from {} based on {}m hub height ' 'turbines with {}m rotor diameters in parallel using {} ' @@ -362,12 +362,13 @@ def compute_exclusions(self, hub_height, rotor_diameter, rotor_diameter) ) ExclusionsConverter._write_layer(self._excl_h5, out_layer, - profile, flicker_arr, + self.profile, flicker_arr, description=description) if out_tiff: logger.info('Saving flicker inclusion layer to {}' .format(out_tiff)) - ExclusionsConverter._write_geotiff(out_tiff, profile, flicker_arr) + ExclusionsConverter._write_geotiff(out_tiff, self.profile, + flicker_arr) return flicker_arr From f1bc36f0aee4061e3c5b77ee7647ea8ca202ef70 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 16 Sep 2022 14:19:24 -0600 Subject: [PATCH 10/42] Updated property names --- reVX/setbacks/parcel_setbacks.py | 4 ++-- reVX/setbacks/regulations.py | 12 ++++++------ tests/test_setbacks.py | 22 +++++++++++----------- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/reVX/setbacks/parcel_setbacks.py b/reVX/setbacks/parcel_setbacks.py index c4d4ca654..e04f57fad 100644 --- a/reVX/setbacks/parcel_setbacks.py +++ b/reVX/setbacks/parcel_setbacks.py @@ -35,12 +35,12 @@ def _compute_generic_setbacks(self, features_fpath): Raster array of setbacks """ logger.info("Computing generic setbacks") - if np.isclose(self._regulations.generic_setback, 0): + if np.isclose(self._regulations.generic, 0): return self._rasterizer.rasterize(shapes=None) features = self._parse_features(features_fpath) setbacks = features.buffer(0).difference( - features.buffer(-1 * self._regulations.generic_setback)) + features.buffer(-1 * self._regulations.generic)) return self._rasterizer.rasterize(list(setbacks)) def _compute_local_setbacks(self, features, cnty, setback): diff --git a/reVX/setbacks/regulations.py b/reVX/setbacks/regulations.py index a1209e1c2..ec897e7f2 100644 --- a/reVX/setbacks/regulations.py +++ b/reVX/setbacks/regulations.py @@ -156,10 +156,10 @@ def base_setback_dist(self): return self._base_setback_dist @property - def generic_setback(self): - """Default setback of base setback distance * multiplier. + def generic(self): + """Default regulation value. - This value is used for global setbacks. + This value is used for global regulations. Returns ------- @@ -183,7 +183,7 @@ def multiplier(self): return self._multi @property - def local_exist(self): + def locals_exist(self): """Flag indicating wether local regulations exist. Returns @@ -193,14 +193,14 @@ def local_exist(self): return (self.regulations is not None and not self.regulations.empty) @property - def generic_exist(self): + def generic_exists(self): """Flag indicating wether generic regulations exist. Returns ------- bool """ - return self.generic_setback is not None + return self.generic is not None def __iter__(self): if self._regulations is None: diff --git a/tests/test_setbacks.py b/tests/test_setbacks.py index 83dc3ab31..6779c8bc3 100644 --- a/tests/test_setbacks.py +++ b/tests/test_setbacks.py @@ -86,7 +86,7 @@ def test_regulations_init(): """Test initializing a normal regulations file. """ regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) assert regs.base_setback_dist == 10 - assert np.isclose(regs.generic_setback, 10 * 1.1) + assert np.isclose(regs.generic, 10 * 1.1) assert np.isclose(regs.multiplier, 1.1) for col in Regulations.REQUIRED_COLUMNS: @@ -96,7 +96,7 @@ def test_regulations_init(): assert regs.regulations['Feature Type'].str.islower().all() regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=None) - assert regs.generic_setback is None + assert regs.generic is None def test_regulations_missing_init(): @@ -173,11 +173,11 @@ def test_regulations_set_to_none(): def test_regulations_locals_exist(): """Test locals_exist property. """ regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) - assert regs.local_exist + assert regs.locals_exist regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=None) - assert regs.local_exist + assert regs.locals_exist regs = Regulations(10, regulations_fpath=None, multiplier=1.1) - assert not regs.local_exist + assert not regs.locals_exist with tempfile.TemporaryDirectory() as td: regs = pd.read_csv(REGS_FPATH).iloc[0:0] @@ -185,19 +185,19 @@ def test_regulations_locals_exist(): regs_fpath = os.path.join(td, regs_fpath) regs.to_csv(regs_fpath, index=False) regs = Regulations(10, regulations_fpath=regs_fpath, multiplier=1.1) - assert not regs.local_exist + assert not regs.locals_exist regs = Regulations(10, regulations_fpath=regs_fpath, multiplier=None) - assert not regs.local_exist + assert not regs.locals_exist -def test_regulations_generic_exist(): +def test_regulations_generic_exists(): """Test locals_exist property. """ regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) - assert regs.generic_exist + assert regs.generic_exists regs = Regulations(10, regulations_fpath=None, multiplier=1.1) - assert regs.generic_exist + assert regs.generic_exists regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=None) - assert not regs.generic_exist + assert not regs.generic_exists def test_regulations_wind(): From b4bd3cd255c13628ca960fef63c8ba4a87b4eaff Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 16 Sep 2022 15:13:51 -0600 Subject: [PATCH 11/42] reduced import path for upcoming refactor --- reVX/cli.py | 2 +- reVX/least_cost_xmission/cost_creator.py | 2 +- reVX/least_cost_xmission/least_cost_paths.py | 2 +- reVX/offshore/dist_to_ports.py | 2 +- reVX/offshore/dist_to_ports_converter.py | 2 +- tests/test_exclusions_converter.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/reVX/cli.py b/reVX/cli.py index 47520e057..89e1ac4d2 100644 --- a/reVX/cli.py +++ b/reVX/cli.py @@ -11,7 +11,7 @@ from rex.utilities.utilities import safe_json_load from reVX.offshore.dist_to_ports_converter import DistToPortsConverter -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities import ExclusionsConverter from reVX.utilities.forecasts import FcstUtils from reVX.utilities.output_extractor import output_extractor from reVX.utilities.region_classifier import RegionClassifier diff --git a/reVX/least_cost_xmission/cost_creator.py b/reVX/least_cost_xmission/cost_creator.py index 332633743..974973314 100644 --- a/reVX/least_cost_xmission/cost_creator.py +++ b/reVX/least_cost_xmission/cost_creator.py @@ -9,7 +9,7 @@ from reV.handlers.exclusions import ExclusionLayers from reVX.handlers.geotiff import Geotiff -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities import ExclusionsConverter from reVX.least_cost_xmission.config import XmissionConfig logger = logging.getLogger(__name__) diff --git a/reVX/least_cost_xmission/least_cost_paths.py b/reVX/least_cost_xmission/least_cost_paths.py index 183257e44..2eb4886e5 100644 --- a/reVX/least_cost_xmission/least_cost_paths.py +++ b/reVX/least_cost_xmission/least_cost_paths.py @@ -19,7 +19,7 @@ from rex.utilities.loggers import log_mem from reVX.least_cost_xmission.trans_cap_costs import TieLineCosts -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities import ExclusionsConverter logger = logging.getLogger(__name__) diff --git a/reVX/offshore/dist_to_ports.py b/reVX/offshore/dist_to_ports.py index e386daf17..ba60fc16d 100644 --- a/reVX/offshore/dist_to_ports.py +++ b/reVX/offshore/dist_to_ports.py @@ -14,7 +14,7 @@ from warnings import warn from reV.handlers.exclusions import ExclusionLayers -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities import ExclusionsConverter from reVX.utilities.utilities import log_versions, coordinate_distance from rex.utilities.execution import SpawnProcessPool from rex.utilities.loggers import log_mem diff --git a/reVX/offshore/dist_to_ports_converter.py b/reVX/offshore/dist_to_ports_converter.py index 081c2e68a..a530a5c5f 100644 --- a/reVX/offshore/dist_to_ports_converter.py +++ b/reVX/offshore/dist_to_ports_converter.py @@ -11,7 +11,7 @@ from reVX.handlers.geotiff import Geotiff from reVX.handlers.outputs import Outputs -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities import ExclusionsConverter logger = logging.getLogger(__name__) diff --git a/tests/test_exclusions_converter.py b/tests/test_exclusions_converter.py index b0687e951..961b4d8d4 100644 --- a/tests/test_exclusions_converter.py +++ b/tests/test_exclusions_converter.py @@ -17,7 +17,7 @@ from reVX.cli import main from reVX.handlers.geotiff import Geotiff -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities import ExclusionsConverter from reVX import TESTDATADIR DIR = os.path.join(TESTDATADIR, 'ri_exclusions') From 2f43839e386b7a2afcf8238bda04c0563dbe33f7 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 16 Sep 2022 15:14:20 -0600 Subject: [PATCH 12/42] Reduced another import path --- reVX/setbacks/setbacks_converter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reVX/setbacks/setbacks_converter.py b/reVX/setbacks/setbacks_converter.py index bb8e96d6e..381418559 100644 --- a/reVX/setbacks/setbacks_converter.py +++ b/reVX/setbacks/setbacks_converter.py @@ -9,7 +9,7 @@ from reVX.handlers.geotiff import Geotiff from reVX.handlers.outputs import Outputs -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities import ExclusionsConverter logger = logging.getLogger(__name__) From 3bdc03ecf081cd10440c425bd8a9204eee724b54 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 11:33:36 -0600 Subject: [PATCH 13/42] First major refactor, WIP --- reVX/setbacks/base.py | 395 +----- reVX/setbacks/parcel_setbacks.py | 13 +- reVX/setbacks/road_setbacks.py | 4 +- reVX/setbacks/setbacks_cli.py | 2 +- reVX/setbacks/structure_setbacks.py | 2 +- reVX/turbine_flicker/turbine_flicker.py | 2 +- reVX/utilities/__init__.py | 2 +- reVX/utilities/exclusions.py | 1457 +++++++++++++++++++++++ reVX/utilities/exclusions_converter.py | 651 ---------- tests/test_setbacks.py | 86 +- 10 files changed, 1552 insertions(+), 1062 deletions(-) create mode 100644 reVX/utilities/exclusions.py delete mode 100644 reVX/utilities/exclusions_converter.py diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index d9893046e..5b78ab07f 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -16,7 +16,7 @@ from rex.utilities import SpawnProcessPool, log_mem from reV.handlers.exclusions import ExclusionLayers -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities.exclusions import AbstractBaseExclusionsMerger from reVX.utilities.utilities import log_versions logger = logging.getLogger(__name__) @@ -256,7 +256,7 @@ def _aggregate_high_res(self, hr_arr): return arr -class AbstractBaseSetbacks(ABC): +class AbstractBaseSetbacks(AbstractBaseExclusionsMerger): """ Create exclusions layers for setbacks """ @@ -306,92 +306,20 @@ def __init__(self, excl_fpath, regulations, hsds=False, this process is skipped and the output is a boolean exclusion mask. By default `None`. """ - log_versions(logger) - self._excl_fpath = excl_fpath - self._regulations = regulations self._rasterizer = Rasterizer(excl_fpath, weights_calculation_upscale_factor, hsds) - - self._preflight_check() + super().__init__(excl_fpath, regulations) def __repr__(self): msg = "{} for {}".format(self.__class__.__name__, self._excl_fpath) return msg - def _preflight_check(self): - """Parse the county regulations. - - Parse regulations, combine with county geometries from - exclusions .h5 file. The county geometries are intersected with - features to compute county specific setbacks. - - Parameters - ---------- - regulations : pandas.DataFrame - Regulations table - - Returns - ------- - regulations: `geopandas.GeoDataFrame` - GeoDataFrame with county level setback regulations merged - with county geometries, use for intersecting with setback - features. - """ - if self.regulations_table is None: - return - - regulations_df = self.regulations_table - if 'FIPS' not in regulations_df: - msg = ('Regulations does not have county FIPS! Please add a ' - '"FIPS" columns with the unique county FIPS values.') - logger.error(msg) - raise RuntimeError(msg) - - if 'geometry' not in regulations_df: - regulations_df['geometry'] = None - - regulations_df = regulations_df[~regulations_df['FIPS'].isna()] - regulations_df = regulations_df.set_index('FIPS') - - logger.info('Merging county geometries w/ local regulations') - with ExclusionLayers(self._excl_fpath) as exc: - fips = exc['cnty_fips'] - profile = exc.get_layer_profile('cnty_fips') - - s = features.shapes( - fips.astype(np.int32), - transform=profile['transform'] - ) - for p, v in s: - v = int(v) - if v in regulations_df.index: - regulations_df.at[v, 'geometry'] = shape(p) - - regulations_df = gpd.GeoDataFrame( - regulations_df, - crs=self._rasterizer.profile['crs'], - geometry='geometry' - ) - regulations_df = regulations_df.reset_index() - regulations_df = regulations_df.to_crs( - crs=self._rasterizer.profile['crs']) - self.regulations_table = regulations_df - @property - def regulations_table(self): - """Regulations table. - - Returns - ------- - geopandas.GeoDataFrame | None - """ - return self._regulations.regulations - - @regulations_table.setter - def regulations_table(self, regulations_table): - self._regulations.regulations = regulations_table + def profile(self): + """dict: Geotiff profile. """ + return self._rasterizer.profile - def _parse_features(self, features_fpath): + def parse_features(self, features_fpath): """Method to parse features. Parameters @@ -405,10 +333,9 @@ def _parse_features(self, features_fpath): Geometries of features to setback from in exclusion coordinate system. """ - return gpd.read_file(features_fpath).to_crs( - crs=self._rasterizer.profile['crs']) + return gpd.read_file(features_fpath).to_crs(crs=self.profile['crs']) - def _pre_process_regulations(self, features_fpath): + def pre_process_regulations(self, features_fpath): """Reduce regulations to state corresponding to features_fpath. Parameters @@ -427,13 +354,14 @@ def _pre_process_regulations(self, features_fpath): logger.debug('Computing setbacks for regulations in {} counties' .format(len(self.regulations_table))) - # pylint: disable=unused-argument - @abstractmethod - def _regulation_table_mask(self, features_fpath): - """Return the regulation table mask for setback feature. """ - raise NotImplementedError + # def _setback_computation(self, setback_features): + # """Get function and args for setbacks computation. """ + # for setback, cnty in self._regulations: + # idx = setback_features.sindex.intersection(cnty.total_bounds) + # cnty_feats = setback_features.iloc[list(idx)].copy() + # yield self._compute_local_setbacks, cnty_feats, cnty, setback - def _compute_local_setbacks(self, features, cnty, setback): + def compute_local_exclusions(self, features, cnty, regulation_value): """Compute local features setbacks. This method will compute the setbacks using a county-specific @@ -447,7 +375,7 @@ def _compute_local_setbacks(self, features, cnty, setback): Features to setback from. cnty : geopandas.GeoDataFrame Regulations for a single county. - setback : int + regulation_value : int Setback distance in meters. Returns @@ -459,98 +387,10 @@ def _compute_local_setbacks(self, features, cnty, setback): .format(cnty.iloc[0]['FIPS'])) log_mem(logger) features = self._feature_filter(features, cnty) - return list(features.buffer(setback)) - - @staticmethod - def _feature_filter(features, cnty): - """Filter the features given a county.""" - return features_with_centroid_in_county(features, cnty) - - def _write_setbacks(self, geotiff, setbacks, replace=False): - """ - Write setbacks to geotiff, replace if requested + setback = regulation_value + return self._rasterizer.rasterize(list(features.buffer(setback))) - Parameters - ---------- - geotiff : str - Path to geotiff file to save setbacks too - setbacks : ndarray - Rasterized array of setbacks - replace : bool, optional - Flag to replace local layer data with arr if layer already - exists in the exclusion .h5 file. By default `False`. - """ - if os.path.exists(geotiff): - if not replace: - msg = ('{} already exists. To replace it set "replace=True"' - .format(geotiff)) - logger.error(msg) - raise IOError(msg) - else: - msg = ('{} already exists and will be replaced!' - .format(geotiff)) - logger.warning(msg) - warn(msg) - - ExclusionsConverter._write_geotiff(geotiff, self._rasterizer.profile, - setbacks) - - def _compute_all_local_setbacks(self, features_fpath, max_workers=None): - """Compute local setbacks for all counties either. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from - max_workers : int, optional - Number of workers to use for setback computation, if 1 run - in serial, if > 1 run in parallel with that many workers, - if `None` run in parallel on all available cores. - By default `None`. - - Returns - ------- - setbacks : ndarray - Raster array of setbacks. - """ - setbacks = [] - setback_features = self._parse_features(features_fpath) - max_workers = max_workers or os.cpu_count() - - log_mem(logger) - if max_workers > 1: - logger.info('Computing local setbacks in parallel using {} ' - 'workers'.format(max_workers)) - loggers = [__name__, 'reVX'] - with SpawnProcessPool(max_workers=max_workers, - loggers=loggers) as exe: - futures = [] - for func, *args in self._setback_computation(setback_features): - future = exe.submit(func, *args) - futures.append(future) - - for i, future in enumerate(as_completed(futures)): - setbacks.extend(future.result()) - logger.debug('Computed setbacks for {} of {} counties' - .format((i + 1), len(self.regulations_table))) - else: - logger.info('Computing local setbacks in serial') - computation = self._setback_computation(setback_features) - for i, (func, *args) in enumerate(computation): - setbacks.extend(func(*args)) - logger.debug('Computed setbacks for {} of {} counties' - .format((i + 1), len(self.regulations_table))) - - return self._rasterizer.rasterize(setbacks) - - def _setback_computation(self, setback_features): - """Get function and args for setbacks computation. """ - for setback, cnty in self._regulations: - idx = setback_features.sindex.intersection(cnty.total_bounds) - cnty_feats = setback_features.iloc[list(idx)].copy() - yield self._compute_local_setbacks, cnty_feats, cnty, setback - - def _compute_generic_setbacks(self, features_fpath): + def compute_generic_exclusions(self, features_fpath): """Compute generic setbacks. This method will compute the setbacks using a generic setback @@ -567,101 +407,16 @@ def _compute_generic_setbacks(self, features_fpath): Raster array of setbacks """ logger.info('Computing generic setbacks') - if np.isclose(self._regulations.generic_setback, 0): + if np.isclose(self._regulations.generic, 0): return self._rasterizer.rasterize(shapes=None) - setback_features = self._parse_features(features_fpath) - setbacks = list(setback_features.buffer( - self._regulations.generic_setback - )) + setback_features = self.parse_features(features_fpath) + setbacks = list(setback_features.buffer(self._regulations.generic)) return self._rasterizer.rasterize(setbacks) - def compute_setbacks(self, features_fpath, max_workers=None, - geotiff=None, replace=False): - """ - Compute setbacks for all states either in serial or parallel. - Existing setbacks are computed if a regulations file was - supplied during class initialization, otherwise generic setbacks - are computed. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from - max_workers : int, optional - Number of workers to use for setback computation, if 1 run - in serial, if > 1 run in parallel with that many workers, - if `None`, run in parallel on all available cores. - By default `None`. - geotiff : str, optional - Path to save geotiff containing rasterized setbacks. - By default `None`. - replace : bool, optional - Flag to replace geotiff if it already exists. - By default `False`. - - Returns - ------- - setbacks : ndarray - Raster array of setbacks - """ - setbacks = self._compute_merged_setbacks(features_fpath, - max_workers=max_workers) - - if geotiff is not None: - logger.debug('Writing setbacks to {}'.format(geotiff)) - self._write_setbacks(geotiff, setbacks, replace=replace) - - return setbacks - - def _compute_merged_setbacks(self, features_fpath, max_workers=None): - """Compute and merge local and generic setbacks, if necessary. """ - mw = max_workers - - if self._regulations.local_exist: - self._pre_process_regulations(features_fpath) - - generic_setbacks_exist = self._regulations.generic_exist - local_setbacks_exist = self._regulations.local_exist - - if not generic_setbacks_exist and not local_setbacks_exist: - msg = ("Found no setbacks to compute: No regulations detected, " - "and generic multiplier not set.") - logger.error(msg) - raise ValueError(msg) - - if generic_setbacks_exist and not local_setbacks_exist: - return self._compute_generic_setbacks(features_fpath) - - if local_setbacks_exist and not generic_setbacks_exist: - return self._compute_all_local_setbacks(features_fpath, - max_workers=mw) - - generic_setbacks = self._compute_generic_setbacks(features_fpath) - local_setbacks = self._compute_all_local_setbacks(features_fpath, - max_workers=mw) - return self._merge_setbacks(generic_setbacks, local_setbacks, - features_fpath) - - def _merge_setbacks(self, generic_setbacks, local_setbacks, - features_fpath): - """Merge local setbacks onto the generic setbacks.""" - logger.info('Merging local setbacks onto the generic setbacks') - - self._pre_process_regulations(features_fpath) - with ExclusionLayers(self._excl_fpath) as exc: - fips = exc['cnty_fips'] - - local_setbacks_mask = np.isin(fips, - self.regulations_table["FIPS"].unique()) - - generic_setbacks[local_setbacks_mask] = ( - local_setbacks[local_setbacks_mask]) - return generic_setbacks - @staticmethod - def _get_feature_paths(features_fpath): + def get_feature_paths(features_fpath): """Ensure features path exists and return as list. Parameters @@ -701,96 +456,14 @@ def _get_feature_paths(features_fpath): return paths - @classmethod - def run(cls, excl_fpath, features_path, out_dir, regulations, - weights_calculation_upscale_factor=None, max_workers=None, - replace=False, hsds=False): - """ - Compute setbacks and write them to a geotiff. If a regulations - file is given, compute local setbacks, otherwise compute generic - setbacks using the given multiplier and the base setback - distance. If both are provided, generic and local setbacks are - merged such that the local setbacks override the generic ones. - - Parameters - ---------- - excl_fpath : str - Path to .h5 file containing exclusion layers, will also be - the location of any new setback layers. - features_path : str - Path to file or directory feature shape files. - This path can contain any pattern that can be used in the - glob function. For example, `/path/to/features/[A]*` would - match with all the features in the directory - `/path/to/features/` that start with "A". This input - can also be a directory, but that directory must ONLY - contain feature files. If your feature files are mixed - with other files or directories, use something like - `/path/to/features/*.geojson`. - out_dir : str - Directory to save setbacks geotiff(s) into - regulations : `~reVX.setbacks.regulations.Regulations` - A `Regulations` object used to extract setback distances. - weights_calculation_upscale_factor : int, optional - If this value is an int > 1, the output will be a layer with - **inclusion** weight values instead of exclusion booleans. - For example, a cell that was previously excluded with a - a boolean mask (value of 1) may instead be converted to an - inclusion weight value of 0.75, meaning that 75% of the area - corresponding to that point should be included (i.e. the - exclusion feature only intersected a small portion - 25% - - of the cell). This percentage inclusion value is calculated - by upscaling the output array using this input value, - rasterizing the exclusion features onto it, and counting the - number of resulting sub-cells excluded by the feature. For - example, setting the value to `3` would split each output - cell into nine sub-cells - 3 divisions in each dimension. - After the feature is rasterized on this high-resolution - sub-grid, the area of the non-excluded sub-cells is totaled - and divided by the area of the original cell to obtain the - final inclusion percentage. Therefore, a larger upscale - factor results in more accurate percentage values. However, - this process is memory intensive and scales quadratically - with the upscale factor. A good way to estimate your minimum - memory requirement is to use the following formula: - - .. math:: memory (GB) = s_0 * s_1 * ((sf^2) * 2 + 4) / 1073741824, + @staticmethod + def _feature_filter(features, cnty): + """Filter the features given a county.""" + return features_with_centroid_in_county(features, cnty) - where :math:`s_0` and :math:`s_1` are the dimensions (shape) - of your exclusion layer and :math:`sf` is the scale factor - (be sure to add several GB for any other overhead required - by the rest of the process). If `None` (or a value <= 1), - this process is skipped and the output is a boolean - exclusion mask. By default `None`. - max_workers : int, optional - Number of workers to use for setback computation, if 1 run - in serial, if > 1 run in parallel with that many workers, - if `None`, run in parallel on all available cores. - By default `None`. - replace : bool, optional - Flag to replace geotiff if it already exists. - By default `False`. - hsds : bool, optional - Boolean flag to use h5pyd to handle .h5 'files' hosted on - AWS behind HSDS. By default `False`. - """ - scale_factor = weights_calculation_upscale_factor - setbacks = cls(excl_fpath, regulations=regulations, hsds=hsds, - weights_calculation_upscale_factor=scale_factor) - - features_path = setbacks._get_feature_paths(features_path) - for fpath in features_path: - geotiff = os.path.basename(fpath) - geotiff = ".".join(geotiff.split('.')[:-1] + ['tif']) - geotiff = os.path.join(out_dir, geotiff) - - if os.path.exists(geotiff) and not replace: - msg = ('{} already exists, setbacks will not be re-computed ' - 'unless replace=True'.format(geotiff)) - logger.error(msg) - else: - logger.info("Computing setbacks from {} and saving " - "to {}".format(fpath, geotiff)) - setbacks.compute_setbacks(fpath, geotiff=geotiff, - max_workers=max_workers, - replace=replace) + # pylint: disable=unused-argument + @staticmethod + @abstractmethod + def _regulation_table_mask(features_fpath): + """Return the regulation table mask for setback feature. """ + raise NotImplementedError diff --git a/reVX/setbacks/parcel_setbacks.py b/reVX/setbacks/parcel_setbacks.py index e04f57fad..3740c7777 100644 --- a/reVX/setbacks/parcel_setbacks.py +++ b/reVX/setbacks/parcel_setbacks.py @@ -18,7 +18,7 @@ class ParcelSetbacks(AbstractBaseSetbacks): """Parcel setbacks - facilitates the use of negative buffers. """ - def _compute_generic_setbacks(self, features_fpath): + def compute_generic_exclusions(self, features_fpath): """Compute generic setbacks. This method will compute the setbacks using a generic setback @@ -38,12 +38,12 @@ def _compute_generic_setbacks(self, features_fpath): if np.isclose(self._regulations.generic, 0): return self._rasterizer.rasterize(shapes=None) - features = self._parse_features(features_fpath) + features = self.parse_features(features_fpath) setbacks = features.buffer(0).difference( features.buffer(-1 * self._regulations.generic)) return self._rasterizer.rasterize(list(setbacks)) - def _compute_local_setbacks(self, features, cnty, setback): + def compute_local_exclusions(self, features, cnty, regulation_value): """Compute local features setbacks. This method will compute the setbacks using a county-specific @@ -57,7 +57,7 @@ def _compute_local_setbacks(self, features, cnty, setback): Features to setback from. cnty : geopandas.GeoDataFrame Regulations for a single county. - setback : int + regulation_value : int Setback distance in meters. Returns @@ -69,8 +69,9 @@ def _compute_local_setbacks(self, features, cnty, setback): .format(cnty.iloc[0]['FIPS'])) log_mem(logger) features = self._feature_filter(features, cnty) + setback = regulation_value setbacks = features.buffer(0).difference(features.buffer(-1 * setback)) - return list(setbacks) + return self._rasterizer.rasterize(list(setbacks)) def _regulation_table_mask(self, features_fpath): """Return the regulation table mask for setback feature. @@ -89,7 +90,7 @@ def _regulation_table_mask(self, features_fpath): == 'property line') return states & property_line - def _parse_features(self, features_fpath): + def parse_features(self, features_fpath): """Method to parse features. Parameters diff --git a/reVX/setbacks/road_setbacks.py b/reVX/setbacks/road_setbacks.py index 01417c0b5..091598a9c 100644 --- a/reVX/setbacks/road_setbacks.py +++ b/reVX/setbacks/road_setbacks.py @@ -18,7 +18,7 @@ class RoadSetbacks(AbstractBaseSetbacks): Road setbacks """ - def _parse_features(self, features_fpath): + def parse_features(self, features_fpath): """ Load roads from gdb file, convert to exclusions coordinate system. @@ -40,7 +40,7 @@ def _parse_features(self, features_fpath): return roads.to_crs(crs=self._rasterizer.profile["crs"]) @staticmethod - def _get_feature_paths(features_fpath): + def get_feature_paths(features_fpath): """ Find all roads gdb files in roads_dir diff --git a/reVX/setbacks/setbacks_cli.py b/reVX/setbacks/setbacks_cli.py index fc154712b..79c69c5b1 100644 --- a/reVX/setbacks/setbacks_cli.py +++ b/reVX/setbacks/setbacks_cli.py @@ -219,7 +219,7 @@ def eagle(config): """ features_path = config.features_path cls = SETBACKS[config.feature_type] - features = cls._get_feature_paths(features_path) + features = cls.get_feature_paths(features_path) if not features: msg = ('No valid feature files were found at {}!' .format(features_path)) diff --git a/reVX/setbacks/structure_setbacks.py b/reVX/setbacks/structure_setbacks.py index 6a5d7e60e..72222e86d 100644 --- a/reVX/setbacks/structure_setbacks.py +++ b/reVX/setbacks/structure_setbacks.py @@ -40,7 +40,7 @@ def _split_state_name(state_name): return state_name @staticmethod - def _get_feature_paths(features_fpath): + def get_feature_paths(features_fpath): """ Find all structures .geojson files in structures dir diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 21838dd88..25a42a565 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -12,7 +12,7 @@ from reV.supply_curve.extent import SupplyCurveExtent from reV.supply_curve.tech_mapping import TechMapping from reVX.wind_dirs.mean_wind_dirs_point import MeanWindDirectionsPoint -from reVX.utilities.exclusions_converter import ExclusionsConverter +from reVX.utilities import ExclusionsConverter from rex.resource_extraction.resource_extraction import WindX from rex.utilities.execution import SpawnProcessPool from rex.utilities.loggers import log_mem diff --git a/reVX/utilities/__init__.py b/reVX/utilities/__init__.py index 65b825de2..e6b3cf52c 100644 --- a/reVX/utilities/__init__.py +++ b/reVX/utilities/__init__.py @@ -2,7 +2,7 @@ """ reVX utilities. """ -from .exclusions_converter import ExclusionsConverter +from .exclusions import ExclusionsConverter from .forecasts import FcstUtils from .output_extractor import output_extractor from .region_classifier import RegionClassifier diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py new file mode 100644 index 000000000..0ad58ef0e --- /dev/null +++ b/reVX/utilities/exclusions.py @@ -0,0 +1,1457 @@ +# -*- coding: utf-8 -*- +""" +Driver class to compute exclusions +""" +import h5py +import json +import os +import logging +from abc import ABC, abstractmethod +from concurrent.futures import as_completed +from warnings import warn + +import numpy as np +import geopandas as gpd +from pyproj.crs import CRS +import rasterio +from rasterio import features +from shapely.geometry import shape + +from rex import Outputs +from rex.utilities import SpawnProcessPool, log_mem, parse_table +from reV.handlers.exclusions import ExclusionLayers +from reVX.handlers.geotiff import Geotiff +from reVX.utilities.utilities import log_versions +from reVX.utilities.exceptions import ExclusionsCheckError + +logger = logging.getLogger(__name__) + + +class AbstractExclusionCalculatorInterface(ABC): + """Abstract Exclusion Calculator Interface. """ + + @property + @abstractmethod + def profile(self): + """dict: Geotiff profile. """ + raise NotImplementedError + + @abstractmethod + def parse_features(self, features_fpath): + """Parse features the feature file. + + Parameters + ---------- + features_fpath : str + Path to file containing features to compute exclusions from. + + Returns + ------- + `geopandas.GeoDataFrame` + Geometries of features to compute exclusions from in + exclusion coordinate system. + """ + raise NotImplementedError + + @abstractmethod + def pre_process_regulations(self, features_fpath): + """Reduce regulations to correct state and features. + + When implementing this method, make sure to update + `self.regulations_table`. + + Parameters + ---------- + features_fpath : str + Path to shape file with features to compute exclusions from. + """ + raise NotImplementedError + + @abstractmethod + def compute_local_exclusions(self, features, cnty, regulation_value): + """Compute local feature exclusions. + + This method should compute the exclusions using the information + about the input county. + + Parameters + ---------- + features : geopandas.GeoDataFrame + Features used to calculate exclusions from. + cnty : geopandas.GeoDataFrame + Regulations for a single county. + regulation_value : int + Regulation value for county. + + Returns + ------- + exclusions : list + List of exclusion geometries. + """ + raise NotImplementedError + + @abstractmethod + def compute_generic_exclusions(self, features_fpath): + """Compute generic exclusions. + + This method should compute the exclusions using a generic + regulation value (`self._regulations.generic`). + + Parameters + ---------- + features_fpath : str + Path to shape file with features to compute exclusions from. + + Returns + ------- + exclusions : ndarray + Raster array of exclusions + """ + raise NotImplementedError + + @staticmethod + @abstractmethod + def get_feature_paths(features_fpath): + """Ensure features path exists and return as list. + + Parameters + ---------- + features_fpath : str + Path to features file. This path can contain + any pattern that can be used in the glob function. + For example, `/path/to/features/[A]*` would match + with all the features in the directory + `/path/to/features/` that start with "A". This input + can also be a directory, but that directory must ONLY + contain feature files. If your feature files are mixed + with other files or directories, use something like + `/path/to/features/*.geojson`. + + Returns + ------- + features_fpath : list + Features path as a list of strings. + + Notes + ----- + This method is required for `run` classmethods for exclusion + features that are spread out over multiple files. + """ + raise NotImplementedError + + +class AbstractBaseExclusionsMerger(AbstractExclusionCalculatorInterface): + """ + Create exclusions layers for exclusions + """ + + def __init__(self, excl_fpath, regulations): + """ + Parameters + ---------- + excl_fpath : str + Path to .h5 file containing exclusion layers, will also be + the location of any new exclusion layers + regulations : `Regulations` + A `Regulations` object used to extract exclusion regulation + values. + """ + log_versions(logger) + self._excl_fpath = excl_fpath + self._regulations = regulations + with ExclusionLayers(self._excl_fpath) as exc: + self._fips = exc['cnty_fips'] + self._cnty_fips_profile = exc.get_layer_profile('cnty_fips') + self._preflight_check() + + def __repr__(self): + msg = "{} for {}".format(self.__class__.__name__, self._excl_fpath) + return msg + + def _preflight_check(self): + """Parse the county regulations. + + Parse regulations, combine with county geometries from + exclusions .h5 file. The county geometries are intersected with + features to compute county specific exclusions. + + Parameters + ---------- + regulations : pandas.DataFrame + Regulations table + + Returns + ------- + regulations: `geopandas.GeoDataFrame` + GeoDataFrame with county level exclusion regulations merged + with county geometries, use for intersecting with exclusion + features. + """ + if self.regulations_table is None: + return + + regulations_df = self.regulations_table + if 'FIPS' not in regulations_df: + msg = ('Regulations does not have county FIPS! Please add a ' + '"FIPS" columns with the unique county FIPS values.') + logger.error(msg) + raise RuntimeError(msg) + + if 'geometry' not in regulations_df: + regulations_df['geometry'] = None + + regulations_df = regulations_df[~regulations_df['FIPS'].isna()] + regulations_df = regulations_df.set_index('FIPS') + + logger.info('Merging county geometries w/ local regulations') + s = features.shapes( + self._fips.astype(np.int32), + transform=self._cnty_fips_profile['transform'] + ) + for p, v in s: + v = int(v) + if v in regulations_df.index: + regulations_df.at[v, 'geometry'] = shape(p) + + regulations_df = gpd.GeoDataFrame( + regulations_df, + crs=self.profile['crs'], + geometry='geometry' + ) + regulations_df = regulations_df.reset_index() + regulations_df = regulations_df.to_crs(crs=self.profile['crs']) + self.regulations_table = regulations_df + + @property + def regulations_table(self): + """Regulations table. + + Returns + ------- + geopandas.GeoDataFrame | None + """ + return self._regulations.regulations + + @regulations_table.setter + def regulations_table(self, regulations_table): + self._regulations.regulations = regulations_table + + # def _parse_features(self, features_fpath): + # """Method to parse features. + + # Parameters + # ---------- + # features_fpath : str + # Path to file containing features to setback from. + + # Returns + # ------- + # `geopandas.GeoDataFrame` + # Geometries of features to setback from in exclusion + # coordinate system. + # """ + # return gpd.read_file(features_fpath).to_crs( + # crs=self._rasterizer.profile['crs']) + + # def _pre_process_regulations(self, features_fpath): + # """Reduce regulations to state corresponding to features_fpath. + + # Parameters + # ---------- + # features_fpath : str + # Path to shape file with features to compute setbacks from. + # """ + # mask = self._regulation_table_mask(features_fpath) + # if not mask.any(): + # msg = "Found no local regulations!" + # logger.warning(msg) + # warn(msg) + + # self.regulations_table = (self.regulations_table[mask] + # .reset_index(drop=True)) + # logger.debug('Computing setbacks for regulations in {} counties' + # .format(len(self.regulations_table))) + + # pylint: disable=unused-argument + # @abstractmethod + # def _regulation_table_mask(self, features_fpath): + # """Return the regulation table mask for setback feature. """ + # raise NotImplementedError + + # def _compute_local_setbacks(self, features, cnty, setback): + # """Compute local features setbacks. + + # This method will compute the setbacks using a county-specific + # regulations file that specifies either a static setback or a + # multiplier value that will be used along with the base setback + # distance to compute the setback. + + # Parameters + # ---------- + # features : geopandas.GeoDataFrame + # Features to setback from. + # cnty : geopandas.GeoDataFrame + # Regulations for a single county. + # setback : int + # Setback distance in meters. + + # Returns + # ------- + # setbacks : list + # List of setback geometries. + # """ + # logger.debug('- Computing setbacks for county FIPS {}' + # .format(cnty.iloc[0]['FIPS'])) + # log_mem(logger) + # features = self._feature_filter(features, cnty) + # return list(features.buffer(setback)) + # TODO: Delegate this to class + + # @staticmethod + # def _feature_filter(features, cnty): + # """Filter the features given a county.""" + # return features_with_centroid_in_county(features, cnty) + + def _write_exclusions(self, geotiff, exclusions, replace=False): + """ + Write exclusions to geotiff, replace if requested + + Parameters + ---------- + geotiff : str + Path to geotiff file to save exclusions too + exclusions : ndarray + Rasterized array of exclusions. + replace : bool, optional + Flag to replace local layer data with arr if layer already + exists in the exclusion .h5 file. By default `False`. + """ + if os.path.exists(geotiff): + if not replace: + msg = ('{} already exists. To replace it set "replace=True"' + .format(geotiff)) + logger.error(msg) + raise IOError(msg) + else: + msg = ('{} already exists and will be replaced!' + .format(geotiff)) + logger.warning(msg) + warn(msg) + + ExclusionsConverter._write_geotiff(geotiff, self.profile, exclusions) + + def compute_all_local_exclusions(self, features_fpath, max_workers=None): + """Compute local exclusions for all counties either. + + Parameters + ---------- + features_fpath : str + Path to shape file with features to compute exclusions from + max_workers : int, optional + Number of workers to use for exclusions computation, if 1 + run in serial, if > 1 run in parallel with that many + workers, if `None` run in parallel on all available cores. + By default `None`. + + Returns + ------- + exclusions : ndarray + Raster array of exclusions. + """ + exclusions = None + features = self.parse_features(features_fpath) + max_workers = max_workers or os.cpu_count() + + log_mem(logger) + if max_workers > 1: + logger.info('Computing local exclusions in parallel using {} ' + 'workers'.format(max_workers)) + loggers = [__name__, 'reVX'] + with SpawnProcessPool(max_workers=max_workers, + loggers=loggers) as exe: + futures = {} + for func, *args in self._exclusions_computation(features): + cnty_feats, cnty, exclusion = args + future = exe.submit(func, cnty_feats, cnty, exclusion) + futures[future] = cnty['FIPS'].unique() + + for i, future in enumerate(as_completed(futures)): + exclusions = self._combine_exclusions(exclusions, + future.result(), + futures[future]) + logger.debug('Computed exclusions for {} of {} counties' + .format((i + 1), len(self.regulations_table))) + else: + logger.info('Computing local exclusions in serial') + computation = self._exclusions_computation(features) + for i, (func, *args) in enumerate(computation): + cnty_feats, cnty, exclusion = args + exclusions = self._combine_exclusions(exclusions, func(*args), + cnty['FIPS'].unique()) + logger.debug('Computed exclusions for {} of {} counties' + .format((i + 1), len(self.regulations_table))) + + return exclusions + + def _exclusions_computation(self, features): + """Get function and args for exclusions computation. """ + for exclusion, cnty in self._regulations: + idx = features.sindex.intersection(cnty.total_bounds) + cnty_feats = features.iloc[list(idx)].copy() + yield self.compute_local_exclusions, cnty_feats, cnty, exclusion + + # def _compute_generic_setbacks(self, features_fpath): + # """Compute generic setbacks. + + # This method will compute the setbacks using a generic setback + # of `base_setback_dist * multiplier`. + + # Parameters + # ---------- + # features_fpath : str + # Path to shape file with features to compute setbacks from. + + # Returns + # ------- + # setbacks : ndarray + # Raster array of setbacks + # """ + # logger.info('Computing generic setbacks') + # if np.isclose(self._regulations.generic_setback, 0): + # return self._rasterizer.rasterize(shapes=None) + + # setback_features = self._parse_features(features_fpath) + # setbacks = list(setback_features.buffer( + # self._regulations.generic_setback + # )) + + # return self._rasterizer.rasterize(setbacks) + # TODO: Delegate this to class + + def compute_exclusions(self, features_fpath, max_workers=None, + geotiff=None, replace=False): + """ + Compute exclusions for all states either in serial or parallel. + Existing exclusions are computed if a regulations file was + supplied during class initialization, otherwise generic exclusions + are computed. + + Parameters + ---------- + features_fpath : str + Path to shape file with features to compute exclusions from + max_workers : int, optional + Number of workers to use for exclusion computation, if 1 run + in serial, if > 1 run in parallel with that many workers, + if `None`, run in parallel on all available cores. + By default `None`. + geotiff : str, optional + Path to save geotiff containing rasterized exclusions. + By default `None`. + replace : bool, optional + Flag to replace geotiff if it already exists. + By default `False`. + + Returns + ------- + exclusions : ndarray + Raster array of exclusions + """ + exclusions = self._compute_merged_exclusions(features_fpath, + max_workers=max_workers) + + if geotiff is not None: + logger.debug('Writing exclusions to {}'.format(geotiff)) + self._write_exclusions(geotiff, exclusions, replace=replace) + + return exclusions + + def _compute_merged_exclusions(self, features_fpath, max_workers=None): + """Compute and merge local and generic exclusions, if necessary. """ + mw = max_workers + + if self._regulations.locals_exist: + self.pre_process_regulations(features_fpath) + + generic_exclusions_exist = self._regulations.generic_exists + local_exclusions_exist = self._regulations.locals_exist + + if not generic_exclusions_exist and not local_exclusions_exist: + msg = ("Found no exclusions to compute: No regulations detected, " + "and generic multiplier not set.") + logger.error(msg) + raise ValueError(msg) + + if generic_exclusions_exist and not local_exclusions_exist: + return self.compute_generic_exclusions(features_fpath) + + if local_exclusions_exist and not generic_exclusions_exist: + return self.compute_all_local_exclusions(features_fpath, + max_workers=mw) + + generic_exclusions = self.compute_generic_exclusions(features_fpath) + local_exclusions = self.compute_all_local_exclusions(features_fpath, + max_workers=mw) + return self._merge_exclusions(generic_exclusions, local_exclusions, + features_fpath) + + def _merge_exclusions(self, generic_exclusions, local_exclusions, + features_fpath): + """Merge local exclusions onto the generic exclusions.""" + logger.info('Merging local exclusions onto the generic exclusions') + + self.pre_process_regulations(features_fpath) + local_fips = self.regulations_table["FIPS"].unique() + return self._combine_exclusions(generic_exclusions, local_exclusions, + local_fips) + + def _combine_exclusions(self, existing, additional, cnty_fips): + """Combine local exclusions using FIPS code""" + if existing is None: + return additional + + local_exclusions_mask = np.isin(self._fips, cnty_fips) + existing[local_exclusions_mask] = additional[local_exclusions_mask] + return existing + + @classmethod + def run(cls, excl_fpath, features_path, out_dir, regulations, + weights_calculation_upscale_factor=None, max_workers=None, + replace=False, hsds=False): + """ + Compute exclusions and write them to a geotiff. If a regulations + file is given, compute local exclusions, otherwise compute + generic exclusions. If both are provided, generic and local + exclusions are merged such that the local exclusions override + the generic ones. + + Parameters + ---------- + excl_fpath : str + Path to .h5 file containing exclusion layers, will also be + the location of any new exclusion layers. + features_path : str + Path to file or directory feature shape files. + This path can contain any pattern that can be used in the + glob function. For example, `/path/to/features/[A]*` would + match with all the features in the directory + `/path/to/features/` that start with "A". This input + can also be a directory, but that directory must ONLY + contain feature files. If your feature files are mixed + with other files or directories, use something like + `/path/to/features/*.geojson`. + out_dir : str + Directory to save exclusion geotiff(s) into + regulations : `Regulations` + A `Regulations` object used to extract exclusion regulation + distances. + weights_calculation_upscale_factor : int, optional + If this value is an int > 1, the output will be a layer with + **inclusion** weight values instead of exclusion booleans. + For example, a cell that was previously excluded with a + a boolean mask (value of 1) may instead be converted to an + inclusion weight value of 0.75, meaning that 75% of the area + corresponding to that point should be included (i.e. the + exclusion feature only intersected a small portion - 25% - + of the cell). This percentage inclusion value is calculated + by upscaling the output array using this input value, + rasterizing the exclusion features onto it, and counting the + number of resulting sub-cells excluded by the feature. For + example, setting the value to `3` would split each output + cell into nine sub-cells - 3 divisions in each dimension. + After the feature is rasterized on this high-resolution + sub-grid, the area of the non-excluded sub-cells is totaled + and divided by the area of the original cell to obtain the + final inclusion percentage. Therefore, a larger upscale + factor results in more accurate percentage values. However, + this process is memory intensive and scales quadratically + with the upscale factor. A good way to estimate your minimum + memory requirement is to use the following formula: + + .. math:: memory (GB) = s_0 * s_1 * ((sf^2) * 2 + 4) / 1073741824, + + where :math:`s_0` and :math:`s_1` are the dimensions (shape) + of your exclusion layer and :math:`sf` is the scale factor + (be sure to add several GB for any other overhead required + by the rest of the process). If `None` (or a value <= 1), + this process is skipped and the output is a boolean + exclusion mask. By default `None`. + max_workers : int, optional + Number of workers to use for exclusion computation, if 1 run + in serial, if > 1 run in parallel with that many workers, + if `None`, run in parallel on all available cores. + By default `None`. + replace : bool, optional + Flag to replace geotiff if it already exists. + By default `False`. + hsds : bool, optional + Boolean flag to use h5pyd to handle .h5 'files' hosted on + AWS behind HSDS. By default `False`. + """ + scale_factor = weights_calculation_upscale_factor + exclusions = cls(excl_fpath, regulations=regulations, hsds=hsds, + weights_calculation_upscale_factor=scale_factor) + + features_path = exclusions.get_feature_paths(features_path) + for fpath in features_path: + geotiff = os.path.basename(fpath) + geotiff = ".".join(geotiff.split('.')[:-1] + ['tif']) + geotiff = os.path.join(out_dir, geotiff) + + if os.path.exists(geotiff) and not replace: + msg = ('{} already exists, exclusions will not be re-computed ' + 'unless replace=True'.format(geotiff)) + logger.error(msg) + else: + logger.info("Computing exclusions from {} and saving " + "to {}".format(fpath, geotiff)) + exclusions.compute_exclusions(fpath, geotiff=geotiff, + max_workers=max_workers, + replace=replace) + + +class ExclusionsConverter: + """ + Convert exclusion layers between .h5 and .tif (geotiff) + """ + def __init__(self, excl_h5, hsds=False, chunks=(128, 128), replace=True): + """ + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing or to contain exclusion layers + hsds : bool, optional + Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS + behind HSDS, by default False + chunks : tuple, optional + Chunk size of exclusions in .h5 and Geotiffs, by default (128, 128) + replace : bool, optional + Flag to replace existing layers if needed, by default True + """ + log_versions(logger) + self._excl_h5 = excl_h5 + self._hsds = hsds + self._chunks = chunks + self._replace = replace + + def __repr__(self): + msg = "{} for {}".format(self.__class__.__name__, self._excl_h5) + return msg + + def __getitem__(self, layer): + """ + Parameters + ---------- + layer : str + Layer to extract data for + + Returns + ------- + profile : dict + Geotiff profile (attributes) + values : ndarray + Geotiff data + """ + + if layer not in self.layers: + msg = "{} is not present in {}".format(layer, self._excl_h5) + logger.error(msg) + raise KeyError(msg) + + profile, values = self._extract_layer(self._excl_h5, layer, + hsds=self._hsds) + return profile, values + + def __setitem__(self, layer, geotiff): + """ + Parameters + ---------- + layer : str + Layer to set + geotiff : str + Path to GeoTiff to load data from + """ + self.geotiff_to_layer(layer, geotiff) + + @property + def layers(self): + """ + Available exclusion layers in .h5 file + + Returns + ------- + layers : list + Available layers in .h5 file + """ + with ExclusionLayers(self._excl_h5, hsds=self._hsds) as exc: + layers = exc.layers + + return layers + + @staticmethod + def _init_h5(excl_h5, geotiff, chunks=(128, 128)): + """ + Initialize exclusions .h5 file from geotiff: + - Transfer profile, shape, and meta + + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing exclusion layers + geotiff : str + Path to geotiff file + chunks : tuple + Chunk size of exclusions in Geotiff + """ + logger.debug('\t- Initializing {} from {}' + .format(excl_h5, geotiff)) + with Geotiff(geotiff, chunks=chunks) as src: + profile = src.profile + shape = src.shape + lat, lon = src.lat_lon + logger.debug('\t- "profile", "meta", and "shape" extracted from {}' + .format(geotiff)) + + try: + with h5py.File(excl_h5, mode='w') as dst: + dst.attrs['profile'] = json.dumps(profile) + logger.debug('\t- Default profile:\n{}'.format(profile)) + dst.attrs['shape'] = shape + logger.debug('\t- Default shape:\n{}'.format(shape)) + dst.attrs['chunks'] = chunks + logger.debug('\t- Default chunks:\n{}'.format(chunks)) + + dst.create_dataset('latitude', shape=lat.shape, + dtype=np.float32, data=lat, + chunks=chunks) + logger.debug('\t- latitude coordiantes created') + + dst.create_dataset('longitude', shape=lon.shape, + dtype=np.float32, data=lon, + chunks=chunks) + logger.debug('\t- longitude coordiantes created') + except Exception: + logger.exception("Error initilizing {}".format(excl_h5)) + if os.path.exists(excl_h5): + os.remove(excl_h5) + + @staticmethod + def _check_crs(baseline_crs, test_crs, ignore_keys=('no_defs',)): + """ + Compare baseline and test crs values + + Parameters + ---------- + baseline_crs : dict + Baseline CRS to use a truth, must be a dict + test_crs : dict + Test CRS to compare with baseline, must be a dictionary + ignore_keys : tuple + Keys to not check + + Returns + ------- + bad_crs : bool + Flag if crs' do not match + """ + bad_crs = False + for k, true_v in baseline_crs.items(): + if k not in ignore_keys: + test_v = test_crs.get(k, true_v) + if true_v != test_v: + bad_crs = True + + return bad_crs + + @classmethod + def _check_geotiff(cls, excl_h5, geotiff, chunks=(128, 128), + transform_atol=0.01, coord_atol=0.001): + """ + Compare geotiff with exclusion layer, raise any errors + + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing exclusion layers + geotiff : str + Path to geotiff file + chunks : tuple + Chunk size of exclusions in Geotiff + transform_atol : float + Absolute tolerance parameter when comparing geotiff transform data. + coord_atol : float + Absolute tolerance parameter when comparing new un-projected + geotiff coordinates against previous coordinates. + """ + with Geotiff(geotiff, chunks=chunks) as tif: + with ExclusionLayers(excl_h5) as h5: + if tif.bands > 1: + error = ('{} contains more than one band!' + .format(geotiff)) + logger.error(error) + raise ExclusionsCheckError(error) + + if not np.array_equal(h5.shape, tif.shape): + error = ('Shape of exclusion data in {} and {} do not ' + 'match!'.format(geotiff, excl_h5)) + logger.error(error) + raise ExclusionsCheckError(error) + + profile = h5.profile + h5_crs = CRS.from_string(profile['crs']).to_dict() + tif_crs = CRS.from_string(tif.profile['crs']).to_dict() + bad_crs = cls._check_crs(h5_crs, tif_crs) + if bad_crs: + error = ('Geospatial "crs" in {} and {} do not match!' + '\n {} !=\n {}' + .format(geotiff, excl_h5, tif_crs, h5_crs)) + logger.error(error) + raise ExclusionsCheckError(error) + + if not np.allclose(profile['transform'], + tif.profile['transform'], + atol=transform_atol): + error = ('Geospatial "transform" in {} and {} do not ' + 'match!\n {} !=\n {}' + .format(geotiff, excl_h5, profile['transform'], + tif.profile['transform'])) + logger.error(error) + raise ExclusionsCheckError(error) + + lat, lon = tif.lat_lon + if not np.allclose(h5.latitude, lat, atol=coord_atol): + error = ('Latitude coordinates {} and {} do not match to ' + 'within {} degrees!' + .format(geotiff, excl_h5, coord_atol)) + logger.error(error) + raise ExclusionsCheckError(error) + + if not np.allclose(h5.longitude, lon, atol=coord_atol): + error = ('Longitude coordinates {} and {} do not match to ' + 'within {} degrees!' + .format(geotiff, excl_h5, coord_atol)) + logger.error(error) + raise ExclusionsCheckError(error) + + @classmethod + def _parse_tiff(cls, geotiff, excl_h5=None, chunks=(128, 128), + check_tiff=True, transform_atol=0.01, coord_atol=0.001): + """ + Extract exclusion layer from given geotiff, compare with excl_h5 + if provided + + Parameters + ---------- + geotiff : str + Path to geotiff file + excl_h5 : str, optional + Path to .h5 file containing exclusion layers, by default None + chunks : tuple, optional + Chunk size of exclusions in Geotiff, by default (128, 128) + check_tiff : bool, optional + Flag to check tiff profile and coordinates against exclusion .h5 + profile and coordinates, by default True + transform_atol : float, optional + Absolute tolerance parameter when comparing geotiff transform data, + by default 0.01 + coord_atol : float, optional + Absolute tolerance parameter when comparing new un-projected + geotiff coordinates against previous coordinates, by default 0.001 + + Returns + ------- + profile : dict + Geotiff profile (attributes) + values : ndarray + Geotiff data + """ + if excl_h5 is not None and check_tiff: + cls._check_geotiff(excl_h5, geotiff, chunks=chunks, + transform_atol=transform_atol, + coord_atol=coord_atol) + + with Geotiff(geotiff, chunks=chunks) as tif: + profile, values = tif.profile, tif.values + + return profile, values + + @staticmethod + def _write_layer(excl_h5, layer, profile, values, chunks=(128, 128), + description=None, scale_factor=None): + """ + Write exclusion layer to .h5 file + + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing exclusion layers + layer : str + Dataset name in .h5 file + profile : dict + Geotiff profile (attributes) + values : ndarray + Geotiff data + chunks : tuple + Chunk size of dataset in .h5 file + description : str + Description of exclusion layer + scale_factor : int | float, optional + Scale factor to use to scale geotiff data when added to the .h5 + file, by default None + """ + if len(chunks) < 3: + chunks = (1, ) + chunks + + if values.ndim < 3: + values = np.expand_dims(values, 0) + + with h5py.File(excl_h5, mode='a') as f: + if layer in f: + ds = f[layer] + ds[...] = values + logger.debug('\t- {} values replaced'.format(layer)) + else: + ds = f.create_dataset(layer, shape=values.shape, + dtype=values.dtype, chunks=chunks, + data=values) + logger.debug('\t- {} created and loaded'.format(layer)) + + ds.attrs['profile'] = json.dumps(profile) + logger.debug('\t- Unique profile for {} added:\n{}' + .format(layer, profile)) + if description is not None: + ds.attrs['description'] = description + logger.debug('\t- Description for {} added:\n{}' + .format(layer, description)) + + if scale_factor is not None: + ds.attrs['scale_factor'] = scale_factor + logger.debug('\t- scale_factor for {} added:\n{}' + .format(layer, scale_factor)) + + @classmethod + def _geotiff_to_h5(cls, excl_h5, layer, geotiff, chunks=(128, 128), + check_tiff=True, transform_atol=0.01, coord_atol=0.001, + description=None, scale_factor=None, dtype='int16'): + """ + Transfer geotiff exclusions to h5 confirming they match existing layers + + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing exclusion layers + layer : str + Layer to extract + geotiff : str + Path to geotiff file + chunks : tuple, optional + Chunk size of exclusions in Geotiff, by default (128, 128) + check_tiff : bool, optional + Flag to check tiff profile and coordinates against exclusion .h5 + profile and coordinates, by default True + transform_atol : float, optional + Absolute tolerance parameter when comparing geotiff transform data, + by default 0.01 + coord_atol : float, optional + Absolute tolerance parameter when comparing new un-projected + geotiff coordinates against previous coordinates, by default 0.001 + description : str, optional + Description of exclusion layer, by default None + scale_factor : int | float, optional + Scale factor to use to scale geotiff data when added to the .h5 + file, by default None + dtype : str, optional + Dtype to save geotiff data as in the .h5 file. Only used when + 'scale_factor' is not None, by default 'int16' + """ + logger.debug('\t- {} being extracted from {} and added to {}' + .format(layer, geotiff, os.path.basename(excl_h5))) + + profile, values = cls._parse_tiff( + geotiff, excl_h5=excl_h5, chunks=chunks, check_tiff=check_tiff, + transform_atol=transform_atol, coord_atol=coord_atol) + + if scale_factor is not None: + attrs = {'scale_factor': scale_factor} + values = Outputs._check_data_dtype(layer, values, dtype, + attrs=attrs) + + cls._write_layer(excl_h5, layer, profile, values, + chunks=chunks, description=description, + scale_factor=scale_factor) + + @staticmethod + def _write_geotiff(geotiff, profile, values): + """ + Write values to geotiff with given profile + + Parameters + ---------- + geotiff : str + Path to geotiff file to save data to + profile : dict + Geotiff profile (attributes) + values : ndarray + Geotiff data + """ + out_dir = os.path.dirname(geotiff) + if not os.path.exists(out_dir): + logger.debug("Creating {}".format(out_dir)) + os.makedirs(out_dir) + + if values.shape[0] != 1: + values = np.expand_dims(values, 0) + + dtype = values.dtype.name + profile['dtype'] = dtype + if np.issubdtype(dtype, np.integer): + dtype_max = np.iinfo(dtype).max + else: + dtype_max = np.finfo(dtype).max + + profile['nodata'] = dtype_max + + with rasterio.open(geotiff, 'w', **profile) as f: + f.write(values) + logger.debug('\t- {} created'.format(geotiff)) + + @classmethod + def _extract_layer(cls, excl_h5, layer, geotiff=None, hsds=False): + """ + Extract given layer from exclusions .h5 file and write to geotiff .tif + + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing exclusion layers + layer : str + Layer to extract + geotiff : str + Path to geotiff file + hsds : bool + Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS + behind HSDS + + Returns + ------- + profile : dict + Geotiff profile (attributes) + values : ndarray + Geotiff data + """ + logger.debug('\t - Extracting {} from {}' + .format(layer, os.path.basename(excl_h5))) + with ExclusionLayers(excl_h5, hsds=hsds) as f: + profile = f.get_layer_profile(layer) + values = f.get_layer_values(layer) + + if geotiff is not None: + logger.debug('\t- Writing {} to {}'.format(layer, geotiff)) + cls._write_geotiff(geotiff, profile, values) + + return profile, values + + def geotiff_to_layer(self, layer, geotiff, check_tiff=True, + transform_atol=0.01, coord_atol=0.001, + description=None, scale_factor=None, dtype='int16'): + """ + Transfer geotiff exclusions to h5 confirming they match existing layers + + Parameters + ---------- + layer : str + Layer to extract + geotiff : str + Path to geotiff file + check_tiff : bool, optional + Flag to check tiff profile and coordinates against exclusion .h5 + profile and coordinates, by default True + transform_atol : float, optional + Absolute tolerance parameter when comparing geotiff transform data, + by default 0.01 + coord_atol : float, optional + Absolute tolerance parameter when comparing new un-projected + geotiff coordinates against previous coordinates, by default 0.001 + description : str, optional + Description of exclusion layer, by default None + scale_factor : int | float, optional + Scale factor to use to scale geotiff data when added to the .h5 + file, by default None + dtype : str, optional + Dtype to save geotiff data as in the .h5 file. Only used when + 'scale_factor' is not None, by default 'int16' + """ + if not os.path.exists(self._excl_h5): + self._init_h5(self._excl_h5, geotiff, chunks=self._chunks) + + if layer in self.layers: + msg = ("{} is already present in {}" + .format(layer, self._excl_h5)) + if self._replace: + msg += " and will be replaced" + logger.warning(msg) + warn(msg) + else: + msg += ", to 'replace' set to True" + logger.error(msg) + raise KeyError(msg) + + self._geotiff_to_h5(self._excl_h5, layer, geotiff, + chunks=self._chunks, + check_tiff=check_tiff, + transform_atol=transform_atol, + coord_atol=coord_atol, + description=description, + scale_factor=scale_factor, + dtype=dtype) + + def layer_to_geotiff(self, layer, geotiff): + """ + Extract desired layer from .h5 file and write to geotiff .tif + + Parameters + ---------- + layer : str + Layer to extract + geotiff : str + Path to geotiff file + """ + self._extract_layer(self._excl_h5, layer, geotiff=geotiff, + hsds=self._hsds) + + @classmethod + def layers_to_h5(cls, excl_h5, layers, chunks=(128, 128), + replace=True, check_tiff=True, + transform_atol=0.01, coord_atol=0.001, + descriptions=None, scale_factors=None): + """ + Create exclusions .h5 file, or load layers into existing exclusion .h5 + file from provided geotiffs + + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing or to contain exclusion layers + layers : list | dict + List of geotiffs to load + or dictionary mapping goetiffs to the layers to load + chunks : tuple, optional + Chunk size of exclusions in Geotiff, by default (128, 128) + replace : bool, optional + Flag to replace existing layers if needed, by default True + check_tiff : bool, optional + Flag to check tiff profile and coordinates against exclusion .h5 + profile and coordinates, by default True + transform_atol : float, optional + Absolute tolerance parameter when comparing geotiff transform data, + by default 0.01 + coord_atol : float, optional + Absolute tolerance parameter when comparing new un-projected + geotiff coordinates against previous coordinates, by default 0.001 + description : dict, optional + Description of exclusion layers, by default None + scale_factor : dict, optional + Scale factors and dtypes to use when scaling given layers, + by default None + """ + if isinstance(layers, list): + layers = {os.path.basename(lyr).split('.')[0]: lyr + for lyr in layers} + + if descriptions is None: + descriptions = {} + + if scale_factors is None: + scale_factors = {} + + excls = cls(excl_h5, chunks=chunks, replace=replace) + logger.info('Creating {}'.format(excl_h5)) + for layer, geotiff in layers.items(): + logger.info('- Transfering {}'.format(layer)) + description = descriptions.get(layer, None) + scale = scale_factors.get(layer, None) + if scale is not None: + scale_factor = scale['scale_factor'] + dtype = scale['dtype'] + else: + scale_factor = None + dtype = None + + excls.geotiff_to_layer(layer, geotiff, check_tiff=check_tiff, + transform_atol=transform_atol, + coord_atol=coord_atol, + description=description, + scale_factor=scale_factor, + dtype=dtype) + + @classmethod + def extract_layers(cls, excl_h5, layers, chunks=(128, 128), + hsds=False): + """ + Extract given layers from exclusions .h5 file and save to disk + as GeoTiffs + + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing or to contain exclusion layers + layers : dict + Dictionary mapping layers to geotiffs to create + chunks : tuple + Chunk size of exclusions in .h5 and Geotiffs + hsds : bool + Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS + behind HSDS + """ + excls = cls(excl_h5, chunks=chunks, hsds=hsds) + logger.info('Extracting layers from {}'.format(excl_h5)) + for layer, geotiff in layers.items(): + logger.info('- Extracting {}'.format(geotiff)) + excls.layer_to_geotiff(layer, geotiff) + + @classmethod + def extract_all_layers(cls, excl_h5, out_dir, chunks=(128, 128), + hsds=False): + """ + Extract all layers from exclusions .h5 file and save to disk + as GeoTiffs + + Parameters + ---------- + excl_h5 : str + Path to .h5 file containing or to contain exclusion layers + out_dir : str + Path to output directory into which layers should be saved as + GeoTiffs + chunks : tuple + Chunk size of exclusions in .h5 and Geotiffs + hsds : bool + Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS + behind HSDS + """ + if not os.path.exists(out_dir): + os.makedirs(out_dir) + + excls = cls(excl_h5, chunks=chunks, hsds=hsds) + logger.info('Extracting layers from {}'.format(excl_h5)) + for layer in excls.layers: + geotiff = os.path.join(out_dir, "{}.tif".format(layer)) + logger.info('- Extracting {}'.format(geotiff)) + excls.layer_to_geotiff(layer, geotiff) + + +class Regulations: + """Exclusion Regulations. """ + + REQUIRED_COLUMNS = ["Feature Type", "Value Type", "Value", "FIPS"] + + def __init__(self, base_regulation_value, regulations_fpath=None, + multiplier=None): + """ + Parameters + ---------- + base_regulation_value : float | int + Base regulation value. This value will be used to calculate + the exclusion regulation value (e.g. setback distance, + flicker hours, etc.) if a multiplier is provided either via + the `regulations_fpath`csv or the `multiplier` input. In + these cases, the exclusion regulation value will be + set to `base_regulation_value * multiplier`. + regulations_fpath : str | None, optional + Path to regulations .csv or .gpkg file. At a minimum, this + file must contain the following columns: `Feature Type` + which labels the type of exclusion that each row represents, + `Value Type`, which specifies wether the value is a + multiplier or static height, `Value`, which specifies the + numeric value of the exclusion or multiplier, and `FIPS`, + which specifies a unique 5-digit code for each county (this + can be an integer - no leading zeros required). Valid + options for the `Value Type` are: + - "Structure Height Multiplier" + - "Meters" + If this input is `None`, a generic regulation value of + `base_regulation_value * multiplier` is used. By default + `None`. + multiplier : int | float | str | None, optional + A regulation value multiplier to use if regulations are not + supplied. This multiplier will be applied to the + ``base_regulation_value`` to calculate the exclusion + regulation value. If supplied along with + ``regulations_fpath``, this input will be used to calculate + exclusions for all counties not listed in the regulations + file. By default `None`. + """ + self._base_regulation_value = base_regulation_value + self._regulations = None + self._multi = multiplier + self._preflight_check(regulations_fpath) + + def _preflight_check(self, regulations_fpath): + """Apply preflight checks to the regulations path and multiplier. + + Run preflight checks on exclusion inputs: + 1) Ensure either a regulations .csv or + an exclusion value multiplier (or both) is provided + 2) Ensure regulations has county FIPS, map regulations to county + geometries from exclusions .h5 file + + Parameters + ---------- + regulations_fpath : str | None + Path to regulations .csv file, if `None`, create global + exclusions. + """ + if regulations_fpath: + try: + self.regulations = parse_table(regulations_fpath) + except ValueError: + self.regulations = gpd.read_file(regulations_fpath) + logger.debug('Computing exclusions using regulations provided ' + 'in: {}'.format(regulations_fpath)) + + if self._multi: + logger.debug('Computing exclusions using base regulation value ' + 'multiplier of {}'.format(self._multi)) + + if not regulations_fpath and not self._multi: + msg = ('Computing exclusions requires a regulations ' + '.csv file and/or a generic multiplier!') + logger.error(msg) + raise RuntimeError(msg) + + @property + def regulations(self): + """Regulations table. + + Returns + ------- + geopandas.GeoDataFrame | None + """ + return self._regulations + + @regulations.setter + def regulations(self, regulations): + if regulations is None: + msg = "Cannot set regulations to `None`" + logger.error(msg) + raise ValueError(msg) + self._regulations = regulations + self._validate_regulations() + + def _validate_regulations(self): + """Perform several validations on regulations""" + + self._convert_cols_to_title() + self._check_for_req_missing_cols() + self._remove_nans_from_req_cols() + self._casefold_feature_types() + + def _convert_cols_to_title(self): + """Convert column names in regulations DataFrame to str.title(). """ + new_col_names = {col: col.lower().title() + for col in self._regulations.columns + if col.lower() not in {"geometry", "fips"}} + self._regulations = self._regulations.rename(new_col_names, axis=1) + + def _check_for_req_missing_cols(self): + """Check for missing (required) columns in regulations DataFrame. """ + missing = [col for col in self.REQUIRED_COLUMNS + if col not in self._regulations] + if any(missing): + msg = ('Regulations are missing the following required columns: {}' + .format(missing)) + logger.error(msg) + raise RuntimeError(msg) + + def _remove_nans_from_req_cols(self): + """Remove rows with NaN values from required columns. """ + for col in self.REQUIRED_COLUMNS: + na_rows = self._regulations[col].isna() + self._regulations = self._regulations[~na_rows] + + def _casefold_feature_types(self): + """Casefold "Feature Type" values. """ + feature_types = self._regulations['Feature Type'].str.strip() + feature_types = feature_types.str.casefold() + self._regulations['Feature Type'] = feature_types + + @property + def base_regulation_value(self): + """The base regulation value. + + Returns + ------- + int | float + """ + return self._base_regulation_value + + @property + def generic(self): + """Default regulation value. + + This value is used for global regulations. + + Returns + ------- + float | None + """ + if self.multiplier is None: + regulation_value = None + else: + regulation_value = self.base_regulation_value * self.multiplier + + return regulation_value + + @property + def multiplier(self): + """Generic exclusion value multiplier. + + Returns + ------- + int | float + """ + return self._multi + + @property + def locals_exist(self): + """Flag indicating wether local regulations exist. + + Returns + ------- + bool + """ + return (self.regulations is not None and not self.regulations.empty) + + @property + def generic_exists(self): + """Flag indicating wether generic regulations exist. + + Returns + ------- + bool + """ + return self.generic is not None + + def __iter__(self): + if self._regulations is None: + return + for ind, county_regulations in self.regulations.iterrows(): + reg = self._county_regulation_value(county_regulations) + if reg is None: + continue + yield reg, self.regulations.iloc[[ind]].copy() + + def _county_regulation_value(self, county_regulations): + """Retrieve county exclusion regulation. """ + exclusion_type = county_regulations["Value Type"].strip() + reg = float(county_regulations["Value"]) + if exclusion_type.lower() == "structure height multiplier": + reg *= self.base_regulation_value + elif exclusion_type.lower() != "meters": + msg = ("Cannot create exclusions for {}, expecting " + '"Meters", but got {!r}' + .format(county_regulations["County"], exclusion_type)) + logger.warning(msg) + warn(msg) + return + return reg \ No newline at end of file diff --git a/reVX/utilities/exclusions_converter.py b/reVX/utilities/exclusions_converter.py deleted file mode 100644 index d17dbf194..000000000 --- a/reVX/utilities/exclusions_converter.py +++ /dev/null @@ -1,651 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Handler to convert exclusion to/from .h5 and .geotiff -""" -import h5py -import json -import logging -import numpy as np -import os -from pyproj.crs import CRS -import rasterio -from warnings import warn - -from reV.handlers.exclusions import ExclusionLayers -from reV.handlers.outputs import Outputs - -from reVX.handlers.geotiff import Geotiff -from reVX.utilities.exceptions import ExclusionsCheckError -from reVX.utilities.utilities import log_versions - -logger = logging.getLogger(__name__) - - -class ExclusionsConverter: - """ - Convert exclusion layers between .h5 and .tif (geotiff) - """ - def __init__(self, excl_h5, hsds=False, chunks=(128, 128), replace=True): - """ - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing or to contain exclusion layers - hsds : bool, optional - Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS - behind HSDS, by default False - chunks : tuple, optional - Chunk size of exclusions in .h5 and Geotiffs, by default (128, 128) - replace : bool, optional - Flag to replace existing layers if needed, by default True - """ - log_versions(logger) - self._excl_h5 = excl_h5 - self._hsds = hsds - self._chunks = chunks - self._replace = replace - - def __repr__(self): - msg = "{} for {}".format(self.__class__.__name__, self._excl_h5) - return msg - - def __getitem__(self, layer): - """ - Parameters - ---------- - layer : str - Layer to extract data for - - Returns - ------- - profile : dict - Geotiff profile (attributes) - values : ndarray - Geotiff data - """ - - if layer not in self.layers: - msg = "{} is not present in {}".format(layer, self._excl_h5) - logger.error(msg) - raise KeyError(msg) - - profile, values = self._extract_layer(self._excl_h5, layer, - hsds=self._hsds) - return profile, values - - def __setitem__(self, layer, geotiff): - """ - Parameters - ---------- - layer : str - Layer to set - geotiff : str - Path to GeoTiff to load data from - """ - self.geotiff_to_layer(layer, geotiff) - - @property - def layers(self): - """ - Available exclusion layers in .h5 file - - Returns - ------- - layers : list - Available layers in .h5 file - """ - with ExclusionLayers(self._excl_h5, hsds=self._hsds) as exc: - layers = exc.layers - - return layers - - @staticmethod - def _init_h5(excl_h5, geotiff, chunks=(128, 128)): - """ - Initialize exclusions .h5 file from geotiff: - - Transfer profile, shape, and meta - - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing exclusion layers - geotiff : str - Path to geotiff file - chunks : tuple - Chunk size of exclusions in Geotiff - """ - logger.debug('\t- Initializing {} from {}' - .format(excl_h5, geotiff)) - with Geotiff(geotiff, chunks=chunks) as src: - profile = src.profile - shape = src.shape - lat, lon = src.lat_lon - logger.debug('\t- "profile", "meta", and "shape" extracted from {}' - .format(geotiff)) - - try: - with h5py.File(excl_h5, mode='w') as dst: - dst.attrs['profile'] = json.dumps(profile) - logger.debug('\t- Default profile:\n{}'.format(profile)) - dst.attrs['shape'] = shape - logger.debug('\t- Default shape:\n{}'.format(shape)) - dst.attrs['chunks'] = chunks - logger.debug('\t- Default chunks:\n{}'.format(chunks)) - - dst.create_dataset('latitude', shape=lat.shape, - dtype=np.float32, data=lat, - chunks=chunks) - logger.debug('\t- latitude coordiantes created') - - dst.create_dataset('longitude', shape=lon.shape, - dtype=np.float32, data=lon, - chunks=chunks) - logger.debug('\t- longitude coordiantes created') - except Exception: - logger.exception("Error initilizing {}".format(excl_h5)) - if os.path.exists(excl_h5): - os.remove(excl_h5) - - @staticmethod - def _check_crs(baseline_crs, test_crs, ignore_keys=('no_defs',)): - """ - Compare baseline and test crs values - - Parameters - ---------- - baseline_crs : dict - Baseline CRS to use a truth, must be a dict - test_crs : dict - Test CRS to compare with baseline, must be a dictionary - ignore_keys : tuple - Keys to not check - - Returns - ------- - bad_crs : bool - Flag if crs' do not match - """ - bad_crs = False - for k, true_v in baseline_crs.items(): - if k not in ignore_keys: - test_v = test_crs.get(k, true_v) - if true_v != test_v: - bad_crs = True - - return bad_crs - - @classmethod - def _check_geotiff(cls, excl_h5, geotiff, chunks=(128, 128), - transform_atol=0.01, coord_atol=0.001): - """ - Compare geotiff with exclusion layer, raise any errors - - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing exclusion layers - geotiff : str - Path to geotiff file - chunks : tuple - Chunk size of exclusions in Geotiff - transform_atol : float - Absolute tolerance parameter when comparing geotiff transform data. - coord_atol : float - Absolute tolerance parameter when comparing new un-projected - geotiff coordinates against previous coordinates. - """ - with Geotiff(geotiff, chunks=chunks) as tif: - with ExclusionLayers(excl_h5) as h5: - if tif.bands > 1: - error = ('{} contains more than one band!' - .format(geotiff)) - logger.error(error) - raise ExclusionsCheckError(error) - - if not np.array_equal(h5.shape, tif.shape): - error = ('Shape of exclusion data in {} and {} do not ' - 'match!'.format(geotiff, excl_h5)) - logger.error(error) - raise ExclusionsCheckError(error) - - profile = h5.profile - h5_crs = CRS.from_string(profile['crs']).to_dict() - tif_crs = CRS.from_string(tif.profile['crs']).to_dict() - bad_crs = cls._check_crs(h5_crs, tif_crs) - if bad_crs: - error = ('Geospatial "crs" in {} and {} do not match!' - '\n {} !=\n {}' - .format(geotiff, excl_h5, tif_crs, h5_crs)) - logger.error(error) - raise ExclusionsCheckError(error) - - if not np.allclose(profile['transform'], - tif.profile['transform'], - atol=transform_atol): - error = ('Geospatial "transform" in {} and {} do not ' - 'match!\n {} !=\n {}' - .format(geotiff, excl_h5, profile['transform'], - tif.profile['transform'])) - logger.error(error) - raise ExclusionsCheckError(error) - - lat, lon = tif.lat_lon - if not np.allclose(h5.latitude, lat, atol=coord_atol): - error = ('Latitude coordinates {} and {} do not match to ' - 'within {} degrees!' - .format(geotiff, excl_h5, coord_atol)) - logger.error(error) - raise ExclusionsCheckError(error) - - if not np.allclose(h5.longitude, lon, atol=coord_atol): - error = ('Longitude coordinates {} and {} do not match to ' - 'within {} degrees!' - .format(geotiff, excl_h5, coord_atol)) - logger.error(error) - raise ExclusionsCheckError(error) - - @classmethod - def _parse_tiff(cls, geotiff, excl_h5=None, chunks=(128, 128), - check_tiff=True, transform_atol=0.01, coord_atol=0.001): - """ - Extract exclusion layer from given geotiff, compare with excl_h5 - if provided - - Parameters - ---------- - geotiff : str - Path to geotiff file - excl_h5 : str, optional - Path to .h5 file containing exclusion layers, by default None - chunks : tuple, optional - Chunk size of exclusions in Geotiff, by default (128, 128) - check_tiff : bool, optional - Flag to check tiff profile and coordinates against exclusion .h5 - profile and coordinates, by default True - transform_atol : float, optional - Absolute tolerance parameter when comparing geotiff transform data, - by default 0.01 - coord_atol : float, optional - Absolute tolerance parameter when comparing new un-projected - geotiff coordinates against previous coordinates, by default 0.001 - - Returns - ------- - profile : dict - Geotiff profile (attributes) - values : ndarray - Geotiff data - """ - if excl_h5 is not None and check_tiff: - cls._check_geotiff(excl_h5, geotiff, chunks=chunks, - transform_atol=transform_atol, - coord_atol=coord_atol) - - with Geotiff(geotiff, chunks=chunks) as tif: - profile, values = tif.profile, tif.values - - return profile, values - - @staticmethod - def _write_layer(excl_h5, layer, profile, values, chunks=(128, 128), - description=None, scale_factor=None): - """ - Write exclusion layer to .h5 file - - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing exclusion layers - layer : str - Dataset name in .h5 file - profile : dict - Geotiff profile (attributes) - values : ndarray - Geotiff data - chunks : tuple - Chunk size of dataset in .h5 file - description : str - Description of exclusion layer - scale_factor : int | float, optional - Scale factor to use to scale geotiff data when added to the .h5 - file, by default None - """ - if len(chunks) < 3: - chunks = (1, ) + chunks - - if values.ndim < 3: - values = np.expand_dims(values, 0) - - with h5py.File(excl_h5, mode='a') as f: - if layer in f: - ds = f[layer] - ds[...] = values - logger.debug('\t- {} values replaced'.format(layer)) - else: - ds = f.create_dataset(layer, shape=values.shape, - dtype=values.dtype, chunks=chunks, - data=values) - logger.debug('\t- {} created and loaded'.format(layer)) - - ds.attrs['profile'] = json.dumps(profile) - logger.debug('\t- Unique profile for {} added:\n{}' - .format(layer, profile)) - if description is not None: - ds.attrs['description'] = description - logger.debug('\t- Description for {} added:\n{}' - .format(layer, description)) - - if scale_factor is not None: - ds.attrs['scale_factor'] = scale_factor - logger.debug('\t- scale_factor for {} added:\n{}' - .format(layer, scale_factor)) - - @classmethod - def _geotiff_to_h5(cls, excl_h5, layer, geotiff, chunks=(128, 128), - check_tiff=True, transform_atol=0.01, coord_atol=0.001, - description=None, scale_factor=None, dtype='int16'): - """ - Transfer geotiff exclusions to h5 confirming they match existing layers - - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing exclusion layers - layer : str - Layer to extract - geotiff : str - Path to geotiff file - chunks : tuple, optional - Chunk size of exclusions in Geotiff, by default (128, 128) - check_tiff : bool, optional - Flag to check tiff profile and coordinates against exclusion .h5 - profile and coordinates, by default True - transform_atol : float, optional - Absolute tolerance parameter when comparing geotiff transform data, - by default 0.01 - coord_atol : float, optional - Absolute tolerance parameter when comparing new un-projected - geotiff coordinates against previous coordinates, by default 0.001 - description : str, optional - Description of exclusion layer, by default None - scale_factor : int | float, optional - Scale factor to use to scale geotiff data when added to the .h5 - file, by default None - dtype : str, optional - Dtype to save geotiff data as in the .h5 file. Only used when - 'scale_factor' is not None, by default 'int16' - """ - logger.debug('\t- {} being extracted from {} and added to {}' - .format(layer, geotiff, os.path.basename(excl_h5))) - - profile, values = cls._parse_tiff( - geotiff, excl_h5=excl_h5, chunks=chunks, check_tiff=check_tiff, - transform_atol=transform_atol, coord_atol=coord_atol) - - if scale_factor is not None: - attrs = {'scale_factor': scale_factor} - values = Outputs._check_data_dtype(layer, values, dtype, - attrs=attrs) - - cls._write_layer(excl_h5, layer, profile, values, - chunks=chunks, description=description, - scale_factor=scale_factor) - - @staticmethod - def _write_geotiff(geotiff, profile, values): - """ - Write values to geotiff with given profile - - Parameters - ---------- - geotiff : str - Path to geotiff file to save data to - profile : dict - Geotiff profile (attributes) - values : ndarray - Geotiff data - """ - out_dir = os.path.dirname(geotiff) - if not os.path.exists(out_dir): - logger.debug("Creating {}".format(out_dir)) - os.makedirs(out_dir) - - if values.shape[0] != 1: - values = np.expand_dims(values, 0) - - dtype = values.dtype.name - profile['dtype'] = dtype - if np.issubdtype(dtype, np.integer): - dtype_max = np.iinfo(dtype).max - else: - dtype_max = np.finfo(dtype).max - - profile['nodata'] = dtype_max - - with rasterio.open(geotiff, 'w', **profile) as f: - f.write(values) - logger.debug('\t- {} created'.format(geotiff)) - - @classmethod - def _extract_layer(cls, excl_h5, layer, geotiff=None, hsds=False): - """ - Extract given layer from exclusions .h5 file and write to geotiff .tif - - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing exclusion layers - layer : str - Layer to extract - geotiff : str - Path to geotiff file - hsds : bool - Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS - behind HSDS - - Returns - ------- - profile : dict - Geotiff profile (attributes) - values : ndarray - Geotiff data - """ - logger.debug('\t - Extracting {} from {}' - .format(layer, os.path.basename(excl_h5))) - with ExclusionLayers(excl_h5, hsds=hsds) as f: - profile = f.get_layer_profile(layer) - values = f.get_layer_values(layer) - - if geotiff is not None: - logger.debug('\t- Writing {} to {}'.format(layer, geotiff)) - cls._write_geotiff(geotiff, profile, values) - - return profile, values - - def geotiff_to_layer(self, layer, geotiff, check_tiff=True, - transform_atol=0.01, coord_atol=0.001, - description=None, scale_factor=None, dtype='int16'): - """ - Transfer geotiff exclusions to h5 confirming they match existing layers - - Parameters - ---------- - layer : str - Layer to extract - geotiff : str - Path to geotiff file - check_tiff : bool, optional - Flag to check tiff profile and coordinates against exclusion .h5 - profile and coordinates, by default True - transform_atol : float, optional - Absolute tolerance parameter when comparing geotiff transform data, - by default 0.01 - coord_atol : float, optional - Absolute tolerance parameter when comparing new un-projected - geotiff coordinates against previous coordinates, by default 0.001 - description : str, optional - Description of exclusion layer, by default None - scale_factor : int | float, optional - Scale factor to use to scale geotiff data when added to the .h5 - file, by default None - dtype : str, optional - Dtype to save geotiff data as in the .h5 file. Only used when - 'scale_factor' is not None, by default 'int16' - """ - if not os.path.exists(self._excl_h5): - self._init_h5(self._excl_h5, geotiff, chunks=self._chunks) - - if layer in self.layers: - msg = ("{} is already present in {}" - .format(layer, self._excl_h5)) - if self._replace: - msg += " and will be replaced" - logger.warning(msg) - warn(msg) - else: - msg += ", to 'replace' set to True" - logger.error(msg) - raise KeyError(msg) - - self._geotiff_to_h5(self._excl_h5, layer, geotiff, - chunks=self._chunks, - check_tiff=check_tiff, - transform_atol=transform_atol, - coord_atol=coord_atol, - description=description, - scale_factor=scale_factor, - dtype=dtype) - - def layer_to_geotiff(self, layer, geotiff): - """ - Extract desired layer from .h5 file and write to geotiff .tif - - Parameters - ---------- - layer : str - Layer to extract - geotiff : str - Path to geotiff file - """ - self._extract_layer(self._excl_h5, layer, geotiff=geotiff, - hsds=self._hsds) - - @classmethod - def layers_to_h5(cls, excl_h5, layers, chunks=(128, 128), - replace=True, check_tiff=True, - transform_atol=0.01, coord_atol=0.001, - descriptions=None, scale_factors=None): - """ - Create exclusions .h5 file, or load layers into existing exclusion .h5 - file from provided geotiffs - - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing or to contain exclusion layers - layers : list | dict - List of geotiffs to load - or dictionary mapping goetiffs to the layers to load - chunks : tuple, optional - Chunk size of exclusions in Geotiff, by default (128, 128) - replace : bool, optional - Flag to replace existing layers if needed, by default True - check_tiff : bool, optional - Flag to check tiff profile and coordinates against exclusion .h5 - profile and coordinates, by default True - transform_atol : float, optional - Absolute tolerance parameter when comparing geotiff transform data, - by default 0.01 - coord_atol : float, optional - Absolute tolerance parameter when comparing new un-projected - geotiff coordinates against previous coordinates, by default 0.001 - description : dict, optional - Description of exclusion layers, by default None - scale_factor : dict, optional - Scale factors and dtypes to use when scaling given layers, - by default None - """ - if isinstance(layers, list): - layers = {os.path.basename(lyr).split('.')[0]: lyr - for lyr in layers} - - if descriptions is None: - descriptions = {} - - if scale_factors is None: - scale_factors = {} - - excls = cls(excl_h5, chunks=chunks, replace=replace) - logger.info('Creating {}'.format(excl_h5)) - for layer, geotiff in layers.items(): - logger.info('- Transfering {}'.format(layer)) - description = descriptions.get(layer, None) - scale = scale_factors.get(layer, None) - if scale is not None: - scale_factor = scale['scale_factor'] - dtype = scale['dtype'] - else: - scale_factor = None - dtype = None - - excls.geotiff_to_layer(layer, geotiff, check_tiff=check_tiff, - transform_atol=transform_atol, - coord_atol=coord_atol, - description=description, - scale_factor=scale_factor, - dtype=dtype) - - @classmethod - def extract_layers(cls, excl_h5, layers, chunks=(128, 128), - hsds=False): - """ - Extract given layers from exclusions .h5 file and save to disk - as GeoTiffs - - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing or to contain exclusion layers - layers : dict - Dictionary mapping layers to geotiffs to create - chunks : tuple - Chunk size of exclusions in .h5 and Geotiffs - hsds : bool - Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS - behind HSDS - """ - excls = cls(excl_h5, chunks=chunks, hsds=hsds) - logger.info('Extracting layers from {}'.format(excl_h5)) - for layer, geotiff in layers.items(): - logger.info('- Extracting {}'.format(geotiff)) - excls.layer_to_geotiff(layer, geotiff) - - @classmethod - def extract_all_layers(cls, excl_h5, out_dir, chunks=(128, 128), - hsds=False): - """ - Extract all layers from exclusions .h5 file and save to disk - as GeoTiffs - - Parameters - ---------- - excl_h5 : str - Path to .h5 file containing or to contain exclusion layers - out_dir : str - Path to output directory into which layers should be saved as - GeoTiffs - chunks : tuple - Chunk size of exclusions in .h5 and Geotiffs - hsds : bool - Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS - behind HSDS - """ - if not os.path.exists(out_dir): - os.makedirs(out_dir) - - excls = cls(excl_h5, chunks=chunks, hsds=hsds) - logger.info('Extracting layers from {}'.format(excl_h5)) - for layer in excls.layers: - geotiff = os.path.join(out_dir, "{}.tif".format(layer)) - logger.info('- Extracting {}'.format(geotiff)) - excls.layer_to_geotiff(layer, geotiff) diff --git a/tests/test_setbacks.py b/tests/test_setbacks.py index 6779c8bc3..5ba170432 100644 --- a/tests/test_setbacks.py +++ b/tests/test_setbacks.py @@ -252,7 +252,7 @@ def test_setbacks_no_computation(setbacks_class): regs = Regulations(10, regulations_fpath=regs_fpath) setbacks = setbacks_class(EXCL_H5, regs) with pytest.raises(ValueError): - setbacks.compute_setbacks("RhodeIsland.file") + setbacks.compute_exclusions("RhodeIsland.file") @pytest.mark.parametrize( @@ -266,7 +266,7 @@ def test_setbacks_no_generic_value(setbacks_class, feature_file): """Test setbacks computation for invalid input. """ regs = Regulations(0, regulations_fpath=None, multiplier=1) setbacks = setbacks_class(EXCL_H5, regs) - out = setbacks.compute_setbacks(feature_file) + out = setbacks.compute_exclusions(feature_file) assert np.isclose(out, 0).all() @@ -282,7 +282,7 @@ def test_generic_structure(generic_wind_regulations): setbacks = StructureSetbacks(EXCL_H5, generic_wind_regulations) structure_path = os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.geojson') - test = setbacks.compute_setbacks(structure_path) + test = setbacks.compute_exclusions(structure_path) assert np.allclose(baseline, test) @@ -293,7 +293,7 @@ def test_generic_structure_gpkg(generic_wind_regulations): """ setbacks = StructureSetbacks(EXCL_H5, generic_wind_regulations) structure_path = os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.gpkg') - test = setbacks.compute_setbacks(structure_path) + test = setbacks.compute_exclusions(structure_path) assert test.sum() == 6830 @@ -306,14 +306,18 @@ def test_local_structures(max_workers, county_wind_regulations_gpkg): baseline = os.path.join(TESTDATADIR, 'setbacks', 'existing_structures.tif') with Geotiff(baseline) as tif: - baseline = tif.values + baseline = tif.values[0] setbacks = StructureSetbacks(EXCL_H5, county_wind_regulations_gpkg) structure_path = os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.geojson') - test = setbacks.compute_setbacks(structure_path, max_workers=max_workers) + test = setbacks.compute_exclusions(structure_path, max_workers=max_workers) - assert np.allclose(baseline, test) + # baseline was generated when code did not clip to county bounds, + # so test should be a subset of baseline + assert baseline.sum() > test.sum() + assert (baseline[test > 0] == 1).all() + assert (test[baseline == 0] == 0).all() @pytest.mark.parametrize('rail_path', @@ -330,7 +334,7 @@ def test_generic_railroads(rail_path, generic_wind_regulations): baseline = tif.values setbacks = RailSetbacks(EXCL_H5, generic_wind_regulations) - test = setbacks.compute_setbacks(rail_path) + test = setbacks.compute_exclusions(rail_path) assert np.allclose(baseline, test) @@ -342,14 +346,18 @@ def test_local_railroads(max_workers, county_wind_regulations_gpkg): """ baseline = os.path.join(TESTDATADIR, 'setbacks', 'existing_rails.tif') with Geotiff(baseline) as tif: - baseline = tif.values + baseline = tif.values[0] setbacks = RailSetbacks(EXCL_H5, county_wind_regulations_gpkg) rail_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Railroads', 'RI_Railroads.shp') - test = setbacks.compute_setbacks(rail_path, max_workers=max_workers) + test = setbacks.compute_exclusions(rail_path, max_workers=max_workers) - assert np.allclose(baseline, test) + # baseline was generated when code did not clip to county bounds, + # so test should be a subset of baseline + assert baseline.sum() > test.sum() + assert (baseline[test > 0] == 1).all() + assert (test[baseline == 0] == 0).all() def test_generic_parcels(): @@ -359,11 +367,11 @@ def test_generic_parcels(): 'Rhode_Island.gpkg') regulations_x1 = Regulations(BASE_SETBACK_DIST, multiplier=1) setbacks_x1 = ParcelSetbacks(EXCL_H5, regulations_x1) - test_x1 = setbacks_x1.compute_setbacks(parcel_path) + test_x1 = setbacks_x1.compute_exclusions(parcel_path) regulations_x100 = Regulations(BASE_SETBACK_DIST, multiplier=100) setbacks_x100 = ParcelSetbacks(EXCL_H5, regulations_x100) - test_x100 = setbacks_x100.compute_setbacks(parcel_path) + test_x100 = setbacks_x100.compute_exclusions(parcel_path) # when the setbacks are so large that they span the entire parcels, # a total of 438 regions should be excluded for this particular @@ -386,11 +394,11 @@ def test_generic_parcels_with_invalid_shape_input(): setbacks = ParcelSetbacks(EXCL_H5, regulations) # Ensure data we are using contains invalid shapes - parcels = setbacks._parse_features(parcel_path) + parcels = setbacks.parse_features(parcel_path) assert not parcels.geometry.is_valid.any() # This code would throw an error if invalid shape not handled properly - test = setbacks.compute_setbacks(parcel_path) + test = setbacks.compute_exclusions(parcel_path) # add a test for expected output assert not test.any() @@ -418,7 +426,8 @@ def test_local_parcels_solar(max_workers, regulations_fpath): parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg') - test = setbacks.compute_setbacks(parcel_path, max_workers=max_workers) + test = setbacks.compute_exclusions(parcel_path, + max_workers=max_workers) assert test.sum() == 3 @@ -462,7 +471,8 @@ def test_local_parcels_wind(max_workers, regulations_fpath): parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg') - test = setbacks.compute_setbacks(parcel_path, max_workers=max_workers) + test = setbacks.compute_exclusions(parcel_path, + max_workers=max_workers) assert test.sum() == 3 @@ -494,11 +504,11 @@ def test_generic_water_setbacks(water_path): regulations_x1 = Regulations(BASE_SETBACK_DIST, multiplier=1) setbacks_x1 = WaterSetbacks(EXCL_H5, regulations_x1) - test_x1 = setbacks_x1.compute_setbacks(water_path) + test_x1 = setbacks_x1.compute_exclusions(water_path) regulations_x100 = Regulations(BASE_SETBACK_DIST, multiplier=100) setbacks_x100 = WaterSetbacks(EXCL_H5, regulations_x100) - test_x100 = setbacks_x100.compute_setbacks(water_path) + test_x100 = setbacks_x100.compute_exclusions(water_path) # A total of 88,994 regions should be excluded for this particular # Rhode Island subset @@ -531,7 +541,7 @@ def test_local_water_solar(max_workers, regulations_fpath): water_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Water', 'Rhode_Island.shp') - test = setbacks.compute_setbacks(water_path, max_workers=max_workers) + test = setbacks.compute_exclusions(water_path, max_workers=max_workers) assert test.sum() == 83 @@ -571,7 +581,7 @@ def test_local_water_wind(max_workers, regulations_fpath): water_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Water', 'Rhode_Island.shp') - test = setbacks.compute_setbacks(water_path, max_workers=max_workers) + test = setbacks.compute_exclusions(water_path, max_workers=max_workers) assert test.sum() == 83 @@ -650,8 +660,8 @@ def test_partial_exclusions(): setbacks_hr = ParcelSetbacks(EXCL_H5, regulations, weights_calculation_upscale_factor=mult) - exclusion_mask = setbacks.compute_setbacks(parcel_path) - inclusion_weights = setbacks_hr.compute_setbacks(parcel_path) + exclusion_mask = setbacks.compute_exclusions(parcel_path) + inclusion_weights = setbacks_hr.compute_exclusions(parcel_path) assert exclusion_mask.shape == inclusion_weights.shape assert (inclusion_weights < 1).any() @@ -673,8 +683,8 @@ def test_partial_exclusions_upscale_factor_less_than_1(mult): setbacks_hr = ParcelSetbacks(EXCL_H5, regulations, weights_calculation_upscale_factor=mult) - exclusion_mask = setbacks.compute_setbacks(parcel_path) - inclusion_weights = setbacks_hr.compute_setbacks(parcel_path) + exclusion_mask = setbacks.compute_exclusions(parcel_path) + inclusion_weights = setbacks_hr.compute_exclusions(parcel_path) assert np.isclose(exclusion_mask, inclusion_weights).all() @@ -687,7 +697,7 @@ def test_partial_exclusions_upscale_factor_less_than_1(mult): REGS_GPKG, 332_887, 142, [HUB_HEIGHT, ROTOR_DIAMETER]), (RailSetbacks, WindRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Railroads.gpkg'), - REGS_GPKG, 754_082, 9_402, [HUB_HEIGHT, ROTOR_DIAMETER]), + REGS_GPKG, 754_082, 9_276, [HUB_HEIGHT, ROTOR_DIAMETER]), (ParcelSetbacks, WindRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg'), PARCEL_REGS_FPATH_VALUE, 474, 3, [HUB_HEIGHT, ROTOR_DIAMETER]), @@ -699,7 +709,7 @@ def test_partial_exclusions_upscale_factor_less_than_1(mult): REGS_FPATH, 260_963, 112, [BASE_SETBACK_DIST + 199]), (RailSetbacks, Regulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Railroads.gpkg'), - REGS_FPATH, 5_355, 194, [BASE_SETBACK_DIST]), + REGS_FPATH, 5_355, 163, [BASE_SETBACK_DIST]), (ParcelSetbacks, Regulations, os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg'), PARCEL_REGS_FPATH_VALUE, 438, 3, [BASE_SETBACK_DIST]), @@ -716,7 +726,7 @@ def test_merged_setbacks(setbacks_class, regulations_class, features_path, multiplier=100) generic_setbacks = setbacks_class(EXCL_H5, regulations, weights_calculation_upscale_factor=sf) - generic_layer = generic_setbacks.compute_setbacks(features_path, + generic_layer = generic_setbacks.compute_exclusions(features_path, max_workers=1) with tempfile.TemporaryDirectory() as td: @@ -730,18 +740,18 @@ def test_merged_setbacks(setbacks_class, regulations_class, features_path, local_setbacks = setbacks_class(EXCL_H5, regulations, weights_calculation_upscale_factor=sf) - local_layer = local_setbacks.compute_setbacks(features_path, - max_workers=1) + local_layer = local_setbacks.compute_exclusions(features_path, + max_workers=1) regulations = regulations_class(*setback_distance, regulations_fpath=regs_fpath, multiplier=100) merged_setbacks = setbacks_class(EXCL_H5, regulations, weights_calculation_upscale_factor=sf) - merged_layer = merged_setbacks.compute_setbacks(features_path, - max_workers=1) + merged_layer = merged_setbacks.compute_exclusions(features_path, + max_workers=1) - local_setbacks._pre_process_regulations(features_path) + local_setbacks.pre_process_regulations(features_path) feats = local_setbacks.regulations_table # make sure the comparison layers match what we expect @@ -811,8 +821,8 @@ def test_merged_setbacks_missing_local(setbacks_class, regulations_class, regulations = regulations_class(*setback_distance, regulations_fpath=None, multiplier=100) generic_setbacks = setbacks_class(EXCL_H5, regulations) - generic_layer = generic_setbacks.compute_setbacks(features_path, - max_workers=1) + generic_layer = generic_setbacks.compute_exclusions(features_path, + max_workers=1) with tempfile.TemporaryDirectory() as td: regs = pd.read_csv(regulations_fpath).iloc[0:0] @@ -825,14 +835,14 @@ def test_merged_setbacks_missing_local(setbacks_class, regulations_class, multiplier=None) local_setbacks = setbacks_class(EXCL_H5, regulations) with pytest.raises(ValueError): - local_setbacks.compute_setbacks(features_path, max_workers=1) + local_setbacks.compute_exclusions(features_path, max_workers=1) regulations = regulations_class(*setback_distance, regulations_fpath=regs_fpath, multiplier=100) merged_setbacks = setbacks_class(EXCL_H5, regulations) - merged_layer = merged_setbacks.compute_setbacks(features_path, - max_workers=1) + merged_layer = merged_setbacks.compute_exclusions(features_path, + max_workers=1) # make sure the comparison layers match what we expect assert generic_layer.sum() == generic_sum From 85c7952f7d627ade9d1f290307d8198180382992 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 12:35:24 -0600 Subject: [PATCH 14/42] max_flicker_exclusion_range can now be str --- reVX/turbine_flicker/turbine_flicker.py | 38 +++++++++++++++++---- reVX/turbine_flicker/turbine_flicker_cli.py | 10 +++--- tests/test_turbine_flicker.py | 30 ++++++++++++++++ 3 files changed, 68 insertions(+), 10 deletions(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 25a42a565..90a2bba9f 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -29,7 +29,7 @@ class TurbineFlicker: def __init__(self, excl_fpath, res_fpath, building_layer, resolution=640, grid_cell_size=90, - max_flicker_exclusion_range=10_000, + max_flicker_exclusion_range="10x", tm_dset='techmap_wtk'): """ Parameters @@ -48,12 +48,14 @@ def __init__(self, excl_fpath, res_fpath, building_layer, by default 640 grid_cell_size : float, optional Length (m) of a side of each grid cell in `excl_fpath`. - max_flicker_exclusion_range : float, optional + max_flicker_exclusion_range : float | int | str, optional Max distance (m) that flicker exclusions will extend in - any of the cardinal directions. Note that increasing this - value can lead to drastically instead memory requirements. - This value may be increased slightly in order to yield - odd exclusion array shapes. + any of the cardinal directions. Can also be a string like + ``"10x"`` (default), which is interpreted as 10 times the + turbine rotor diameter. Note that increasing this value can + lead to drastically instead memory requirements. This value + may be increased slightly (no more then the size of one grid + cell) in order to yield odd exclusion array shapes. tm_dset : str, optional Dataset / layer name for wind toolkit techmap, by default 'techmap_wtk' @@ -98,6 +100,7 @@ def _compute_shadow_flicker(self, lat, lon, rotor_diameter, wind_dir): # Import HOPP dynamically so its not a requirement from hybrid.flicker.flicker_mismatch_grid import FlickerMismatch + self._set_max_grid_size_for_odd_shaped_arr(rotor_diameter) mult = self._max_flicker_exclusion_range / rotor_diameter FlickerMismatch.diam_mult_nwe = mult FlickerMismatch.diam_mult_s = mult @@ -118,6 +121,29 @@ def _compute_shadow_flicker(self, lat, lon, rotor_diameter, wind_dir): return shadow_flicker + def _set_max_grid_size_for_odd_shaped_arr(self, rotor_diameter): + """Set the max_flicker_exclusion_range to multiple of 0.5 grids """ + excl_range = self._parse_max_flicker_exclusion_rang(rotor_diameter) + mult = np.round(excl_range / self._grid_cell_size) + 0.5 + self._max_flicker_exclusion_range = mult * self._grid_cell_size + + def _parse_max_flicker_exclusion_rang(self, rotor_diameter): + """Convert max_flicker_exclusion_range to float if necessary. """ + excl_range = self._max_flicker_exclusion_range + if isinstance(excl_range, str) and excl_range.endswith('x'): + return float(excl_range.strip('x')) * rotor_diameter + + if not isinstance(excl_range, (int, float)): + try: + excl_range = float(excl_range) + except Exception as e: + msg = ('max_flicker_exclusion_range must be numeric but ' + 'received: {}, {}'.format(excl_range, type(excl_range))) + logger.error(msg) + raise TypeError(msg) from e + + return excl_range + def _exclude_turbine_flicker(self, point, res_fpath, hub_height, rotor_diameter, flicker_threshold=30): """ diff --git a/reVX/turbine_flicker/turbine_flicker_cli.py b/reVX/turbine_flicker/turbine_flicker_cli.py index e53103673..e306d4c1a 100644 --- a/reVX/turbine_flicker/turbine_flicker_cli.py +++ b/reVX/turbine_flicker/turbine_flicker_cli.py @@ -8,7 +8,7 @@ import os from rex.utilities.loggers import init_mult -from rex.utilities.cli_dtypes import STR, INT +from rex.utilities.cli_dtypes import STR, INT, STRFLOAT from rex.utilities.hpc import SLURM from rex.utilities.utilities import get_class_properties @@ -147,10 +147,12 @@ def from_config(ctx, config, verbose): @click.option('--grid_cell_size', '-gcs', default=90, type=INT, show_default=True, help=("Length (m) of a side of each grid cell in `excl_fpath`.")) -@click.option('--max_flicker_exclusion_range', '-mfer', default=10_000, - type=INT, show_default=True, +@click.option('--max_flicker_exclusion_range', '-mfer', default="10x", + type=STRFLOAT, show_default=True, help=("Max distance (m) that flicker exclusions will extend in " - "any of the cardinal directions. Note that increasing " + "any of the cardinal directions. Can also be a string " + "like ``'10x'`` (default), which is interpreted as 10 " + "times the turbine rotor diameter. Note that increasing " "this value can lead to drastically instead memory " "requirements. This value may be increased slightly in " "order to yield odd exclusion array shapes.")) diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index 27786d8d4..c78a21e0a 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -155,6 +155,17 @@ def test_turbine_flicker(max_workers): assert np.allclose(baseline, test) +def test_turbine_flicker_bad_max_flicker_exclusion_range_input(): + """ + Test Turbine Flicker with bad input for max_flicker_exclusion_range + """ + with pytest.raises(TypeError) as excinfo: + TurbineFlicker.run(EXCL_H5, RES_H5, BLD_LAYER, HUB_HEIGHT, + ROTOR_DIAMETER, max_flicker_exclusion_range='abc') + + assert "max_flicker_exclusion_range must be numeric" in str(excinfo.value) + + def test_cli(runner): """ Test MeanWindDirections CLI @@ -292,6 +303,19 @@ def test_cli_max_flicker_exclusion_range(runner): ) assert result.exit_code == 0, msg + out_tiff_20d = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5d.tiff" + config["out_tiff"] = os.path.join(td, out_tiff_20d) + config["max_flicker_exclusion_range"] = "20x" + config_path = os.path.join(td, 'config.json') + with open(config_path, 'w') as f: + json.dump(config, f) + + result = runner.invoke(main, ['from-config', '-c', config_path]) + msg = 'Failed with error {}'.format( + traceback.print_exception(*result.exc_info) + ) + assert result.exit_code == 0, msg + with ExclusionLayers(EXCL_H5) as f: baseline = f[BASELINE] @@ -307,9 +331,15 @@ def test_cli_max_flicker_exclusion_range(runner): with Geotiff(os.path.join(td, out_tiff)) as f: test2 = f.values[0] + with Geotiff(os.path.join(td, out_tiff_20d)) as f: + test3 = f.values[0] + assert np.allclose(baseline, test) assert np.allclose(baseline, test2) + assert np.allclose(baseline, test3) assert np.allclose(test, test2) + assert np.allclose(test, test3) + assert np.allclose(test2, test3) LOGGERS.clear() From 212ad11f88fb58b3b1dbb42cf17ea17f7798ff70 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 12:41:25 -0600 Subject: [PATCH 15/42] Linter fixes --- tests/test_setbacks.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/tests/test_setbacks.py b/tests/test_setbacks.py index 5ba170432..ab4b82678 100644 --- a/tests/test_setbacks.py +++ b/tests/test_setbacks.py @@ -727,7 +727,7 @@ def test_merged_setbacks(setbacks_class, regulations_class, features_path, generic_setbacks = setbacks_class(EXCL_H5, regulations, weights_calculation_upscale_factor=sf) generic_layer = generic_setbacks.compute_exclusions(features_path, - max_workers=1) + max_workers=1) with tempfile.TemporaryDirectory() as td: regs_fpath = os.path.basename(regulations_fpath) @@ -1302,11 +1302,4 @@ def execute_pytest(capture='all', flags='-rapP'): if __name__ == '__main__': - # execute_pytest() - runner = CliRunner() - runner.invoke( - main, - ['from-config', - '-c', "/shared-projects/rev/projects/seto/fy22/data/paper_exclusions/pv/setbacks/pv_parcel_50/pv_parcel_50_debug.json", - ] - ) \ No newline at end of file + execute_pytest() From 07ef0d5cd7fca84a158fd3ea790f63070bdc8290 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 13:28:19 -0600 Subject: [PATCH 16/42] Added `out_layers` option to setbacks in anticipation of the flicker refactor --- reVX/config/setbacks.py | 5 ++ reVX/setbacks/setbacks_cli.py | 24 +++++++-- reVX/utilities/exclusions.py | 94 ++++++++++++++++++++++++++--------- tests/test_setbacks.py | 84 +++++++++++++++++++++++++++++++ 4 files changed, 179 insertions(+), 28 deletions(-) diff --git a/reVX/config/setbacks.py b/reVX/config/setbacks.py index 3b1c62905..6da0271b7 100644 --- a/reVX/config/setbacks.py +++ b/reVX/config/setbacks.py @@ -88,3 +88,8 @@ def hsds(self): def weights_calculation_upscale_factor(self): """Get upscale factor for weights calculation. """ return self.get("weights_calculation_upscale_factor", None) + + @property + def out_layers(self): + """Get out_layers dictionary. """ + return self.get("out_layers", None) diff --git a/reVX/setbacks/setbacks_cli.py b/reVX/setbacks/setbacks_cli.py index 79c69c5b1..90a997d06 100644 --- a/reVX/setbacks/setbacks_cli.py +++ b/reVX/setbacks/setbacks_cli.py @@ -11,7 +11,7 @@ from rex.utilities.loggers import init_mult from rex.utilities.cli_dtypes import STR, FLOAT, INT from rex.utilities.hpc import SLURM -from rex.utilities.utilities import get_class_properties +from rex.utilities.utilities import get_class_properties, dict_str_load from reVX.config.setbacks import SetbacksConfig from reVX.setbacks import SETBACKS @@ -108,6 +108,13 @@ def valid_config_keys(): @click.option('--hsds', '-hsds', is_flag=True, help=('Flag to use h5pyd to handle .h5 domain hosted on AWS ' 'behind HSDS')) +@click.option('--out_layers', '-ol', type=STR, default=None, + show_default=True, + help=('String representation of a dictionary mapping feature ' + 'file names (with extension) to names of layers under ' + 'which exclusions should be saved in the "excl_fpath" ' + '.h5 file. If "None" or empty dictionary, no layers are ' + 'saved to the h5 file.')) @click.option('--log_dir', '-log', default=None, type=STR, show_default=True, help='Directory to dump log files. Default is out_dir.') @@ -117,7 +124,7 @@ def valid_config_keys(): def local(ctx, excl_fpath, feature_type, features_path, out_dir, hub_height, rotor_diameter, base_setback_dist, regs_fpath, multiplier, weights_calculation_upscale_factor, max_workers, replace, hsds, - log_dir, verbose): + out_layers, log_dir, verbose): """ Compute Setbacks locally """ @@ -147,18 +154,23 @@ def local(ctx, excl_fpath, feature_type, features_path, out_dir, hub_height, '- using max_workers = {}\n' '- replace layer if needed = {}\n' '- weights calculation upscale factor = {}\n' + '- out_layers = {}\n' .format(base_setback_dist, hub_height, rotor_diameter, regs_fpath, multiplier, max_workers, replace, - weights_calculation_upscale_factor)) + weights_calculation_upscale_factor, out_layers)) regulations = select_regulations(base_setback_dist, hub_height, rotor_diameter, regs_fpath, multiplier) setbacks_class = SETBACKS[feature_type] wcuf = weights_calculation_upscale_factor + if isinstance(out_layers, str): + out_layers = dict_str_load(out_layers) + setbacks_class.run(excl_fpath, features_path, out_dir, regulations, weights_calculation_upscale_factor=wcuf, - max_workers=max_workers, replace=replace, hsds=hsds) + max_workers=max_workers, replace=replace, hsds=hsds, + out_layers=out_layers) logger.info('Setbacks computed and written to {}'.format(out_dir)) @@ -205,7 +217,8 @@ def run_local(ctx, config): multiplier=config.multiplier, weights_calculation_upscale_factor=wcuf, max_workers=config.execution_control.max_workers, - replace=config.replace) + replace=config.replace, + out_layers=config.out_layers) def eagle(config): @@ -295,6 +308,7 @@ def get_node_cmd(name, config): '-regs {}'.format(SLURM.s(config.regs_fpath)), '-mult {}'.format(SLURM.s(config.multiplier)), '-wcuf {}'.format(SLURM.s(wcuf)), + '-ol {}'.format(SLURM.s(config.out_layers)), '-mw {}'.format(SLURM.s(config.execution_control.max_workers)), '-log {}'.format(SLURM.s(config.log_directory)), ] diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index 0ad58ef0e..b27f7ad62 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -323,23 +323,42 @@ def _write_exclusions(self, geotiff, exclusions, replace=False): exclusions : ndarray Rasterized array of exclusions. replace : bool, optional - Flag to replace local layer data with arr if layer already - exists in the exclusion .h5 file. By default `False`. + Flag to replace local layer data with arr if file already + exists on disk. By default `False`. """ if os.path.exists(geotiff): - if not replace: - msg = ('{} already exists. To replace it set "replace=True"' - .format(geotiff)) - logger.error(msg) - raise IOError(msg) - else: - msg = ('{} already exists and will be replaced!' - .format(geotiff)) - logger.warning(msg) - warn(msg) + _error_or_warn(geotiff, replace) ExclusionsConverter._write_geotiff(geotiff, self.profile, exclusions) + def _write_layer(self, out_layer, exclusions, replace=False): + """Write exclusions to H5, replace if requested + + Parameters + ---------- + out_layer : str + Name of new exclusion layer to add to h5. + exclusions : ndarray + Rasterized array of exclusions. + replace : bool, optional + Flag to replace local layer data with arr if layer already + exists in the exclusion .h5 file. By default `False`. + """ + with ExclusionLayers(self._excl_fpath) as exc: + layers = exc.layers + + if out_layer in layers: + _error_or_warn(out_layer, replace) + + try: + description = self.description + except AttributeError: + description = None + + ExclusionsConverter._write_layer(self._excl_fpath, out_layer, + self.profile, exclusions, + description=description) + def compute_all_local_exclusions(self, features_fpath, max_workers=None): """Compute local exclusions for all counties either. @@ -429,7 +448,7 @@ def _exclusions_computation(self, features): # TODO: Delegate this to class def compute_exclusions(self, features_fpath, max_workers=None, - geotiff=None, replace=False): + out_layer=None, out_tiff=None, replace=False): """ Compute exclusions for all states either in serial or parallel. Existing exclusions are computed if a regulations file was @@ -445,8 +464,13 @@ def compute_exclusions(self, features_fpath, max_workers=None, in serial, if > 1 run in parallel with that many workers, if `None`, run in parallel on all available cores. By default `None`. - geotiff : str, optional + out_layer : str, optional + Name to save rasterized exclusions under in .h5 file. + If `None`, exclusions will not be written to the .h5 file. + By default `None`. + out_tiff : str, optional Path to save geotiff containing rasterized exclusions. + If `None`, exclusions will not be written to a geotiff file. By default `None`. replace : bool, optional Flag to replace geotiff if it already exists. @@ -460,9 +484,14 @@ def compute_exclusions(self, features_fpath, max_workers=None, exclusions = self._compute_merged_exclusions(features_fpath, max_workers=max_workers) - if geotiff is not None: - logger.debug('Writing exclusions to {}'.format(geotiff)) - self._write_exclusions(geotiff, exclusions, replace=replace) + if out_layer is not None: + logger.info('Saving exclusion layer to {} as {}' + .format(self._excl_fpath, out_layer)) + self._write_layer(out_layer, exclusions, replace=replace) + + if out_tiff is not None: + logger.debug('Writing exclusions to {}'.format(out_tiff)) + self._write_exclusions(out_tiff, exclusions, replace=replace) return exclusions @@ -517,7 +546,7 @@ def _combine_exclusions(self, existing, additional, cnty_fips): @classmethod def run(cls, excl_fpath, features_path, out_dir, regulations, weights_calculation_upscale_factor=None, max_workers=None, - replace=False, hsds=False): + replace=False, hsds=False, out_layers=None): """ Compute exclusions and write them to a geotiff. If a regulations file is given, compute local exclusions, otherwise compute @@ -587,17 +616,22 @@ def run(cls, excl_fpath, features_path, out_dir, regulations, hsds : bool, optional Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS behind HSDS. By default `False`. + out_layers : dict, optional + Dictionary mapping feature file names (with extension) to + names of layers under which exclusions should be saved in + the `excl_fpath` .h5 file. If `None` or empty dictionary, + no layers are saved to the h5 file. By default `None`. """ scale_factor = weights_calculation_upscale_factor exclusions = cls(excl_fpath, regulations=regulations, hsds=hsds, weights_calculation_upscale_factor=scale_factor) features_path = exclusions.get_feature_paths(features_path) + out_layers = out_layers or {} for fpath in features_path: - geotiff = os.path.basename(fpath) - geotiff = ".".join(geotiff.split('.')[:-1] + ['tif']) + fn = os.path.basename(fpath) + geotiff = ".".join(fn.split('.')[:-1] + ['tif']) geotiff = os.path.join(out_dir, geotiff) - if os.path.exists(geotiff) and not replace: msg = ('{} already exists, exclusions will not be re-computed ' 'unless replace=True'.format(geotiff)) @@ -605,7 +639,8 @@ def run(cls, excl_fpath, features_path, out_dir, regulations, else: logger.info("Computing exclusions from {} and saving " "to {}".format(fpath, geotiff)) - exclusions.compute_exclusions(fpath, geotiff=geotiff, + exclusions.compute_exclusions(fpath, out_tiff=geotiff, + out_layer=out_layers.get(fn), max_workers=max_workers, replace=replace) @@ -1454,4 +1489,17 @@ def _county_regulation_value(self, county_regulations): logger.warning(msg) warn(msg) return - return reg \ No newline at end of file + return reg + +def _error_or_warn(name, replace): + """If replace, throw warning, otherwise throw error. """ + if not replace: + msg = ('{} already exists. To replace it set "replace=True"' + .format(name)) + logger.error(msg) + raise IOError(msg) + else: + msg = ('{} already exists and will be replaced!' + .format(name)) + logger.warning(msg) + warn(msg) diff --git a/tests/test_setbacks.py b/tests/test_setbacks.py index ab4b82678..f67ef0442 100644 --- a/tests/test_setbacks.py +++ b/tests/test_setbacks.py @@ -270,6 +270,33 @@ def test_setbacks_no_generic_value(setbacks_class, feature_file): assert np.isclose(out, 0).all() +def test_setbacks_saving_tiff_h5(): + """Test setbacks saves to tiff and h5. """ + feature_file = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', + 'Rhode_Island.gpkg') + regs = Regulations(0, regulations_fpath=None, multiplier=1) + with tempfile.TemporaryDirectory() as td: + assert not os.path.exists(os.path.join(td, "Rhode_Island.tif")) + + excl_fpath = os.path.basename(EXCL_H5) + excl_fpath = os.path.join(td, excl_fpath) + shutil.copy(EXCL_H5, excl_fpath) + with ExclusionLayers(excl_fpath) as exc: + assert "ri_parcel_setbacks" not in exc.layers + + ParcelSetbacks.run(excl_fpath, feature_file, td, regs, + out_layers={'Rhode_Island.gpkg': + "ri_parcel_setbacks"}) + + assert os.path.exists(os.path.join(td, "Rhode_Island.tif")) + with Geotiff(os.path.join(td, "Rhode_Island.tif")) as tif: + assert np.isclose(tif.values, 0).all() + + with ExclusionLayers(excl_fpath) as exc: + assert "ri_parcel_setbacks" in exc.layers + assert np.isclose(exc["ri_parcel_setbacks"], 0).all() + + def test_generic_structure(generic_wind_regulations): """ Test generic structures setbacks @@ -1285,6 +1312,63 @@ def test_cli_invalid_inputs(runner): LOGGERS.clear() +def test_cli_saving(runner): + """ + Test CLI saving files. + """ + parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', + 'Rhode_Island.gpkg') + with tempfile.TemporaryDirectory() as td: + test_fp = os.path.join(td, 'Rhode_Island.tif') + assert not os.path.exists(test_fp) + + regs_fpath = os.path.basename(PARCEL_REGS_FPATH_VALUE) + regs_fpath = os.path.join(td, regs_fpath) + shutil.copy(PARCEL_REGS_FPATH_VALUE, regs_fpath) + + excl_fpath = os.path.basename(EXCL_H5) + excl_fpath = os.path.join(td, excl_fpath) + shutil.copy(EXCL_H5, excl_fpath) + with ExclusionLayers(excl_fpath) as exc: + assert "ri_parcel_setbacks" not in exc.layers + + config = { + "log_directory": td, + "execution_control": { + "option": "local" + }, + "excl_fpath": excl_fpath, + "feature_type": "parcel", + "features_path": parcel_path, + "log_level": "INFO", + "regs_fpath": regs_fpath, + "replace": True, + "base_setback_dist": BASE_SETBACK_DIST, + "out_layers": { + "Rhode_Island.gpkg": "ri_parcel_setbacks" + } + } + config_path = os.path.join(td, 'config.json') + with open(config_path, 'w') as f: + json.dump(config, f) + + result = runner.invoke(main, ['from-config', + '-c', config_path]) + msg = ('Failed with error {}' + .format(traceback.print_exception(*result.exc_info))) + assert result.exit_code == 0, msg + + assert os.path.exists(test_fp) + with Geotiff(test_fp) as tif: + assert tif.values.sum() == 3 + + with ExclusionLayers(excl_fpath) as exc: + assert "ri_parcel_setbacks" in exc.layers + assert exc["ri_parcel_setbacks"].sum() == 3 + + LOGGERS.clear() + + def execute_pytest(capture='all', flags='-rapP'): """Execute module as pytest with detailed summary report. From 93d24b814782749b11b26f12ac15bfa962fc7c94 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 13:30:47 -0600 Subject: [PATCH 17/42] Flake8 fixes --- tests/test_turbine_flicker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index c78a21e0a..c7371ef91 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -123,7 +123,7 @@ def test_get_building_indices(): assert (buildings[row_idx, col_idx] > 0).all() - +# noqa: E201,E241 def test_invert_shadow_flicker_arr(): """Test inverting the shadow flicker array. """ From fa4b630698e80584a10508e56384665a44092109 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 13:39:48 -0600 Subject: [PATCH 18/42] Delete irrelevant method and call --- reVX/turbine_flicker/turbine_flicker.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 90a2bba9f..36b294d6f 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -226,14 +226,6 @@ def _preflight_check(self, tm_dset='techmap_wtk'): 'following error:\n{}'.format(e)) raise e - self._set_max_grid_size_for_odd_shaped_arr() - - def _set_max_grid_size_for_odd_shaped_arr(self): - """Set the max_flicker_exclusion_range to multiple of 0.5 grids """ - mult = np.round(self._max_flicker_exclusion_range - / self._grid_cell_size) + 0.5 - self._max_flicker_exclusion_range = mult * self._grid_cell_size - def _get_sc_points(self, tm_dset='techmap_wtk'): """ Get the valid sc points to run turbine flicker for From 99328252352226e2a5a95bb7f11af634a995b77d Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 14:39:38 -0600 Subject: [PATCH 19/42] Updated order of args in function --- reVX/setbacks/base.py | 10 +++++----- reVX/setbacks/parcel_setbacks.py | 10 +++++----- reVX/utilities/exclusions.py | 16 ++++++++-------- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 5b78ab07f..610c03665 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -361,7 +361,7 @@ def pre_process_regulations(self, features_fpath): # cnty_feats = setback_features.iloc[list(idx)].copy() # yield self._compute_local_setbacks, cnty_feats, cnty, setback - def compute_local_exclusions(self, features, cnty, regulation_value): + def compute_local_exclusions(self, regulation_value, cnty, features): """Compute local features setbacks. This method will compute the setbacks using a county-specific @@ -371,12 +371,12 @@ def compute_local_exclusions(self, features, cnty, regulation_value): Parameters ---------- - features : geopandas.GeoDataFrame - Features to setback from. + regulation_value : float | int + Setback distance in meters. cnty : geopandas.GeoDataFrame Regulations for a single county. - regulation_value : int - Setback distance in meters. + features : geopandas.GeoDataFrame + Features to setback from. Returns ------- diff --git a/reVX/setbacks/parcel_setbacks.py b/reVX/setbacks/parcel_setbacks.py index 3740c7777..f843ca57a 100644 --- a/reVX/setbacks/parcel_setbacks.py +++ b/reVX/setbacks/parcel_setbacks.py @@ -43,7 +43,7 @@ def compute_generic_exclusions(self, features_fpath): features.buffer(-1 * self._regulations.generic)) return self._rasterizer.rasterize(list(setbacks)) - def compute_local_exclusions(self, features, cnty, regulation_value): + def compute_local_exclusions(self, regulation_value, cnty, features): """Compute local features setbacks. This method will compute the setbacks using a county-specific @@ -53,12 +53,12 @@ def compute_local_exclusions(self, features, cnty, regulation_value): Parameters ---------- - features : geopandas.GeoDataFrame - Features to setback from. + regulation_value : float | int + Setback distance in meters. cnty : geopandas.GeoDataFrame Regulations for a single county. - regulation_value : int - Setback distance in meters. + features : geopandas.GeoDataFrame + Features to setback from. Returns ------- diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index b27f7ad62..e05f50d6e 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -68,7 +68,7 @@ def pre_process_regulations(self, features_fpath): raise NotImplementedError @abstractmethod - def compute_local_exclusions(self, features, cnty, regulation_value): + def compute_local_exclusions(self, regulation_value, cnty, features): """Compute local feature exclusions. This method should compute the exclusions using the information @@ -76,12 +76,12 @@ def compute_local_exclusions(self, features, cnty, regulation_value): Parameters ---------- - features : geopandas.GeoDataFrame - Features used to calculate exclusions from. + regulation_value : float | int + Regulation value for county. cnty : geopandas.GeoDataFrame Regulations for a single county. - regulation_value : int - Regulation value for county. + features : geopandas.GeoDataFrame + Features used to calculate exclusions from. Returns ------- @@ -390,7 +390,7 @@ def compute_all_local_exclusions(self, features_fpath, max_workers=None): loggers=loggers) as exe: futures = {} for func, *args in self._exclusions_computation(features): - cnty_feats, cnty, exclusion = args + exclusion, cnty, cnty_feats = args future = exe.submit(func, cnty_feats, cnty, exclusion) futures[future] = cnty['FIPS'].unique() @@ -404,7 +404,7 @@ def compute_all_local_exclusions(self, features_fpath, max_workers=None): logger.info('Computing local exclusions in serial') computation = self._exclusions_computation(features) for i, (func, *args) in enumerate(computation): - cnty_feats, cnty, exclusion = args + exclusion, cnty, cnty_feats = args exclusions = self._combine_exclusions(exclusions, func(*args), cnty['FIPS'].unique()) logger.debug('Computed exclusions for {} of {} counties' @@ -417,7 +417,7 @@ def _exclusions_computation(self, features): for exclusion, cnty in self._regulations: idx = features.sindex.intersection(cnty.total_bounds) cnty_feats = features.iloc[list(idx)].copy() - yield self.compute_local_exclusions, cnty_feats, cnty, exclusion + yield self.compute_local_exclusions, exclusion, cnty, cnty_feats # def _compute_generic_setbacks(self, features_fpath): # """Compute generic setbacks. From aaf0c413bcd9a3a9e1abea948c7804a9b7c501b6 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 14:45:33 -0600 Subject: [PATCH 20/42] Minor docstring update --- reVX/setbacks/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 610c03665..a21e9727c 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -380,8 +380,8 @@ def compute_local_exclusions(self, regulation_value, cnty, features): Returns ------- - setbacks : list - List of setback geometries. + setbacks : ndarray + Raster array of setbacks """ logger.debug('- Computing setbacks for county FIPS {}' .format(cnty.iloc[0]['FIPS'])) From 4ae7bd9b7c2b5cfc9c77bfb2a54ddcb5532d23c9 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 15:39:31 -0600 Subject: [PATCH 21/42] Parse features no longer part of interface --- reVX/setbacks/base.py | 28 ++---- reVX/setbacks/parcel_setbacks.py | 9 +- reVX/utilities/exclusions.py | 151 +++---------------------------- 3 files changed, 28 insertions(+), 160 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index a21e9727c..5e36bb0f2 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -2,22 +2,18 @@ """ Compute setbacks exclusions """ -from abc import ABC, abstractmethod -from concurrent.futures import as_completed +from abc import abstractmethod from warnings import warn from itertools import product -import os import logging import pathlib import numpy as np import geopandas as gpd from rasterio import features -from shapely.geometry import shape -from rex.utilities import SpawnProcessPool, log_mem +from rex.utilities import log_mem from reV.handlers.exclusions import ExclusionLayers from reVX.utilities.exclusions import AbstractBaseExclusionsMerger -from reVX.utilities.utilities import log_versions logger = logging.getLogger(__name__) @@ -354,14 +350,7 @@ def pre_process_regulations(self, features_fpath): logger.debug('Computing setbacks for regulations in {} counties' .format(len(self.regulations_table))) - # def _setback_computation(self, setback_features): - # """Get function and args for setbacks computation. """ - # for setback, cnty in self._regulations: - # idx = setback_features.sindex.intersection(cnty.total_bounds) - # cnty_feats = setback_features.iloc[list(idx)].copy() - # yield self._compute_local_setbacks, cnty_feats, cnty, setback - - def compute_local_exclusions(self, regulation_value, cnty, features): + def compute_local_exclusions(self, regulation_value, cnty, features_fpath): """Compute local features setbacks. This method will compute the setbacks using a county-specific @@ -375,8 +364,8 @@ def compute_local_exclusions(self, regulation_value, cnty, features): Setback distance in meters. cnty : geopandas.GeoDataFrame Regulations for a single county. - features : geopandas.GeoDataFrame - Features to setback from. + features_fpath : str + Path to shape file with features to compute exclusions from Returns ------- @@ -385,10 +374,13 @@ def compute_local_exclusions(self, regulation_value, cnty, features): """ logger.debug('- Computing setbacks for county FIPS {}' .format(cnty.iloc[0]['FIPS'])) + features = self.parse_features(features_fpath) + idx = features.sindex.intersection(cnty.total_bounds) + features = features.iloc[list(idx)].copy() log_mem(logger) features = self._feature_filter(features, cnty) - setback = regulation_value - return self._rasterizer.rasterize(list(features.buffer(setback))) + features = list(features.buffer(regulation_value)) + return self._rasterizer.rasterize(features) def compute_generic_exclusions(self, features_fpath): """Compute generic setbacks. diff --git a/reVX/setbacks/parcel_setbacks.py b/reVX/setbacks/parcel_setbacks.py index f843ca57a..ab5d589ad 100644 --- a/reVX/setbacks/parcel_setbacks.py +++ b/reVX/setbacks/parcel_setbacks.py @@ -43,7 +43,7 @@ def compute_generic_exclusions(self, features_fpath): features.buffer(-1 * self._regulations.generic)) return self._rasterizer.rasterize(list(setbacks)) - def compute_local_exclusions(self, regulation_value, cnty, features): + def compute_local_exclusions(self, regulation_value, cnty, features_fpath): """Compute local features setbacks. This method will compute the setbacks using a county-specific @@ -57,8 +57,8 @@ def compute_local_exclusions(self, regulation_value, cnty, features): Setback distance in meters. cnty : geopandas.GeoDataFrame Regulations for a single county. - features : geopandas.GeoDataFrame - Features to setback from. + features_fpath : str + Path to shape file with features to compute exclusions from Returns ------- @@ -67,6 +67,9 @@ def compute_local_exclusions(self, regulation_value, cnty, features): """ logger.debug('- Computing setbacks for county FIPS {}' .format(cnty.iloc[0]['FIPS'])) + features = self.parse_features(features_fpath) + idx = features.sindex.intersection(cnty.total_bounds) + features = features.iloc[list(idx)].copy() log_mem(logger) features = self._feature_filter(features, cnty) setback = regulation_value diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index e05f50d6e..e9bcc9140 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -36,23 +36,6 @@ def profile(self): """dict: Geotiff profile. """ raise NotImplementedError - @abstractmethod - def parse_features(self, features_fpath): - """Parse features the feature file. - - Parameters - ---------- - features_fpath : str - Path to file containing features to compute exclusions from. - - Returns - ------- - `geopandas.GeoDataFrame` - Geometries of features to compute exclusions from in - exclusion coordinate system. - """ - raise NotImplementedError - @abstractmethod def pre_process_regulations(self, features_fpath): """Reduce regulations to correct state and features. @@ -68,7 +51,7 @@ def pre_process_regulations(self, features_fpath): raise NotImplementedError @abstractmethod - def compute_local_exclusions(self, regulation_value, cnty, features): + def compute_local_exclusions(self, regulation_value, cnty, features_fpath): """Compute local feature exclusions. This method should compute the exclusions using the information @@ -80,8 +63,9 @@ def compute_local_exclusions(self, regulation_value, cnty, features): Regulation value for county. cnty : geopandas.GeoDataFrame Regulations for a single county. - features : geopandas.GeoDataFrame - Features used to calculate exclusions from. + features_fpath : str + Path to shape file with features to compute exclusions from + Returns ------- @@ -236,82 +220,6 @@ def regulations_table(self): def regulations_table(self, regulations_table): self._regulations.regulations = regulations_table - # def _parse_features(self, features_fpath): - # """Method to parse features. - - # Parameters - # ---------- - # features_fpath : str - # Path to file containing features to setback from. - - # Returns - # ------- - # `geopandas.GeoDataFrame` - # Geometries of features to setback from in exclusion - # coordinate system. - # """ - # return gpd.read_file(features_fpath).to_crs( - # crs=self._rasterizer.profile['crs']) - - # def _pre_process_regulations(self, features_fpath): - # """Reduce regulations to state corresponding to features_fpath. - - # Parameters - # ---------- - # features_fpath : str - # Path to shape file with features to compute setbacks from. - # """ - # mask = self._regulation_table_mask(features_fpath) - # if not mask.any(): - # msg = "Found no local regulations!" - # logger.warning(msg) - # warn(msg) - - # self.regulations_table = (self.regulations_table[mask] - # .reset_index(drop=True)) - # logger.debug('Computing setbacks for regulations in {} counties' - # .format(len(self.regulations_table))) - - # pylint: disable=unused-argument - # @abstractmethod - # def _regulation_table_mask(self, features_fpath): - # """Return the regulation table mask for setback feature. """ - # raise NotImplementedError - - # def _compute_local_setbacks(self, features, cnty, setback): - # """Compute local features setbacks. - - # This method will compute the setbacks using a county-specific - # regulations file that specifies either a static setback or a - # multiplier value that will be used along with the base setback - # distance to compute the setback. - - # Parameters - # ---------- - # features : geopandas.GeoDataFrame - # Features to setback from. - # cnty : geopandas.GeoDataFrame - # Regulations for a single county. - # setback : int - # Setback distance in meters. - - # Returns - # ------- - # setbacks : list - # List of setback geometries. - # """ - # logger.debug('- Computing setbacks for county FIPS {}' - # .format(cnty.iloc[0]['FIPS'])) - # log_mem(logger) - # features = self._feature_filter(features, cnty) - # return list(features.buffer(setback)) - # TODO: Delegate this to class - - # @staticmethod - # def _feature_filter(features, cnty): - # """Filter the features given a county.""" - # return features_with_centroid_in_county(features, cnty) - def _write_exclusions(self, geotiff, exclusions, replace=False): """ Write exclusions to geotiff, replace if requested @@ -378,7 +286,6 @@ def compute_all_local_exclusions(self, features_fpath, max_workers=None): Raster array of exclusions. """ exclusions = None - features = self.parse_features(features_fpath) max_workers = max_workers or os.cpu_count() log_mem(logger) @@ -389,9 +296,9 @@ def compute_all_local_exclusions(self, features_fpath, max_workers=None): with SpawnProcessPool(max_workers=max_workers, loggers=loggers) as exe: futures = {} - for func, *args in self._exclusions_computation(features): - exclusion, cnty, cnty_feats = args - future = exe.submit(func, cnty_feats, cnty, exclusion) + for exclusion, cnty in self._regulations: + future = exe.submit(self.compute_local_exclusions, + exclusion, cnty, features_fpath) futures[future] = cnty['FIPS'].unique() for i, future in enumerate(as_completed(futures)): @@ -402,51 +309,17 @@ def compute_all_local_exclusions(self, features_fpath, max_workers=None): .format((i + 1), len(self.regulations_table))) else: logger.info('Computing local exclusions in serial') - computation = self._exclusions_computation(features) - for i, (func, *args) in enumerate(computation): - exclusion, cnty, cnty_feats = args - exclusions = self._combine_exclusions(exclusions, func(*args), + for i, (exclusion, cnty) in enumerate(self._regulations): + local_exclusions = self.compute_local_exclusions( + exclusion, cnty, features_fpath) + exclusions = self._combine_exclusions(exclusions, + local_exclusions, cnty['FIPS'].unique()) logger.debug('Computed exclusions for {} of {} counties' .format((i + 1), len(self.regulations_table))) return exclusions - def _exclusions_computation(self, features): - """Get function and args for exclusions computation. """ - for exclusion, cnty in self._regulations: - idx = features.sindex.intersection(cnty.total_bounds) - cnty_feats = features.iloc[list(idx)].copy() - yield self.compute_local_exclusions, exclusion, cnty, cnty_feats - - # def _compute_generic_setbacks(self, features_fpath): - # """Compute generic setbacks. - - # This method will compute the setbacks using a generic setback - # of `base_setback_dist * multiplier`. - - # Parameters - # ---------- - # features_fpath : str - # Path to shape file with features to compute setbacks from. - - # Returns - # ------- - # setbacks : ndarray - # Raster array of setbacks - # """ - # logger.info('Computing generic setbacks') - # if np.isclose(self._regulations.generic_setback, 0): - # return self._rasterizer.rasterize(shapes=None) - - # setback_features = self._parse_features(features_fpath) - # setbacks = list(setback_features.buffer( - # self._regulations.generic_setback - # )) - - # return self._rasterizer.rasterize(setbacks) - # TODO: Delegate this to class - def compute_exclusions(self, features_fpath, max_workers=None, out_layer=None, out_tiff=None, replace=False): """ From 1701dc3baeb8ed88ed56587630f805b6bc826595 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 17:06:48 -0600 Subject: [PATCH 22/42] Add hsds arg --- reVX/setbacks/base.py | 2 +- reVX/utilities/exclusions.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 5e36bb0f2..8f00dbd2d 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -304,7 +304,7 @@ def __init__(self, excl_fpath, regulations, hsds=False, """ self._rasterizer = Rasterizer(excl_fpath, weights_calculation_upscale_factor, hsds) - super().__init__(excl_fpath, regulations) + super().__init__(excl_fpath, regulations, hsds) def __repr__(self): msg = "{} for {}".format(self.__class__.__name__, self._excl_fpath) diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index e9bcc9140..f20eb5da2 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -129,7 +129,7 @@ class AbstractBaseExclusionsMerger(AbstractExclusionCalculatorInterface): Create exclusions layers for exclusions """ - def __init__(self, excl_fpath, regulations): + def __init__(self, excl_fpath, regulations, hsds=False): """ Parameters ---------- @@ -139,11 +139,15 @@ def __init__(self, excl_fpath, regulations): regulations : `Regulations` A `Regulations` object used to extract exclusion regulation values. + hsds : bool, optional + Boolean flag to use h5pyd to handle .h5 'files' hosted on + AWS behind HSDS. By default `False`. """ log_versions(logger) self._excl_fpath = excl_fpath self._regulations = regulations - with ExclusionLayers(self._excl_fpath) as exc: + self._hsds = hsds + with ExclusionLayers(self._excl_fpath, hsds=hsds) as exc: self._fips = exc['cnty_fips'] self._cnty_fips_profile = exc.get_layer_profile('cnty_fips') self._preflight_check() @@ -252,7 +256,7 @@ def _write_layer(self, out_layer, exclusions, replace=False): Flag to replace local layer data with arr if layer already exists in the exclusion .h5 file. By default `False`. """ - with ExclusionLayers(self._excl_fpath) as exc: + with ExclusionLayers(self._excl_fpath, hsds=self._hsds) as exc: layers = exc.layers if out_layer in layers: From e780ad9666b48591e9afcf02c1085d9358234518 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Mon, 19 Sep 2022 18:36:41 -0600 Subject: [PATCH 23/42] Use kwargs to initiate class --- reVX/utilities/exclusions.py | 51 +++++++----------------------------- 1 file changed, 10 insertions(+), 41 deletions(-) diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index f20eb5da2..d3082c9f9 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -422,8 +422,8 @@ def _combine_exclusions(self, existing, additional, cnty_fips): @classmethod def run(cls, excl_fpath, features_path, out_dir, regulations, - weights_calculation_upscale_factor=None, max_workers=None, - replace=False, hsds=False, out_layers=None): + max_workers=None, replace=False, out_layers=None, hsds=False, + **kwargs): """ Compute exclusions and write them to a geotiff. If a regulations file is given, compute local exclusions, otherwise compute @@ -451,37 +451,6 @@ def run(cls, excl_fpath, features_path, out_dir, regulations, regulations : `Regulations` A `Regulations` object used to extract exclusion regulation distances. - weights_calculation_upscale_factor : int, optional - If this value is an int > 1, the output will be a layer with - **inclusion** weight values instead of exclusion booleans. - For example, a cell that was previously excluded with a - a boolean mask (value of 1) may instead be converted to an - inclusion weight value of 0.75, meaning that 75% of the area - corresponding to that point should be included (i.e. the - exclusion feature only intersected a small portion - 25% - - of the cell). This percentage inclusion value is calculated - by upscaling the output array using this input value, - rasterizing the exclusion features onto it, and counting the - number of resulting sub-cells excluded by the feature. For - example, setting the value to `3` would split each output - cell into nine sub-cells - 3 divisions in each dimension. - After the feature is rasterized on this high-resolution - sub-grid, the area of the non-excluded sub-cells is totaled - and divided by the area of the original cell to obtain the - final inclusion percentage. Therefore, a larger upscale - factor results in more accurate percentage values. However, - this process is memory intensive and scales quadratically - with the upscale factor. A good way to estimate your minimum - memory requirement is to use the following formula: - - .. math:: memory (GB) = s_0 * s_1 * ((sf^2) * 2 + 4) / 1073741824, - - where :math:`s_0` and :math:`s_1` are the dimensions (shape) - of your exclusion layer and :math:`sf` is the scale factor - (be sure to add several GB for any other overhead required - by the rest of the process). If `None` (or a value <= 1), - this process is skipped and the output is a boolean - exclusion mask. By default `None`. max_workers : int, optional Number of workers to use for exclusion computation, if 1 run in serial, if > 1 run in parallel with that many workers, @@ -490,22 +459,22 @@ def run(cls, excl_fpath, features_path, out_dir, regulations, replace : bool, optional Flag to replace geotiff if it already exists. By default `False`. - hsds : bool, optional - Boolean flag to use h5pyd to handle .h5 'files' hosted on - AWS behind HSDS. By default `False`. out_layers : dict, optional Dictionary mapping feature file names (with extension) to names of layers under which exclusions should be saved in the `excl_fpath` .h5 file. If `None` or empty dictionary, no layers are saved to the h5 file. By default `None`. + hsds : bool, optional + Boolean flag to use h5pyd to handle .h5 'files' hosted on + AWS behind HSDS. By default `False`. + **kwargs + Keyword args to exclusions calculator class. """ - scale_factor = weights_calculation_upscale_factor - exclusions = cls(excl_fpath, regulations=regulations, hsds=hsds, - weights_calculation_upscale_factor=scale_factor) + exclusions = cls(excl_fpath=excl_fpath, regulations=regulations, + hsds=hsds, **kwargs) - features_path = exclusions.get_feature_paths(features_path) out_layers = out_layers or {} - for fpath in features_path: + for fpath in exclusions.get_feature_paths(features_path): fn = os.path.basename(fpath) geotiff = ".".join(fn.split('.')[:-1] + ['tif']) geotiff = os.path.join(out_dir, geotiff) From 000057f143decb2285534f6681c2b3de0b48ce00 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Tue, 20 Sep 2022 09:54:25 -0600 Subject: [PATCH 24/42] Linter updates --- reVX/utilities/exclusions.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index d3082c9f9..f29a14ac1 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -1337,15 +1337,16 @@ def _county_regulation_value(self, county_regulations): return return reg + def _error_or_warn(name, replace): """If replace, throw warning, otherwise throw error. """ if not replace: msg = ('{} already exists. To replace it set "replace=True"' - .format(name)) + .format(name)) logger.error(msg) raise IOError(msg) else: msg = ('{} already exists and will be replaced!' - .format(name)) + .format(name)) logger.warning(msg) warn(msg) From 45f2bb29bcac5b44947d092cd0def84edfa977e3 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Tue, 20 Sep 2022 11:04:29 -0600 Subject: [PATCH 25/42] fips no longer required for generic run --- reVX/setbacks/base.py | 4 ++-- reVX/utilities/exclusions.py | 23 ++++++++++++----------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 8f00dbd2d..d422fc13b 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -345,8 +345,8 @@ def pre_process_regulations(self, features_fpath): logger.warning(msg) warn(msg) - self.regulations_table = (self.regulations_table[mask] - .reset_index(drop=True)) + self._regulations.regulations = (self.regulations_table[mask] + .reset_index(drop=True)) logger.debug('Computing setbacks for regulations in {} counties' .format(len(self.regulations_table))) diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index f29a14ac1..31ca27f4a 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -41,7 +41,7 @@ def pre_process_regulations(self, features_fpath): """Reduce regulations to correct state and features. When implementing this method, make sure to update - `self.regulations_table`. + `self._regulations.regulations`. Parameters ---------- @@ -147,16 +147,14 @@ def __init__(self, excl_fpath, regulations, hsds=False): self._excl_fpath = excl_fpath self._regulations = regulations self._hsds = hsds - with ExclusionLayers(self._excl_fpath, hsds=hsds) as exc: - self._fips = exc['cnty_fips'] - self._cnty_fips_profile = exc.get_layer_profile('cnty_fips') - self._preflight_check() + self._fips = None + self._process_regulations(regulations.regulations) def __repr__(self): msg = "{} for {}".format(self.__class__.__name__, self._excl_fpath) return msg - def _preflight_check(self): + def _process_regulations(self, regulations_df): """Parse the county regulations. Parse regulations, combine with county geometries from @@ -175,10 +173,13 @@ def _preflight_check(self): with county geometries, use for intersecting with exclusion features. """ - if self.regulations_table is None: + if regulations_df is None: return - regulations_df = self.regulations_table + with ExclusionLayers(self._excl_fpath, hsds=self._hsds) as exc: + self._fips = exc['cnty_fips'] + cnty_fips_profile = exc.get_layer_profile('cnty_fips') + if 'FIPS' not in regulations_df: msg = ('Regulations does not have county FIPS! Please add a ' '"FIPS" columns with the unique county FIPS values.') @@ -194,7 +195,7 @@ def _preflight_check(self): logger.info('Merging county geometries w/ local regulations') s = features.shapes( self._fips.astype(np.int32), - transform=self._cnty_fips_profile['transform'] + transform=cnty_fips_profile['transform'] ) for p, v in s: v = int(v) @@ -208,7 +209,7 @@ def _preflight_check(self): ) regulations_df = regulations_df.reset_index() regulations_df = regulations_df.to_crs(crs=self.profile['crs']) - self.regulations_table = regulations_df + self._regulations.regulations = regulations_df @property def regulations_table(self): @@ -222,7 +223,7 @@ def regulations_table(self): @regulations_table.setter def regulations_table(self, regulations_table): - self._regulations.regulations = regulations_table + self._process_regulations(regulations_table) def _write_exclusions(self, geotiff, exclusions, replace=False): """ From de3e2959bdbf5b3fc824828e72bbfafde2cb3d3c Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Tue, 20 Sep 2022 14:58:53 -0600 Subject: [PATCH 26/42] (WIP) All tests passing (WIP) --- reVX/config/turbine_flicker.py | 25 +- reVX/turbine_flicker/turbine_flicker.py | 664 +++++++++++++------- reVX/turbine_flicker/turbine_flicker_cli.py | 98 ++- tests/test_turbine_flicker.py | 58 +- 4 files changed, 553 insertions(+), 292 deletions(-) diff --git a/reVX/config/turbine_flicker.py b/reVX/config/turbine_flicker.py index d425cd4c0..6b3afce25 100644 --- a/reVX/config/turbine_flicker.py +++ b/reVX/config/turbine_flicker.py @@ -24,9 +24,10 @@ def __init__(self, config): self._default_tm_dset = 'techmap_wtk' self._default_resolution = 128 self._default_grid_cell_size = 90 - self._default_max_flicker_exclusion_range = 10_000 + self._default_max_flicker_exclusion_range = "10x" self._default_building_threshold = 0 self._default_flicker_threshold = 30 + self._default_hsds_flag = False @property def excl_fpath(self): @@ -38,6 +39,11 @@ def res_fpath(self): """Get the resource .h5 file path (required).""" return self['res_fpath'] + @property + def regs_fpath(self): + """Get regulations .csv path""" + return self.get('regs_fpath', None) + @property def building_layer(self): """Get the building layer name.""" @@ -105,7 +111,18 @@ def out_layer(self): """ return self.get('out_layer', None) + # @property + # def out_tiff(self): + # """str: Path to output tiff file where exclusions should be saved. """ + # out_tiff = self.get('out_tiff', None) + # return self.get('out_tiff', None) + + @property + def replace(self): + """Get replace flag""" + return self.get('replace', False) + @property - def out_tiff(self): - """str: Path to output tiff file where exclusions should be saved. """ - return self.get('out_tiff', None) + def hsds(self): + """Get hsds flag""" + return self.get('hsds', self._default_hsds_flag) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 36b294d6f..cca2ef4b6 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -11,8 +11,10 @@ from reV.handlers.exclusions import ExclusionLayers from reV.supply_curve.extent import SupplyCurveExtent from reV.supply_curve.tech_mapping import TechMapping +from reVX.setbacks.regulations import WindRegulations from reVX.wind_dirs.mean_wind_dirs_point import MeanWindDirectionsPoint -from reVX.utilities import ExclusionsConverter +from reVX.utilities.exclusions import (ExclusionsConverter, + AbstractBaseExclusionsMerger) from rex.resource_extraction.resource_extraction import WindX from rex.utilities.execution import SpawnProcessPool from rex.utilities.loggers import log_mem @@ -20,29 +22,95 @@ logger = logging.getLogger(__name__) -class TurbineFlicker: +class FlickerRegulations(WindRegulations): + """Shadow flicker regulation values. """ + + def __init__(self, hub_height, rotor_diameter, flicker_threshold=30, + regulations_fpath=None): + """ + Parameters + ---------- + hub_height : float | int + Turbine hub height (m). + rotor_diameter : float | int + Turbine rotor diameter (m). + flicker_threshold : float | int, optional + Maximum number of allowable flicker hours per year. + By default, `30`. + regulations_fpath : str, optional + Path to regulations .csv or .gpkg file. At a minimum, this + file must contain the following columns: `Feature Type` + which labels the type of setback that each row represents, + `Value Type`, which specifies wether the value is a + multiplier or static height, `Value`, which specifies the + numeric value of the setback or multiplier, and `FIPS`, + which specifies a unique 5-digit code for each county (this + can be an integer - no leading zeros required). Valid + options for the `Value Type` are: + - "Hrs/Year" + If this input is `None`, a generic setback of + `max_tip_height * multiplier` is used. By default `None`. + """ + super().__init__(hub_height=hub_height, rotor_diameter=rotor_diameter, + regulations_fpath=regulations_fpath, + multiplier=1) + self._base_setback_dist = flicker_threshold + + @property + def flicker_threshold(self): + """ + Maximum number of allowable flicker hours per year. + + Returns + ------- + float + """ + return self._base_setback_dist + + def _county_regulation_setback(self, county_regulations): + """Retrieve county regulation setback. """ + setback_type = county_regulations["Value Type"].strip() + setback = float(county_regulations["Value"]) + if setback_type.lower() != "hrs/year": + msg = ('Cannot create setback for {}, expecting ' + '"Hrs/Year", but got {!r}' + .format(county_regulations["County"], setback_type)) + logger.warning(msg) + warn(msg) + return + return setback + + +class TurbineFlicker(AbstractBaseExclusionsMerger): """ Class to compute turbine shadow flicker and exclude sites that will cause excessive flicker on building """ STEPS_PER_HOUR = 1 - def __init__(self, excl_fpath, res_fpath, building_layer, - resolution=640, grid_cell_size=90, - max_flicker_exclusion_range="10x", - tm_dset='techmap_wtk'): + def __init__(self, excl_fpath, res_fpath, building_layer, regulations, + building_threshold=0, resolution=640, grid_cell_size=90, + max_flicker_exclusion_range="10x", tm_dset='techmap_wtk', + hsds=False): """ Parameters ---------- excl_fpath : str - Filepath to exclusions h5 file. File must contain "building_layer" - and "tm_dset". + Filepath to exclusions h5 file. File must contain + "building_layer" and "tm_dset". res_fpath : str Filepath to wind resource .h5 file containing hourly wind direction data building_layer : str - Exclusion layer containing buildings from which turbine flicker - exclusions will be computed. + Exclusion layer containing buildings from which turbine + flicker exclusions will be computed. + regulations : `FlickerRegulations` + A `FlickerRegulations` object used to shadow flicker + regulation values. + building_threshold : float, optional + Threshold for exclusion layer values to identify pixels with + buildings, values are % of pixel containing a building. By + default, `0`. resolution : int, optional SC resolution, must be input in combination with gid, by default 640 @@ -59,24 +127,110 @@ def __init__(self, excl_fpath, res_fpath, building_layer, tm_dset : str, optional Dataset / layer name for wind toolkit techmap, by default 'techmap_wtk' + hsds : bool, optional + Boolean flag to use h5pyd to handle .h5 'files' hosted on + AWS behind HSDS. By default `False`. """ - self._excl_h5 = excl_fpath + super().__init__(excl_fpath, regulations, hsds) self._res_h5 = res_fpath self._bld_layer = building_layer self._res = resolution + self._building_threshold = building_threshold self._grid_cell_size = grid_cell_size - self._max_flicker_exclusion_range = max_flicker_exclusion_range - self._preflight_check(tm_dset=tm_dset) + self._max_flicker_exclusion_range = ( + self._parse_max_flicker_exclusion_range( + max_flicker_exclusion_range)) + self._flicker_preflight_check(tm_dset=tm_dset) self._sc_points = self._get_sc_points(tm_dset=tm_dset) - with ExclusionLayers(excl_fpath) as f: - self.profile = f.profile + self._fips_to_gid = {} + with ExclusionLayers(excl_fpath, hsds=hsds) as f: + self._profile = f.profile self._exclusion_shape = f.shape def __repr__(self): msg = "{} from {}".format(self.__class__.__name__, self._bld_layer) return msg - def _compute_shadow_flicker(self, lat, lon, rotor_diameter, wind_dir): + def _parse_max_flicker_exclusion_range(self, excl_range): + """Convert max_flicker_exclusion_range to float if necessary. """ + if isinstance(excl_range, str) and excl_range.endswith('x'): + rd = self._regulations.rotor_diameter + return float(excl_range.strip('x')) * rd + + if not isinstance(excl_range, (int, float)): + try: + excl_range = float(excl_range) + except Exception as e: + msg = ('max_flicker_exclusion_range must be numeric but ' + 'received: {}, {}'.format(excl_range, type(excl_range))) + logger.error(msg) + raise TypeError(msg) from e + + return excl_range + + def _flicker_preflight_check(self, tm_dset='techmap_wtk'): + """ + Check to ensure building_layer and tm_dset are in exclusion .h5 file + + Parameters + ---------- + tm_dset : str, optional + Dataset / layer name for wind toolkit techmap, + by default 'techmap_wtk' + """ + with ExclusionLayers(self._excl_fpath, hsds=self._hsds) as f: + layers = f.layers + + # TODO: Make this more flexible to accept tiff + if self._bld_layer not in layers: + msg = ("{} is not available in {}" + .format(self._bld_layer, self._excl_fpath)) + logger.error(msg) + raise RuntimeError(msg) + + if tm_dset not in layers: + logger.warning('Could not find techmap "{t}" in {e}. ' + 'Creating {t} using reV TechMapping' + .format(t=tm_dset, e=self._excl_fpath)) + try: + TechMapping.run(self._excl_fpath, self._res_h5, + dset=tm_dset) + except Exception as e: + logger.exception('TechMapping process failed. Received the ' + 'following error:\n{}'.format(e)) + raise e + + def _get_sc_points(self, tm_dset='techmap_wtk'): + """ + Get the valid sc points to run turbine flicker for + + Parameters + ---------- + tm_dset : str, optional + Dataset / layer name for wind toolkit techmap, by default + 'techmap_wtk' + + Returns + ------- + points : pandas.DataFrame + DataFrame of valid sc point gids with their latitude and longitude + coordinates and nearest resource gid + """ + with SupplyCurveExtent(self._excl_fpath, resolution=self._res) as sc: + points = sc.points + points['latitude'] = sc.latitude + points['longitude'] = sc.longitude + gids = sc.valid_sc_points(tm_dset) + points = points.loc[gids] + + with WindX(self._res_h5) as f: + res_gids = f.lat_lon_gid(points[['latitude', 'longitude']].values, + check_lat_lon=False) + points['res_gid'] = res_gids + + return points + + def _compute_shadow_flicker(self, lat, lon, wind_dir): """ Compute shadow flicker for given location @@ -86,8 +240,6 @@ def _compute_shadow_flicker(self, lat, lon, rotor_diameter, wind_dir): Latitude coordinate of turbine. lon : float Longitude coordinate of turbine. - rotor_diameter : float - Turbine rotor diameter (m). wind_dir : ndarray Time-series of wind direction for turbine. @@ -100,8 +252,9 @@ def _compute_shadow_flicker(self, lat, lon, rotor_diameter, wind_dir): # Import HOPP dynamically so its not a requirement from hybrid.flicker.flicker_mismatch_grid import FlickerMismatch - self._set_max_grid_size_for_odd_shaped_arr(rotor_diameter) - mult = self._max_flicker_exclusion_range / rotor_diameter + rd = self._regulations.rotor_diameter + self._set_max_grid_size_for_odd_shaped_arr() + mult = self._max_flicker_exclusion_range / rd FlickerMismatch.diam_mult_nwe = mult FlickerMismatch.diam_mult_s = mult FlickerMismatch.steps_per_hour = self.STEPS_PER_HOUR @@ -110,7 +263,7 @@ def _compute_shadow_flicker(self, lat, lon, rotor_diameter, wind_dir): assert len(wind_dir) == 8760 shadow_flicker = FlickerMismatch(lat, lon, - blade_length=rotor_diameter / 2, + blade_length=rd / 2, angles_per_step=None, wind_dir=wind_dir, gridcell_height=self._grid_cell_size, @@ -121,31 +274,13 @@ def _compute_shadow_flicker(self, lat, lon, rotor_diameter, wind_dir): return shadow_flicker - def _set_max_grid_size_for_odd_shaped_arr(self, rotor_diameter): + def _set_max_grid_size_for_odd_shaped_arr(self): """Set the max_flicker_exclusion_range to multiple of 0.5 grids """ - excl_range = self._parse_max_flicker_exclusion_rang(rotor_diameter) - mult = np.round(excl_range / self._grid_cell_size) + 0.5 + mult = np.round(self._max_flicker_exclusion_range + / self._grid_cell_size) + 0.5 self._max_flicker_exclusion_range = mult * self._grid_cell_size - def _parse_max_flicker_exclusion_rang(self, rotor_diameter): - """Convert max_flicker_exclusion_range to float if necessary. """ - excl_range = self._max_flicker_exclusion_range - if isinstance(excl_range, str) and excl_range.endswith('x'): - return float(excl_range.strip('x')) * rotor_diameter - - if not isinstance(excl_range, (int, float)): - try: - excl_range = float(excl_range) - except Exception as e: - msg = ('max_flicker_exclusion_range must be numeric but ' - 'received: {}, {}'.format(excl_range, type(excl_range))) - logger.error(msg) - raise TypeError(msg) from e - - return excl_range - - def _exclude_turbine_flicker(self, point, res_fpath, hub_height, - rotor_diameter, flicker_threshold=30): + def _exclude_turbine_flicker(self, point, res_fpath, flicker_threshold): """ Exclude all pixels that will cause flicker exceeding the "flicker_threshold" on buildings that exist within @@ -163,10 +298,6 @@ def _exclude_turbine_flicker(self, point, res_fpath, hub_height, res_fpath : str Filepath to wind resource .h5 file containing hourly wind direction data - hub_height : int - Hub-height in meters to compute turbine shadow flicker. - rotor_diameter : int - Rotor diameter in meters to compute shadow flicker. flicker_threshold : int, optional Maximum number of allowable flicker hours, by default 30 @@ -178,7 +309,7 @@ def _exclude_turbine_flicker(self, point, res_fpath, hub_height, """ with WindX(res_fpath, log_vers=False) as f: - dset = 'winddirection_{}m'.format(hub_height) + dset = 'winddirection_{}m'.format(self._regulations.hub_height) wind_dir = f[dset, :, int(point['res_gid'])] # pylint: disable=unsubscriptable-object @@ -187,77 +318,16 @@ def _exclude_turbine_flicker(self, point, res_fpath, hub_height, shadow_flicker = self._compute_shadow_flicker(point['latitude'], point['longitude'], - rotor_diameter, wind_dir) flicker_shifts = _get_flicker_excl_shifts( - shadow_flicker, flicker_threshold=flicker_threshold) + shadow_flicker,flicker_threshold=flicker_threshold) return flicker_shifts - def _preflight_check(self, tm_dset='techmap_wtk'): - """ - Check to ensure building_layer and tm_dset are in exclusion .h5 file - - Parameters - ---------- - tm_dset : str, optional - Dataset / layer name for wind toolkit techmap, - by default 'techmap_wtk' - """ - with ExclusionLayers(self._excl_h5) as f: - layers = f.layers - - if self._bld_layer not in layers: - msg = ("{} is not available in {}" - .format(self._bld_layer, self._excl_h5)) - logger.error(msg) - raise RuntimeError(msg) - - if tm_dset not in layers: - logger.warning('Could not find techmap "{t}" in {e}. ' - 'Creating {t} using reV TechMapping' - .format(t=tm_dset, e=self._excl_h5)) - try: - TechMapping.run(self._excl_h5, self._res_h5, - dset=tm_dset) - except Exception as e: - logger.exception('TechMapping process failed. Received the ' - 'following error:\n{}'.format(e)) - raise e - - def _get_sc_points(self, tm_dset='techmap_wtk'): - """ - Get the valid sc points to run turbine flicker for - - Parameters - ---------- - tm_dset : str, optional - [description], by default 'techmap_wtk' - - Returns - ------- - points : pandas.DataFrame - DataFrame of valid sc point gids with their latitude and longitude - coordinates and nearest resource gid - """ - with SupplyCurveExtent(self._excl_h5, resolution=self._res) as sc: - points = sc.points - points['latitude'] = sc.latitude - points['longitude'] = sc.longitude - gids = sc.valid_sc_points(tm_dset) - points = points.loc[gids] - - with WindX(self._res_h5) as f: - res_gids = f.lat_lon_gid(points[['latitude', 'longitude']].values, - check_lat_lon=False) - points['res_gid'] = res_gids - - return points - - def compute_exclusions(self, hub_height, rotor_diameter, - building_threshold=0, flicker_threshold=30, - max_workers=None, out_layer=None, out_tiff=None): + def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, + max_workers=None, out_layer=None, + out_tiff=None): """Compute turbine flicker exclusions. Exclude all pixels that will cause flicker exceeding the @@ -270,16 +340,11 @@ def compute_exclusions(self, hub_height, rotor_diameter, Parameters ---------- - hub_height : int - Hub-height in meters to compute turbine shadow flicker. - rotor_diameter : int - Rotor diameter in meters to compute turbine shadow flicker. - building_threshold : float, optional - Threshold for exclusion layer values to identify pixels with - buildings, values are % of pixel containing a building. By - default, `0`. flicker_threshold : int, optional Maximum number of allowable flicker hours. By default, `30`. + fips : int, optional + If not `None`, only building indices within counties with + the given FIPS code will be returned. By default, `None`. max_workers : int, optional Number of workers to use. If 1 run, in serial. If `None`, use all available cores. By default, `None`. @@ -300,11 +365,17 @@ def compute_exclusions(self, hub_height, rotor_diameter, if max_workers is None: max_workers = os.cpu_count() + if fips is None: + gids = self._sc_points.index + else: + gids = self._fips_to_gid.get(fips, []) + flicker_arr = np.ones(self._exclusion_shape, dtype=np.uint8) if max_workers > 1: msg = ('Computing exclusions from {} based on {}m hub height ' 'turbines with {}m rotor diameters in parallel using {} ' - 'workers'.format(self, hub_height, rotor_diameter, + 'workers'.format(self, self._regulations.hub_height, + self._regulations.rotor_diameter, max_workers)) logger.info(msg) @@ -312,19 +383,18 @@ def compute_exclusions(self, hub_height, rotor_diameter, with SpawnProcessPool(max_workers=max_workers, loggers=loggers) as exe: futures = {} - for _, point in self._sc_points.iterrows(): + for gid in gids: + point = self._sc_points.iloc[gid] - row_idx, col_idx, shape = _get_building_indices( - self._excl_h5, self._bld_layer, point.name, - resolution=self._res, - building_threshold=building_threshold) - if row_idx.size == 0: - continue + # row_idx, col_idx, shape = _get_building_indices( + # self._excl_fpath, self._bld_layer, point.name, + # resolution=self._res, fips=fips, + # building_threshold=self._building_threshold) + # if row_idx.size == 0: + # continue future = exe.submit(self._exclude_turbine_flicker, - point, self._res_h5, hub_height, - rotor_diameter, - flicker_threshold=flicker_threshold) + point, self._res_h5, flicker_threshold) futures[future] = point for i, future in enumerate(as_completed(futures)): @@ -332,9 +402,9 @@ def compute_exclusions(self, hub_height, rotor_diameter, point = futures[future] row_idx, col_idx, shape = _get_building_indices( - self._excl_h5, self._bld_layer, point.name, - resolution=self._res, - building_threshold=building_threshold) + self._excl_fpath, self._bld_layer, point.name, + resolution=self._res, # fips=fips, + building_threshold=self._building_threshold) row_idx, col_idx = _create_excl_indices( (row_idx, col_idx), flicker_shifts, shape) @@ -346,135 +416,232 @@ def compute_exclusions(self, hub_height, rotor_diameter, msg = ( 'Computing exclusions from {} based on {}m hub height, {}m ' 'rotor diameter turbines in serial.' - .format(self, hub_height, rotor_diameter) + .format(self, self._regulations.hub_height, + self._regulations.rotor_diameter) ) logger.info(msg) - for i, (_, point) in enumerate(self._sc_points.iterrows()): - row_idx, col_idx, shape = _get_building_indices( - self._excl_h5, self._bld_layer, point.name, - resolution=self._res, - building_threshold=building_threshold) - if row_idx.size == 0: - continue + for i, gid in enumerate(gids): + point = self._sc_points.iloc[gid] + # for i, (_, point) in enumerate(self._sc_points.iterrows()): + # row_idx, col_idx, shape = _get_building_indices( + # self._excl_fpath, self._bld_layer, point.name, + # resolution=self._res, fips=fips, + # building_threshold=self._building_threshold) + # if row_idx.size == 0: + # continue flicker_shifts = self._exclude_turbine_flicker( - point, self._res_h5, hub_height, rotor_diameter, - flicker_threshold=flicker_threshold) - row_idx, col_idx = _create_excl_indices( - (row_idx, col_idx), flicker_shifts, shape) + point, self._res_h5, flicker_threshold) + + row_idx, col_idx, shape = _get_building_indices( + self._excl_fpath, self._bld_layer, point.name, + resolution=self._res, # fips=fips, + building_threshold=self._building_threshold) + row_idx, col_idx = _create_excl_indices((row_idx, col_idx), + flicker_shifts, shape) flicker_arr[row_idx, col_idx] = 0 logger.debug('Completed {} out of {} gids' .format((i + 1), len(self._sc_points))) log_mem(logger) - if out_layer: - logger.info('Saving flicker inclusion layer to {} as {}' - .format(self._excl_h5, out_layer)) - description = ( - 'Pixels with value 0 are excluded as they will cause greater ' - 'than {} hours of flicker on buildings in {}. Shadow flicker ' - 'is computed using a {}m hub height, {}m rotor diameter ' - 'turbine.' - .format(flicker_threshold, self._bld_layer, hub_height, - rotor_diameter) - ) - ExclusionsConverter._write_layer(self._excl_h5, out_layer, - self.profile, flicker_arr, - description=description) - if out_tiff: - logger.info('Saving flicker inclusion layer to {}' - .format(out_tiff)) - ExclusionsConverter._write_geotiff(out_tiff, self.profile, - flicker_arr) + # if out_layer: + # logger.info('Saving flicker inclusion layer to {} as {}' + # .format(self._excl_fpath, out_layer)) + # description = ( + # 'Pixels with value 0 are excluded as they will cause greater ' + # 'than {} hours of flicker on buildings in {}. Shadow flicker ' + # 'is computed using a {}m hub height, {}m rotor diameter ' + # 'turbine.' + # .format(flicker_threshold, self._bld_layer, + # self._regulations.hub_height, + # self._regulations.rotor_diameter) + # ) + # ExclusionsConverter._write_layer(self._excl_fpath, out_layer, + # self.profile, flicker_arr, + # description=description) + # if out_tiff: + # logger.info('Saving flicker inclusion layer to {}' + # .format(out_tiff)) + # ExclusionsConverter._write_geotiff(out_tiff, self.profile, + # flicker_arr) return flicker_arr - @classmethod - def run(cls, excl_fpath, res_fpath, building_layer, hub_height, - rotor_diameter, tm_dset='techmap_wtk', building_threshold=0, - flicker_threshold=30, resolution=640, grid_cell_size=90, - max_flicker_exclusion_range=10_000, max_workers=None, - out_layer=None, out_tiff=None): - """Run flicker exclusion layer generation. + @property + def profile(self): + """dict: Geotiff profile. """ + return self._profile - Exclude all pixels that will cause flicker exceeding the - "flicker_threshold" on any building in "building_layer". - Buildings are defined as pixels with >= the "building_threshold - value in "building_layer". Shadow flicker is computed at the - supply curve point resolution based on a turbine with - "hub_height" (m) and applied to all buildings within that supply - curve point sub-array. + def pre_process_regulations(self, features_fpath): + """Reduce regulations to correct state and features. Parameters ---------- - excl_fpath : str - Filepath to exclusions h5 file. File must contain - `building_layer` and `tm_dset`. - res_fpath : str - Filepath to wind resource .h5 file containing hourly wind - direction data. - building_layer : str - Exclusion layer containing buildings from which turbine - flicker exclusions will be computed. - hub_height : int - Hub-height (m) used to compute turbine shadow flicker. - rotor_diameter : int - Rotor diameter (m) used to compute turbine shadow flicker. - tm_dset : str, optional - Dataset / layer name for wind toolkit techmap. By default, - `'techmap_wtk'`. - building_threshold : float, optional - Threshold for exclusion layer values to identify pixels with - buildings, values are % of pixel containing a building. By - default, `0`. - flicker_threshold : int, optional - Maximum number of allowable flicker hours. By default, `30`. - resolution : int, optional - SC resolution, must be input in combination with gid. - By default, `640`. - grid_cell_size : float, optional - Length (m) of a side of each grid cell in `excl_fpath`. - max_flicker_exclusion_range : float, optional - Max distance (m) that flicker exclusions will extend in - any of the cardinal directions. Note that increasing this - value can lead to drastically instead memory requirements. - This value may be increased slightly in order to yield - odd exclusion array shapes. - max_workers : int, optional - Number of workers to use. If 1 run, in serial. If `None`, - use all available cores. By default, `None`. - out_layer : str, optional - Layer to save exclusions under. Layer will be saved in - `excl_fpath`. By default, `None`. - out_tiff : str, optional - Path to output tiff file where exclusions should be saved. - By default, `None`. + features_fpath : str + Path to shape file with features to compute exclusions from. + """ + + self._fips_to_gid = {} + reg_fips = self._regulations.FIPS.unique() + with SupplyCurveExtent(self._excl_fpath, resolution=self._res) as sc: + for gid in self._sc_points.index: + for fips in np.unique(sc.get_excl_points('cnty_fips', gid)): + if fips in reg_fips: + self._fips_to_gid.setdefault(fips, []).append(gid) + + # TODO: Turn this into a warning + assert len(self._fips_to_gid) == len(reg_fips), "Some FIPS not found" + + def compute_local_exclusions(self, regulation_value, cnty, features_fpath): + """Compute local flicker exclusions. + + This method computes a flicker exclusion layer using the + information about the input county. + + Parameters + ---------- + regulation_value : float | int + Maximum number of allowable flicker hours in county. + cnty : geopandas.GeoDataFrame + Regulations for a single county. Returns ------- - flicker_arr : ndarray - 2D inclusion array. Pixels to exclude (0) to prevent shadow - flicker on buildings in "building_layer" + flicker : ndarray + Raster array of flicker exclusions """ - flicker = cls(excl_fpath, res_fpath, building_layer, - resolution=resolution, grid_cell_size=grid_cell_size, - max_flicker_exclusion_range=max_flicker_exclusion_range, - tm_dset=tm_dset) - out_excl = flicker.compute_exclusions( - hub_height, - rotor_diameter, - building_threshold=building_threshold, - flicker_threshold=flicker_threshold, - max_workers=max_workers, - out_layer=out_layer, - out_tiff=out_tiff - ) - return out_excl + cnty_fips = cnty.iloc[0]['FIPS'] + logger.debug('- Computing flicker for county FIPS {}' + .format(cnty_fips)) + return self.compute_flicker_exclusions( + flicker_threshold=regulation_value, fips=cnty_fips, max_workers=1) + + def compute_generic_exclusions(self, features_fpath): + """Compute generic flicker exclusions. + + This method will compute a generic flicker exclusion layer. + + Returns + ------- + flicker : ndarray + Raster array of flicker exclusions + """ + logger.info('Computing generic flicker exclusions...') + return self.compute_flicker_exclusions(flicker_threshold=30, + fips=None, max_workers=None) + + @staticmethod + def get_feature_paths(features_fpath): + """Ensure features path exists and return as list. + + Parameters + ---------- + features_fpath : str + Path to features file. This path can contain + any pattern that can be used in the glob function. + For example, `/path/to/features/[A]*` would match + with all the features in the directory + `/path/to/features/` that start with "A". This input + can also be a directory, but that directory must ONLY + contain feature files. If your feature files are mixed + with other files or directories, use something like + `/path/to/features/*.geojson`. + + Returns + ------- + features_fpath : list + Features path as a list of strings. + + Notes + ----- + This method is required for `run` classmethods for exclusion + features that are spread out over multiple files. + """ + return ['flicker.tiff'] + + # @classmethod + # def run(cls, excl_fpath, res_fpath, building_layer, tm_dset='techmap_wtk', + # building_threshold=0, flicker_threshold=30, resolution=640, + # grid_cell_size=90, max_flicker_exclusion_range=10_000, + # max_workers=None, out_layer=None, out_tiff=None): + # """Run flicker exclusion layer generation. + + # Exclude all pixels that will cause flicker exceeding the + # "flicker_threshold" on any building in "building_layer". + # Buildings are defined as pixels with >= the "building_threshold + # value in "building_layer". Shadow flicker is computed at the + # supply curve point resolution based on a turbine with + # "hub_height" (m) and applied to all buildings within that supply + # curve point sub-array. + + # Parameters + # ---------- + # excl_fpath : str + # Filepath to exclusions h5 file. File must contain + # `building_layer` and `tm_dset`. + # res_fpath : str + # Filepath to wind resource .h5 file containing hourly wind + # direction data. + # building_layer : str + # Exclusion layer containing buildings from which turbine + # flicker exclusions will be computed. + # regulations : `FlickerRegulations` + # A `FlickerRegulations` object used to shadow flicker + # regulation values. + # tm_dset : str, optional + # Dataset / layer name for wind toolkit techmap. By default, + # `'techmap_wtk'`. + # building_threshold : float, optional + # Threshold for exclusion layer values to identify pixels with + # buildings, values are % of pixel containing a building. By + # default, `0`. + # flicker_threshold : int, optional + # Maximum number of allowable flicker hours. By default, `30`. + # resolution : int, optional + # SC resolution, must be input in combination with gid. + # By default, `640`. + # grid_cell_size : float, optional + # Length (m) of a side of each grid cell in `excl_fpath`. + # max_flicker_exclusion_range : float, optional + # Max distance (m) that flicker exclusions will extend in + # any of the cardinal directions. Note that increasing this + # value can lead to drastically instead memory requirements. + # This value may be increased slightly in order to yield + # odd exclusion array shapes. + # max_workers : int, optional + # Number of workers to use. If 1 run, in serial. If `None`, + # use all available cores. By default, `None`. + # out_layer : str, optional + # Layer to save exclusions under. Layer will be saved in + # `excl_fpath`. By default, `None`. + # out_tiff : str, optional + # Path to output tiff file where exclusions should be saved. + # By default, `None`. + + # Returns + # ------- + # flicker_arr : ndarray + # 2D inclusion array. Pixels to exclude (0) to prevent shadow + # flicker on buildings in "building_layer" + # """ + # flicker = cls(excl_fpath, res_fpath, building_layer, + # resolution=resolution, grid_cell_size=grid_cell_size, + # max_flicker_exclusion_range=max_flicker_exclusion_range, + # tm_dset=tm_dset) + # out_excl = flicker.compute_exclusions( + # building_threshold=building_threshold, + # flicker_threshold=flicker_threshold, + # max_workers=max_workers, + # out_layer=out_layer, + # out_tiff=out_tiff + # ) + # return out_excl def _get_building_indices(excl_fpath, building_layer, gid, - resolution=640, building_threshold=0): + resolution=640, building_threshold=0, + fips=None, fips_layer="cnty_fips", hsds=False): """Find buildings exclusion indices Parameters @@ -494,6 +661,14 @@ def _get_building_indices(excl_fpath, building_layer, gid, Threshold for exclusion layer values to identify pixels with buildings, values are % of pixel containing a building. By default, `0`. + fips : int, optional + If not `None`, only building indices within counties with the + given FIPS code will be returned. By default, `None`. + fips_layer : str, optional + Name of fips layer in `excl_fpath`. By default, `"cnty_fips"`. + hsds : bool, optional + Boolean flag to use h5pyd to handle .h5 'files' hosted on + AWS behind HSDS. By default `False`. Returns ------- @@ -506,17 +681,22 @@ def _get_building_indices(excl_fpath, building_layer, gid, shape : tuple Full exclusion array shape. """ - with ExclusionLayers(excl_fpath) as f: + with ExclusionLayers(excl_fpath, hsds=hsds) as f: shape = f.shape row_slice, col_slice = MeanWindDirectionsPoint.get_agg_slices( gid, shape, resolution ) sc_blds = f[building_layer, row_slice, col_slice] + # fips_vals = f[fips_layer, row_slice, col_slice] row_idx = np.array(range(*row_slice.indices(row_slice.stop))) col_idx = np.array(range(*col_slice.indices(col_slice.stop))) - bld_row_idx, bld_col_idx = np.where(sc_blds > building_threshold) + + bld_row_idx, bld_col_idx = np.where((sc_blds > building_threshold) + # & (fips_vals == fips + # if fips is not None else True) + ) return row_idx[bld_row_idx], col_idx[bld_col_idx], shape diff --git a/reVX/turbine_flicker/turbine_flicker_cli.py b/reVX/turbine_flicker/turbine_flicker_cli.py index e306d4c1a..03178d1d4 100644 --- a/reVX/turbine_flicker/turbine_flicker_cli.py +++ b/reVX/turbine_flicker/turbine_flicker_cli.py @@ -13,7 +13,8 @@ from rex.utilities.utilities import get_class_properties from reVX.config.turbine_flicker import TurbineFlickerConfig -from reVX.turbine_flicker.turbine_flicker import TurbineFlicker +from reVX.turbine_flicker.turbine_flicker import (FlickerRegulations, + TurbineFlicker) from reVX import __version__ logger = logging.getLogger(__name__) @@ -63,14 +64,17 @@ def run_local(ctx, config): hub_height=config.hub_height, rotor_diameter=config.rotor_diameter, out_layer=config.out_layer, - out_tiff=config.out_tiff, + out_dir=config.dirout, tm_dset=config.tm_dset, building_threshold=config.building_threshold, flicker_threshold=config.flicker_threshold, resolution=config.resolution, grid_cell_size=config.grid_cell_size, max_flicker_exclusion_range=config.max_flicker_exclusion_range, + regs_fpath=config.regs_fpath, max_workers=config.execution_control.max_workers, + replace=config.replace, + hsds=config.hsds, log_dir=config.log_directory, verbose=config.log_level) @@ -117,16 +121,13 @@ def from_config(ctx, config, verbose): @click.option('--rotor_diameter', '-rd', required=True, type=int, help=('Rotor diameter in meters to compute turbine shadow ' 'flicker.')) -@click.option('--out_layer', '-o', default=None, type=STR, +@click.option('--out_layer', '-ol', default=None, type=STR, show_default=True, help=("Layer to save exclusions under. Layer will be saved in " "excl_fpath, if not provided will be generated from the " "building_layer name and hub-height")) -@click.option('--out_tiff', '-ot', default=None, type=STR, - show_default=True, - help=("Path to output tiff file where exclusions should be " - "saved, if not provided, data will not be written to " - "a file")) +@click.option('--out_dir', '-o', required=True, type=STR, + help=('Directory to save setbacks geotiff(s) into')) @click.option('--tm_dset', '-td', default='techmap_wtk', type=STR, show_default=True, help=("Dataset name in the techmap file containing the " @@ -156,10 +157,21 @@ def from_config(ctx, config, verbose): "this value can lead to drastically instead memory " "requirements. This value may be increased slightly in " "order to yield odd exclusion array shapes.")) +@click.option('--regs_fpath', '-regs', default=None, type=STR, + show_default=True, + help=('Path to regulations .csv file, if None create ' + 'generic setbacks using max - tip height * "multiplier", ' + 'by default None')) @click.option('--max_workers', '-mw', default=None, type=INT, show_default=True, help=("Number of cores to run summary on. None is all " "available cpus")) +@click.option('--replace', '-r', is_flag=True, + help=('Flag to replace local layer data with arr if layer ' + 'already exists in the exclusion .h5 file')) +@click.option('--hsds', '-hsds', is_flag=True, + help=('Flag to use h5pyd to handle .h5 domain hosted on AWS ' + 'behind HSDS')) @click.option('--log_dir', '-log', default=None, type=STR, show_default=True, help='Directory to dump log files. Default is out_dir.') @@ -167,14 +179,20 @@ def from_config(ctx, config, verbose): help='Flag to turn on debug logging. Default is not verbose.') @click.pass_context def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, - rotor_diameter, out_layer, out_tiff, tm_dset, building_threshold, + rotor_diameter, out_layer, out_dir, tm_dset, building_threshold, flicker_threshold, resolution, grid_cell_size, - max_flicker_exclusion_range, max_workers, log_dir, verbose): + max_flicker_exclusion_range, regs_fpath, max_workers, replace, hsds, + log_dir, verbose): """ Compute turbine flicker on local hardware """ - if out_layer is None: - out_layer = "{}-{}m".format(building_layer, hub_height) + # if out_layer is None: + # out_layer = "{}-{}m".format(building_layer, hub_height) + + if out_layer is not None: + out_layers = {"flicker.tiff": out_layer} + else: + out_layers = {} name = ctx.obj['NAME'] if 'VERBOSE' in ctx.obj: @@ -183,18 +201,43 @@ def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, log_modules = [__name__, 'reVX', 'reV', 'rex'] init_mult(name, log_dir, modules=log_modules, verbose=verbose) - logger.info('Computing Turbine Flicker Exclusions, ' - 'outputs to be save as a a new exclusions layer : {}' - .format(out_layer)) - - TurbineFlicker.run(excl_fpath, res_fpath, building_layer, hub_height, - rotor_diameter, tm_dset=tm_dset, + # logger.info('Computing Turbine Flicker Exclusions, ' + # 'outputs to be save as a a new exclusions layer : {}' + # .format(out_layer)) + + logger.info('Computing Turbine Flicker Exclusions from structures in {}' + .format(building_layer)) + logger.debug('Flicker to be computed with:\n' + '- hub_height = {}\n' + '- rotor_diameter = {}\n' + '- tm_dset = {}\n' + '- building_threshold = {}\n' + '- flicker_threshold = {}\n' + '- resolution = {}\n' + '- grid_cell_size = {}\n' + '- max_flicker_exclusion_range = {}\n' + '- regs_fpath = {}\n' + '- using max_workers = {}\n' + '- replace layer if needed = {}\n' + '- out_layer = {}\n' + .format(hub_height, rotor_diameter, tm_dset, + building_threshold, flicker_threshold, resolution, + grid_cell_size, max_flicker_exclusion_range, + regs_fpath, max_workers, replace, out_layer)) + + regulations = FlickerRegulations(hub_height, rotor_diameter, + flicker_threshold, regs_fpath) + + TurbineFlicker.run(excl_fpath, building_layer, out_dir, + res_fpath=res_fpath, + building_layer=building_layer, + regulations=regulations, building_threshold=building_threshold, - flicker_threshold=flicker_threshold, - resolution=resolution, max_workers=max_workers, - out_layer=out_layer, out_tiff=out_tiff, + resolution=resolution, grid_cell_size=grid_cell_size, - max_flicker_exclusion_range=max_flicker_exclusion_range) + max_flicker_exclusion_range=max_flicker_exclusion_range, + tm_dset=tm_dset, max_workers=max_workers, + replace=replace, hsds=hsds, out_layers=out_layers) def get_node_cmd(config): @@ -218,8 +261,8 @@ def get_node_cmd(config): '-bldl {}'.format(SLURM.s(config.building_layer)), '-h {}'.format(SLURM.s(config.hub_height)), '-rd {}'.format(SLURM.s(config.rotor_diameter)), - '-o {}'.format(SLURM.s(config.out_layer)), - '-ot {}'.format(SLURM.s(config.out_tiff)), + '-ol {}'.format(SLURM.s(config.out_layer)), + '-o {}'.format(SLURM.s(config.dirout)), '-td {}'.format(SLURM.s(config.tm_dset)), '-bldt {}'.format(SLURM.s(config.building_threshold)), '-ft {}'.format(SLURM.s(config.flicker_threshold)), @@ -227,9 +270,16 @@ def get_node_cmd(config): '-gcs {}'.format(SLURM.s(config.grid_cell_size)), '-mfer {}'.format(SLURM.s(config.max_flicker_exclusion_range)), '-mw {}'.format(SLURM.s(config.execution_control.max_workers)), + '-regs {}'.format(SLURM.s(config.regs_fpath)), '-log {}'.format(SLURM.s(config.log_directory)), ] + if config.replace: + args.append('-r') + + if config.hsds: + args.append('-hsds') + if config.log_level == logging.DEBUG: args.append('-v') diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index c7371ef91..4f2345a74 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -16,6 +16,7 @@ from reV.handlers.exclusions import ExclusionLayers from reVX import TESTDATADIR from reVX.turbine_flicker.turbine_flicker import ( + FlickerRegulations, TurbineFlicker, _create_excl_indices, _get_building_indices, @@ -75,10 +76,11 @@ def test_shadow_flicker(flicker_threshold): """ lat, lon = 39.913373, -105.220105 wind_dir = np.zeros(8760) - tf = TurbineFlicker(EXCL_H5, RES_H5, BLD_LAYER, grid_cell_size=90, - max_flicker_exclusion_range=4_510) - shadow_flicker = tf._compute_shadow_flicker(lat, lon, ROTOR_DIAMETER, - wind_dir) + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + flicker_threshold=flicker_threshold) + tf = TurbineFlicker(EXCL_H5, RES_H5, BLD_LAYER, regulations, + grid_cell_size=90, max_flicker_exclusion_range=4_510) + shadow_flicker = tf._compute_shadow_flicker(lat, lon, wind_dir) baseline = (shadow_flicker[::-1, ::-1].copy() <= (flicker_threshold / 8760)).astype(np.int8) @@ -149,9 +151,11 @@ def test_turbine_flicker(max_workers): with ExclusionLayers(EXCL_H5) as f: baseline = f[BASELINE] - test = TurbineFlicker.run(EXCL_H5, RES_H5, BLD_LAYER, HUB_HEIGHT, - ROTOR_DIAMETER, tm_dset='techmap_wind', - resolution=64, max_workers=max_workers) + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER) + tf = TurbineFlicker(EXCL_H5, RES_H5, BLD_LAYER, regulations, + resolution=64, tm_dset='techmap_wind', + max_flicker_exclusion_range=4540) + test = tf.compute_flicker_exclusions(max_workers=max_workers) assert np.allclose(baseline, test) @@ -159,16 +163,17 @@ def test_turbine_flicker_bad_max_flicker_exclusion_range_input(): """ Test Turbine Flicker with bad input for max_flicker_exclusion_range """ + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER) with pytest.raises(TypeError) as excinfo: - TurbineFlicker.run(EXCL_H5, RES_H5, BLD_LAYER, HUB_HEIGHT, - ROTOR_DIAMETER, max_flicker_exclusion_range='abc') + TurbineFlicker(EXCL_H5, RES_H5, BLD_LAYER, regulations, + max_flicker_exclusion_range='abc') assert "max_flicker_exclusion_range must be numeric" in str(excinfo.value) def test_cli(runner): """ - Test MeanWindDirections CLI + Test Flicker CLI """ with tempfile.TemporaryDirectory() as td: @@ -188,7 +193,8 @@ def test_cli(runner): "log_level": "INFO", "res_fpath": RES_H5, "resolution": 64, - "tm_dset": "techmap_wind" + "tm_dset": "techmap_wind", + "max_flicker_exclusion_range": 4540 } config_path = os.path.join(td, 'config.json') with open(config_path, 'w') as f: @@ -217,7 +223,8 @@ def test_cli_tiff(runner): with tempfile.TemporaryDirectory() as td: excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) shutil.copy(EXCL_H5, excl_h5) - out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd.tiff" + # out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd.tiff" + out_tiff = "flicker.tif" config = { "log_directory": td, "excl_fpath": excl_h5, @@ -226,12 +233,12 @@ def test_cli_tiff(runner): }, "building_layer": BLD_LAYER, "hub_height": HUB_HEIGHT, - "out_tiff": os.path.join(td, out_tiff), "rotor_diameter": ROTOR_DIAMETER, "log_level": "INFO", "res_fpath": RES_H5, "resolution": 64, - "tm_dset": "techmap_wind" + "tm_dset": "techmap_wind", + "max_flicker_exclusion_range": 4540 } config_path = os.path.join(td, 'config.json') with open(config_path, 'w') as f: @@ -273,12 +280,12 @@ def test_cli_max_flicker_exclusion_range(runner): }, "building_layer": BLD_LAYER, "hub_height": HUB_HEIGHT, - "out_tiff": os.path.join(td, out_tiff_def), "rotor_diameter": ROTOR_DIAMETER, "log_level": "INFO", "res_fpath": RES_H5, "resolution": 64, - "tm_dset": "techmap_wind" + "tm_dset": "techmap_wind", + "max_flicker_exclusion_range": 4_540 } config_path = os.path.join(td, 'config.json') with open(config_path, 'w') as f: @@ -289,9 +296,11 @@ def test_cli_max_flicker_exclusion_range(runner): traceback.print_exception(*result.exc_info) ) assert result.exit_code == 0, msg + shutil.move(os.path.join(td, "flicker.tif"), + os.path.join(td, out_tiff_def)) - out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5k.tiff" - config["out_tiff"] = os.path.join(td, out_tiff) + out_tiff_5k = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5k.tiff" + # config["out_tiff"] = os.path.join(td, out_tiff) config["max_flicker_exclusion_range"] = 5_000 config_path = os.path.join(td, 'config.json') with open(config_path, 'w') as f: @@ -302,9 +311,11 @@ def test_cli_max_flicker_exclusion_range(runner): traceback.print_exception(*result.exc_info) ) assert result.exit_code == 0, msg + shutil.move(os.path.join(td, "flicker.tif"), + os.path.join(td, out_tiff_5k)) out_tiff_20d = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5d.tiff" - config["out_tiff"] = os.path.join(td, out_tiff_20d) + # config["out_tiff"] = os.path.join(td, out_tiff_20d) config["max_flicker_exclusion_range"] = "20x" config_path = os.path.join(td, 'config.json') with open(config_path, 'w') as f: @@ -315,6 +326,9 @@ def test_cli_max_flicker_exclusion_range(runner): traceback.print_exception(*result.exc_info) ) assert result.exit_code == 0, msg + shutil.move(os.path.join(td, "flicker.tif"), + os.path.join(td, out_tiff_20d)) + with ExclusionLayers(EXCL_H5) as f: baseline = f[BASELINE] @@ -322,13 +336,13 @@ def test_cli_max_flicker_exclusion_range(runner): with ExclusionLayers(excl_h5) as f: assert out_tiff_def not in f.layers assert out_tiff_def.split('.') not in f.layers - assert out_tiff not in f.layers - assert out_tiff.split('.') not in f.layers + assert out_tiff_5k not in f.layers + assert out_tiff_5k.split('.') not in f.layers with Geotiff(os.path.join(td, out_tiff_def)) as f: test = f.values[0] - with Geotiff(os.path.join(td, out_tiff)) as f: + with Geotiff(os.path.join(td, out_tiff_5k)) as f: test2 = f.values[0] with Geotiff(os.path.join(td, out_tiff_20d)) as f: From 1f388639a5d5754f4e25c9be6334ef1d20b595d0 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Tue, 20 Sep 2022 16:19:39 -0600 Subject: [PATCH 27/42] (WIP) Can now input tiff file (WIP) --- reVX/config/turbine_flicker.py | 45 +++++++- reVX/turbine_flicker/turbine_flicker.py | 120 ++++++++++++++------ reVX/turbine_flicker/turbine_flicker_cli.py | 31 +++-- tests/test_turbine_flicker.py | 28 ++++- 4 files changed, 169 insertions(+), 55 deletions(-) diff --git a/reVX/config/turbine_flicker.py b/reVX/config/turbine_flicker.py index 6b3afce25..93ab2e40b 100644 --- a/reVX/config/turbine_flicker.py +++ b/reVX/config/turbine_flicker.py @@ -10,8 +10,7 @@ class TurbineFlickerConfig(AnalysisConfig): """Config framework for turbine flicker calculation""" NAME = 'TurbineFlicker' - REQUIREMENTS = ('excl_fpath', 'res_fpath', 'building_layer', 'hub_height', - 'rotor_diameter') + REQUIREMENTS = ('excl_fpath', 'res_fpath', 'hub_height', 'rotor_diameter') def __init__(self, config): """ @@ -28,6 +27,8 @@ def __init__(self, config): self._default_building_threshold = 0 self._default_flicker_threshold = 30 self._default_hsds_flag = False + validate_feature_input(features_path=self.features_path, + building_layer=self.building_layer) @property def excl_fpath(self): @@ -44,10 +45,15 @@ def regs_fpath(self): """Get regulations .csv path""" return self.get('regs_fpath', None) + @property + def features_path(self): + """Get path to features file or directory (required).""" + return self.get('features_path', None) + @property def building_layer(self): """Get the building layer name.""" - return self['building_layer'] + return self.get('building_layer', None) @property def hub_height(self): @@ -126,3 +132,36 @@ def replace(self): def hsds(self): """Get hsds flag""" return self.get('hsds', self._default_hsds_flag) + + +def validate_feature_input(features_path=None, building_layer=None): + """Validate the feature input. + + Specifically, this function raises an error unless exactly one of + the following inputs are provided: + - features_path + - building_layer + + Parameters + ---------- + features_path : str | int + Path to input tif containing buildings from which turbine + flicker exclusions will be computed. By default, `None`. + building_layer : float | int + Exclusion layer containing buildings from which turbine + flicker exclusions will be computed. By default, `None`. + + Raises + ------ + RuntimeError + If not enough info is provided (all inputs are `None`), or too + much info is given (all inputs are not `None`). + """ + no_features_path = features_path is None + no_building_layer = building_layer is None + + not_enough_info = no_features_path and no_building_layer + too_much_info = not no_features_path and not no_building_layer + if not_enough_info or too_much_info: + raise RuntimeError("Must provide either `features_path` or " + "`building_layer` (but not both).") diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index cca2ef4b6..fe7dc296c 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -11,6 +11,7 @@ from reV.handlers.exclusions import ExclusionLayers from reV.supply_curve.extent import SupplyCurveExtent from reV.supply_curve.tech_mapping import TechMapping +from reVX.handlers.geotiff import Geotiff from reVX.setbacks.regulations import WindRegulations from reVX.wind_dirs.mean_wind_dirs_point import MeanWindDirectionsPoint from reVX.utilities.exclusions import (ExclusionsConverter, @@ -87,6 +88,7 @@ class TurbineFlicker(AbstractBaseExclusionsMerger): cause excessive flicker on building """ STEPS_PER_HOUR = 1 + DEFAULT_FEATURE_OUTFILE = 'flicker.tif' def __init__(self, excl_fpath, res_fpath, building_layer, regulations, building_threshold=0, resolution=640, grid_cell_size=90, @@ -101,7 +103,7 @@ def __init__(self, excl_fpath, res_fpath, building_layer, regulations, res_fpath : str Filepath to wind resource .h5 file containing hourly wind direction data - building_layer : str + building_layer : np.ndarray Exclusion layer containing buildings from which turbine flicker exclusions will be computed. regulations : `FlickerRegulations` @@ -140,16 +142,10 @@ def __init__(self, excl_fpath, res_fpath, building_layer, regulations, self._max_flicker_exclusion_range = ( self._parse_max_flicker_exclusion_range( max_flicker_exclusion_range)) + self._profile = None self._flicker_preflight_check(tm_dset=tm_dset) self._sc_points = self._get_sc_points(tm_dset=tm_dset) self._fips_to_gid = {} - with ExclusionLayers(excl_fpath, hsds=hsds) as f: - self._profile = f.profile - self._exclusion_shape = f.shape - - def __repr__(self): - msg = "{} from {}".format(self.__class__.__name__, self._bld_layer) - return msg def _parse_max_flicker_exclusion_range(self, excl_range): """Convert max_flicker_exclusion_range to float if necessary. """ @@ -180,11 +176,13 @@ def _flicker_preflight_check(self, tm_dset='techmap_wtk'): """ with ExclusionLayers(self._excl_fpath, hsds=self._hsds) as f: layers = f.layers + exclusion_shape = f.shape + self._profile = f.profile - # TODO: Make this more flexible to accept tiff - if self._bld_layer not in layers: - msg = ("{} is not available in {}" - .format(self._bld_layer, self._excl_fpath)) + if self._bld_layer.shape != exclusion_shape: + msg = ("Shape of building layer {} does not match shape of " + "ExclusionLayers {}" + .format(self._bld_layer.shape, exclusion_shape)) logger.error(msg) raise RuntimeError(msg) @@ -370,7 +368,7 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, else: gids = self._fips_to_gid.get(fips, []) - flicker_arr = np.ones(self._exclusion_shape, dtype=np.uint8) + flicker_arr = np.ones(self._bld_layer.shape, dtype=np.uint8) if max_workers > 1: msg = ('Computing exclusions from {} based on {}m hub height ' 'turbines with {}m rotor diameters in parallel using {} ' @@ -401,12 +399,13 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, flicker_shifts = future.result() point = futures[future] - row_idx, col_idx, shape = _get_building_indices( - self._excl_fpath, self._bld_layer, point.name, + row_idx, col_idx = _get_building_indices( + self._bld_layer, point.name, resolution=self._res, # fips=fips, building_threshold=self._building_threshold) row_idx, col_idx = _create_excl_indices( - (row_idx, col_idx), flicker_shifts, shape) + (row_idx, col_idx), flicker_shifts, + self._bld_layer.shape) flicker_arr[row_idx, col_idx] = 0 logger.info('Completed {} out of {} gids' @@ -433,12 +432,13 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, flicker_shifts = self._exclude_turbine_flicker( point, self._res_h5, flicker_threshold) - row_idx, col_idx, shape = _get_building_indices( - self._excl_fpath, self._bld_layer, point.name, + row_idx, col_idx = _get_building_indices( + self._bld_layer, point.name, resolution=self._res, # fips=fips, building_threshold=self._building_threshold) row_idx, col_idx = _create_excl_indices((row_idx, col_idx), - flicker_shifts, shape) + flicker_shifts, + self._bld_layer.shape) flicker_arr[row_idx, col_idx] = 0 logger.debug('Completed {} out of {} gids' @@ -558,7 +558,7 @@ def get_feature_paths(features_fpath): This method is required for `run` classmethods for exclusion features that are spread out over multiple files. """ - return ['flicker.tiff'] + return [TurbineFlicker.DEFAULT_FEATURE_OUTFILE] # @classmethod # def run(cls, excl_fpath, res_fpath, building_layer, tm_dset='techmap_wtk', @@ -639,17 +639,14 @@ def get_feature_paths(features_fpath): # return out_excl -def _get_building_indices(excl_fpath, building_layer, gid, - resolution=640, building_threshold=0, +def _get_building_indices(building_layer, gid, resolution=640, + building_threshold=0, fips=None, fips_layer="cnty_fips", hsds=False): """Find buildings exclusion indices Parameters ---------- - excl_fpath : str - Filepath to exclusions h5 file. File must contain - `building_layer` and `tm_dset`. - building_layer : str + building_layer : np.ndarray Exclusion layer containing buildings from which turbine flicker exclusions will be computed. gid : int @@ -681,24 +678,19 @@ def _get_building_indices(excl_fpath, building_layer, gid, shape : tuple Full exclusion array shape. """ - with ExclusionLayers(excl_fpath, hsds=hsds) as f: - shape = f.shape - row_slice, col_slice = MeanWindDirectionsPoint.get_agg_slices( - gid, shape, resolution - ) - - sc_blds = f[building_layer, row_slice, col_slice] - # fips_vals = f[fips_layer, row_slice, col_slice] - + row_slice, col_slice = MeanWindDirectionsPoint.get_agg_slices( + gid, building_layer.shape, resolution + ) + sc_blds = building_layer[row_slice, col_slice] row_idx = np.array(range(*row_slice.indices(row_slice.stop))) col_idx = np.array(range(*col_slice.indices(col_slice.stop))) bld_row_idx, bld_col_idx = np.where((sc_blds > building_threshold) # & (fips_vals == fips # if fips is not None else True) - ) + ) - return row_idx[bld_row_idx], col_idx[bld_col_idx], shape + return row_idx[bld_row_idx], col_idx[bld_col_idx] def _create_excl_indices(bld_idx, flicker_shifts, shape): @@ -827,3 +819,57 @@ def _get_flicker_excl_shifts(shadow_flicker, flicker_threshold=30): col_shifts -= shape[1] // 2 return row_shifts, col_shifts + + +def load_building_layer(excl_fpath, building_layer=None, features_path=None, + hsds=False): + """Load building data from excl .h5 file or geotiff. + + Parameters + ---------- + excl_fpath : str + Filepath to exclusions h5 file. If `building_layer` is not + `None`, this file must contain `building_layer`. + building_layer : float | int + Exclusion layer containing buildings from which turbine + flicker exclusions will be computed. By default, `None`. + features_path : str | int + Path to input tif containing buildings from which turbine + flicker exclusions will be computed. By default, `None`. + hsds : bool, optional + Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS + behind HSDS. By default `False`. + + Returns + ------- + np.ndarray + Array containing building data. + + Raises + ------ + RuntimeError + If not enough info is provided (all inputs are `None`), or too + much info is given (all inputs are not `None`). + RuntimeError + If `building_layer` is not None but also does not exist in + `excl_fpath` .h5 file. + """ + + if building_layer is not None: + with ExclusionLayers(excl_fpath, hsds=hsds) as f: + if building_layer not in f.layers: + msg = ("{} is not available in {}" + .format(building_layer, excl_fpath)) + logger.error(msg) + raise RuntimeError(msg) + logger.debug("Loading building data from {}, layer {}" + .format(excl_fpath, building_layer)) + return f[building_layer] + + if features_path is not None: + logger.debug("Loading building data from {}".format(features_path)) + with Geotiff(features_path) as f: + return f.values[0] + + raise RuntimeError("Must provide either `features_path` or " + "`building_layer` (but not both).") diff --git a/reVX/turbine_flicker/turbine_flicker_cli.py b/reVX/turbine_flicker/turbine_flicker_cli.py index 03178d1d4..4dd647090 100644 --- a/reVX/turbine_flicker/turbine_flicker_cli.py +++ b/reVX/turbine_flicker/turbine_flicker_cli.py @@ -14,7 +14,8 @@ from reVX.config.turbine_flicker import TurbineFlickerConfig from reVX.turbine_flicker.turbine_flicker import (FlickerRegulations, - TurbineFlicker) + TurbineFlicker, + load_building_layer) from reVX import __version__ logger = logging.getLogger(__name__) @@ -60,6 +61,7 @@ def run_local(ctx, config): ctx.invoke(local, excl_fpath=config.excl_fpath, res_fpath=config.res_fpath, + features_path=config.features_path, building_layer=config.building_layer, hub_height=config.hub_height, rotor_diameter=config.rotor_diameter, @@ -113,9 +115,16 @@ def from_config(ctx, config, verbose): @click.option('--res_fpath', '-ref', required=True, type=click.Path(exists=True), help="Filepath to .h5 file containing wind direction data") -@click.option('--building_layer', '-bldl', required=True, type=str, +@click.option('--features_path', '-feats', + type=click.Path(exists=True), + help=("Filepath to geotiff file containing buildings from " + "which turbine 'flicker exclusions will be computed. " + "If this input is provided, `building_layer` should " + "NOT be set.")) +@click.option('--building_layer', '-bldl', type=str, help=('Exclusion layer containing buildings from which turbine ' - 'flicker exclusions will be computed.')) + 'flicker exclusions will be computed. If this input is ' + 'provided, `features_path` should NOT be set.')) @click.option('--hub_height', '-h', required=True, type=int, help=('Hub-height in meters to compute turbine shadow flicker.')) @click.option('--rotor_diameter', '-rd', required=True, type=int, @@ -178,9 +187,9 @@ def from_config(ctx, config, verbose): @click.option('--verbose', '-v', is_flag=True, help='Flag to turn on debug logging. Default is not verbose.') @click.pass_context -def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, - rotor_diameter, out_layer, out_dir, tm_dset, building_threshold, - flicker_threshold, resolution, grid_cell_size, +def local(ctx, excl_fpath, res_fpath, features_path, building_layer, + hub_height, rotor_diameter, out_layer, out_dir, tm_dset, + building_threshold, flicker_threshold, resolution, grid_cell_size, max_flicker_exclusion_range, regs_fpath, max_workers, replace, hsds, log_dir, verbose): """ @@ -190,7 +199,7 @@ def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, # out_layer = "{}-{}m".format(building_layer, hub_height) if out_layer is not None: - out_layers = {"flicker.tiff": out_layer} + out_layers = {TurbineFlicker.DEFAULT_FEATURE_OUTFILE: out_layer} else: out_layers = {} @@ -227,8 +236,11 @@ def local(ctx, excl_fpath, res_fpath, building_layer, hub_height, regulations = FlickerRegulations(hub_height, rotor_diameter, flicker_threshold, regs_fpath) - - TurbineFlicker.run(excl_fpath, building_layer, out_dir, + building_layer = load_building_layer(excl_fpath=excl_fpath, + building_layer=building_layer, + features_path=features_path, + hsds=hsds) + TurbineFlicker.run(excl_fpath, features_path, out_dir, res_fpath=res_fpath, building_layer=building_layer, regulations=regulations, @@ -258,6 +270,7 @@ def get_node_cmd(config): 'local', '-excl {}'.format(SLURM.s(config.excl_fpath)), '-ref {}'.format(SLURM.s(config.res_fpath)), + '-feats {}'.format(SLURM.s(config.features_path)), '-bldl {}'.format(SLURM.s(config.building_layer)), '-h {}'.format(SLURM.s(config.hub_height)), '-rd {}'.format(SLURM.s(config.rotor_diameter)), diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index 4f2345a74..198380cfa 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -18,6 +18,7 @@ from reVX.turbine_flicker.turbine_flicker import ( FlickerRegulations, TurbineFlicker, + load_building_layer, _create_excl_indices, _get_building_indices, _get_flicker_excl_shifts, @@ -78,7 +79,8 @@ def test_shadow_flicker(flicker_threshold): wind_dir = np.zeros(8760) regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, flicker_threshold=flicker_threshold) - tf = TurbineFlicker(EXCL_H5, RES_H5, BLD_LAYER, regulations, + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) + tf = TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, grid_cell_size=90, max_flicker_exclusion_range=4_510) shadow_flicker = tf._compute_shadow_flicker(lat, lon, wind_dir) @@ -117,9 +119,9 @@ def test_excl_indices_mapping(): def test_get_building_indices(): """Test retrieving building indices. """ - row_idx, col_idx, __ = _get_building_indices(EXCL_H5, BLD_LAYER, 0, - resolution=64, - building_threshold=0) + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) + row_idx, col_idx = _get_building_indices(building_layer, 0, resolution=64, + building_threshold=0) with ExclusionLayers(EXCL_H5) as f: buildings = f[BLD_LAYER, 0:64, 0:64] @@ -152,20 +154,34 @@ def test_turbine_flicker(max_workers): baseline = f[BASELINE] regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER) - tf = TurbineFlicker(EXCL_H5, RES_H5, BLD_LAYER, regulations, + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) + tf = TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, resolution=64, tm_dset='techmap_wind', max_flicker_exclusion_range=4540) test = tf.compute_flicker_exclusions(max_workers=max_workers) assert np.allclose(baseline, test) +def test_turbine_flicker_bad_building_layer_input(): + """ + Test Turbine Flicker with bad input for max_flicker_exclusion_range + """ + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER) + with pytest.raises(RuntimeError) as excinfo: + TurbineFlicker(EXCL_H5, RES_H5, np.zeros((10, 10)), regulations) + + assert "Shape of building layer" in str(excinfo.value) + assert "does not match shape of ExclusionLayers" in str(excinfo.value) + + def test_turbine_flicker_bad_max_flicker_exclusion_range_input(): """ Test Turbine Flicker with bad input for max_flicker_exclusion_range """ regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER) + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) with pytest.raises(TypeError) as excinfo: - TurbineFlicker(EXCL_H5, RES_H5, BLD_LAYER, regulations, + TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, max_flicker_exclusion_range='abc') assert "max_flicker_exclusion_range must be numeric" in str(excinfo.value) From fb8ee637524cb78c91c21ffe094489e9bd33f05d Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Tue, 20 Sep 2022 18:06:47 -0600 Subject: [PATCH 28/42] (WIP) Added `input_output_filenames` (WIP) --- reVX/setbacks/base.py | 21 ++++++++ reVX/turbine_flicker/turbine_flicker.py | 56 +++++++++++++-------- reVX/turbine_flicker/turbine_flicker_cli.py | 11 ++-- reVX/utilities/exclusions.py | 48 +++++++----------- tests/test_turbine_flicker.py | 12 ++--- 5 files changed, 86 insertions(+), 62 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index d422fc13b..effc65c2b 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -2,6 +2,7 @@ """ Compute setbacks exclusions """ +import os from abc import abstractmethod from warnings import warn from itertools import product @@ -407,6 +408,26 @@ def compute_generic_exclusions(self, features_fpath): return self._rasterizer.rasterize(setbacks) + def input_output_filenames(self, out_dir, features_fpath): + """Generate pairs of input/output file names. + + Parameters + ---------- + out_dir : str + Path to output file directory. + features_fpath : str + Path to shape file with features to compute exclusions from. + + Yields + ------ + tuple + An input-output filename pair. + """ + for fpath in self.get_feature_paths(features_fpath): + fn = os.path.basename(fpath) + geotiff = ".".join(fn.split('.')[:-1] + ['tif']) + yield fpath, os.path.join(out_dir, geotiff) + @staticmethod def get_feature_paths(features_fpath): """Ensure features path exists and return as list. diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index fe7dc296c..b7fcecace 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -531,34 +531,28 @@ def compute_generic_exclusions(self, features_fpath): return self.compute_flicker_exclusions(flicker_threshold=30, fips=None, max_workers=None) - @staticmethod - def get_feature_paths(features_fpath): - """Ensure features path exists and return as list. + def input_output_filenames(self, out_dir, features_fpath): + """Generate pairs of input/output file names. Parameters ---------- + out_dir : str + Path to output file directory. features_fpath : str - Path to features file. This path can contain - any pattern that can be used in the glob function. - For example, `/path/to/features/[A]*` would match - with all the features in the directory - `/path/to/features/` that start with "A". This input - can also be a directory, but that directory must ONLY - contain feature files. If your feature files are mixed - with other files or directories, use something like - `/path/to/features/*.geojson`. - - Returns - ------- - features_fpath : list - Features path as a list of strings. + Path to shape file with features to compute exclusions from. - Notes - ----- - This method is required for `run` classmethods for exclusion - features that are spread out over multiple files. + Yields + ------ + tuple + An input-output filename pair. """ - return [TurbineFlicker.DEFAULT_FEATURE_OUTFILE] + for fpath in [self.DEFAULT_FEATURE_OUTFILE]: + fn = flicker_fn_out(self._regulations.hub_height, + self._regulations.rotor_diameter) + geotiff = ".".join(fn.split('.')[:-1] + ['tif']) + yield fpath, os.path.join(out_dir, geotiff) + + # @classmethod # def run(cls, excl_fpath, res_fpath, building_layer, tm_dset='techmap_wtk', @@ -873,3 +867,21 @@ def load_building_layer(excl_fpath, building_layer=None, features_path=None, raise RuntimeError("Must provide either `features_path` or " "`building_layer` (but not both).") + + +def flicker_fn_out(hub_height, rotor_diameter): + """Generate flicker tiff outfile name. + + Parameters + ---------- + hub_height : int + Turbine hub-height (m). + rotor_diameter : int + Turbine rotor diameter (m). + + Returns + ------- + str + Name of flicker outfile. + """ + return "flicker_{}hh_{}rd.tif".format(hub_height, rotor_diameter) diff --git a/reVX/turbine_flicker/turbine_flicker_cli.py b/reVX/turbine_flicker/turbine_flicker_cli.py index 4dd647090..4cf525728 100644 --- a/reVX/turbine_flicker/turbine_flicker_cli.py +++ b/reVX/turbine_flicker/turbine_flicker_cli.py @@ -217,6 +217,8 @@ def local(ctx, excl_fpath, res_fpath, features_path, building_layer, logger.info('Computing Turbine Flicker Exclusions from structures in {}' .format(building_layer)) logger.debug('Flicker to be computed with:\n' + '- features_path = {}\n' + '- building_layer = {}\n' '- hub_height = {}\n' '- rotor_diameter = {}\n' '- tm_dset = {}\n' @@ -229,10 +231,11 @@ def local(ctx, excl_fpath, res_fpath, features_path, building_layer, '- using max_workers = {}\n' '- replace layer if needed = {}\n' '- out_layer = {}\n' - .format(hub_height, rotor_diameter, tm_dset, - building_threshold, flicker_threshold, resolution, - grid_cell_size, max_flicker_exclusion_range, - regs_fpath, max_workers, replace, out_layer)) + .format(features_path, building_layer, hub_height, + rotor_diameter, tm_dset, building_threshold, + flicker_threshold, resolution, grid_cell_size, + max_flicker_exclusion_range, regs_fpath, max_workers, + replace, out_layer)) regulations = FlickerRegulations(hub_height, rotor_diameter, flicker_threshold, regs_fpath) diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index 31ca27f4a..d1ad94259 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -93,33 +93,22 @@ def compute_generic_exclusions(self, features_fpath): """ raise NotImplementedError - @staticmethod + @abstractmethod - def get_feature_paths(features_fpath): - """Ensure features path exists and return as list. + def input_output_filenames(self, out_dir, features_fpath): + """Generate pairs of input/output file names. Parameters ---------- + out_dir : str + Path to output file directory. features_fpath : str - Path to features file. This path can contain - any pattern that can be used in the glob function. - For example, `/path/to/features/[A]*` would match - with all the features in the directory - `/path/to/features/` that start with "A". This input - can also be a directory, but that directory must ONLY - contain feature files. If your feature files are mixed - with other files or directories, use something like - `/path/to/features/*.geojson`. - - Returns - ------- - features_fpath : list - Features path as a list of strings. + Path to shape file with features to compute exclusions from. - Notes - ----- - This method is required for `run` classmethods for exclusion - features that are spread out over multiple files. + Yields + ------ + tuple + An input-output filename pair. """ raise NotImplementedError @@ -475,19 +464,18 @@ def run(cls, excl_fpath, features_path, out_dir, regulations, hsds=hsds, **kwargs) out_layers = out_layers or {} - for fpath in exclusions.get_feature_paths(features_path): - fn = os.path.basename(fpath) - geotiff = ".".join(fn.split('.')[:-1] + ['tif']) - geotiff = os.path.join(out_dir, geotiff) - if os.path.exists(geotiff) and not replace: + files = exclusions.input_output_filenames(out_dir, features_path) + for f_in, f_out in files: + if os.path.exists(f_out) and not replace: msg = ('{} already exists, exclusions will not be re-computed ' - 'unless replace=True'.format(geotiff)) + 'unless replace=True'.format(f_out)) logger.error(msg) else: logger.info("Computing exclusions from {} and saving " - "to {}".format(fpath, geotiff)) - exclusions.compute_exclusions(fpath, out_tiff=geotiff, - out_layer=out_layers.get(fn), + "to {}".format(f_in, f_out)) + out_layer = out_layers.get(os.path.basename(f_in)) + exclusions.compute_exclusions(f_in, out_tiff=f_out, + out_layer=out_layer, max_workers=max_workers, replace=replace) diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index 198380cfa..95a9bb77b 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -19,6 +19,7 @@ FlickerRegulations, TurbineFlicker, load_building_layer, + flicker_fn_out, _create_excl_indices, _get_building_indices, _get_flicker_excl_shifts, @@ -240,7 +241,7 @@ def test_cli_tiff(runner): excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) shutil.copy(EXCL_H5, excl_h5) # out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd.tiff" - out_tiff = "flicker.tif" + out_tiff = flicker_fn_out(HUB_HEIGHT, ROTOR_DIAMETER) config = { "log_directory": td, "excl_fpath": excl_h5, @@ -283,7 +284,7 @@ def test_cli_tiff(runner): def test_cli_max_flicker_exclusion_range(runner): """Test Turbine Flicker CLI with max_flicker_exclusion_range value. """ - + def_tiff_name = flicker_fn_out(HUB_HEIGHT, ROTOR_DIAMETER) with tempfile.TemporaryDirectory() as td: excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) shutil.copy(EXCL_H5, excl_h5) @@ -312,7 +313,7 @@ def test_cli_max_flicker_exclusion_range(runner): traceback.print_exception(*result.exc_info) ) assert result.exit_code == 0, msg - shutil.move(os.path.join(td, "flicker.tif"), + shutil.move(os.path.join(td, def_tiff_name), os.path.join(td, out_tiff_def)) out_tiff_5k = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5k.tiff" @@ -327,7 +328,7 @@ def test_cli_max_flicker_exclusion_range(runner): traceback.print_exception(*result.exc_info) ) assert result.exit_code == 0, msg - shutil.move(os.path.join(td, "flicker.tif"), + shutil.move(os.path.join(td, def_tiff_name), os.path.join(td, out_tiff_5k)) out_tiff_20d = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5d.tiff" @@ -342,10 +343,9 @@ def test_cli_max_flicker_exclusion_range(runner): traceback.print_exception(*result.exc_info) ) assert result.exit_code == 0, msg - shutil.move(os.path.join(td, "flicker.tif"), + shutil.move(os.path.join(td, def_tiff_name), os.path.join(td, out_tiff_20d)) - with ExclusionLayers(EXCL_H5) as f: baseline = f[BASELINE] From 59bc95ec3fd7413db007a780f2f58e5ee5625ff2 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Thu, 22 Sep 2022 13:02:42 -0600 Subject: [PATCH 29/42] (WIP) `feature_fpath` now an instance attr (WIP) --- reVX/setbacks/base.py | 47 +++++++----------- reVX/setbacks/parcel_setbacks.py | 37 ++++---------- reVX/setbacks/rail_setbacks.py | 11 +---- reVX/setbacks/road_setbacks.py | 24 +++------ reVX/setbacks/structure_setbacks.py | 12 ++--- reVX/setbacks/transmission_setbacks.py | 11 +---- reVX/setbacks/water_setbacks.py | 11 +---- reVX/turbine_flicker/turbine_flicker.py | 26 +++++----- reVX/utilities/exclusions.py | 66 +++++++++++-------------- tests/test_setbacks.py | 5 +- 10 files changed, 89 insertions(+), 161 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index effc65c2b..77a1c6024 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -316,31 +316,23 @@ def profile(self): """dict: Geotiff profile. """ return self._rasterizer.profile - def parse_features(self, features_fpath): + def parse_features(self): """Method to parse features. - Parameters - ---------- - features_fpath : str - Path to file containing features to setback from. - Returns ------- `geopandas.GeoDataFrame` Geometries of features to setback from in exclusion coordinate system. """ - return gpd.read_file(features_fpath).to_crs(crs=self.profile['crs']) + return (gpd.read_file(self._features_fpath) + .to_crs(crs=self.profile['crs'])) - def pre_process_regulations(self, features_fpath): + def pre_process_regulations(self): """Reduce regulations to state corresponding to features_fpath. - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from. """ - mask = self._regulation_table_mask(features_fpath) + mask = self._regulation_table_mask() if not mask.any(): msg = "Found no local regulations!" logger.warning(msg) @@ -351,7 +343,7 @@ def pre_process_regulations(self, features_fpath): logger.debug('Computing setbacks for regulations in {} counties' .format(len(self.regulations_table))) - def compute_local_exclusions(self, regulation_value, cnty, features_fpath): + def compute_local_exclusions(self, regulation_value, cnty): """Compute local features setbacks. This method will compute the setbacks using a county-specific @@ -365,8 +357,6 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): Setback distance in meters. cnty : geopandas.GeoDataFrame Regulations for a single county. - features_fpath : str - Path to shape file with features to compute exclusions from Returns ------- @@ -375,7 +365,7 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): """ logger.debug('- Computing setbacks for county FIPS {}' .format(cnty.iloc[0]['FIPS'])) - features = self.parse_features(features_fpath) + features = self.parse_features() idx = features.sindex.intersection(cnty.total_bounds) features = features.iloc[list(idx)].copy() log_mem(logger) @@ -383,17 +373,12 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): features = list(features.buffer(regulation_value)) return self._rasterizer.rasterize(features) - def compute_generic_exclusions(self, features_fpath): + def compute_generic_exclusions(self): """Compute generic setbacks. This method will compute the setbacks using a generic setback of `base_setback_dist * multiplier`. - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from. - Returns ------- setbacks : ndarray @@ -403,7 +388,7 @@ def compute_generic_exclusions(self, features_fpath): if np.isclose(self._regulations.generic, 0): return self._rasterizer.rasterize(shapes=None) - setback_features = self.parse_features(features_fpath) + setback_features = self.parse_features() setbacks = list(setback_features.buffer(self._regulations.generic)) return self._rasterizer.rasterize(setbacks) @@ -416,7 +401,15 @@ def input_output_filenames(self, out_dir, features_fpath): out_dir : str Path to output file directory. features_fpath : str - Path to shape file with features to compute exclusions from. + Path to features file. This path can contain + any pattern that can be used in the glob function. + For example, `/path/to/features/[A]*` would match + with all the features in the directory + `/path/to/features/` that start with "A". This input + can also be a directory, but that directory must ONLY + contain feature files. If your feature files are mixed + with other files or directories, use something like + `/path/to/features/*.geojson`. Yields ------ @@ -474,9 +467,7 @@ def _feature_filter(features, cnty): """Filter the features given a county.""" return features_with_centroid_in_county(features, cnty) - # pylint: disable=unused-argument - @staticmethod @abstractmethod - def _regulation_table_mask(features_fpath): + def _regulation_table_mask(self): """Return the regulation table mask for setback feature. """ raise NotImplementedError diff --git a/reVX/setbacks/parcel_setbacks.py b/reVX/setbacks/parcel_setbacks.py index ab5d589ad..775aa3027 100644 --- a/reVX/setbacks/parcel_setbacks.py +++ b/reVX/setbacks/parcel_setbacks.py @@ -18,17 +18,12 @@ class ParcelSetbacks(AbstractBaseSetbacks): """Parcel setbacks - facilitates the use of negative buffers. """ - def compute_generic_exclusions(self, features_fpath): + def compute_generic_exclusions(self): """Compute generic setbacks. This method will compute the setbacks using a generic setback of `base_setback_dist * multiplier`. - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from. - Returns ------- setbacks : ndarray @@ -38,12 +33,12 @@ def compute_generic_exclusions(self, features_fpath): if np.isclose(self._regulations.generic, 0): return self._rasterizer.rasterize(shapes=None) - features = self.parse_features(features_fpath) + features = self.parse_features() setbacks = features.buffer(0).difference( features.buffer(-1 * self._regulations.generic)) return self._rasterizer.rasterize(list(setbacks)) - def compute_local_exclusions(self, regulation_value, cnty, features_fpath): + def compute_local_exclusions(self, regulation_value, cnty): """Compute local features setbacks. This method will compute the setbacks using a county-specific @@ -57,8 +52,6 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): Setback distance in meters. cnty : geopandas.GeoDataFrame Regulations for a single county. - features_fpath : str - Path to shape file with features to compute exclusions from Returns ------- @@ -67,7 +60,7 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): """ logger.debug('- Computing setbacks for county FIPS {}' .format(cnty.iloc[0]['FIPS'])) - features = self.parse_features(features_fpath) + features = self.parse_features() idx = features.sindex.intersection(cnty.total_bounds) features = features.iloc[list(idx)].copy() log_mem(logger) @@ -76,16 +69,9 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): setbacks = features.buffer(0).difference(features.buffer(-1 * setback)) return self._rasterizer.rasterize(list(setbacks)) - def _regulation_table_mask(self, features_fpath): - """Return the regulation table mask for setback feature. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from. - This file needs to have the state in the filename. - """ - state = os.path.basename(features_fpath).split('.')[0] + def _regulation_table_mask(self): + """Return the regulation table mask for setback feature. """ + state = os.path.basename(self._features_fpath).split('.')[0] state = _get_state_name(state) states = self.regulations_table.State.apply(_get_state_name) states = states == state @@ -93,21 +79,16 @@ def _regulation_table_mask(self, features_fpath): == 'property line') return states & property_line - def parse_features(self, features_fpath): + def parse_features(self): """Method to parse features. - Parameters - ---------- - features_fpath : str - Path to file containing features to setback from. - Returns ------- `geopandas.GeoDataFrame` Geometries of features to setback from in exclusion coordinate system. """ - features = gpd.read_file(features_fpath) + features = gpd.read_file(self._features_fpath) if features.crs is None: features = features.set_crs("EPSG:4326") return features.to_crs(crs=self._rasterizer.profile["crs"]) diff --git a/reVX/setbacks/rail_setbacks.py b/reVX/setbacks/rail_setbacks.py index fdef58e1c..30bb1ecac 100644 --- a/reVX/setbacks/rail_setbacks.py +++ b/reVX/setbacks/rail_setbacks.py @@ -21,13 +21,6 @@ def _feature_filter(features, cnty): """Filter the features given a county.""" return features_clipped_to_county(features, cnty) - # pylint: disable=unused-argument - def _regulation_table_mask(self, features_fpath): - """Return the regulation table mask for setback feature. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from - """ + def _regulation_table_mask(self): + """Return the regulation table mask for setback feature. """ return self.regulations_table['Feature Type'] == 'railroads' diff --git a/reVX/setbacks/road_setbacks.py b/reVX/setbacks/road_setbacks.py index 091598a9c..ce306f959 100644 --- a/reVX/setbacks/road_setbacks.py +++ b/reVX/setbacks/road_setbacks.py @@ -18,24 +18,20 @@ class RoadSetbacks(AbstractBaseSetbacks): Road setbacks """ - def parse_features(self, features_fpath): + def parse_features(self): """ Load roads from gdb file, convert to exclusions coordinate system. - Parameters - ---------- - features_fpath : str - Path to here streets gdb file for given state. - Returns ------- roads : `geopandas.GeoDataFrame.sindex` Geometries for roads in gdb file, in exclusion coordinate system """ - lyr = fiona.listlayers(features_fpath)[0] - roads = gpd.read_file(features_fpath, driver='FileGDB', layer=lyr) + lyr = fiona.listlayers(self._features_fpath)[0] + roads = gpd.read_file(self._features_fpath, + driver='FileGDB', layer=lyr) return roads.to_crs(crs=self._rasterizer.profile["crs"]) @@ -69,15 +65,9 @@ def get_feature_paths(features_fpath): return file_paths - def _regulation_table_mask(self, features_fpath): - """Return the regulation table mask for setback feature. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from - """ - state = features_fpath.split('.')[0].split('_')[-1] + def _regulation_table_mask(self): + """Return the regulation table mask for setback feature. """ + state = self._features_fpath.split('.')[0].split('_')[-1] if 'Abbr' not in self.regulations_table: states = self.regulations_table['State'].str.title() self.regulations_table['Abbr'] = states.map(STATES_ABBR_MAP) diff --git a/reVX/setbacks/structure_setbacks.py b/reVX/setbacks/structure_setbacks.py index 72222e86d..4b5ee579e 100644 --- a/reVX/setbacks/structure_setbacks.py +++ b/reVX/setbacks/structure_setbacks.py @@ -70,15 +70,9 @@ def get_feature_paths(features_fpath): return file_paths - def _regulation_table_mask(self, features_fpath): - """Return the regulation table mask for setback feature. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from - """ - state_name = os.path.basename(features_fpath).split('.')[0] + def _regulation_table_mask(self): + """Return the regulation table mask for setback feature. """ + state_name = os.path.basename(self._features_fpath).split('.')[0] state = self._split_state_name(state_name) states = self.regulations_table["State"] == state structures = self.regulations_table['Feature Type'] == 'structures' diff --git a/reVX/setbacks/transmission_setbacks.py b/reVX/setbacks/transmission_setbacks.py index 86ec18bc9..c11d375bf 100644 --- a/reVX/setbacks/transmission_setbacks.py +++ b/reVX/setbacks/transmission_setbacks.py @@ -21,13 +21,6 @@ def _feature_filter(features, cnty): """Filter the features given a county.""" return features_clipped_to_county(features, cnty) - # pylint: disable=unused-argument - def _regulation_table_mask(self, features_fpath): - """Return the regulation table mask for setback feature. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from - """ + def _regulation_table_mask(self): + """Return the regulation table mask for setback feature. """ return self.regulations_table['Feature Type'] == 'transmission' diff --git a/reVX/setbacks/water_setbacks.py b/reVX/setbacks/water_setbacks.py index a54c9647e..1bbf9ba8d 100644 --- a/reVX/setbacks/water_setbacks.py +++ b/reVX/setbacks/water_setbacks.py @@ -18,13 +18,6 @@ def _feature_filter(features, cnty): """Filter the features given a county.""" return features_clipped_to_county(features, cnty) - # pylint: disable=unused-argument - def _regulation_table_mask(self, features_fpath): - """Return the regulation table mask for setback feature. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute setbacks from - """ + def _regulation_table_mask(self): + """Return the regulation table mask for setback feature. """ return self.regulations_table['Feature Type'] == 'water' diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index b7fcecace..63a5b22cc 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -473,14 +473,8 @@ def profile(self): """dict: Geotiff profile. """ return self._profile - def pre_process_regulations(self, features_fpath): - """Reduce regulations to correct state and features. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute exclusions from. - """ + def pre_process_regulations(self): + """Reduce regulations to correct state and features. """ self._fips_to_gid = {} reg_fips = self._regulations.FIPS.unique() @@ -493,7 +487,7 @@ def pre_process_regulations(self, features_fpath): # TODO: Turn this into a warning assert len(self._fips_to_gid) == len(reg_fips), "Some FIPS not found" - def compute_local_exclusions(self, regulation_value, cnty, features_fpath): + def compute_local_exclusions(self, regulation_value, cnty): """Compute local flicker exclusions. This method computes a flicker exclusion layer using the @@ -517,7 +511,7 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): return self.compute_flicker_exclusions( flicker_threshold=regulation_value, fips=cnty_fips, max_workers=1) - def compute_generic_exclusions(self, features_fpath): + def compute_generic_exclusions(self): """Compute generic flicker exclusions. This method will compute a generic flicker exclusion layer. @@ -538,8 +532,16 @@ def input_output_filenames(self, out_dir, features_fpath): ---------- out_dir : str Path to output file directory. - features_fpath : str - Path to shape file with features to compute exclusions from. + features_fpath : : str + Path to features file. This path can contain + any pattern that can be used in the glob function. + For example, `/path/to/features/[A]*` would match + with all the features in the directory + `/path/to/features/` that start with "A". This input + can also be a directory, but that directory must ONLY + contain feature files. If your feature files are mixed + with other files or directories, use something like + `/path/to/features/*.geojson`. Yields ------ diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index d1ad94259..f5011f852 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -37,21 +37,16 @@ def profile(self): raise NotImplementedError @abstractmethod - def pre_process_regulations(self, features_fpath): + def pre_process_regulations(self): """Reduce regulations to correct state and features. When implementing this method, make sure to update `self._regulations.regulations`. - - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute exclusions from. """ raise NotImplementedError @abstractmethod - def compute_local_exclusions(self, regulation_value, cnty, features_fpath): + def compute_local_exclusions(self, regulation_value, cnty): """Compute local feature exclusions. This method should compute the exclusions using the information @@ -63,8 +58,6 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): Regulation value for county. cnty : geopandas.GeoDataFrame Regulations for a single county. - features_fpath : str - Path to shape file with features to compute exclusions from Returns @@ -75,17 +68,12 @@ def compute_local_exclusions(self, regulation_value, cnty, features_fpath): raise NotImplementedError @abstractmethod - def compute_generic_exclusions(self, features_fpath): + def compute_generic_exclusions(self): """Compute generic exclusions. This method should compute the exclusions using a generic regulation value (`self._regulations.generic`). - Parameters - ---------- - features_fpath : str - Path to shape file with features to compute exclusions from. - Returns ------- exclusions : ndarray @@ -103,7 +91,15 @@ def input_output_filenames(self, out_dir, features_fpath): out_dir : str Path to output file directory. features_fpath : str - Path to shape file with features to compute exclusions from. + Path to features file. This path can contain + any pattern that can be used in the glob function. + For example, `/path/to/features/[A]*` would match + with all the features in the directory + `/path/to/features/` that start with "A". This input + can also be a directory, but that directory must ONLY + contain feature files. If your feature files are mixed + with other files or directories, use something like + `/path/to/features/*.geojson`. Yields ------ @@ -136,7 +132,7 @@ def __init__(self, excl_fpath, regulations, hsds=False): self._excl_fpath = excl_fpath self._regulations = regulations self._hsds = hsds - self._fips = None + self._fips = self._features_fpath = None self._process_regulations(regulations.regulations) def __repr__(self): @@ -261,13 +257,11 @@ def _write_layer(self, out_layer, exclusions, replace=False): self.profile, exclusions, description=description) - def compute_all_local_exclusions(self, features_fpath, max_workers=None): + def compute_all_local_exclusions(self, max_workers=None): """Compute local exclusions for all counties either. Parameters ---------- - features_fpath : str - Path to shape file with features to compute exclusions from max_workers : int, optional Number of workers to use for exclusions computation, if 1 run in serial, if > 1 run in parallel with that many @@ -292,7 +286,7 @@ def compute_all_local_exclusions(self, features_fpath, max_workers=None): futures = {} for exclusion, cnty in self._regulations: future = exe.submit(self.compute_local_exclusions, - exclusion, cnty, features_fpath) + exclusion, cnty) futures[future] = cnty['FIPS'].unique() for i, future in enumerate(as_completed(futures)): @@ -304,8 +298,8 @@ def compute_all_local_exclusions(self, features_fpath, max_workers=None): else: logger.info('Computing local exclusions in serial') for i, (exclusion, cnty) in enumerate(self._regulations): - local_exclusions = self.compute_local_exclusions( - exclusion, cnty, features_fpath) + local_exclusions = self.compute_local_exclusions(exclusion, + cnty) exclusions = self._combine_exclusions(exclusions, local_exclusions, cnty['FIPS'].unique()) @@ -348,8 +342,8 @@ def compute_exclusions(self, features_fpath, max_workers=None, exclusions : ndarray Raster array of exclusions """ - exclusions = self._compute_merged_exclusions(features_fpath, - max_workers=max_workers) + self._features_fpath = features_fpath + exclusions = self._compute_merged_exclusions(max_workers=max_workers) if out_layer is not None: logger.info('Saving exclusion layer to {} as {}' @@ -362,12 +356,12 @@ def compute_exclusions(self, features_fpath, max_workers=None, return exclusions - def _compute_merged_exclusions(self, features_fpath, max_workers=None): + def _compute_merged_exclusions(self, max_workers=None): """Compute and merge local and generic exclusions, if necessary. """ mw = max_workers if self._regulations.locals_exist: - self.pre_process_regulations(features_fpath) + self.pre_process_regulations() generic_exclusions_exist = self._regulations.generic_exists local_exclusions_exist = self._regulations.locals_exist @@ -379,24 +373,20 @@ def _compute_merged_exclusions(self, features_fpath, max_workers=None): raise ValueError(msg) if generic_exclusions_exist and not local_exclusions_exist: - return self.compute_generic_exclusions(features_fpath) + return self.compute_generic_exclusions() if local_exclusions_exist and not generic_exclusions_exist: - return self.compute_all_local_exclusions(features_fpath, - max_workers=mw) + return self.compute_all_local_exclusions(max_workers=mw) - generic_exclusions = self.compute_generic_exclusions(features_fpath) - local_exclusions = self.compute_all_local_exclusions(features_fpath, - max_workers=mw) - return self._merge_exclusions(generic_exclusions, local_exclusions, - features_fpath) + generic_exclusions = self.compute_generic_exclusions() + local_exclusions = self.compute_all_local_exclusions(max_workers=mw) + return self._merge_exclusions(generic_exclusions, local_exclusions,) - def _merge_exclusions(self, generic_exclusions, local_exclusions, - features_fpath): + def _merge_exclusions(self, generic_exclusions, local_exclusions): """Merge local exclusions onto the generic exclusions.""" logger.info('Merging local exclusions onto the generic exclusions') - self.pre_process_regulations(features_fpath) + self.pre_process_regulations() local_fips = self.regulations_table["FIPS"].unique() return self._combine_exclusions(generic_exclusions, local_exclusions, local_fips) diff --git a/tests/test_setbacks.py b/tests/test_setbacks.py index f67ef0442..220c38141 100644 --- a/tests/test_setbacks.py +++ b/tests/test_setbacks.py @@ -421,7 +421,8 @@ def test_generic_parcels_with_invalid_shape_input(): setbacks = ParcelSetbacks(EXCL_H5, regulations) # Ensure data we are using contains invalid shapes - parcels = setbacks.parse_features(parcel_path) + setbacks._features_fpath = parcel_path + parcels = setbacks.parse_features() assert not parcels.geometry.is_valid.any() # This code would throw an error if invalid shape not handled properly @@ -778,7 +779,7 @@ def test_merged_setbacks(setbacks_class, regulations_class, features_path, merged_layer = merged_setbacks.compute_exclusions(features_path, max_workers=1) - local_setbacks.pre_process_regulations(features_path) + local_setbacks.pre_process_regulations() feats = local_setbacks.regulations_table # make sure the comparison layers match what we expect From 7551c083dc5244050dd6f71317da780730278390 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Thu, 22 Sep 2022 13:17:17 -0600 Subject: [PATCH 30/42] Linter fixes --- reVX/utilities/exclusions.py | 1 - tests/test_setbacks.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index f5011f852..f3c2d55b3 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -81,7 +81,6 @@ def compute_generic_exclusions(self): """ raise NotImplementedError - @abstractmethod def input_output_filenames(self, out_dir, features_fpath): """Generate pairs of input/output file names. diff --git a/tests/test_setbacks.py b/tests/test_setbacks.py index 220c38141..cfa9caf2d 100644 --- a/tests/test_setbacks.py +++ b/tests/test_setbacks.py @@ -286,7 +286,7 @@ def test_setbacks_saving_tiff_h5(): ParcelSetbacks.run(excl_fpath, feature_file, td, regs, out_layers={'Rhode_Island.gpkg': - "ri_parcel_setbacks"}) + "ri_parcel_setbacks"}) assert os.path.exists(os.path.join(td, "Rhode_Island.tif")) with Geotiff(os.path.join(td, "Rhode_Island.tif")) as tif: From 0780b26553cd3b45538d2b831aca468426a3f37a Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Thu, 22 Sep 2022 13:26:07 -0600 Subject: [PATCH 31/42] (WIP) Added `max_workers` to `compute_generic_exclusions` (WIP) --- reVX/setbacks/base.py | 2 +- reVX/setbacks/parcel_setbacks.py | 2 +- reVX/turbine_flicker/turbine_flicker.py | 13 +++++++++++-- reVX/utilities/exclusions.py | 14 +++++++++++--- 4 files changed, 24 insertions(+), 7 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 77a1c6024..9c3d980ee 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -373,7 +373,7 @@ def compute_local_exclusions(self, regulation_value, cnty): features = list(features.buffer(regulation_value)) return self._rasterizer.rasterize(features) - def compute_generic_exclusions(self): + def compute_generic_exclusions(self, **__): """Compute generic setbacks. This method will compute the setbacks using a generic setback diff --git a/reVX/setbacks/parcel_setbacks.py b/reVX/setbacks/parcel_setbacks.py index 775aa3027..9ad632cb3 100644 --- a/reVX/setbacks/parcel_setbacks.py +++ b/reVX/setbacks/parcel_setbacks.py @@ -18,7 +18,7 @@ class ParcelSetbacks(AbstractBaseSetbacks): """Parcel setbacks - facilitates the use of negative buffers. """ - def compute_generic_exclusions(self): + def compute_generic_exclusions(self, **__): """Compute generic setbacks. This method will compute the setbacks using a generic setback diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 63a5b22cc..acd521e07 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -511,11 +511,19 @@ def compute_local_exclusions(self, regulation_value, cnty): return self.compute_flicker_exclusions( flicker_threshold=regulation_value, fips=cnty_fips, max_workers=1) - def compute_generic_exclusions(self): + def compute_generic_exclusions(self, max_workers=None): """Compute generic flicker exclusions. This method will compute a generic flicker exclusion layer. + Parameters + ---------- + max_workers : int, optional + Number of workers to use for exclusions computation, if 1 + run in serial, if > 1 run in parallel with that many + workers, if `None` run in parallel on all available cores. + By default `None`. + Returns ------- flicker : ndarray @@ -523,7 +531,8 @@ def compute_generic_exclusions(self): """ logger.info('Computing generic flicker exclusions...') return self.compute_flicker_exclusions(flicker_threshold=30, - fips=None, max_workers=None) + fips=None, + max_workers=max_workers) def input_output_filenames(self, out_dir, features_fpath): """Generate pairs of input/output file names. diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index f3c2d55b3..4504ba1ca 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -68,12 +68,20 @@ def compute_local_exclusions(self, regulation_value, cnty): raise NotImplementedError @abstractmethod - def compute_generic_exclusions(self): + def compute_generic_exclusions(self, max_workers=None): """Compute generic exclusions. This method should compute the exclusions using a generic regulation value (`self._regulations.generic`). + Parameters + ---------- + max_workers : int, optional + Number of workers to use for exclusions computation, if 1 + run in serial, if > 1 run in parallel with that many + workers, if `None` run in parallel on all available cores. + By default `None`. + Returns ------- exclusions : ndarray @@ -372,12 +380,12 @@ def _compute_merged_exclusions(self, max_workers=None): raise ValueError(msg) if generic_exclusions_exist and not local_exclusions_exist: - return self.compute_generic_exclusions() + return self.compute_generic_exclusions(max_workers=mw) if local_exclusions_exist and not generic_exclusions_exist: return self.compute_all_local_exclusions(max_workers=mw) - generic_exclusions = self.compute_generic_exclusions() + generic_exclusions = self.compute_generic_exclusions(max_workers=mw) local_exclusions = self.compute_all_local_exclusions(max_workers=mw) return self._merge_exclusions(generic_exclusions, local_exclusions,) From cfaf64e0b91d68c2bb45c0472a67ebadb18e1931 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 09:56:20 -0600 Subject: [PATCH 32/42] (WIP) Profile now a property of exclusions (WIP) --- reVX/setbacks/base.py | 5 ----- reVX/turbine_flicker/turbine_flicker.py | 7 ------- reVX/utilities/exclusions.py | 19 ++++++++++++------- 3 files changed, 12 insertions(+), 19 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 9c3d980ee..8f5c2e0b0 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -311,11 +311,6 @@ def __repr__(self): msg = "{} for {}".format(self.__class__.__name__, self._excl_fpath) return msg - @property - def profile(self): - """dict: Geotiff profile. """ - return self._rasterizer.profile - def parse_features(self): """Method to parse features. diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index acd521e07..6ec86f722 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -142,7 +142,6 @@ def __init__(self, excl_fpath, res_fpath, building_layer, regulations, self._max_flicker_exclusion_range = ( self._parse_max_flicker_exclusion_range( max_flicker_exclusion_range)) - self._profile = None self._flicker_preflight_check(tm_dset=tm_dset) self._sc_points = self._get_sc_points(tm_dset=tm_dset) self._fips_to_gid = {} @@ -177,7 +176,6 @@ def _flicker_preflight_check(self, tm_dset='techmap_wtk'): with ExclusionLayers(self._excl_fpath, hsds=self._hsds) as f: layers = f.layers exclusion_shape = f.shape - self._profile = f.profile if self._bld_layer.shape != exclusion_shape: msg = ("Shape of building layer {} does not match shape of " @@ -468,11 +466,6 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, return flicker_arr - @property - def profile(self): - """dict: Geotiff profile. """ - return self._profile - def pre_process_regulations(self): """Reduce regulations to correct state and features. """ diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index 4504ba1ca..625d248ad 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -30,12 +30,6 @@ class AbstractExclusionCalculatorInterface(ABC): """Abstract Exclusion Calculator Interface. """ - @property - @abstractmethod - def profile(self): - """dict: Geotiff profile. """ - raise NotImplementedError - @abstractmethod def pre_process_regulations(self): """Reduce regulations to correct state and features. @@ -139,13 +133,19 @@ def __init__(self, excl_fpath, regulations, hsds=False): self._excl_fpath = excl_fpath self._regulations = regulations self._hsds = hsds - self._fips = self._features_fpath = None + self._fips = self._features_fpath = self._profile = None + self._set_profile() self._process_regulations(regulations.regulations) def __repr__(self): msg = "{} for {}".format(self.__class__.__name__, self._excl_fpath) return msg + def _set_profile(self): + """Extract profile from excl h5.""" + with ExclusionLayers(self._excl_fpath, hsds=self._hsds) as f: + self._profile = f.profile + def _process_regulations(self, regulations_df): """Parse the county regulations. @@ -203,6 +203,11 @@ def _process_regulations(self, regulations_df): regulations_df = regulations_df.to_crs(crs=self.profile['crs']) self._regulations.regulations = regulations_df + @property + def profile(self): + """dict: Geotiff profile. """ + return self._profile + @property def regulations_table(self): """Regulations table. From 30d88314921bc5d255f5a3d0b770fe475c7f5bee Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 10:24:33 -0600 Subject: [PATCH 33/42] (WIP) Added `no_exclusions_array` to AEC interface (WIP) --- reVX/setbacks/base.py | 11 +++++++++-- reVX/turbine_flicker/turbine_flicker.py | 2 +- reVX/utilities/exclusions.py | 12 +++++++++--- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 8f5c2e0b0..17e7b02d4 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -323,6 +323,11 @@ def parse_features(self): return (gpd.read_file(self._features_fpath) .to_crs(crs=self.profile['crs'])) + @property + def no_exclusions_array(self): + """np.array: Array representing no exclusions. """ + return self._rasterizer.rasterize(shapes=None) + def pre_process_regulations(self): """Reduce regulations to state corresponding to features_fpath. @@ -380,8 +385,10 @@ def compute_generic_exclusions(self, **__): Raster array of setbacks """ logger.info('Computing generic setbacks') - if np.isclose(self._regulations.generic, 0): - return self._rasterizer.rasterize(shapes=None) + generic_regs_dne = (self._regulations.generic is None + or np.isclose(self._regulations.generic, 0)) + if generic_regs_dne: + return self.no_exclusions_array setback_features = self.parse_features() setbacks = list(setback_features.buffer(self._regulations.generic)) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 6ec86f722..20bfdd1dd 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -366,7 +366,7 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, else: gids = self._fips_to_gid.get(fips, []) - flicker_arr = np.ones(self._bld_layer.shape, dtype=np.uint8) + flicker_arr = self.no_exclusions_array if max_workers > 1: msg = ('Computing exclusions from {} based on {}m hub height ' 'turbines with {}m rotor diameters in parallel using {} ' diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index 625d248ad..88fe076bd 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -30,6 +30,12 @@ class AbstractExclusionCalculatorInterface(ABC): """Abstract Exclusion Calculator Interface. """ + @property + @abstractmethod + def no_exclusions_array(self): + """np.array: Array representing no exclusions. """ + raise NotImplementedError + @abstractmethod def pre_process_regulations(self): """Reduce regulations to correct state and features. @@ -53,7 +59,6 @@ def compute_local_exclusions(self, regulation_value, cnty): cnty : geopandas.GeoDataFrame Regulations for a single county. - Returns ------- exclusions : list @@ -388,11 +393,12 @@ def _compute_merged_exclusions(self, max_workers=None): return self.compute_generic_exclusions(max_workers=mw) if local_exclusions_exist and not generic_exclusions_exist: - return self.compute_all_local_exclusions(max_workers=mw) + local_excl = self.compute_all_local_exclusions(max_workers=mw) + return self._merge_exclusions(self.no_exclusions_array, local_excl) generic_exclusions = self.compute_generic_exclusions(max_workers=mw) local_exclusions = self.compute_all_local_exclusions(max_workers=mw) - return self._merge_exclusions(generic_exclusions, local_exclusions,) + return self._merge_exclusions(generic_exclusions, local_exclusions) def _merge_exclusions(self, generic_exclusions, local_exclusions): """Merge local exclusions onto the generic exclusions.""" From 38e02ab29c524e438eed69963741edc94a6e7ea2 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 13:24:12 -0600 Subject: [PATCH 34/42] (WIP) Major regulations file refactor (WIP) --- reVX/config/setbacks.py | 9 +- reVX/setbacks/base.py | 9 +- reVX/setbacks/regulations.py | 197 ++++---------- reVX/setbacks/setbacks_cli.py | 12 +- reVX/turbine_flicker/regulations.py | 71 +++++ reVX/turbine_flicker/turbine_flicker.py | 127 ++++----- reVX/turbine_flicker/turbine_flicker_cli.py | 4 +- reVX/utilities/exclusions.py | 236 +---------------- reVX/utilities/regulations.py | 155 +++++++++++ .../turbine_flicker/blue_creek_regs_value.csv | 8 + tests/test_regulations.py | 166 ++++++++++++ tests/test_setbacks.py | 242 +++++++----------- tests/test_turbine_flicker.py | 101 +++++++- 13 files changed, 718 insertions(+), 619 deletions(-) create mode 100644 reVX/turbine_flicker/regulations.py create mode 100644 reVX/utilities/regulations.py create mode 100644 tests/data/turbine_flicker/blue_creek_regs_value.csv create mode 100644 tests/test_regulations.py diff --git a/reVX/config/setbacks.py b/reVX/config/setbacks.py index 6da0271b7..57bb36a2e 100644 --- a/reVX/config/setbacks.py +++ b/reVX/config/setbacks.py @@ -5,7 +5,7 @@ import logging from reV.config.base_analysis_config import AnalysisConfig -from reVX.setbacks.regulations import validate_regulations_input +from reVX.setbacks.regulations import validate_setback_regulations_input from reVX.setbacks import SETBACKS logger = logging.getLogger(__name__) @@ -22,9 +22,10 @@ def _preflight(self): Run a preflight check for extra requirements based on feature type. """ super()._preflight() - validate_regulations_input(base_setback_dist=self.base_setback_dist, - hub_height=self.hub_height, - rotor_diameter=self.rotor_diameter) + validate_setback_regulations_input( + base_setback_dist=self.base_setback_dist, + hub_height=self.hub_height, + rotor_diameter=self.rotor_diameter) @property def feature_type(self): diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 17e7b02d4..75eed1f23 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -266,8 +266,9 @@ def __init__(self, excl_fpath, regulations, hsds=False, excl_fpath : str Path to .h5 file containing exclusion layers, will also be the location of any new setback layers - regulations : `~reVX.setbacks.regulations.Regulations` - A `Regulations` object used to extract setback distances. + regulations : `~reVX.setbacks.regulations.SetbackRegulations` + A `SetbackRegulations` object used to extract setback + distances. hsds : bool, optional Boolean flag to use h5pyd to handle .h5 'files' hosted on AWS behind HSDS. By default `False`. @@ -338,8 +339,8 @@ def pre_process_regulations(self): logger.warning(msg) warn(msg) - self._regulations.regulations = (self.regulations_table[mask] - .reset_index(drop=True)) + self._regulations.df = (self.regulations_table[mask] + .reset_index(drop=True)) logger.debug('Computing setbacks for regulations in {} counties' .format(len(self.regulations_table))) diff --git a/reVX/setbacks/regulations.py b/reVX/setbacks/regulations.py index ec897e7f2..a0b6a9c23 100644 --- a/reVX/setbacks/regulations.py +++ b/reVX/setbacks/regulations.py @@ -7,15 +7,14 @@ import geopandas as gpd from rex.utilities import parse_table +from reVX.utilities.regulations import AbstractBaseRegulations logger = logging.getLogger(__name__) -class Regulations: - """Regulation setback values. """ - - REQUIRED_COLUMNS = ["Feature Type", "Value Type", "Value", "FIPS"] +class SetbackRegulations(AbstractBaseRegulations): + """Setback regulation values. """ def __init__(self, base_setback_dist, regulations_fpath=None, multiplier=None): @@ -50,11 +49,9 @@ def __init__(self, base_setback_dist, regulations_fpath=None, setback to all counties not listed in the regulations file. By default `None`. """ - - self._base_setback_dist = base_setback_dist - self._regulations = None self._multi = multiplier - self._preflight_check(regulations_fpath) + super().__init__(generic_regulation_value=base_setback_dist, + regulations_fpath=regulations_fpath) def _preflight_check(self, regulations_fpath): """Apply preflight checks to the regulations path and multiplier. @@ -71,13 +68,7 @@ def _preflight_check(self, regulations_fpath): Path to regulations .csv file, if `None`, create global setbacks. """ - if regulations_fpath: - try: - self.regulations = parse_table(regulations_fpath) - except ValueError: - self.regulations = gpd.read_file(regulations_fpath) - logger.debug('Computing setbacks using regulations provided in: {}' - .format(regulations_fpath)) + super()._preflight_check(regulations_fpath) if self._multi: logger.debug('Computing setbacks using base setback distance ' @@ -89,82 +80,19 @@ def _preflight_check(self, regulations_fpath): logger.error(msg) raise RuntimeError(msg) - @property - def regulations(self): - """Regulations table. - - Returns - ------- - geopandas.GeoDataFrame | None - """ - return self._regulations - - @regulations.setter - def regulations(self, regulations): - if regulations is None: - msg = "Cannot set regulations to `None`" - logger.error(msg) - raise ValueError(msg) - self._regulations = regulations - self._validate_regulations() - - def _validate_regulations(self): - """Perform several validations on regulations""" - - self._convert_cols_to_title() - self._check_for_req_missing_cols() - self._remove_nans_from_req_cols() - self._casefold_feature_types() - - def _convert_cols_to_title(self): - """Convert column names in regulations DataFrame to str.title(). """ - new_col_names = {col: col.lower().title() - for col in self._regulations.columns - if col.lower() not in {"geometry", "fips"}} - self._regulations = self._regulations.rename(new_col_names, axis=1) - - def _check_for_req_missing_cols(self): - """Check for missing (required) columns in regulations DataFrame. """ - missing = [col for col in self.REQUIRED_COLUMNS - if col not in self._regulations] - if any(missing): - msg = ('Regulations are missing the following required columns: {}' - .format(missing)) - logger.error(msg) - raise RuntimeError(msg) - - def _remove_nans_from_req_cols(self): - """Remove rows with NaN values from required columns. """ - for col in self.REQUIRED_COLUMNS: - na_rows = self._regulations[col].isna() - self._regulations = self._regulations[~na_rows] - - def _casefold_feature_types(self): - """Casefold "Feature Type" values. """ - feature_types = self._regulations['Feature Type'].str.strip() - feature_types = feature_types.str.casefold() - self._regulations['Feature Type'] = feature_types - @property def base_setback_dist(self): - """The base setback distance, in meters. + """float: The base setback distance, in meters. """ + return self._generic_regulation_value - Returns - ------- - float - """ - return self._base_setback_dist + @property + def multiplier(self): + """int | float: Generic setback multiplier. """ + return self._multi @property def generic(self): - """Default regulation value. - - This value is used for global regulations. - - Returns - ------- - float | None - """ + """float | None: Regulation value used for global regulations. """ if self.multiplier is None: setback = None else: @@ -172,52 +100,13 @@ def generic(self): return setback - @property - def multiplier(self): - """Generic setback multiplier. - - Returns - ------- - int | float - """ - return self._multi - - @property - def locals_exist(self): - """Flag indicating wether local regulations exist. - - Returns - ------- - bool - """ - return (self.regulations is not None and not self.regulations.empty) - - @property - def generic_exists(self): - """Flag indicating wether generic regulations exist. - - Returns - ------- - bool - """ - return self.generic is not None - - def __iter__(self): - if self._regulations is None: - return - for ind, county_regulations in self.regulations.iterrows(): - setback = self._county_regulation_setback(county_regulations) - if setback is None: - continue - yield setback, self.regulations.iloc[[ind]].copy() - - def _county_regulation_setback(self, county_regulations): + def _county_regulation_value(self, county_regulations): """Retrieve county regulation setback. """ - setback_type = county_regulations["Value Type"].strip() + setback_type = county_regulations["Value Type"] setback = float(county_regulations["Value"]) - if setback_type.lower() == "structure height multiplier": + if setback_type == "structure height multiplier": setback *= self.base_setback_dist - elif setback_type.lower() != "meters": + elif setback_type != "meters": msg = ("Cannot create setback for {}, expecting " '"Structure Height Multiplier", or ' '"Meters", but got {!r}' @@ -228,8 +117,8 @@ def _county_regulation_setback(self, county_regulations): return setback -class WindRegulations(Regulations): - """Wind regulation setback values. """ +class WindSetbackRegulations(SetbackRegulations): + """Wind setback regulation setback values. """ MULTIPLIERS = {'high': 3, 'moderate': 1.1} @@ -278,7 +167,7 @@ def __init__(self, hub_height, rotor_diameter, regulations_fpath=None, multiplier=multiplier) def _preflight_check(self, regulations_fpath): - """ Run preflight checks on WindRegulations inputs. + """ Run preflight checks on WindSetbackRegulations inputs. In addition to the checks performed in `Regulations`, the `multiplier` is converted to a float values if a string is @@ -318,17 +207,17 @@ def rotor_diameter(self): """ return self._rotor_diameter - def _county_regulation_setback(self, county_regulations): + def _county_regulation_value(self, county_regulations): """Retrieve county regulation setback. """ - setback_type = county_regulations["Value Type"].strip() + setback_type = county_regulations["Value Type"] setback = float(county_regulations["Value"]) - if setback_type.lower() == "max-tip height multiplier": + if setback_type == "max-tip height multiplier": setback *= self.base_setback_dist - elif setback_type.lower() == "rotor-diameter multiplier": + elif setback_type == "rotor-diameter multiplier": setback *= self.rotor_diameter - elif setback_type.lower() == "hub-height multiplier": + elif setback_type == "hub-height multiplier": setback *= self.hub_height - elif setback_type.lower() != "meters": + elif setback_type != "meters": msg = ('Cannot create setback for {}, expecting ' '"Max-tip Height Multiplier", ' '"Rotor-Diameter Multiplier", ' @@ -341,9 +230,9 @@ def _county_regulation_setback(self, county_regulations): return setback -def validate_regulations_input(base_setback_dist=None, hub_height=None, - rotor_diameter=None): - """Validate the regulations initialization input. +def validate_setback_regulations_input(base_setback_dist=None, hub_height=None, + rotor_diameter=None): + """Validate the setback regulations initialization input. Specifically, this function raises an error unless exactly one of the following combinations of inputs are provided: @@ -384,10 +273,10 @@ def validate_regulations_input(base_setback_dist=None, hub_height=None, "three).") -def select_regulations(base_setback_dist=None, hub_height=None, - rotor_diameter=None, regulations_fpath=None, - multiplier=None): - """Select appropriate regulations based on input. +def select_setback_regulations(base_setback_dist=None, hub_height=None, + rotor_diameter=None, regulations_fpath=None, + multiplier=None): + """Select appropriate setback regulations based on input. Parameters ---------- @@ -430,16 +319,16 @@ def select_regulations(base_setback_dist=None, hub_height=None, setback distance. """ - validate_regulations_input(base_setback_dist=base_setback_dist, - hub_height=hub_height, - rotor_diameter=rotor_diameter) + validate_setback_regulations_input(base_setback_dist=base_setback_dist, + hub_height=hub_height, + rotor_diameter=rotor_diameter) if base_setback_dist is None: - return WindRegulations(hub_height=hub_height, - rotor_diameter=rotor_diameter, - regulations_fpath=regulations_fpath, - multiplier=multiplier) + return WindSetbackRegulations(hub_height=hub_height, + rotor_diameter=rotor_diameter, + regulations_fpath=regulations_fpath, + multiplier=multiplier) else: - return Regulations(base_setback_dist=base_setback_dist, - regulations_fpath=regulations_fpath, - multiplier=multiplier) + return SetbackRegulations(base_setback_dist=base_setback_dist, + regulations_fpath=regulations_fpath, + multiplier=multiplier) diff --git a/reVX/setbacks/setbacks_cli.py b/reVX/setbacks/setbacks_cli.py index 90a997d06..28d19f073 100644 --- a/reVX/setbacks/setbacks_cli.py +++ b/reVX/setbacks/setbacks_cli.py @@ -15,8 +15,8 @@ from reVX.config.setbacks import SetbacksConfig from reVX.setbacks import SETBACKS -from reVX.setbacks.regulations import (validate_regulations_input, - select_regulations) +from reVX.setbacks.regulations import (validate_setback_regulations_input, + select_setback_regulations) from reVX import __version__ logger = logging.getLogger(__name__) @@ -141,7 +141,8 @@ def local(ctx, excl_fpath, feature_type, features_path, out_dir, hub_height, # same check as the config in case someone invokes this from the # direct command line instead of a config file for some bizarre reason - validate_regulations_input(base_setback_dist, hub_height, rotor_diameter) + validate_setback_regulations_input(base_setback_dist, hub_height, + rotor_diameter) logger.info('Computing setbacks from structures in {}' .format(features_path)) @@ -159,8 +160,9 @@ def local(ctx, excl_fpath, feature_type, features_path, out_dir, hub_height, regs_fpath, multiplier, max_workers, replace, weights_calculation_upscale_factor, out_layers)) - regulations = select_regulations(base_setback_dist, hub_height, - rotor_diameter, regs_fpath, multiplier) + regulations = select_setback_regulations(base_setback_dist, hub_height, + rotor_diameter, regs_fpath, + multiplier) setbacks_class = SETBACKS[feature_type] wcuf = weights_calculation_upscale_factor diff --git a/reVX/turbine_flicker/regulations.py b/reVX/turbine_flicker/regulations.py new file mode 100644 index 000000000..395667785 --- /dev/null +++ b/reVX/turbine_flicker/regulations.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +Compute setbacks exclusions +""" +from warnings import warn +import logging + +from reVX.utilities.regulations import AbstractBaseRegulations + + +logger = logging.getLogger(__name__) + + +class FlickerRegulations(AbstractBaseRegulations): + """Shadow flicker regulation values. """ + + def __init__(self, hub_height, rotor_diameter, flicker_threshold=None, + regulations_fpath=None): + """ + Parameters + ---------- + hub_height : float | int + Turbine hub height (m). + rotor_diameter : float | int + Turbine rotor diameter (m). + flicker_threshold : float | int, optional + Maximum number of allowable flicker hours per year to use + for generic flicker regulations. If `None`, then only local + (county) flicker regulations are applied. + By default, `None`. + regulations_fpath : str, optional + Path to regulations .csv or .gpkg file. At a minimum, this + file must contain the following columns: `Feature Type` + which labels the type of regulation that each row + represents (flicker regulations must be called "Shadow + Flicker"), `Value Type`, which specifies the type of the + value (flicker value types must be "Hrs/Year"), `Value`, + which specifies the numeric value of the flicker threshold + (in hours), and `FIPS`, which specifies a unique 5-digit + code for each county (this can be an integer - no leading + zeros required). If this input is `None`, generic flicker + regulations defined by `flicker_threshold` are applied. + By default `None`. + """ + self._hub_height = hub_height + self._rotor_diameter = rotor_diameter + super().__init__(generic_regulation_value=flicker_threshold, + regulations_fpath=regulations_fpath) + + @property + def hub_height(self): + """float | int: Turbine hub-height in meters. """ + return self._hub_height + + @property + def rotor_diameter(self): + """float | int: Turbine rotor diameter in meters. """ + return self._rotor_diameter + + def _county_regulation_value(self, county_regulations): + """Retrieve county regulation value. """ + regulation_type = county_regulations["Value Type"] + regulation = float(county_regulations["Value"]) + if regulation_type != "hrs/year": + msg = ('Cannot create flicker regulations for {}, expecting ' + '"Hrs/Year", but got {!r}' + .format(county_regulations["County"], regulation_type)) + logger.warning(msg) + warn(msg) + return + return regulation diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 20bfdd1dd..8623db067 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -12,7 +12,7 @@ from reV.supply_curve.extent import SupplyCurveExtent from reV.supply_curve.tech_mapping import TechMapping from reVX.handlers.geotiff import Geotiff -from reVX.setbacks.regulations import WindRegulations +from reVX.utilities.regulations import AbstractBaseRegulations from reVX.wind_dirs.mean_wind_dirs_point import MeanWindDirectionsPoint from reVX.utilities.exclusions import (ExclusionsConverter, AbstractBaseExclusionsMerger) @@ -23,65 +23,6 @@ logger = logging.getLogger(__name__) -class FlickerRegulations(WindRegulations): - """Shadow flicker regulation values. """ - - def __init__(self, hub_height, rotor_diameter, flicker_threshold=30, - regulations_fpath=None): - """ - Parameters - ---------- - hub_height : float | int - Turbine hub height (m). - rotor_diameter : float | int - Turbine rotor diameter (m). - flicker_threshold : float | int, optional - Maximum number of allowable flicker hours per year. - By default, `30`. - regulations_fpath : str, optional - Path to regulations .csv or .gpkg file. At a minimum, this - file must contain the following columns: `Feature Type` - which labels the type of setback that each row represents, - `Value Type`, which specifies wether the value is a - multiplier or static height, `Value`, which specifies the - numeric value of the setback or multiplier, and `FIPS`, - which specifies a unique 5-digit code for each county (this - can be an integer - no leading zeros required). Valid - options for the `Value Type` are: - - "Hrs/Year" - If this input is `None`, a generic setback of - `max_tip_height * multiplier` is used. By default `None`. - """ - super().__init__(hub_height=hub_height, rotor_diameter=rotor_diameter, - regulations_fpath=regulations_fpath, - multiplier=1) - self._base_setback_dist = flicker_threshold - - @property - def flicker_threshold(self): - """ - Maximum number of allowable flicker hours per year. - - Returns - ------- - float - """ - return self._base_setback_dist - - def _county_regulation_setback(self, county_regulations): - """Retrieve county regulation setback. """ - setback_type = county_regulations["Value Type"].strip() - setback = float(county_regulations["Value"]) - if setback_type.lower() != "hrs/year": - msg = ('Cannot create setback for {}, expecting ' - '"Hrs/Year", but got {!r}' - .format(county_regulations["County"], setback_type)) - logger.warning(msg) - warn(msg) - return - return setback - - class TurbineFlicker(AbstractBaseExclusionsMerger): """ Class to compute turbine shadow flicker and exclude sites that will @@ -368,11 +309,12 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, flicker_arr = self.no_exclusions_array if max_workers > 1: - msg = ('Computing exclusions from {} based on {}m hub height ' - 'turbines with {}m rotor diameters in parallel using {} ' - 'workers'.format(self, self._regulations.hub_height, - self._regulations.rotor_diameter, - max_workers)) + msg = ('Computing local flicker exclusions based on {}m hub ' + 'height turbines with {}m rotor diameters in parallel ' + 'using {} workers' + .format(self._regulations.hub_height, + self._regulations.rotor_diameter, + max_workers)) logger.info(msg) loggers = [__name__, 'reVX', 'rex'] @@ -411,9 +353,9 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, log_mem(logger) else: msg = ( - 'Computing exclusions from {} based on {}m hub height, {}m ' - 'rotor diameter turbines in serial.' - .format(self, self._regulations.hub_height, + 'Computing local flicker exclusions based on {}m hub height, ' + '{}m rotor diameter turbines in serial.' + .format(self._regulations.hub_height, self._regulations.rotor_diameter) ) logger.info(msg) @@ -466,19 +408,48 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, return flicker_arr - def pre_process_regulations(self): - """Reduce regulations to correct state and features. """ + def _apply_regulations_mask(self): + """Mask regulations to only shadow flicker. """ + flicker = self._regulations.df['Feature Type'] == 'shadow flicker' + + if not flicker.any(): + msg = "Found no local flicker regulations!" + logger.warning(msg) + warn(msg) + + self._regulations.df = (self._regulations.df[flicker] + .reset_index(drop=True)) + logger.debug('Computing flicker for regulations in {} counties' + .format(len(self._regulations.df))) + + def _map_fips_to_gid(self): + """Map county FIPS values to corresponding SC gids. """ self._fips_to_gid = {} - reg_fips = self._regulations.FIPS.unique() + reg_fips = self._regulations.df.FIPS.unique() with SupplyCurveExtent(self._excl_fpath, resolution=self._res) as sc: for gid in self._sc_points.index: for fips in np.unique(sc.get_excl_points('cnty_fips', gid)): if fips in reg_fips: self._fips_to_gid.setdefault(fips, []).append(gid) - # TODO: Turn this into a warning - assert len(self._fips_to_gid) == len(reg_fips), "Some FIPS not found" + missing_fips = set(reg_fips) - set(self._fips_to_gid) + if missing_fips: + msg = ("{} counties with flicker regulations were not found on " + "the supply curve grid ({}): {}" + .format(len(missing_fips), self._excl_fpath, missing_fips)) + logger.warning(msg) + warn(msg) + + @property + def no_exclusions_array(self): + """np.array: Array representing no exclusions. """ + return np.ones(self._bld_layer.shape, dtype=np.uint8) + + def pre_process_regulations(self): + """Reduce regulations to correct state and features. """ + self._apply_regulations_mask() + self._map_fips_to_gid() def compute_local_exclusions(self, regulation_value, cnty): """Compute local flicker exclusions. @@ -522,9 +493,13 @@ def compute_generic_exclusions(self, max_workers=None): flicker : ndarray Raster array of flicker exclusions """ - logger.info('Computing generic flicker exclusions...') - return self.compute_flicker_exclusions(flicker_threshold=30, - fips=None, + ft = self._regulations.generic + logger.info('Computing generic flicker exclusions using a threshold ' + 'of from {} hrs/year based on {}m hub height, {}m ' + 'rotor diameter turbines' + .format(ft, self._regulations.hub_height, + self._regulations.rotor_diameter)) + return self.compute_flicker_exclusions(flicker_threshold=ft, fips=None, max_workers=max_workers) def input_output_filenames(self, out_dir, features_fpath): diff --git a/reVX/turbine_flicker/turbine_flicker_cli.py b/reVX/turbine_flicker/turbine_flicker_cli.py index 4cf525728..e54549941 100644 --- a/reVX/turbine_flicker/turbine_flicker_cli.py +++ b/reVX/turbine_flicker/turbine_flicker_cli.py @@ -13,9 +13,9 @@ from rex.utilities.utilities import get_class_properties from reVX.config.turbine_flicker import TurbineFlickerConfig -from reVX.turbine_flicker.turbine_flicker import (FlickerRegulations, - TurbineFlicker, +from reVX.turbine_flicker.turbine_flicker import (TurbineFlicker, load_building_layer) +from reVX.turbine_flicker.regulations import FlickerRegulations from reVX import __version__ logger = logging.getLogger(__name__) diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index 88fe076bd..365f4cbf3 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -41,7 +41,7 @@ def pre_process_regulations(self): """Reduce regulations to correct state and features. When implementing this method, make sure to update - `self._regulations.regulations`. + `self._regulations.df`. """ raise NotImplementedError @@ -127,8 +127,8 @@ def __init__(self, excl_fpath, regulations, hsds=False): excl_fpath : str Path to .h5 file containing exclusion layers, will also be the location of any new exclusion layers - regulations : `Regulations` - A `Regulations` object used to extract exclusion regulation + regulations : `~reVX.utilities.AbstractBaseRegulations` subclass + A regulations object used to extract exclusion regulation values. hsds : bool, optional Boolean flag to use h5pyd to handle .h5 'files' hosted on @@ -140,7 +140,7 @@ def __init__(self, excl_fpath, regulations, hsds=False): self._hsds = hsds self._fips = self._features_fpath = self._profile = None self._set_profile() - self._process_regulations(regulations.regulations) + self._process_regulations(regulations.df) def __repr__(self): msg = "{} for {}".format(self.__class__.__name__, self._excl_fpath) @@ -206,7 +206,7 @@ def _process_regulations(self, regulations_df): ) regulations_df = regulations_df.reset_index() regulations_df = regulations_df.to_crs(crs=self.profile['crs']) - self._regulations.regulations = regulations_df + self._regulations.df = regulations_df @property def profile(self): @@ -221,7 +221,7 @@ def regulations_table(self): ------- geopandas.GeoDataFrame | None """ - return self._regulations.regulations + return self._regulations.df @regulations_table.setter def regulations_table(self, regulations_table): @@ -394,7 +394,8 @@ def _compute_merged_exclusions(self, max_workers=None): if local_exclusions_exist and not generic_exclusions_exist: local_excl = self.compute_all_local_exclusions(max_workers=mw) - return self._merge_exclusions(self.no_exclusions_array, local_excl) + nea = self.no_exclusions_array.astype(local_excl.dtype) + return self._merge_exclusions(nea, local_excl) generic_exclusions = self.compute_generic_exclusions(max_workers=mw) local_exclusions = self.compute_all_local_exclusions(max_workers=mw) @@ -446,8 +447,8 @@ def run(cls, excl_fpath, features_path, out_dir, regulations, `/path/to/features/*.geojson`. out_dir : str Directory to save exclusion geotiff(s) into - regulations : `Regulations` - A `Regulations` object used to extract exclusion regulation + regulations : `~reVX.utilities.AbstractBaseRegulations` subclass + A regulations object used to extract exclusion regulation distances. max_workers : int, optional Number of workers to use for exclusion computation, if 1 run @@ -1118,223 +1119,6 @@ def extract_all_layers(cls, excl_h5, out_dir, chunks=(128, 128), excls.layer_to_geotiff(layer, geotiff) -class Regulations: - """Exclusion Regulations. """ - - REQUIRED_COLUMNS = ["Feature Type", "Value Type", "Value", "FIPS"] - - def __init__(self, base_regulation_value, regulations_fpath=None, - multiplier=None): - """ - Parameters - ---------- - base_regulation_value : float | int - Base regulation value. This value will be used to calculate - the exclusion regulation value (e.g. setback distance, - flicker hours, etc.) if a multiplier is provided either via - the `regulations_fpath`csv or the `multiplier` input. In - these cases, the exclusion regulation value will be - set to `base_regulation_value * multiplier`. - regulations_fpath : str | None, optional - Path to regulations .csv or .gpkg file. At a minimum, this - file must contain the following columns: `Feature Type` - which labels the type of exclusion that each row represents, - `Value Type`, which specifies wether the value is a - multiplier or static height, `Value`, which specifies the - numeric value of the exclusion or multiplier, and `FIPS`, - which specifies a unique 5-digit code for each county (this - can be an integer - no leading zeros required). Valid - options for the `Value Type` are: - - "Structure Height Multiplier" - - "Meters" - If this input is `None`, a generic regulation value of - `base_regulation_value * multiplier` is used. By default - `None`. - multiplier : int | float | str | None, optional - A regulation value multiplier to use if regulations are not - supplied. This multiplier will be applied to the - ``base_regulation_value`` to calculate the exclusion - regulation value. If supplied along with - ``regulations_fpath``, this input will be used to calculate - exclusions for all counties not listed in the regulations - file. By default `None`. - """ - self._base_regulation_value = base_regulation_value - self._regulations = None - self._multi = multiplier - self._preflight_check(regulations_fpath) - - def _preflight_check(self, regulations_fpath): - """Apply preflight checks to the regulations path and multiplier. - - Run preflight checks on exclusion inputs: - 1) Ensure either a regulations .csv or - an exclusion value multiplier (or both) is provided - 2) Ensure regulations has county FIPS, map regulations to county - geometries from exclusions .h5 file - - Parameters - ---------- - regulations_fpath : str | None - Path to regulations .csv file, if `None`, create global - exclusions. - """ - if regulations_fpath: - try: - self.regulations = parse_table(regulations_fpath) - except ValueError: - self.regulations = gpd.read_file(regulations_fpath) - logger.debug('Computing exclusions using regulations provided ' - 'in: {}'.format(regulations_fpath)) - - if self._multi: - logger.debug('Computing exclusions using base regulation value ' - 'multiplier of {}'.format(self._multi)) - - if not regulations_fpath and not self._multi: - msg = ('Computing exclusions requires a regulations ' - '.csv file and/or a generic multiplier!') - logger.error(msg) - raise RuntimeError(msg) - - @property - def regulations(self): - """Regulations table. - - Returns - ------- - geopandas.GeoDataFrame | None - """ - return self._regulations - - @regulations.setter - def regulations(self, regulations): - if regulations is None: - msg = "Cannot set regulations to `None`" - logger.error(msg) - raise ValueError(msg) - self._regulations = regulations - self._validate_regulations() - - def _validate_regulations(self): - """Perform several validations on regulations""" - - self._convert_cols_to_title() - self._check_for_req_missing_cols() - self._remove_nans_from_req_cols() - self._casefold_feature_types() - - def _convert_cols_to_title(self): - """Convert column names in regulations DataFrame to str.title(). """ - new_col_names = {col: col.lower().title() - for col in self._regulations.columns - if col.lower() not in {"geometry", "fips"}} - self._regulations = self._regulations.rename(new_col_names, axis=1) - - def _check_for_req_missing_cols(self): - """Check for missing (required) columns in regulations DataFrame. """ - missing = [col for col in self.REQUIRED_COLUMNS - if col not in self._regulations] - if any(missing): - msg = ('Regulations are missing the following required columns: {}' - .format(missing)) - logger.error(msg) - raise RuntimeError(msg) - - def _remove_nans_from_req_cols(self): - """Remove rows with NaN values from required columns. """ - for col in self.REQUIRED_COLUMNS: - na_rows = self._regulations[col].isna() - self._regulations = self._regulations[~na_rows] - - def _casefold_feature_types(self): - """Casefold "Feature Type" values. """ - feature_types = self._regulations['Feature Type'].str.strip() - feature_types = feature_types.str.casefold() - self._regulations['Feature Type'] = feature_types - - @property - def base_regulation_value(self): - """The base regulation value. - - Returns - ------- - int | float - """ - return self._base_regulation_value - - @property - def generic(self): - """Default regulation value. - - This value is used for global regulations. - - Returns - ------- - float | None - """ - if self.multiplier is None: - regulation_value = None - else: - regulation_value = self.base_regulation_value * self.multiplier - - return regulation_value - - @property - def multiplier(self): - """Generic exclusion value multiplier. - - Returns - ------- - int | float - """ - return self._multi - - @property - def locals_exist(self): - """Flag indicating wether local regulations exist. - - Returns - ------- - bool - """ - return (self.regulations is not None and not self.regulations.empty) - - @property - def generic_exists(self): - """Flag indicating wether generic regulations exist. - - Returns - ------- - bool - """ - return self.generic is not None - - def __iter__(self): - if self._regulations is None: - return - for ind, county_regulations in self.regulations.iterrows(): - reg = self._county_regulation_value(county_regulations) - if reg is None: - continue - yield reg, self.regulations.iloc[[ind]].copy() - - def _county_regulation_value(self, county_regulations): - """Retrieve county exclusion regulation. """ - exclusion_type = county_regulations["Value Type"].strip() - reg = float(county_regulations["Value"]) - if exclusion_type.lower() == "structure height multiplier": - reg *= self.base_regulation_value - elif exclusion_type.lower() != "meters": - msg = ("Cannot create exclusions for {}, expecting " - '"Meters", but got {!r}' - .format(county_regulations["County"], exclusion_type)) - logger.warning(msg) - warn(msg) - return - return reg - - def _error_or_warn(name, replace): """If replace, throw warning, otherwise throw error. """ if not replace: diff --git a/reVX/utilities/regulations.py b/reVX/utilities/regulations.py new file mode 100644 index 000000000..ef4661e67 --- /dev/null +++ b/reVX/utilities/regulations.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +""" +Abstract generic+local regulations +""" +from abc import ABC, abstractmethod +import logging +import geopandas as gpd + +from rex.utilities import parse_table + + +logger = logging.getLogger(__name__) + + +class AbstractBaseRegulations(ABC): + """ABC for county regulation values. """ + + REQUIRED_COLUMNS = ["Feature Type", "Value Type", "Value", "FIPS"] + + def __init__(self, generic_regulation_value=None, regulations_fpath=None): + """ + Parameters + ---------- + generic_regulation_value : float | int | None, optional + A generic regulation value to be applied where local + regulations and/or ordinances are not given. A `None` value + signifies that no regulation should be applied for regions + without a local regulation. By default `None`. + regulations_fpath : str | None, optional + Path to regulations .csv or .gpkg file. At a minimum, this + file must contain the following columns: `Feature Type` + which labels the type of regulation that each row + represents, `Value Type`, which specifies the type of the + value (e.g. a multiplier or static height, etc.), `Value`, + which specifies the numeric value of the regulation, and + `FIPS`, which specifies a unique 5-digit code for each + county (this can be an integer - no leading zeros required). + A `None` value signifies that no local regulations should + be applied. By default `None`. + """ + + self._generic_regulation_value = generic_regulation_value + self._regulations_df = None + self._preflight_check(regulations_fpath) + + def _preflight_check(self, regulations_fpath): + """Apply preflight checks to the regulations path and multiplier. + + Run preflight checks on setback inputs: + 1) Ensure either a regulations .csv or a generic regulation + value (or both) is provided + 2) Ensure regulations has county FIPS, map regulations to county + geometries from exclusions .h5 file + + Parameters + ---------- + regulations_fpath : str | None + Path to regulations .csv file, if `None`, create global + setbacks. + """ + if regulations_fpath: + try: + self.df = parse_table(regulations_fpath) + except ValueError: + self.df = gpd.read_file(regulations_fpath) + logger.debug('Found regulations provided in: {}' + .format(regulations_fpath)) + + if (regulations_fpath is None + and self._generic_regulation_value is None): + msg = ('Regulations require a local regulation.csv file ' + 'and/or a generic regulation value!') + logger.error(msg) + raise RuntimeError(msg) + + @property + def generic(self): + """float | None: Regulation value used for global regulations. """ + return self._generic_regulation_value + + @property + def df(self): + """geopandas.GeoDataFrame | None: Regulations table. """ + return self._regulations_df + + @df.setter + def df(self, regulations_df): + if regulations_df is None: + msg = "Cannot set df to `None`" + logger.error(msg) + raise ValueError(msg) + self._regulations_df = regulations_df + self._validate_regulations() + + def _validate_regulations(self): + """Perform several validations on regulations""" + + self._convert_cols_to_title() + self._check_for_req_missing_cols() + self._remove_nans_from_req_cols() + self._casefold(cols=['Feature Type', 'Value Type']) + + def _convert_cols_to_title(self): + """Convert column names in regulations DataFrame to str.title(). """ + new_col_names = {col: col.lower().title() + for col in self._regulations_df.columns + if col.lower() not in {"geometry", "fips"}} + self._regulations_df = self._regulations_df.rename(new_col_names, + axis=1) + + def _check_for_req_missing_cols(self): + """Check for missing (required) columns in regulations DataFrame. """ + missing = [col for col in self.REQUIRED_COLUMNS + if col not in self._regulations_df] + if any(missing): + msg = ('Regulations are missing the following required columns: {}' + .format(missing)) + logger.error(msg) + raise RuntimeError(msg) + + def _remove_nans_from_req_cols(self): + """Remove rows with NaN values from required columns. """ + for col in self.REQUIRED_COLUMNS: + na_rows = self._regulations_df[col].isna() + self._regulations_df = self._regulations_df[~na_rows] + + def _casefold(self, cols): + """Casefold column values. """ + for col in cols: + vals = self._regulations_df[col].str.strip().str.casefold() + self._regulations_df[col] = vals + + @property + def locals_exist(self): + """bool: Flag indicating wether local regulations exist. """ + return (self.df is not None and not self.df.empty) + + @property + def generic_exists(self): + """bool: Flag indicating wether generic regulations exist. """ + return self.generic is not None + + def __iter__(self): + if self._regulations_df is None: + return + for ind, county_regulations in self.df.iterrows(): + regulation = self._county_regulation_value(county_regulations) + if regulation is None: + continue + yield regulation, self.df.iloc[[ind]].copy() + + @abstractmethod + def _county_regulation_value(self, county_regulations): + """Retrieve county regulation value. """ + raise NotImplementedError diff --git a/tests/data/turbine_flicker/blue_creek_regs_value.csv b/tests/data/turbine_flicker/blue_creek_regs_value.csv new file mode 100644 index 000000000..1dc860790 --- /dev/null +++ b/tests/data/turbine_flicker/blue_creek_regs_value.csv @@ -0,0 +1,8 @@ +County,State,STATE_FIPS,CNTY_FIPS,FIPS,Feature Type,Value Type,Value,Comment +Bristol,Rhode Island,44,1,44001, wAter ,Meters,10, +Bristol,Rhode Island,44,1,44001,Property Line ,Meters,4.1, +Bristol,Rhode Island,44,1,44001, Highway,Meters,23, +Test1,Ohio,39,1,39001, wAter ,Meters,10, +Test2,Ohio,39,2,39002,Property Line ,Meters,4.1, +Test1,Ohio,39,1,39001,Shadow Flicker,hrs/Year,30, +Bristol,Rhode Island,44,1,44001,Shadow Flicker,hrs/Year,30, \ No newline at end of file diff --git a/tests/test_regulations.py b/tests/test_regulations.py new file mode 100644 index 000000000..3a38669aa --- /dev/null +++ b/tests/test_regulations.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +""" +Regulations tests +""" +import numpy as np +import pandas as pd +import os +import pytest +import tempfile + +from reVX import TESTDATADIR +from reVX.utilities.regulations import AbstractBaseRegulations + + +GENERIC_REG_VAL = 10 +REGS_FPATH = os.path.join(TESTDATADIR, 'setbacks', 'ri_wind_regs_fips.csv') + + +class TestRegulations(AbstractBaseRegulations): + """Implementation of AbstractBaseRegulations for testing only.""" + + def _county_regulation_value(self, __): + """Retrieve county regulation setback. """ + return 0 + + +def test_regulations_init(): + """Test initializing a normal regulations file. """ + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=REGS_FPATH) + assert np.isclose(regs.generic, GENERIC_REG_VAL) + + for col in TestRegulations.REQUIRED_COLUMNS: + assert col in regs.df + assert not regs.df[col].isna().any() + + assert regs.df['Feature Type'].str.islower().all() + assert regs.df['Value Type'].str.islower().all() + + +def test_regulations_missing_init(): + """Test initializing base regulations with missing info. """ + with pytest.raises(RuntimeError) as excinfo: + TestRegulations() + + expected_err_msg = ('Regulations require a local regulation.csv file ' + 'and/or a generic regulation value!') + assert expected_err_msg in str(excinfo.value) + + +def test_regulations_non_capitalized_cols(): + """Test base regulations for csv with non-capitalized cols. """ + regs_path = os.path.join(TESTDATADIR, 'setbacks', 'non_standard_regs', + 'col_names_not_caps.csv') + + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=regs_path) + for col in regs.df.columns: + if col.lower() not in {"geometry", "fips"}: + assert col.istitle() + + +def test_regulations_missing_cols(): + """Test base regulations for csv with missing cols. """ + expected_err_msg = 'Regulations are missing the following required columns' + + for fn in ['missing_ft.csv', 'missing_vt.csv', 'missing_vt.csv']: + regs_path = os.path.join(TESTDATADIR, 'setbacks', 'non_standard_regs', + fn) + + with pytest.raises(RuntimeError) as excinfo: + TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=regs_path) + assert expected_err_msg in str(excinfo.value) + + +def test_regulations_na_cols(): + """Test base regulations for csv with cols containing NaN's. """ + + for fn in ['nan_feature_types.csv', 'nan_fips.csv', 'nan_value_types.csv', + 'nan_values.csv']: + regs_path = os.path.join(TESTDATADIR, 'setbacks', 'non_standard_regs', + fn) + regs_df = pd.read_csv(regs_path) + assert regs_df[TestRegulations.REQUIRED_COLUMNS].isna().values.any() + + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=regs_path) + for col in TestRegulations.REQUIRED_COLUMNS: + assert not regs.df[col].isna().any() + + +def test_regulations_iter(): + """Test base regulations iterator. """ + regs_path = os.path.join(TESTDATADIR, 'setbacks', + 'ri_parcel_regs_multiplier_solar.csv') + + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=regs_path) + for ind, (setback, cnty) in enumerate(regs): + assert np.isclose(setback, 0) + assert regs.df.iloc[[ind]].equals(cnty) + + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=None) + assert len(list(regs)) == 0 + + +def test_regulations_set_to_none(): + """Test setting regulations to `None` not allowed. """ + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=REGS_FPATH) + with pytest.raises(ValueError): + regs.df = None + + +def test_regulations_locals_exist(): + """Test locals_exist property. """ + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=REGS_FPATH) + assert regs.locals_exist + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=None) + assert not regs.locals_exist + + with tempfile.TemporaryDirectory() as td: + regs = pd.read_csv(REGS_FPATH).iloc[0:0] + regs_fpath = os.path.basename(REGS_FPATH) + regs_fpath = os.path.join(td, regs_fpath) + regs.to_csv(regs_fpath, index=False) + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=regs_fpath) + assert not regs.locals_exist + + +def test_regulations_generic_exists(): + """Test generic_exists property. """ + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=REGS_FPATH) + assert regs.generic_exists + regs = TestRegulations(generic_regulation_value=GENERIC_REG_VAL, + regulations_fpath=None) + assert regs.generic_exists + regs = TestRegulations(generic_regulation_value=None, + regulations_fpath=REGS_FPATH) + assert not regs.generic_exists + + +def execute_pytest(capture='all', flags='-rapP'): + """Execute module as pytest with detailed summary report. + + Parameters + ---------- + capture : str + Log or stdout/stderr capture option. ex: log (only logger), + all (includes stdout/stderr) + flags : str + Which tests to show logs and results for. + """ + + fname = os.path.basename(__file__) + pytest.main(['-q', '--show-capture={}'.format(capture), fname, flags]) + + +if __name__ == '__main__': + execute_pytest() diff --git a/tests/test_setbacks.py b/tests/test_setbacks.py index cfa9caf2d..25434c1ba 100644 --- a/tests/test_setbacks.py +++ b/tests/test_setbacks.py @@ -20,9 +20,10 @@ from reVX import TESTDATADIR from reVX.handlers.geotiff import Geotiff -from reVX.setbacks.regulations import (Regulations, WindRegulations, - validate_regulations_input, - select_regulations) +from reVX.setbacks.regulations import (SetbackRegulations, + WindSetbackRegulations, + validate_setback_regulations_input, + select_setback_regulations) from reVX.setbacks import (ParcelSetbacks, RailSetbacks, StructureSetbacks, WaterSetbacks, SETBACKS) from reVX.setbacks.setbacks_cli import main @@ -65,118 +66,69 @@ def runner(): @pytest.fixture def generic_wind_regulations(): """Wind regulations with multiplier. """ - return WindRegulations(HUB_HEIGHT, ROTOR_DIAMETER, multiplier=MULTIPLIER) + return WindSetbackRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + multiplier=MULTIPLIER) @pytest.fixture def county_wind_regulations(): """Wind regulations with multiplier. """ - return WindRegulations(HUB_HEIGHT, ROTOR_DIAMETER, - regulations_fpath=REGS_FPATH) + return WindSetbackRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + regulations_fpath=REGS_FPATH) @pytest.fixture def county_wind_regulations_gpkg(): """Wind regulations with multiplier. """ - return WindRegulations(HUB_HEIGHT, ROTOR_DIAMETER, - regulations_fpath=REGS_GPKG) + return WindSetbackRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + regulations_fpath=REGS_GPKG) -def test_regulations_init(): +def test_setback_regulations_init(): """Test initializing a normal regulations file. """ - regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) + regs = SetbackRegulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) assert regs.base_setback_dist == 10 assert np.isclose(regs.generic, 10 * 1.1) assert np.isclose(regs.multiplier, 1.1) - for col in Regulations.REQUIRED_COLUMNS: - assert col in regs.regulations - assert not regs.regulations[col].isna().any() - - assert regs.regulations['Feature Type'].str.islower().all() - - regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=None) + regs = SetbackRegulations(10, regulations_fpath=REGS_FPATH, + multiplier=None) assert regs.generic is None -def test_regulations_missing_init(): - """Test initializing `Regulations` with missing info. """ +def test_setback_regulations_missing_init(): + """Test initializing `SetbackRegulations` with missing info. """ with pytest.raises(RuntimeError) as excinfo: - Regulations(10) + SetbackRegulations(10) expected_err_msg = ('Computing setbacks requires a regulations ' '.csv file and/or a generic multiplier!') assert expected_err_msg in str(excinfo.value) -def test_regulations_non_capitalized_cols(): - """Test `Regulations` for csv with non-capitalized cols. """ - regs_path = os.path.join(TESTDATADIR, 'setbacks', 'non_standard_regs', - 'col_names_not_caps.csv') - - regs = Regulations(10, regulations_fpath=regs_path, multiplier=1.1) - for col in regs.regulations.columns: - if col.lower() not in {"geometry", "fips"}: - assert col.istitle() - - -def test_regulations_missing_cols(): - """Test `Regulations` for csv with missing cols. """ - expected_err_msg = 'Regulations are missing the following required columns' - - for fn in ['missing_ft.csv', 'missing_vt.csv', 'missing_vt.csv']: - regs_path = os.path.join(TESTDATADIR, 'setbacks', 'non_standard_regs', - fn) - - with pytest.raises(RuntimeError) as excinfo: - Regulations(10, regulations_fpath=regs_path, multiplier=1.1) - assert expected_err_msg in str(excinfo.value) - - -def test_regulations_na_cols(): - """Test `Regulations` for csv with cols containing NaN's. """ - - for fn in ['nan_feature_types.csv', 'nan_fips.csv', 'nan_value_types.csv', - 'nan_values.csv']: - regs_path = os.path.join(TESTDATADIR, 'setbacks', 'non_standard_regs', - fn) - regs_df = pd.read_csv(regs_path) - assert regs_df[Regulations.REQUIRED_COLUMNS].isna().values.any() - - regs = Regulations(10, regulations_fpath=regs_path, multiplier=1.1) - for col in Regulations.REQUIRED_COLUMNS: - assert not regs.regulations[col].isna().any() - - -def test_regulations_iter(): - """Test `Regulations` iterator. """ +def test_setback_regulations_iter(): + """Test `SetbackRegulations` iterator. """ expected_setbacks = [20, 23] regs_path = os.path.join(TESTDATADIR, 'setbacks', 'ri_parcel_regs_multiplier_solar.csv') - regs = Regulations(10, regulations_fpath=regs_path, multiplier=1.1) + regs = SetbackRegulations(10, regulations_fpath=regs_path, multiplier=1.1) for ind, (setback, cnty) in enumerate(regs): assert np.isclose(setback, expected_setbacks[ind]) - assert regs.regulations.iloc[[ind]].equals(cnty) + assert regs.df.iloc[[ind]].equals(cnty) - regs = Regulations(10, regulations_fpath=None, multiplier=1.1) + regs = SetbackRegulations(10, regulations_fpath=None, multiplier=1.1) assert len(list(regs)) == 0 -def test_regulations_set_to_none(): - """Test setting regulations to `None` not allowed. """ - regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) - with pytest.raises(ValueError): - regs.regulations = None - - -def test_regulations_locals_exist(): +def test_setback_regulations_locals_exist(): """Test locals_exist property. """ - regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) + regs = SetbackRegulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) assert regs.locals_exist - regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=None) + regs = SetbackRegulations(10, regulations_fpath=REGS_FPATH, + multiplier=None) assert regs.locals_exist - regs = Regulations(10, regulations_fpath=None, multiplier=1.1) + regs = SetbackRegulations(10, regulations_fpath=None, multiplier=1.1) assert not regs.locals_exist with tempfile.TemporaryDirectory() as td: @@ -184,60 +136,63 @@ def test_regulations_locals_exist(): regs_fpath = os.path.basename(REGS_FPATH) regs_fpath = os.path.join(td, regs_fpath) regs.to_csv(regs_fpath, index=False) - regs = Regulations(10, regulations_fpath=regs_fpath, multiplier=1.1) + regs = SetbackRegulations(10, regulations_fpath=regs_fpath, + multiplier=1.1) assert not regs.locals_exist - regs = Regulations(10, regulations_fpath=regs_fpath, multiplier=None) + regs = SetbackRegulations(10, regulations_fpath=regs_fpath, + multiplier=None) assert not regs.locals_exist -def test_regulations_generic_exists(): +def test_setback_regulations_generic_exists(): """Test locals_exist property. """ - regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) + regs = SetbackRegulations(10, regulations_fpath=REGS_FPATH, multiplier=1.1) assert regs.generic_exists - regs = Regulations(10, regulations_fpath=None, multiplier=1.1) + regs = SetbackRegulations(10, regulations_fpath=None, multiplier=1.1) assert regs.generic_exists - regs = Regulations(10, regulations_fpath=REGS_FPATH, multiplier=None) + regs = SetbackRegulations(10, regulations_fpath=REGS_FPATH, + multiplier=None) assert not regs.generic_exists -def test_regulations_wind(): - """Test `WindRegulations` initialization and iteration. """ +def test_setback_regulations_wind(): + """Test `WindSetbackRegulations` initialization and iteration. """ expected_setbacks = [250, 23] regs_path = os.path.join(TESTDATADIR, 'setbacks', 'ri_parcel_regs_multiplier_wind.csv') - regs = WindRegulations(hub_height=100, rotor_diameter=50, - regulations_fpath=regs_path, multiplier=1.1) + regs = WindSetbackRegulations(hub_height=100, rotor_diameter=50, + regulations_fpath=regs_path, multiplier=1.1) assert regs.hub_height == 100 assert regs.rotor_diameter == 50 for ind, (setback, cnty) in enumerate(regs): assert np.isclose(setback, expected_setbacks[ind]) - assert regs.regulations.iloc[[ind]].equals(cnty) + assert regs.df.iloc[[ind]].equals(cnty) -def test_validate_regulations_input(): - """Test that `validate_regulations_input` throws for incorrect input. """ +def test_validate_setback_regulations_input(): + """Test that `validate_setback_regulations_input` throws for bad input. """ with pytest.raises(RuntimeError): - validate_regulations_input() + validate_setback_regulations_input() with pytest.raises(RuntimeError): - validate_regulations_input(1, 2, 3) + validate_setback_regulations_input(1, 2, 3) -def test_select_regulations(): - """Test that `test_select_regulations` returns correct class. """ +def test_select_setback_regulations(): + """Test that `select_setback_regulations` returns correct class. """ with pytest.raises(RuntimeError): - select_regulations() + select_setback_regulations() with pytest.raises(RuntimeError): - select_regulations(1, 2, 3) + select_setback_regulations(1, 2, 3) - assert isinstance(select_regulations(None, 2, 3, None, 1.1), - WindRegulations) + assert isinstance(select_setback_regulations(None, 2, 3, None, 1.1), + WindSetbackRegulations) - assert isinstance(select_regulations(1, None, None, None, 1.1), - Regulations) + assert isinstance(select_setback_regulations(1, None, None, None, 1.1), + SetbackRegulations) @pytest.mark.parametrize('setbacks_class', SETBACKS.values()) @@ -249,7 +204,7 @@ def test_setbacks_no_computation(setbacks_class): regs_fpath = os.path.basename(REGS_FPATH) regs_fpath = os.path.join(td, regs_fpath) regs.to_csv(regs_fpath, index=False) - regs = Regulations(10, regulations_fpath=regs_fpath) + regs = SetbackRegulations(10, regulations_fpath=regs_fpath) setbacks = setbacks_class(EXCL_H5, regs) with pytest.raises(ValueError): setbacks.compute_exclusions("RhodeIsland.file") @@ -264,7 +219,7 @@ def test_setbacks_no_computation(setbacks_class): os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Water.gpkg'))]) def test_setbacks_no_generic_value(setbacks_class, feature_file): """Test setbacks computation for invalid input. """ - regs = Regulations(0, regulations_fpath=None, multiplier=1) + regs = SetbackRegulations(0, regulations_fpath=None, multiplier=1) setbacks = setbacks_class(EXCL_H5, regs) out = setbacks.compute_exclusions(feature_file) assert np.isclose(out, 0).all() @@ -274,7 +229,7 @@ def test_setbacks_saving_tiff_h5(): """Test setbacks saves to tiff and h5. """ feature_file = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg') - regs = Regulations(0, regulations_fpath=None, multiplier=1) + regs = SetbackRegulations(0, regulations_fpath=None, multiplier=1) with tempfile.TemporaryDirectory() as td: assert not os.path.exists(os.path.join(td, "Rhode_Island.tif")) @@ -392,11 +347,11 @@ def test_generic_parcels(): parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg') - regulations_x1 = Regulations(BASE_SETBACK_DIST, multiplier=1) + regulations_x1 = SetbackRegulations(BASE_SETBACK_DIST, multiplier=1) setbacks_x1 = ParcelSetbacks(EXCL_H5, regulations_x1) test_x1 = setbacks_x1.compute_exclusions(parcel_path) - regulations_x100 = Regulations(BASE_SETBACK_DIST, multiplier=100) + regulations_x100 = SetbackRegulations(BASE_SETBACK_DIST, multiplier=100) setbacks_x100 = ParcelSetbacks(EXCL_H5, regulations_x100) test_x100 = setbacks_x100.compute_exclusions(parcel_path) @@ -417,7 +372,7 @@ def test_generic_parcels_with_invalid_shape_input(): parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'invalid', 'Rhode_Island.gpkg') - regulations = Regulations(BASE_SETBACK_DIST, multiplier=100) + regulations = SetbackRegulations(BASE_SETBACK_DIST, multiplier=100) setbacks = ParcelSetbacks(EXCL_H5, regulations) # Ensure data we are using contains invalid shapes @@ -448,8 +403,8 @@ def test_local_parcels_solar(max_workers, regulations_fpath): regs_fpath = os.path.join(td, regs_fpath) shutil.copy(regulations_fpath, regs_fpath) - regulations = Regulations(BASE_SETBACK_DIST, - regulations_fpath=regs_fpath) + regulations = SetbackRegulations(BASE_SETBACK_DIST, + regulations_fpath=regs_fpath) setbacks = ParcelSetbacks(EXCL_H5, regulations) parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', @@ -493,8 +448,9 @@ def test_local_parcels_wind(max_workers, regulations_fpath): regs_fpath = os.path.join(td, regs_fpath) shutil.copy(regulations_fpath, regs_fpath) - regulations = WindRegulations(hub_height=1.75, rotor_diameter=0.5, - regulations_fpath=regs_fpath) + regulations = WindSetbackRegulations(hub_height=1.75, + rotor_diameter=0.5, + regulations_fpath=regs_fpath) setbacks = ParcelSetbacks(EXCL_H5, regulations) parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', @@ -530,11 +486,11 @@ def test_local_parcels_wind(max_workers, regulations_fpath): def test_generic_water_setbacks(water_path): """Test generic water setbacks. """ - regulations_x1 = Regulations(BASE_SETBACK_DIST, multiplier=1) + regulations_x1 = SetbackRegulations(BASE_SETBACK_DIST, multiplier=1) setbacks_x1 = WaterSetbacks(EXCL_H5, regulations_x1) test_x1 = setbacks_x1.compute_exclusions(water_path) - regulations_x100 = Regulations(BASE_SETBACK_DIST, multiplier=100) + regulations_x100 = SetbackRegulations(BASE_SETBACK_DIST, multiplier=100) setbacks_x100 = WaterSetbacks(EXCL_H5, regulations_x100) test_x100 = setbacks_x100.compute_exclusions(water_path) @@ -563,8 +519,8 @@ def test_local_water_solar(max_workers, regulations_fpath): regs_fpath = os.path.join(td, regs_fpath) shutil.copy(regulations_fpath, regs_fpath) - regulations = Regulations(BASE_SETBACK_DIST, - regulations_fpath=regs_fpath) + regulations = SetbackRegulations(BASE_SETBACK_DIST, + regulations_fpath=regs_fpath) setbacks = WaterSetbacks(EXCL_H5, regulations) water_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Water', @@ -603,8 +559,8 @@ def test_local_water_wind(max_workers, regulations_fpath): regs_fpath = os.path.join(td, regs_fpath) shutil.copy(regulations_fpath, regs_fpath) - regulations = WindRegulations(hub_height=4, rotor_diameter=2, - regulations_fpath=regs_fpath) + regulations = WindSetbackRegulations(hub_height=4, rotor_diameter=2, + regulations_fpath=regs_fpath) setbacks = WaterSetbacks(EXCL_H5, regulations) water_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Water', @@ -630,19 +586,17 @@ def test_local_water_wind(max_workers, regulations_fpath): def test_regulations_preflight_check(): - """ - Test Regulations preflight_checks - """ + """Test WindSetbackRegulations preflight_checks""" with pytest.raises(RuntimeError): - WindRegulations(HUB_HEIGHT, ROTOR_DIAMETER) + WindSetbackRegulations(HUB_HEIGHT, ROTOR_DIAMETER) def test_high_res_excl_array(): """Test the multiplier of the exclusion array is applied correctly. """ mult = 5 - regulations = Regulations(BASE_SETBACK_DIST, regulations_fpath=None, - multiplier=1) + regulations = SetbackRegulations(BASE_SETBACK_DIST, regulations_fpath=None, + multiplier=1) setbacks = ParcelSetbacks(EXCL_H5, regulations, weights_calculation_upscale_factor=mult) rasterizer = setbacks._rasterizer @@ -657,8 +611,8 @@ def test_aggregate_high_res(): """Test the aggregation of a high_resolution array. """ mult = 5 - regulations = Regulations(BASE_SETBACK_DIST, regulations_fpath=None, - multiplier=1) + regulations = SetbackRegulations(BASE_SETBACK_DIST, regulations_fpath=None, + multiplier=1) setbacks = ParcelSetbacks(EXCL_H5, regulations, weights_calculation_upscale_factor=mult) rasterizer = setbacks._rasterizer @@ -682,8 +636,8 @@ def test_partial_exclusions(): 'Rhode_Island.gpkg') mult = 5 - regulations = Regulations(BASE_SETBACK_DIST, regulations_fpath=None, - multiplier=10) + regulations = SetbackRegulations(BASE_SETBACK_DIST, regulations_fpath=None, + multiplier=10) setbacks = ParcelSetbacks(EXCL_H5, regulations,) setbacks_hr = ParcelSetbacks(EXCL_H5, regulations, weights_calculation_upscale_factor=mult) @@ -705,8 +659,8 @@ def test_partial_exclusions_upscale_factor_less_than_1(mult): parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg') - regulations = Regulations(BASE_SETBACK_DIST, regulations_fpath=None, - multiplier=10) + regulations = SetbackRegulations(BASE_SETBACK_DIST, regulations_fpath=None, + multiplier=10) setbacks = ParcelSetbacks(EXCL_H5, regulations) setbacks_hr = ParcelSetbacks(EXCL_H5, regulations, weights_calculation_upscale_factor=mult) @@ -720,28 +674,28 @@ def test_partial_exclusions_upscale_factor_less_than_1(mult): @pytest.mark.parametrize( ('setbacks_class', 'regulations_class', 'features_path', 'regulations_fpath', 'generic_sum', 'local_sum', 'setback_distance'), - [(StructureSetbacks, WindRegulations, + [(StructureSetbacks, WindSetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.gpkg'), - REGS_GPKG, 332_887, 142, [HUB_HEIGHT, ROTOR_DIAMETER]), - (RailSetbacks, WindRegulations, + REGS_GPKG, 332_887, 128, [HUB_HEIGHT, ROTOR_DIAMETER]), + (RailSetbacks, WindSetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Railroads.gpkg'), REGS_GPKG, 754_082, 9_276, [HUB_HEIGHT, ROTOR_DIAMETER]), - (ParcelSetbacks, WindRegulations, + (ParcelSetbacks, WindSetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg'), PARCEL_REGS_FPATH_VALUE, 474, 3, [HUB_HEIGHT, ROTOR_DIAMETER]), - (WaterSetbacks, WindRegulations, + (WaterSetbacks, WindSetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Water.gpkg'), WATER_REGS_FPATH_VALUE, 1_159_266, 83, [HUB_HEIGHT, ROTOR_DIAMETER]), - (StructureSetbacks, Regulations, + (StructureSetbacks, SetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.gpkg'), - REGS_FPATH, 260_963, 112, [BASE_SETBACK_DIST + 199]), - (RailSetbacks, Regulations, + REGS_FPATH, 260_963, 104, [BASE_SETBACK_DIST + 199]), + (RailSetbacks, SetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Railroads.gpkg'), REGS_FPATH, 5_355, 163, [BASE_SETBACK_DIST]), - (ParcelSetbacks, Regulations, + (ParcelSetbacks, SetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg'), PARCEL_REGS_FPATH_VALUE, 438, 3, [BASE_SETBACK_DIST]), - (WaterSetbacks, Regulations, + (WaterSetbacks, SetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Water.gpkg'), WATER_REGS_FPATH_VALUE, 88_994, 83, [BASE_SETBACK_DIST])]) @pytest.mark.parametrize('sf', [None, 10]) @@ -817,28 +771,28 @@ def test_merged_setbacks(setbacks_class, regulations_class, features_path, @pytest.mark.parametrize( ('setbacks_class', 'regulations_class', 'features_path', 'regulations_fpath', 'generic_sum', 'setback_distance'), - [(StructureSetbacks, WindRegulations, + [(StructureSetbacks, WindSetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.gpkg'), REGS_FPATH, 332_887, [HUB_HEIGHT, ROTOR_DIAMETER]), - (RailSetbacks, WindRegulations, + (RailSetbacks, WindSetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Railroads.gpkg'), REGS_FPATH, 754_082, [HUB_HEIGHT, ROTOR_DIAMETER]), - (ParcelSetbacks, WindRegulations, + (ParcelSetbacks, WindSetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg'), PARCEL_REGS_FPATH_VALUE, 474, [HUB_HEIGHT, ROTOR_DIAMETER]), - (WaterSetbacks, WindRegulations, + (WaterSetbacks, WindSetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Water.gpkg'), WATER_REGS_FPATH_VALUE, 1_159_266, [HUB_HEIGHT, ROTOR_DIAMETER]), - (StructureSetbacks, Regulations, + (StructureSetbacks, SetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.gpkg'), REGS_FPATH, 260_963, [BASE_SETBACK_DIST + 199]), - (RailSetbacks, Regulations, + (RailSetbacks, SetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Railroads.gpkg'), REGS_FPATH, 5_355, [BASE_SETBACK_DIST]), - (ParcelSetbacks, Regulations, + (ParcelSetbacks, SetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg'), PARCEL_REGS_FPATH_VALUE, 438, [BASE_SETBACK_DIST]), - (WaterSetbacks, Regulations, + (WaterSetbacks, SetbackRegulations, os.path.join(TESTDATADIR, 'setbacks', 'Rhode_Island_Water.gpkg'), WATER_REGS_FPATH_VALUE, 88_994, [BASE_SETBACK_DIST])]) def test_merged_setbacks_missing_local(setbacks_class, regulations_class, diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index 95a9bb77b..980ddb434 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -16,7 +16,6 @@ from reV.handlers.exclusions import ExclusionLayers from reVX import TESTDATADIR from reVX.turbine_flicker.turbine_flicker import ( - FlickerRegulations, TurbineFlicker, load_building_layer, flicker_fn_out, @@ -25,8 +24,10 @@ _get_flicker_excl_shifts, _invert_shadow_flicker_arr ) +from reVX.turbine_flicker.regulations import FlickerRegulations from reVX.turbine_flicker.turbine_flicker_cli import main from reVX.handlers.geotiff import Geotiff +from reVX.utilities import ExclusionsConverter pytest.importorskip('hybrid.flicker') @@ -46,6 +47,21 @@ def runner(): return CliRunner() +def test_flicker_regulations(): + """Test `WindSetbackRegulations` initialization and iteration. """ + + regs_path = os.path.join(TESTDATADIR, 'turbine_flicker', + 'blue_creek_regs_value.csv') + regs = FlickerRegulations(hub_height=100, rotor_diameter=50, + flicker_threshold=30, + regulations_fpath=regs_path) + assert regs.hub_height == 100 + assert regs.rotor_diameter == 50 + + for flicker_threshold, __ in regs: + assert np.isclose(flicker_threshold, 30) + + @pytest.mark.parametrize('shadow_loc', [(2, 2), (-2, -2), @@ -154,7 +170,8 @@ def test_turbine_flicker(max_workers): with ExclusionLayers(EXCL_H5) as f: baseline = f[BASELINE] - regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER) + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + flicker_threshold=30) building_layer = load_building_layer(EXCL_H5, BLD_LAYER) tf = TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, resolution=64, tm_dset='techmap_wind', @@ -163,11 +180,86 @@ def test_turbine_flicker(max_workers): assert np.allclose(baseline, test) +def test_local_turbine_flicker(): + """ + Test Turbine Flicker for local regulations + """ + regs_fpath = os.path.join(TESTDATADIR, 'turbine_flicker', + 'blue_creek_regs_value.csv') + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + regulations_fpath=regs_fpath) + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) + with tempfile.TemporaryDirectory() as td: + excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) + shutil.copy(EXCL_H5, excl_h5) + with ExclusionLayers(EXCL_H5) as f: + fips = np.zeros(f.shape, dtype=np.uint32) + fips[:10] = 39001 + ExclusionsConverter._write_layer(excl_h5, 'cnty_fips', f.profile, + fips, chunks=f.chunks) + + tf = TurbineFlicker(excl_h5, RES_H5, building_layer, regulations, + resolution=64, tm_dset='techmap_wind', + max_flicker_exclusion_range=4540) + test = tf.compute_exclusions(None, max_workers=1) + + with ExclusionLayers(EXCL_H5) as f: + baseline = f[BASELINE] + + assert np.allclose(baseline[:10], test[:10]) + assert not np.allclose(baseline[10:], test[10:]) + assert np.allclose(test[10:], 1) + + +def test_local_and_generic_turbine_flicker(): + """ + Test Turbine Flicker for local + generic regulations + """ + regs_fpath = os.path.join(TESTDATADIR, 'turbine_flicker', + 'blue_creek_regs_value.csv') + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + flicker_threshold=100, + regulations_fpath=regs_fpath) + regulations_generic_only = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + flicker_threshold=100, + regulations_fpath=None) + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) + + tf = TurbineFlicker(EXCL_H5, RES_H5, building_layer, + regulations_generic_only, + resolution=64, tm_dset='techmap_wind', + max_flicker_exclusion_range=4540) + generic_flicker = tf.compute_exclusions(None, max_workers=1) + + with tempfile.TemporaryDirectory() as td: + excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) + shutil.copy(EXCL_H5, excl_h5) + with ExclusionLayers(EXCL_H5) as f: + fips = np.zeros(f.shape, dtype=np.uint32) + fips[:10] = 39001 + ExclusionsConverter._write_layer(excl_h5, 'cnty_fips', f.profile, + fips, chunks=f.chunks) + + tf = TurbineFlicker(excl_h5, RES_H5, building_layer, regulations, + resolution=64, tm_dset='techmap_wind', + max_flicker_exclusion_range=4540) + test = tf.compute_exclusions(None, max_workers=1) + + with ExclusionLayers(EXCL_H5) as f: + baseline = f[BASELINE] + + assert np.allclose(baseline[:10], test[:10]) + assert not np.allclose(generic_flicker[:10], test[:10]) + assert np.allclose(generic_flicker[10:], test[10:]) + assert not np.allclose(baseline[10:], test[10:]) + + def test_turbine_flicker_bad_building_layer_input(): """ Test Turbine Flicker with bad input for max_flicker_exclusion_range """ - regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER) + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + flicker_threshold=30) with pytest.raises(RuntimeError) as excinfo: TurbineFlicker(EXCL_H5, RES_H5, np.zeros((10, 10)), regulations) @@ -179,7 +271,8 @@ def test_turbine_flicker_bad_max_flicker_exclusion_range_input(): """ Test Turbine Flicker with bad input for max_flicker_exclusion_range """ - regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER) + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + flicker_threshold=30) building_layer = load_building_layer(EXCL_H5, BLD_LAYER) with pytest.raises(TypeError) as excinfo: TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, From 6d81972d42587a0b6d365189dfd5a1f642cedee7 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 13:49:42 -0600 Subject: [PATCH 35/42] (WIP) Added tests for allow both building layer and features fpath input (WIP) --- reVX/setbacks/regulations.py | 2 - reVX/turbine_flicker/turbine_flicker.py | 6 +- tests/test_turbine_flicker.py | 110 ++++++++++++++++++++++++ 3 files changed, 112 insertions(+), 6 deletions(-) diff --git a/reVX/setbacks/regulations.py b/reVX/setbacks/regulations.py index a0b6a9c23..15757f8f5 100644 --- a/reVX/setbacks/regulations.py +++ b/reVX/setbacks/regulations.py @@ -4,9 +4,7 @@ """ from warnings import warn import logging -import geopandas as gpd -from rex.utilities import parse_table from reVX.utilities.regulations import AbstractBaseRegulations diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 8623db067..f07c22d17 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -12,7 +12,6 @@ from reV.supply_curve.extent import SupplyCurveExtent from reV.supply_curve.tech_mapping import TechMapping from reVX.handlers.geotiff import Geotiff -from reVX.utilities.regulations import AbstractBaseRegulations from reVX.wind_dirs.mean_wind_dirs_point import MeanWindDirectionsPoint from reVX.utilities.exclusions import (ExclusionsConverter, AbstractBaseExclusionsMerger) @@ -827,8 +826,7 @@ def load_building_layer(excl_fpath, building_layer=None, features_path=None, If `building_layer` is not None but also does not exist in `excl_fpath` .h5 file. """ - - if building_layer is not None: + if building_layer is not None and features_path is None: with ExclusionLayers(excl_fpath, hsds=hsds) as f: if building_layer not in f.layers: msg = ("{} is not available in {}" @@ -839,7 +837,7 @@ def load_building_layer(excl_fpath, building_layer=None, features_path=None, .format(excl_fpath, building_layer)) return f[building_layer] - if features_path is not None: + if building_layer is None and features_path is not None: logger.debug("Loading building data from {}".format(features_path)) with Geotiff(features_path) as f: return f.values[0] diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index 980ddb434..59f7ca739 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -62,6 +62,32 @@ def test_flicker_regulations(): assert np.isclose(flicker_threshold, 30) +def test_load_building_layer(): + """Test the load building layer function. """ + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) + with ExclusionLayers(EXCL_H5) as f: + baseline = f[BLD_LAYER] + profile = f.profile + + assert np.allclose(building_layer, baseline) + + with tempfile.TemporaryDirectory() as td: + tiff_fp = os.path.join(td, "temp.tiff") + ExclusionsConverter._write_geotiff(tiff_fp, profile, baseline) + building_layer = load_building_layer(EXCL_H5, features_path=tiff_fp) + assert np.allclose(building_layer, baseline) + + +@pytest.mark.parametrize('inputs', [[], [BLD_LAYER, "A fake path"]]) +def test_load_building_layer_bad_input(inputs): + """Test the load building layer function with bad inputs. """ + with pytest.raises(RuntimeError) as excinfo: + load_building_layer(EXCL_H5, *inputs) + + assert "Must provide either `features_path` or " in str(excinfo.value) + assert "`building_layer` (but not both)." in str(excinfo.value) + + @pytest.mark.parametrize('shadow_loc', [(2, 2), (-2, -2), @@ -375,6 +401,90 @@ def test_cli_tiff(runner): LOGGERS.clear() +def test_cli_tiff_input(runner): + """Test Turbine Flicker CLI with input building tiff. """ + + with ExclusionLayers(EXCL_H5) as f: + building_layer = f[BLD_LAYER] + profile = f.profile + baseline = f[BASELINE] + + with tempfile.TemporaryDirectory() as td: + tiff_fp = os.path.join(td, "temp.tiff") + ExclusionsConverter._write_geotiff(tiff_fp, profile, building_layer) + + excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) + shutil.copy(EXCL_H5, excl_h5) + # out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd.tiff" + out_tiff = flicker_fn_out(HUB_HEIGHT, ROTOR_DIAMETER) + config = { + "log_directory": td, + "excl_fpath": excl_h5, + "execution_control": { + "option": "local", + }, + "hub_height": HUB_HEIGHT, + "rotor_diameter": ROTOR_DIAMETER, + "log_level": "INFO", + "res_fpath": RES_H5, + "features_path": tiff_fp, + "resolution": 64, + "tm_dset": "techmap_wind", + "max_flicker_exclusion_range": 4540 + } + config_path = os.path.join(td, 'config.json') + with open(config_path, 'w') as f: + json.dump(config, f) + + result = runner.invoke(main, ['from-config', '-c', config_path]) + msg = 'Failed with error {}'.format( + traceback.print_exception(*result.exc_info) + ) + assert result.exit_code == 0, msg + + with ExclusionLayers(excl_h5) as f: + assert out_tiff not in f.layers + assert out_tiff.split('.') not in f.layers + + with Geotiff(os.path.join(td, out_tiff)) as f: + test = f.values[0] + + assert np.allclose(baseline, test) + + LOGGERS.clear() + + +def test_cli_bad_input(runner): + """Test Turbine Flicker CLI with bad input. """ + + with tempfile.TemporaryDirectory() as td: + tiff_fp = os.path.join(td, "temp.tiff") + config = { + "log_directory": td, + "excl_fpath": EXCL_H5, + "execution_control": { + "option": "local", + }, + "hub_height": HUB_HEIGHT, + "rotor_diameter": ROTOR_DIAMETER, + "log_level": "INFO", + "res_fpath": RES_H5, + "building_layer": BLD_LAYER, + "features_path": tiff_fp, + "resolution": 64, + "tm_dset": "techmap_wind", + "max_flicker_exclusion_range": 4540 + } + config_path = os.path.join(td, 'config.json') + with open(config_path, 'w') as f: + json.dump(config, f) + + result = runner.invoke(main, ['from-config', '-c', config_path]) + assert result.exit_code == 1 + + LOGGERS.clear() + + def test_cli_max_flicker_exclusion_range(runner): """Test Turbine Flicker CLI with max_flicker_exclusion_range value. """ def_tiff_name = flicker_fn_out(HUB_HEIGHT, ROTOR_DIAMETER) From 95a12a7aa22d699839307e7871b79cf2eef15e8f Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 14:02:47 -0600 Subject: [PATCH 36/42] Added missing docstring --- reVX/setbacks/regulations.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/reVX/setbacks/regulations.py b/reVX/setbacks/regulations.py index 15757f8f5..9edf4947e 100644 --- a/reVX/setbacks/regulations.py +++ b/reVX/setbacks/regulations.py @@ -278,8 +278,12 @@ def select_setback_regulations(base_setback_dist=None, hub_height=None, Parameters ---------- - base_setback_dist : _type_, optional - _description_. By default, `None`. + base_setback_dist : float | int + Base setback distance (m). This value will be used to calculate + the setback distance when a multiplier is provided either via + the `regulations_fpath` csv or the `multiplier` input. In these + cases, the setbacks will be calculated using + `base_setback_dist * multiplier`. By default, `None`. hub_height : float | int Turbine hub height (m), used along with rotor diameter to compute blade tip height which is used to determine setback From 40ffee522e63078affce9c5bbc0cbf54870c2628 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 14:11:42 -0600 Subject: [PATCH 37/42] (WIP) Added test for empty local regulations (WIP) --- tests/test_turbine_flicker.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index 59f7ca739..fbe5e2092 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -4,6 +4,7 @@ """ from click.testing import CliRunner import json +import pandas as pd import numpy as np import os import pytest @@ -237,6 +238,35 @@ def test_local_turbine_flicker(): assert np.allclose(test[10:], 1) +def test_local_flicker_empty_regs(): + """ + Test Turbine Flicker for empty local regulations + """ + regs_fpath = os.path.join(TESTDATADIR, 'turbine_flicker', + 'blue_creek_regs_value.csv') + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) + with tempfile.TemporaryDirectory() as td: + regs = pd.read_csv(regs_fpath).iloc[0:0] + regs_fpath = os.path.basename(regs_fpath) + regs_fpath = os.path.join(td, regs_fpath) + regs.to_csv(regs_fpath, index=False) + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + regulations_fpath=regs_fpath) + + excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) + shutil.copy(EXCL_H5, excl_h5) + with ExclusionLayers(EXCL_H5) as f: + fips = np.zeros(f.shape, dtype=np.uint32) + ExclusionsConverter._write_layer(excl_h5, 'cnty_fips', f.profile, + fips, chunks=f.chunks) + + tf = TurbineFlicker(excl_h5, RES_H5, building_layer, regulations, + resolution=64, tm_dset='techmap_wind', + max_flicker_exclusion_range=4540) + with pytest.raises(ValueError): + tf.compute_exclusions(None, max_workers=1) + + def test_local_and_generic_turbine_flicker(): """ Test Turbine Flicker for local + generic regulations From a61edc86e468a5e0b19161f1c8e16c122f6996d4 Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 14:11:59 -0600 Subject: [PATCH 38/42] (WIP) Added description property (WIP) --- reVX/setbacks/base.py | 11 +++++++++++ reVX/turbine_flicker/turbine_flicker.py | 9 +++++++++ 2 files changed, 20 insertions(+) diff --git a/reVX/setbacks/base.py b/reVX/setbacks/base.py index 75eed1f23..7da55ac2a 100644 --- a/reVX/setbacks/base.py +++ b/reVX/setbacks/base.py @@ -324,6 +324,17 @@ def parse_features(self): return (gpd.read_file(self._features_fpath) .to_crs(crs=self.profile['crs'])) + @property + def description(self): + """str: Description to be added to excl H5.""" + return ('{} computed with a base setback distance of {} and a ' + 'multiplier of {} for a total generic setback value of {} ' + '(local exclusions may differ).' + .format(self.__class__, + self._regulations.base_setback_dist, + self._regulations.multiplier, + self._regulations.generic)) + @property def no_exclusions_array(self): """np.array: Array representing no exclusions. """ diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index f07c22d17..4f3ab8fea 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -261,6 +261,15 @@ def _exclude_turbine_flicker(self, point, res_fpath, flicker_threshold): return flicker_shifts + @property + def description(self): + """str: Description to be added to excl H5.""" + return ('Pixels with value 0 are excluded as they will cause shadow ' + 'flicker on buildings. Shadow flicker is computed using a ' + '{}m hub height, {}m rotor diameter turbine.' + .format(self._regulations.hub_height, + self._regulations.rotor_diameter)) + def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, max_workers=None, out_layer=None, out_tiff=None): From bd667f24f40fb96bff0401d894894dd85509dc4c Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 14:37:14 -0600 Subject: [PATCH 39/42] (WIP) Added test for TM mapping (WIP) --- tests/test_turbine_flicker.py | 48 +++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index fbe5e2092..ae352b252 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -38,6 +38,7 @@ ROTOR_DIAMETER = 108 BASELINE = 'turbine_flicker' BLD_LAYER = 'blue_creek_buildings' +TM = 'techmap_wind' @pytest.fixture(scope="module") @@ -114,7 +115,24 @@ def test_shadow_mapping(shadow_loc): assert np.allclose(baseline_col_idx, test_col_idx) -@pytest.mark.parametrize('flicker_threshold', [10, 30]) +def test_flicker_tech_mapping(): + """Tets that flicker runs tech mapping if it DNE. """ + building_layer = load_building_layer(EXCL_H5, BLD_LAYER) + regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, + flicker_threshold=30) + with tempfile.TemporaryDirectory() as td: + excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) + shutil.copy(EXCL_H5, excl_h5) + with ExclusionLayers(excl_h5) as f: + assert "techmap_wtk" not in f.layers + + TurbineFlicker(excl_h5, RES_H5, building_layer, regulations) + + with ExclusionLayers(excl_h5) as f: + assert "techmap_wtk" in f.layers + + +@pytest.mark.parametrize('flicker_threshold', [10]) def test_shadow_flicker(flicker_threshold): """ Test shadow_flicker @@ -125,7 +143,8 @@ def test_shadow_flicker(flicker_threshold): flicker_threshold=flicker_threshold) building_layer = load_building_layer(EXCL_H5, BLD_LAYER) tf = TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, - grid_cell_size=90, max_flicker_exclusion_range=4_510) + grid_cell_size=90, max_flicker_exclusion_range=4_510, + tm_dset=TM) shadow_flicker = tf._compute_shadow_flicker(lat, lon, wind_dir) baseline = (shadow_flicker[::-1, ::-1].copy() @@ -201,7 +220,7 @@ def test_turbine_flicker(max_workers): flicker_threshold=30) building_layer = load_building_layer(EXCL_H5, BLD_LAYER) tf = TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, - resolution=64, tm_dset='techmap_wind', + resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) test = tf.compute_flicker_exclusions(max_workers=max_workers) assert np.allclose(baseline, test) @@ -226,7 +245,7 @@ def test_local_turbine_flicker(): fips, chunks=f.chunks) tf = TurbineFlicker(excl_h5, RES_H5, building_layer, regulations, - resolution=64, tm_dset='techmap_wind', + resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) test = tf.compute_exclusions(None, max_workers=1) @@ -261,7 +280,7 @@ def test_local_flicker_empty_regs(): fips, chunks=f.chunks) tf = TurbineFlicker(excl_h5, RES_H5, building_layer, regulations, - resolution=64, tm_dset='techmap_wind', + resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) with pytest.raises(ValueError): tf.compute_exclusions(None, max_workers=1) @@ -283,7 +302,7 @@ def test_local_and_generic_turbine_flicker(): tf = TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations_generic_only, - resolution=64, tm_dset='techmap_wind', + resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) generic_flicker = tf.compute_exclusions(None, max_workers=1) @@ -297,7 +316,7 @@ def test_local_and_generic_turbine_flicker(): fips, chunks=f.chunks) tf = TurbineFlicker(excl_h5, RES_H5, building_layer, regulations, - resolution=64, tm_dset='techmap_wind', + resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) test = tf.compute_exclusions(None, max_workers=1) @@ -317,7 +336,8 @@ def test_turbine_flicker_bad_building_layer_input(): regulations = FlickerRegulations(HUB_HEIGHT, ROTOR_DIAMETER, flicker_threshold=30) with pytest.raises(RuntimeError) as excinfo: - TurbineFlicker(EXCL_H5, RES_H5, np.zeros((10, 10)), regulations) + TurbineFlicker(EXCL_H5, RES_H5, np.zeros((10, 10)), regulations, + tm_dset=TM) assert "Shape of building layer" in str(excinfo.value) assert "does not match shape of ExclusionLayers" in str(excinfo.value) @@ -332,7 +352,7 @@ def test_turbine_flicker_bad_max_flicker_exclusion_range_input(): building_layer = load_building_layer(EXCL_H5, BLD_LAYER) with pytest.raises(TypeError) as excinfo: TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, - max_flicker_exclusion_range='abc') + tm_dset=TM, max_flicker_exclusion_range='abc') assert "max_flicker_exclusion_range must be numeric" in str(excinfo.value) @@ -359,7 +379,7 @@ def test_cli(runner): "log_level": "INFO", "res_fpath": RES_H5, "resolution": 64, - "tm_dset": "techmap_wind", + "tm_dset": TM, "max_flicker_exclusion_range": 4540 } config_path = os.path.join(td, 'config.json') @@ -403,7 +423,7 @@ def test_cli_tiff(runner): "log_level": "INFO", "res_fpath": RES_H5, "resolution": 64, - "tm_dset": "techmap_wind", + "tm_dset": TM, "max_flicker_exclusion_range": 4540 } config_path = os.path.join(td, 'config.json') @@ -459,7 +479,7 @@ def test_cli_tiff_input(runner): "res_fpath": RES_H5, "features_path": tiff_fp, "resolution": 64, - "tm_dset": "techmap_wind", + "tm_dset": TM, "max_flicker_exclusion_range": 4540 } config_path = os.path.join(td, 'config.json') @@ -502,7 +522,7 @@ def test_cli_bad_input(runner): "building_layer": BLD_LAYER, "features_path": tiff_fp, "resolution": 64, - "tm_dset": "techmap_wind", + "tm_dset": TM, "max_flicker_exclusion_range": 4540 } config_path = os.path.join(td, 'config.json') @@ -534,7 +554,7 @@ def test_cli_max_flicker_exclusion_range(runner): "log_level": "INFO", "res_fpath": RES_H5, "resolution": 64, - "tm_dset": "techmap_wind", + "tm_dset": TM, "max_flicker_exclusion_range": 4_540 } config_path = os.path.join(td, 'config.json') From 61d238d70668891c6f6232243d39db8caf8d230d Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 15:28:11 -0600 Subject: [PATCH 40/42] Cleanup --- reVX/config/turbine_flicker.py | 6 - reVX/setbacks/setbacks_cli.py | 1 - reVX/turbine_flicker/turbine_flicker.py | 174 ++------------------ reVX/turbine_flicker/turbine_flicker_cli.py | 8 - reVX/utilities/exclusions.py | 13 +- tests/test_setbacks.py | 69 ++++---- tests/test_turbine_flicker.py | 16 +- 7 files changed, 65 insertions(+), 222 deletions(-) diff --git a/reVX/config/turbine_flicker.py b/reVX/config/turbine_flicker.py index 93ab2e40b..9b9ddcb69 100644 --- a/reVX/config/turbine_flicker.py +++ b/reVX/config/turbine_flicker.py @@ -117,12 +117,6 @@ def out_layer(self): """ return self.get('out_layer', None) - # @property - # def out_tiff(self): - # """str: Path to output tiff file where exclusions should be saved. """ - # out_tiff = self.get('out_tiff', None) - # return self.get('out_tiff', None) - @property def replace(self): """Get replace flag""" diff --git a/reVX/setbacks/setbacks_cli.py b/reVX/setbacks/setbacks_cli.py index 28d19f073..effac6a7c 100644 --- a/reVX/setbacks/setbacks_cli.py +++ b/reVX/setbacks/setbacks_cli.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -# pylint: disable=all """ Setbacks CLI """ diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index 4f3ab8fea..fd56d1d05 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -13,8 +13,7 @@ from reV.supply_curve.tech_mapping import TechMapping from reVX.handlers.geotiff import Geotiff from reVX.wind_dirs.mean_wind_dirs_point import MeanWindDirectionsPoint -from reVX.utilities.exclusions import (ExclusionsConverter, - AbstractBaseExclusionsMerger) +from reVX.utilities.exclusions import AbstractBaseExclusionsMerger from rex.resource_extraction.resource_extraction import WindX from rex.utilities.execution import SpawnProcessPool from rex.utilities.loggers import log_mem @@ -270,9 +269,8 @@ def description(self): .format(self._regulations.hub_height, self._regulations.rotor_diameter)) - def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, - max_workers=None, out_layer=None, - out_tiff=None): + def compute_flicker_exclusions(self, flicker_threshold, fips=None, + max_workers=None): """Compute turbine flicker exclusions. Exclude all pixels that will cause flicker exceeding the @@ -285,20 +283,14 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, Parameters ---------- - flicker_threshold : int, optional - Maximum number of allowable flicker hours. By default, `30`. + flicker_threshold : int + Maximum number of allowable flicker hours. fips : int, optional If not `None`, only building indices within counties with the given FIPS code will be returned. By default, `None`. max_workers : int, optional Number of workers to use. If 1 run, in serial. If `None`, use all available cores. By default, `None`. - out_layer : str, optional - Layer to save exclusions under. Layer will be saved in - `excl_fpath`. By default, `None`. - out_tiff : str, optional - Path to output tiff file where exclusions should be saved. - By default, `None`. Returns ------- @@ -331,14 +323,6 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, futures = {} for gid in gids: point = self._sc_points.iloc[gid] - - # row_idx, col_idx, shape = _get_building_indices( - # self._excl_fpath, self._bld_layer, point.name, - # resolution=self._res, fips=fips, - # building_threshold=self._building_threshold) - # if row_idx.size == 0: - # continue - future = exe.submit(self._exclude_turbine_flicker, point, self._res_h5, flicker_threshold) futures[future] = point @@ -349,7 +333,7 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, row_idx, col_idx = _get_building_indices( self._bld_layer, point.name, - resolution=self._res, # fips=fips, + resolution=self._res, building_threshold=self._building_threshold) row_idx, col_idx = _create_excl_indices( (row_idx, col_idx), flicker_shifts, @@ -369,20 +353,12 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, logger.info(msg) for i, gid in enumerate(gids): point = self._sc_points.iloc[gid] - # for i, (_, point) in enumerate(self._sc_points.iterrows()): - # row_idx, col_idx, shape = _get_building_indices( - # self._excl_fpath, self._bld_layer, point.name, - # resolution=self._res, fips=fips, - # building_threshold=self._building_threshold) - # if row_idx.size == 0: - # continue - flicker_shifts = self._exclude_turbine_flicker( point, self._res_h5, flicker_threshold) row_idx, col_idx = _get_building_indices( self._bld_layer, point.name, - resolution=self._res, # fips=fips, + resolution=self._res, building_threshold=self._building_threshold) row_idx, col_idx = _create_excl_indices((row_idx, col_idx), flicker_shifts, @@ -393,27 +369,6 @@ def compute_flicker_exclusions(self, flicker_threshold=30, fips=None, .format((i + 1), len(self._sc_points))) log_mem(logger) - # if out_layer: - # logger.info('Saving flicker inclusion layer to {} as {}' - # .format(self._excl_fpath, out_layer)) - # description = ( - # 'Pixels with value 0 are excluded as they will cause greater ' - # 'than {} hours of flicker on buildings in {}. Shadow flicker ' - # 'is computed using a {}m hub height, {}m rotor diameter ' - # 'turbine.' - # .format(flicker_threshold, self._bld_layer, - # self._regulations.hub_height, - # self._regulations.rotor_diameter) - # ) - # ExclusionsConverter._write_layer(self._excl_fpath, out_layer, - # self.profile, flicker_arr, - # description=description) - # if out_tiff: - # logger.info('Saving flicker inclusion layer to {}' - # .format(out_tiff)) - # ExclusionsConverter._write_geotiff(out_tiff, self.profile, - # flicker_arr) - return flicker_arr def _apply_regulations_mask(self): @@ -480,8 +435,8 @@ def compute_local_exclusions(self, regulation_value, cnty): cnty_fips = cnty.iloc[0]['FIPS'] logger.debug('- Computing flicker for county FIPS {}' .format(cnty_fips)) - return self.compute_flicker_exclusions( - flicker_threshold=regulation_value, fips=cnty_fips, max_workers=1) + return self.compute_flicker_exclusions(regulation_value, + fips=cnty_fips, max_workers=1) def compute_generic_exclusions(self, max_workers=None): """Compute generic flicker exclusions. @@ -507,26 +462,16 @@ def compute_generic_exclusions(self, max_workers=None): 'rotor diameter turbines' .format(ft, self._regulations.hub_height, self._regulations.rotor_diameter)) - return self.compute_flicker_exclusions(flicker_threshold=ft, fips=None, + return self.compute_flicker_exclusions(ft, fips=None, max_workers=max_workers) - def input_output_filenames(self, out_dir, features_fpath): + def input_output_filenames(self, out_dir, *__, **___): """Generate pairs of input/output file names. Parameters ---------- out_dir : str Path to output file directory. - features_fpath : : str - Path to features file. This path can contain - any pattern that can be used in the glob function. - For example, `/path/to/features/[A]*` would match - with all the features in the directory - `/path/to/features/` that start with "A". This input - can also be a directory, but that directory must ONLY - contain feature files. If your feature files are mixed - with other files or directories, use something like - `/path/to/features/*.geojson`. Yields ------ @@ -540,89 +485,8 @@ def input_output_filenames(self, out_dir, features_fpath): yield fpath, os.path.join(out_dir, geotiff) - - # @classmethod - # def run(cls, excl_fpath, res_fpath, building_layer, tm_dset='techmap_wtk', - # building_threshold=0, flicker_threshold=30, resolution=640, - # grid_cell_size=90, max_flicker_exclusion_range=10_000, - # max_workers=None, out_layer=None, out_tiff=None): - # """Run flicker exclusion layer generation. - - # Exclude all pixels that will cause flicker exceeding the - # "flicker_threshold" on any building in "building_layer". - # Buildings are defined as pixels with >= the "building_threshold - # value in "building_layer". Shadow flicker is computed at the - # supply curve point resolution based on a turbine with - # "hub_height" (m) and applied to all buildings within that supply - # curve point sub-array. - - # Parameters - # ---------- - # excl_fpath : str - # Filepath to exclusions h5 file. File must contain - # `building_layer` and `tm_dset`. - # res_fpath : str - # Filepath to wind resource .h5 file containing hourly wind - # direction data. - # building_layer : str - # Exclusion layer containing buildings from which turbine - # flicker exclusions will be computed. - # regulations : `FlickerRegulations` - # A `FlickerRegulations` object used to shadow flicker - # regulation values. - # tm_dset : str, optional - # Dataset / layer name for wind toolkit techmap. By default, - # `'techmap_wtk'`. - # building_threshold : float, optional - # Threshold for exclusion layer values to identify pixels with - # buildings, values are % of pixel containing a building. By - # default, `0`. - # flicker_threshold : int, optional - # Maximum number of allowable flicker hours. By default, `30`. - # resolution : int, optional - # SC resolution, must be input in combination with gid. - # By default, `640`. - # grid_cell_size : float, optional - # Length (m) of a side of each grid cell in `excl_fpath`. - # max_flicker_exclusion_range : float, optional - # Max distance (m) that flicker exclusions will extend in - # any of the cardinal directions. Note that increasing this - # value can lead to drastically instead memory requirements. - # This value may be increased slightly in order to yield - # odd exclusion array shapes. - # max_workers : int, optional - # Number of workers to use. If 1 run, in serial. If `None`, - # use all available cores. By default, `None`. - # out_layer : str, optional - # Layer to save exclusions under. Layer will be saved in - # `excl_fpath`. By default, `None`. - # out_tiff : str, optional - # Path to output tiff file where exclusions should be saved. - # By default, `None`. - - # Returns - # ------- - # flicker_arr : ndarray - # 2D inclusion array. Pixels to exclude (0) to prevent shadow - # flicker on buildings in "building_layer" - # """ - # flicker = cls(excl_fpath, res_fpath, building_layer, - # resolution=resolution, grid_cell_size=grid_cell_size, - # max_flicker_exclusion_range=max_flicker_exclusion_range, - # tm_dset=tm_dset) - # out_excl = flicker.compute_exclusions( - # building_threshold=building_threshold, - # flicker_threshold=flicker_threshold, - # max_workers=max_workers, - # out_layer=out_layer, - # out_tiff=out_tiff - # ) - # return out_excl - - def _get_building_indices(building_layer, gid, resolution=640, - building_threshold=0, - fips=None, fips_layer="cnty_fips", hsds=False): + building_threshold=0): """Find buildings exclusion indices Parameters @@ -639,14 +503,6 @@ def _get_building_indices(building_layer, gid, resolution=640, Threshold for exclusion layer values to identify pixels with buildings, values are % of pixel containing a building. By default, `0`. - fips : int, optional - If not `None`, only building indices within counties with the - given FIPS code will be returned. By default, `None`. - fips_layer : str, optional - Name of fips layer in `excl_fpath`. By default, `"cnty_fips"`. - hsds : bool, optional - Boolean flag to use h5pyd to handle .h5 'files' hosted on - AWS behind HSDS. By default `False`. Returns ------- @@ -666,11 +522,7 @@ def _get_building_indices(building_layer, gid, resolution=640, row_idx = np.array(range(*row_slice.indices(row_slice.stop))) col_idx = np.array(range(*col_slice.indices(col_slice.stop))) - bld_row_idx, bld_col_idx = np.where((sc_blds > building_threshold) - # & (fips_vals == fips - # if fips is not None else True) - ) - + bld_row_idx, bld_col_idx = np.where(sc_blds > building_threshold) return row_idx[bld_row_idx], col_idx[bld_col_idx] diff --git a/reVX/turbine_flicker/turbine_flicker_cli.py b/reVX/turbine_flicker/turbine_flicker_cli.py index e54549941..f155c0884 100644 --- a/reVX/turbine_flicker/turbine_flicker_cli.py +++ b/reVX/turbine_flicker/turbine_flicker_cli.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -# pylint: disable=all """ Turbine Flicker Command Line Interface """ @@ -195,9 +194,6 @@ def local(ctx, excl_fpath, res_fpath, features_path, building_layer, """ Compute turbine flicker on local hardware """ - # if out_layer is None: - # out_layer = "{}-{}m".format(building_layer, hub_height) - if out_layer is not None: out_layers = {TurbineFlicker.DEFAULT_FEATURE_OUTFILE: out_layer} else: @@ -210,10 +206,6 @@ def local(ctx, excl_fpath, res_fpath, features_path, building_layer, log_modules = [__name__, 'reVX', 'reV', 'rex'] init_mult(name, log_dir, modules=log_modules, verbose=verbose) - # logger.info('Computing Turbine Flicker Exclusions, ' - # 'outputs to be save as a a new exclusions layer : {}' - # .format(out_layer)) - logger.info('Computing Turbine Flicker Exclusions from structures in {}' .format(building_layer)) logger.debug('Flicker to be computed with:\n' diff --git a/reVX/utilities/exclusions.py b/reVX/utilities/exclusions.py index 365f4cbf3..6ff47b5b1 100644 --- a/reVX/utilities/exclusions.py +++ b/reVX/utilities/exclusions.py @@ -18,7 +18,7 @@ from shapely.geometry import shape from rex import Outputs -from rex.utilities import SpawnProcessPool, log_mem, parse_table +from rex.utilities import SpawnProcessPool, log_mem from reV.handlers.exclusions import ExclusionLayers from reVX.handlers.geotiff import Geotiff from reVX.utilities.utilities import log_versions @@ -325,7 +325,7 @@ def compute_all_local_exclusions(self, max_workers=None): return exclusions - def compute_exclusions(self, features_fpath, max_workers=None, + def compute_exclusions(self, features_fpath=None, max_workers=None, out_layer=None, out_tiff=None, replace=False): """ Compute exclusions for all states either in serial or parallel. @@ -335,8 +335,10 @@ def compute_exclusions(self, features_fpath, max_workers=None, Parameters ---------- - features_fpath : str - Path to shape file with features to compute exclusions from + features_fpath : str, optional + Path to shape file with features to compute exclusions from. + Only required if the exclusions calculator requires it. + By default `None`. max_workers : int, optional Number of workers to use for exclusion computation, if 1 run in serial, if > 1 run in parallel with that many workers, @@ -483,7 +485,8 @@ def run(cls, excl_fpath, features_path, out_dir, regulations, logger.info("Computing exclusions from {} and saving " "to {}".format(f_in, f_out)) out_layer = out_layers.get(os.path.basename(f_in)) - exclusions.compute_exclusions(f_in, out_tiff=f_out, + exclusions.compute_exclusions(features_fpath=f_in, + out_tiff=f_out, out_layer=out_layer, max_workers=max_workers, replace=replace) diff --git a/tests/test_setbacks.py b/tests/test_setbacks.py index 25434c1ba..c1fd146bc 100644 --- a/tests/test_setbacks.py +++ b/tests/test_setbacks.py @@ -207,7 +207,7 @@ def test_setbacks_no_computation(setbacks_class): regs = SetbackRegulations(10, regulations_fpath=regs_fpath) setbacks = setbacks_class(EXCL_H5, regs) with pytest.raises(ValueError): - setbacks.compute_exclusions("RhodeIsland.file") + setbacks.compute_exclusions(features_fpath="RhodeIsland.file") @pytest.mark.parametrize( @@ -221,7 +221,7 @@ def test_setbacks_no_generic_value(setbacks_class, feature_file): """Test setbacks computation for invalid input. """ regs = SetbackRegulations(0, regulations_fpath=None, multiplier=1) setbacks = setbacks_class(EXCL_H5, regs) - out = setbacks.compute_exclusions(feature_file) + out = setbacks.compute_exclusions(features_fpath=feature_file) assert np.isclose(out, 0).all() @@ -264,7 +264,7 @@ def test_generic_structure(generic_wind_regulations): setbacks = StructureSetbacks(EXCL_H5, generic_wind_regulations) structure_path = os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.geojson') - test = setbacks.compute_exclusions(structure_path) + test = setbacks.compute_exclusions(features_fpath=structure_path) assert np.allclose(baseline, test) @@ -275,7 +275,7 @@ def test_generic_structure_gpkg(generic_wind_regulations): """ setbacks = StructureSetbacks(EXCL_H5, generic_wind_regulations) structure_path = os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.gpkg') - test = setbacks.compute_exclusions(structure_path) + test = setbacks.compute_exclusions(features_fpath=structure_path) assert test.sum() == 6830 @@ -293,7 +293,8 @@ def test_local_structures(max_workers, county_wind_regulations_gpkg): setbacks = StructureSetbacks(EXCL_H5, county_wind_regulations_gpkg) structure_path = os.path.join(TESTDATADIR, 'setbacks', 'RhodeIsland.geojson') - test = setbacks.compute_exclusions(structure_path, max_workers=max_workers) + test = setbacks.compute_exclusions(features_fpath=structure_path, + max_workers=max_workers) # baseline was generated when code did not clip to county bounds, # so test should be a subset of baseline @@ -316,7 +317,7 @@ def test_generic_railroads(rail_path, generic_wind_regulations): baseline = tif.values setbacks = RailSetbacks(EXCL_H5, generic_wind_regulations) - test = setbacks.compute_exclusions(rail_path) + test = setbacks.compute_exclusions(features_fpath=rail_path) assert np.allclose(baseline, test) @@ -333,7 +334,8 @@ def test_local_railroads(max_workers, county_wind_regulations_gpkg): setbacks = RailSetbacks(EXCL_H5, county_wind_regulations_gpkg) rail_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Railroads', 'RI_Railroads.shp') - test = setbacks.compute_exclusions(rail_path, max_workers=max_workers) + test = setbacks.compute_exclusions(features_fpath=rail_path, + max_workers=max_workers) # baseline was generated when code did not clip to county bounds, # so test should be a subset of baseline @@ -349,11 +351,11 @@ def test_generic_parcels(): 'Rhode_Island.gpkg') regulations_x1 = SetbackRegulations(BASE_SETBACK_DIST, multiplier=1) setbacks_x1 = ParcelSetbacks(EXCL_H5, regulations_x1) - test_x1 = setbacks_x1.compute_exclusions(parcel_path) + test_x1 = setbacks_x1.compute_exclusions(features_fpath=parcel_path) regulations_x100 = SetbackRegulations(BASE_SETBACK_DIST, multiplier=100) setbacks_x100 = ParcelSetbacks(EXCL_H5, regulations_x100) - test_x100 = setbacks_x100.compute_exclusions(parcel_path) + test_x100 = setbacks_x100.compute_exclusions(features_fpath=parcel_path) # when the setbacks are so large that they span the entire parcels, # a total of 438 regions should be excluded for this particular @@ -381,7 +383,7 @@ def test_generic_parcels_with_invalid_shape_input(): assert not parcels.geometry.is_valid.any() # This code would throw an error if invalid shape not handled properly - test = setbacks.compute_exclusions(parcel_path) + test = setbacks.compute_exclusions(features_fpath=parcel_path) # add a test for expected output assert not test.any() @@ -409,7 +411,7 @@ def test_local_parcels_solar(max_workers, regulations_fpath): parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg') - test = setbacks.compute_exclusions(parcel_path, + test = setbacks.compute_exclusions(features_fpath=parcel_path, max_workers=max_workers) assert test.sum() == 3 @@ -455,7 +457,7 @@ def test_local_parcels_wind(max_workers, regulations_fpath): parcel_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Parcels', 'Rhode_Island.gpkg') - test = setbacks.compute_exclusions(parcel_path, + test = setbacks.compute_exclusions(features_fpath=parcel_path, max_workers=max_workers) assert test.sum() == 3 @@ -488,11 +490,11 @@ def test_generic_water_setbacks(water_path): regulations_x1 = SetbackRegulations(BASE_SETBACK_DIST, multiplier=1) setbacks_x1 = WaterSetbacks(EXCL_H5, regulations_x1) - test_x1 = setbacks_x1.compute_exclusions(water_path) + test_x1 = setbacks_x1.compute_exclusions(features_fpath=water_path) regulations_x100 = SetbackRegulations(BASE_SETBACK_DIST, multiplier=100) setbacks_x100 = WaterSetbacks(EXCL_H5, regulations_x100) - test_x100 = setbacks_x100.compute_exclusions(water_path) + test_x100 = setbacks_x100.compute_exclusions(features_fpath=water_path) # A total of 88,994 regions should be excluded for this particular # Rhode Island subset @@ -525,7 +527,8 @@ def test_local_water_solar(max_workers, regulations_fpath): water_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Water', 'Rhode_Island.shp') - test = setbacks.compute_exclusions(water_path, max_workers=max_workers) + test = setbacks.compute_exclusions(features_fpath=water_path, + max_workers=max_workers) assert test.sum() == 83 @@ -565,7 +568,8 @@ def test_local_water_wind(max_workers, regulations_fpath): water_path = os.path.join(TESTDATADIR, 'setbacks', 'RI_Water', 'Rhode_Island.shp') - test = setbacks.compute_exclusions(water_path, max_workers=max_workers) + test = setbacks.compute_exclusions(features_fpath=water_path, + max_workers=max_workers) assert test.sum() == 83 @@ -642,8 +646,9 @@ def test_partial_exclusions(): setbacks_hr = ParcelSetbacks(EXCL_H5, regulations, weights_calculation_upscale_factor=mult) - exclusion_mask = setbacks.compute_exclusions(parcel_path) - inclusion_weights = setbacks_hr.compute_exclusions(parcel_path) + exclusion_mask = setbacks.compute_exclusions(features_fpath=parcel_path) + inclusion_weights = setbacks_hr.compute_exclusions( + features_fpath=parcel_path) assert exclusion_mask.shape == inclusion_weights.shape assert (inclusion_weights < 1).any() @@ -665,8 +670,9 @@ def test_partial_exclusions_upscale_factor_less_than_1(mult): setbacks_hr = ParcelSetbacks(EXCL_H5, regulations, weights_calculation_upscale_factor=mult) - exclusion_mask = setbacks.compute_exclusions(parcel_path) - inclusion_weights = setbacks_hr.compute_exclusions(parcel_path) + exclusion_mask = setbacks.compute_exclusions(features_fpath=parcel_path) + inclusion_weights = setbacks_hr.compute_exclusions( + features_fpath=parcel_path) assert np.isclose(exclusion_mask, inclusion_weights).all() @@ -708,8 +714,8 @@ def test_merged_setbacks(setbacks_class, regulations_class, features_path, multiplier=100) generic_setbacks = setbacks_class(EXCL_H5, regulations, weights_calculation_upscale_factor=sf) - generic_layer = generic_setbacks.compute_exclusions(features_path, - max_workers=1) + generic_layer = generic_setbacks.compute_exclusions( + features_fpath=features_path, max_workers=1) with tempfile.TemporaryDirectory() as td: regs_fpath = os.path.basename(regulations_fpath) @@ -722,16 +728,16 @@ def test_merged_setbacks(setbacks_class, regulations_class, features_path, local_setbacks = setbacks_class(EXCL_H5, regulations, weights_calculation_upscale_factor=sf) - local_layer = local_setbacks.compute_exclusions(features_path, - max_workers=1) + local_layer = local_setbacks.compute_exclusions( + features_fpath=features_path, max_workers=1) regulations = regulations_class(*setback_distance, regulations_fpath=regs_fpath, multiplier=100) merged_setbacks = setbacks_class(EXCL_H5, regulations, weights_calculation_upscale_factor=sf) - merged_layer = merged_setbacks.compute_exclusions(features_path, - max_workers=1) + merged_layer = merged_setbacks.compute_exclusions( + features_fpath=features_path, max_workers=1) local_setbacks.pre_process_regulations() feats = local_setbacks.regulations_table @@ -803,8 +809,8 @@ def test_merged_setbacks_missing_local(setbacks_class, regulations_class, regulations = regulations_class(*setback_distance, regulations_fpath=None, multiplier=100) generic_setbacks = setbacks_class(EXCL_H5, regulations) - generic_layer = generic_setbacks.compute_exclusions(features_path, - max_workers=1) + generic_layer = generic_setbacks.compute_exclusions( + features_fpath=features_path, max_workers=1) with tempfile.TemporaryDirectory() as td: regs = pd.read_csv(regulations_fpath).iloc[0:0] @@ -817,14 +823,15 @@ def test_merged_setbacks_missing_local(setbacks_class, regulations_class, multiplier=None) local_setbacks = setbacks_class(EXCL_H5, regulations) with pytest.raises(ValueError): - local_setbacks.compute_exclusions(features_path, max_workers=1) + local_setbacks.compute_exclusions(features_fpath=features_path, + max_workers=1) regulations = regulations_class(*setback_distance, regulations_fpath=regs_fpath, multiplier=100) merged_setbacks = setbacks_class(EXCL_H5, regulations) - merged_layer = merged_setbacks.compute_exclusions(features_path, - max_workers=1) + merged_layer = merged_setbacks.compute_exclusions( + features_fpath=features_path, max_workers=1) # make sure the comparison layers match what we expect assert generic_layer.sum() == generic_sum diff --git a/tests/test_turbine_flicker.py b/tests/test_turbine_flicker.py index ae352b252..88678e633 100644 --- a/tests/test_turbine_flicker.py +++ b/tests/test_turbine_flicker.py @@ -190,7 +190,7 @@ def test_get_building_indices(): assert (buildings[row_idx, col_idx] > 0).all() -# noqa: E201,E241 +# flake8: noqa def test_invert_shadow_flicker_arr(): """Test inverting the shadow flicker array. """ @@ -222,7 +222,7 @@ def test_turbine_flicker(max_workers): tf = TurbineFlicker(EXCL_H5, RES_H5, building_layer, regulations, resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) - test = tf.compute_flicker_exclusions(max_workers=max_workers) + test = tf.compute_flicker_exclusions(30, max_workers=max_workers) assert np.allclose(baseline, test) @@ -247,7 +247,7 @@ def test_local_turbine_flicker(): tf = TurbineFlicker(excl_h5, RES_H5, building_layer, regulations, resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) - test = tf.compute_exclusions(None, max_workers=1) + test = tf.compute_exclusions(max_workers=1) with ExclusionLayers(EXCL_H5) as f: baseline = f[BASELINE] @@ -283,7 +283,7 @@ def test_local_flicker_empty_regs(): resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) with pytest.raises(ValueError): - tf.compute_exclusions(None, max_workers=1) + tf.compute_exclusions(max_workers=1) def test_local_and_generic_turbine_flicker(): @@ -304,7 +304,7 @@ def test_local_and_generic_turbine_flicker(): regulations_generic_only, resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) - generic_flicker = tf.compute_exclusions(None, max_workers=1) + generic_flicker = tf.compute_exclusions(max_workers=1) with tempfile.TemporaryDirectory() as td: excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) @@ -318,7 +318,7 @@ def test_local_and_generic_turbine_flicker(): tf = TurbineFlicker(excl_h5, RES_H5, building_layer, regulations, resolution=64, tm_dset=TM, max_flicker_exclusion_range=4540) - test = tf.compute_exclusions(None, max_workers=1) + test = tf.compute_exclusions(max_workers=1) with ExclusionLayers(EXCL_H5) as f: baseline = f[BASELINE] @@ -409,7 +409,6 @@ def test_cli_tiff(runner): with tempfile.TemporaryDirectory() as td: excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) shutil.copy(EXCL_H5, excl_h5) - # out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd.tiff" out_tiff = flicker_fn_out(HUB_HEIGHT, ROTOR_DIAMETER) config = { "log_directory": td, @@ -465,7 +464,6 @@ def test_cli_tiff_input(runner): excl_h5 = os.path.join(td, os.path.basename(EXCL_H5)) shutil.copy(EXCL_H5, excl_h5) - # out_tiff = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd.tiff" out_tiff = flicker_fn_out(HUB_HEIGHT, ROTOR_DIAMETER) config = { "log_directory": td, @@ -570,7 +568,6 @@ def test_cli_max_flicker_exclusion_range(runner): os.path.join(td, out_tiff_def)) out_tiff_5k = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5k.tiff" - # config["out_tiff"] = os.path.join(td, out_tiff) config["max_flicker_exclusion_range"] = 5_000 config_path = os.path.join(td, 'config.json') with open(config_path, 'w') as f: @@ -585,7 +582,6 @@ def test_cli_max_flicker_exclusion_range(runner): os.path.join(td, out_tiff_5k)) out_tiff_20d = f"{BLD_LAYER}_{HUB_HEIGHT}hh_{ROTOR_DIAMETER}rd_5d.tiff" - # config["out_tiff"] = os.path.join(td, out_tiff_20d) config["max_flicker_exclusion_range"] = "20x" config_path = os.path.join(td, 'config.json') with open(config_path, 'w') as f: From 0e9d3376c3ec29f98ec037a0ae462a5f595bae9b Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Fri, 30 Sep 2022 15:32:32 -0600 Subject: [PATCH 41/42] Linter updates --- reVX/turbine_flicker/turbine_flicker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reVX/turbine_flicker/turbine_flicker.py b/reVX/turbine_flicker/turbine_flicker.py index fd56d1d05..b2073c126 100644 --- a/reVX/turbine_flicker/turbine_flicker.py +++ b/reVX/turbine_flicker/turbine_flicker.py @@ -256,7 +256,7 @@ def _exclude_turbine_flicker(self, point, res_fpath, flicker_threshold): wind_dir) flicker_shifts = _get_flicker_excl_shifts( - shadow_flicker,flicker_threshold=flicker_threshold) + shadow_flicker, flicker_threshold=flicker_threshold) return flicker_shifts @@ -704,7 +704,7 @@ def load_building_layer(excl_fpath, building_layer=None, features_path=None, return f.values[0] raise RuntimeError("Must provide either `features_path` or " - "`building_layer` (but not both).") + "`building_layer` (but not both).") def flicker_fn_out(hub_height, rotor_diameter): From e7fff820dc0cd52b4e905f0f5caf17231c071bec Mon Sep 17 00:00:00 2001 From: ppinchuk Date: Tue, 4 Oct 2022 08:18:44 -0600 Subject: [PATCH 42/42] Increment version number --- reVX/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reVX/version.py b/reVX/version.py index d0f788ddc..24ad27771 100644 --- a/reVX/version.py +++ b/reVX/version.py @@ -3,4 +3,4 @@ reVX version number """ -__version__ = "0.3.44" +__version__ = "0.3.45"