Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use Python3.6+ syntax #12

Merged
merged 1 commit into from Jun 20, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions buzzard/_a_async_raster.py
Expand Up @@ -67,7 +67,7 @@ def queue_data(self, fps, channels=None, dst_nodata=None, interpolation='cv_area
"""
for fp in fps:
if not isinstance(fp, Footprint):
msg = 'element of `fps` parameter should be a Footprint (not {})'.format(fp) # pragma: no cover
msg = f'element of `fps` parameter should be a Footprint (not {fp})' # pragma: no cover
raise ValueError(msg)

return self._back.queue_data(
Expand Down Expand Up @@ -145,7 +145,7 @@ def queue_data(self, fps, channel_ids, dst_nodata, interpolation, max_queue_size
parent_uid, key_in_parent):
q = queue.Queue(max_queue_size)
self.back_ds.put_message(Msg(
'/Raster{}/QueriesHandler'.format(self.uid),
f'/Raster{self.uid}/QueriesHandler',
'new_query',
weakref.ref(q),
max_queue_size,
Expand Down
2 changes: 1 addition & 1 deletion buzzard/_a_emissary.py
Expand Up @@ -62,7 +62,7 @@ def __init__(self, driver, open_options, path, **kwargs):
self.driver = driver
self.open_options = open_options
self.path = path
super(ABackEmissary, self).__init__(**kwargs)
super().__init__(**kwargs)


def delete(self):
Expand Down
2 changes: 1 addition & 1 deletion buzzard/_a_emissary_vector.py
Expand Up @@ -17,5 +17,5 @@ class ABackEmissaryVector(ABackEmissary, ABackStoredVector):
"""Implementation of AEmissaryVector's specifications"""

def __init__(self, layer, **kwargs):
super(ABackEmissaryVector, self).__init__(**kwargs)
super().__init__(**kwargs)
self.layer = layer
4 changes: 2 additions & 2 deletions buzzard/_a_gdal_raster.py
Expand Up @@ -37,8 +37,8 @@ def get_data(self, fp, channel_ids, dst_nodata, interpolation):

def sample_bands_driver(self, fp, channel_ids, gdal_ds):
rtlx, rtly = self.fp.spatial_to_raster(fp.tl)
assert rtlx >= 0 and rtlx < self.fp.rsizex, '{} >= 0 and {} < {}'.format(rtlx, rtlx, self.fp.rsizex)
assert rtly >= 0 and rtly < self.fp.rsizey, '{} >= 0 and {} < {}'.format(rtly, rtly, self.fp.rsizey)
assert rtlx >= 0 and rtlx < self.fp.rsizex, f'{rtlx} >= 0 and {rtlx} < {self.fp.rsizex}'
assert rtly >= 0 and rtly < self.fp.rsizey, f'{rtly} >= 0 and {rtly} < {self.fp.rsizey}'

dstarray = np.empty(np.r_[fp.shape, len(channel_ids)], self.dtype)
for i, channel_id in enumerate(channel_ids):
Expand Down
4 changes: 2 additions & 2 deletions buzzard/_a_gdal_vector.py
Expand Up @@ -111,14 +111,14 @@ def iter_features_driver(slicing, mask_poly, mask_rect, lyr):
for i in indices:
ftr = lyr.GetNextFeature()
if ftr is None: # pragma: no cover
raise IndexError('Feature #{} not found'.format(i))
raise IndexError(f'Feature #{i} not found')
yield ftr
else:
for i in indices:
lyr.SetNextByIndex(i)
ftr = lyr.GetNextFeature()
if ftr is None: # pragma: no cover
raise IndexError('Feature #{} not found'.format(i))
raise IndexError(f'Feature #{i} not found')
yield ftr

# Necessary to prevent the old swig bug
Expand Down
6 changes: 3 additions & 3 deletions buzzard/_a_pooled_emissary.py
Expand Up @@ -40,7 +40,7 @@ class ABackPooledEmissary(ABackEmissary):

def __init__(self, uid, **kwargs):
self.uid = uid
super(ABackPooledEmissary, self).__init__(**kwargs)
super().__init__(**kwargs)

def activate(self):
self.back_ds.activate(self.uid, self.allocator)
Expand All @@ -62,7 +62,7 @@ def close(self):
- Should always be called
"""
self.back_ds.deactivate(self.uid)
super(ABackPooledEmissary, self).close()
super().close()

def delete(self):
"""Virtual method:
Expand All @@ -73,4 +73,4 @@ def delete(self):
Ideally a `close` should be performed before a delete.
"""
self.back_ds.deactivate(self.uid)
super(ABackPooledEmissary, self).delete()
super().delete()
4 changes: 2 additions & 2 deletions buzzard/_a_source.py
Expand Up @@ -4,7 +4,7 @@

from buzzard import _tools

class ASource(object):
class ASource:
"""Base abstract class defining the common behavior of all sources opened in the Dataset.

Features Defined
Expand Down Expand Up @@ -93,7 +93,7 @@ def __del__(self):
'0.4.4'
)

class ABackSource(object):
class ABackSource:
"""Implementation of ASource's specifications"""

def __init__(self, back_ds, wkt_stored, rect, **kwargs):
Expand Down
4 changes: 2 additions & 2 deletions buzzard/_a_source_raster.py
Expand Up @@ -153,7 +153,7 @@ def _band_to_channels(val):
if fp is None:
fp = self.fp
elif not isinstance(fp, Footprint): # pragma: no cover
raise ValueError('`fp` parameter should be a Footprint (not {})'.format(fp))
raise ValueError(f'`fp` parameter should be a Footprint (not {fp})')

# Normalize and check channels parameter
channel_ids, is_flat = _tools.normalize_channels_parameter(
Expand Down Expand Up @@ -202,7 +202,7 @@ class ABackSourceRaster(ABackSource, ABackSourceRasterRemapMixin):
"""Implementation of ASourceRaster's specifications"""

def __init__(self, channels_schema, dtype, fp_stored, **kwargs):
super(ABackSourceRaster, self).__init__(rect=fp_stored, **kwargs)
super().__init__(rect=fp_stored, **kwargs)

if self.to_work is not None:
fp = fp_stored.move(*self.to_work([
Expand Down
4 changes: 2 additions & 2 deletions buzzard/_a_source_raster_remap.py
Expand Up @@ -20,7 +20,7 @@
2. or that you want to perform a resampling operation and that you need `interpolation` to be a string.
"""

class ABackSourceRasterRemapMixin(object):
class ABackSourceRasterRemapMixin:
"""Raster Mixin containing remap subroutine"""

_REMAP_MASK_MODES = frozenset(['dilate', 'erode', ])
Expand Down Expand Up @@ -188,7 +188,7 @@ def _remap_interpolate(cls, src_fp, dst_fp, array, mask, src_nodata, dst_nodata,
mask_mode, interpolation):
if array is not None and array.dtype in [np.dtype('float64'), np.dtype('bool')]:
raise ValueError(
'dtype {!r} not handled by cv2.remap'.format(array.dtype)
f'dtype {array.dtype!r} not handled by cv2.remap'
) # pragma: no cover

mapx, mapy = dst_fp.meshgrid_raster_in(src_fp, dtype='float32')
Expand Down
6 changes: 3 additions & 3 deletions buzzard/_a_source_vector.py
Expand Up @@ -170,7 +170,7 @@ def get_data(self, index, fields=-1, geom_type='shapely', mask=None, clip=False)
for val in self.iter_data(fields, geom_type, mask, clip, slice(index, index + 1, 1)):
return val
else: # pragma: no cover
raise IndexError('Feature `{}` not found'.format(index))
raise IndexError(f'Feature `{index}` not found')

def iter_geojson(self, mask=None, clip=False, slicing=slice(0, None, 1)):
"""Create an iterator over vector's features
Expand Down Expand Up @@ -253,7 +253,7 @@ def get_geojson(self, index, mask=None, clip=False):
for val in self.iter_geojson(mask, clip, slice(index, index + 1, 1)):
return val
else: # pragma: no cover
raise IndexError('Feature `{}` not found'.format(index))
raise IndexError(f'Feature `{index}` not found')

@staticmethod
def _normalize_mask_parameter(mask):
Expand Down Expand Up @@ -281,7 +281,7 @@ class ABackSourceVector(ABackSource):
"""Implementation of ASourceVector's specifications"""

def __init__(self, type, fields, **kwargs):
super(ABackSourceVector, self).__init__(**kwargs)
super().__init__(**kwargs)
self.type = type
self.fields = fields
self.index_of_field_name = {
Expand Down
2 changes: 1 addition & 1 deletion buzzard/_a_stored.py
Expand Up @@ -19,4 +19,4 @@ class ABackStored(ABackSource):

def __init__(self, mode, **kwargs):
self.mode = mode
super(ABackStored, self).__init__(**kwargs)
super().__init__(**kwargs)
4 changes: 2 additions & 2 deletions buzzard/_a_stored_raster.py
Expand Up @@ -106,7 +106,7 @@ def _band_to_channels(val):
if fp is None:
fp = self.fp
elif not isinstance(fp, Footprint):
raise ValueError('`fp` parameter should be a Footprint (not {})'.format(fp)) # pragma: no cover
raise ValueError(f'`fp` parameter should be a Footprint (not {fp})') # pragma: no cover

# Normalize and check channels parameter
channel_ids, _ = _tools.normalize_channels_parameter(channels, len(self))
Expand All @@ -117,7 +117,7 @@ def _band_to_channels(val):
# Normalize and check array parameter
array = np.atleast_3d(array)
if array.ndim != 3: # pragma: no cover
raise ValueError('Input array should have 2 or 3 dimensions, not {}'.format(array.ndim))
raise ValueError(f'Input array should have 2 or 3 dimensions, not {array.ndim}')
if array.shape[:2] != tuple(fp.shape): # pragma: no cover
msg = 'Incompatible shape between input `array` ({}) and `fp` ({})'.format(
array.shape[:2], tuple(fp.shape)
Expand Down
4 changes: 2 additions & 2 deletions buzzard/_a_stored_vector.py
Expand Up @@ -76,7 +76,7 @@ def _normalize_field_values(self, fields):
lst[i] = self._back.type_of_field_index[i](v)
for defn, val in zip(self._back.fields, lst):
if val is None and defn['nullable'] is False: # pragma: no cover
raise ValueError('{} not nullable'.format(defn))
raise ValueError(f'{defn} not nullable')
return lst
elif isinstance(fields, collections.Iterable):
if len(fields) == 0 and self._back.all_nullable:
Expand All @@ -97,7 +97,7 @@ class ABackStoredVector(ABackStored, ABackSourceVector):
"""Implementation of AStoredRaster's specifications"""

def __init__(self, **kwargs):
super(ABackStoredVector, self).__init__(**kwargs)
super().__init__(**kwargs)

def insert_data(self, geom, geom_type, fields, index): # pragma: no cover
raise NotImplementedError('ABackStoredVector.insert_data is virtual pure')
9 changes: 4 additions & 5 deletions buzzard/_dataset.py
Expand Up @@ -385,7 +385,7 @@ def __init__(self, sr_work=None, sr_fallback=None, sr_forced=None,
ds_id=id(self),
debug_observers=debug_observers,
)
super(Dataset, self).__init__()
super().__init__()

# Raster entry points *********************************************************************** **
def open_raster(self, key, path, driver='GTiff', options=(), mode='r'):
Expand Down Expand Up @@ -1385,7 +1385,7 @@ def aopen_vector(self, path, layer=None, driver='ESRI Shapefile', options=(), mo

def create_vector(self, key, path, type, fields=(), layer=None,
driver='ESRI Shapefile', options=(), sr=None, ow=False):
"""Create an empty vector file and register it under `key` within this Dataset. Only metadata
r"""Create an empty vector file and register it under `key` within this Dataset. Only metadata
are kept in memory.

>>> help(GDALFileVector)
Expand Down Expand Up @@ -1645,8 +1645,7 @@ def items(self):
def keys(self):
"""Generate all source keys"""
for source, keys in self._keys_of_source.items():
for key in keys:
yield key
yield from keys

def values(self):
"""Generate all proxies"""
Expand Down Expand Up @@ -1823,5 +1822,5 @@ def wrap_numpy_raster(*args, **kwargs):

DataSource = deprecation_pool.wrap_class(Dataset, 'DataSource', '0.6.0')

class _AnonymousSentry(object):
class _AnonymousSentry:
"""Sentry object used to instanciate anonymous proxies"""
3 changes: 1 addition & 2 deletions buzzard/_dataset_back.py
@@ -1,4 +1,3 @@

from buzzard._dataset_back_conversions import BackDatasetConversionsMixin
from buzzard._dataset_back_activation_pool import BackDatasetActivationPoolMixin
from buzzard._dataset_back_scheduler import BackDatasetSchedulerMixin
Expand All @@ -14,4 +13,4 @@ def __init__(self, allow_none_geometry, allow_interpolation, **kwargs):
self.allow_interpolation = allow_interpolation
self.allow_none_geometry = allow_none_geometry
self.pools_container = PoolsContainer()
super(BackDataset, self).__init__(**kwargs)
super().__init__(**kwargs)
4 changes: 2 additions & 2 deletions buzzard/_dataset_back_activation_pool.py
Expand Up @@ -7,7 +7,7 @@
_ERR_FMT = 'Dataset is configured for a maximum of {} simultaneous active driver objects \
but there are already {} idle objects and {} used objects'

class BackDatasetActivationPoolMixin(object):
class BackDatasetActivationPoolMixin:
"""Private mixin for the Dataset class containing subroutines for proxies' driver
objects pooling"""

Expand All @@ -16,7 +16,7 @@ def __init__(self, max_active, **kwargs):
self._ap_lock = threading.Lock()
self._ap_idle = MultiOrderedDict()
self._ap_used = collections.Counter()
super(BackDatasetActivationPoolMixin, self).__init__(**kwargs)
super().__init__(**kwargs)

def activate(self, uid, allocator):
"""Make sure at least one driver object is idle or used for uid"""
Expand Down
4 changes: 2 additions & 2 deletions buzzard/_dataset_back_conversions.py
Expand Up @@ -4,7 +4,7 @@
from buzzard import srs
from buzzard._footprint import Footprint

class BackDatasetConversionsMixin(object):
class BackDatasetConversionsMixin:
"""Private mixin for the Dataset class containing the spatial coordinates
conversion subroutines"""

Expand All @@ -30,7 +30,7 @@ def __init__(self, wkt_work, wkt_fallback, wkt_forced, analyse_transformation, *
self.sr_fallback = sr_fallback
self.sr_forced = sr_forced
self.analyse_transformations = analyse_transformation
super(BackDatasetConversionsMixin, self).__init__(**kwargs)
super().__init__(**kwargs)

def get_transforms(self, sr_virtual, rect, rect_from='virtual'):
"""Retrieve the `to_work` and `to_virtual` conversion functions.
Expand Down
6 changes: 3 additions & 3 deletions buzzard/_dataset_back_scheduler.py
Expand Up @@ -9,7 +9,7 @@

VERBOSE = 0

class BackDatasetSchedulerMixin(object):
class BackDatasetSchedulerMixin:
"""TODO: docstring"""

def __init__(self, ds_id, debug_observers, **kwargs):
Expand All @@ -26,7 +26,7 @@ def ensure_scheduler_living(self):
if self._thread is None:
self._thread = threading.Thread(
target=self._exception_catcher,
name='Dataset{:#x}Scheduler'.format(self._ds_id),
name=f'Dataset{self._ds_id:#x}Scheduler',
daemon=True,
)
self._thread.start()
Expand Down Expand Up @@ -155,7 +155,7 @@ def _unregister_actor(a):
for dst_actor in _find_actors(msg.address, src_actor):
if dst_actor is None:
# This message may be discadted if DroppableMsg
assert isinstance(msg, DroppableMsg), '\ndst_actor: {}\n msg: {}\n'.format(dst_actor, msg)
assert isinstance(msg, DroppableMsg), f'\ndst_actor: {dst_actor}\n msg: {msg}\n'
else:
a = datetime.datetime.now()
met = getattr(dst_actor, title_prefix + msg.title)
Expand Down
2 changes: 1 addition & 1 deletion buzzard/_dataset_pools_container.py
Expand Up @@ -3,7 +3,7 @@
import multiprocessing as mp
import multiprocessing.pool

class PoolsContainer(object):
class PoolsContainer:
"""Manages thread/process pools and aliases for a Dataset"""

def __init__(self):
Expand Down
6 changes: 3 additions & 3 deletions buzzard/_dataset_register.py
@@ -1,19 +1,19 @@
import threading

class DatasetRegisterMixin(object):
class DatasetRegisterMixin:
"""Private mixin for the Dataset class containing subroutines for proxies registration"""

def __init__(self, **kwargs):
self._keys_of_source = {}
self._source_of_key = {}
self._register_lock = threading.Lock()
super(DatasetRegisterMixin, self).__init__(**kwargs)
super().__init__(**kwargs)

def _register(self, keys, prox):
with self._register_lock:
for key in keys:
if key in self.__dict__: # pragma: no cover
raise ValueError('key `{}` is already bound'.format(key))
raise ValueError(f'key `{key}` is already bound')
self._keys_of_source[prox] = keys
for key in keys:
self._source_of_key[key] = prox
Expand Down
4 changes: 2 additions & 2 deletions buzzard/_debug_observers_manager.py
@@ -1,6 +1,6 @@
import collections

class DebugObserversManager(object):
class DebugObserversManager:
"""Delivers the callbacks to the observers provided by user in the `debug_observers` parameters.
"""
def __init__(self, debug_observers):
Expand All @@ -16,7 +16,7 @@ def __init__(self, debug_observers):
self._obs = debug_observers

def __missing__(self, ename):
method_name = 'on_{}'.format(ename)
method_name = f'on_{ename}'
return [
getattr(o, method_name)
for o in self._obs
Expand Down
4 changes: 2 additions & 2 deletions buzzard/_env.py
Expand Up @@ -84,7 +84,7 @@ def __init__(self):
_LOCAL = _Storage()

# Env update ************************************************************************************ **
class Env(object):
class Env:
"""Context manager to update buzzard's states. Can also be used as a decorator.

Parameters
Expand Down Expand Up @@ -152,7 +152,7 @@ def f(*args, **kwargs):
return f

# Value retrieval ******************************************************************************* **
class _ThreadMapStackGetter(object):
class _ThreadMapStackGetter:
"""Getter for env attribute"""
def __init__(self, key):
self.key = key
Expand Down