Skip to content

Commit

Permalink
Added change to handle single feature if provided to add_features
Browse files Browse the repository at this point in the history
Signed-off-by: Omkar Mestry <omkar.mestry@here.com>
  • Loading branch information
omanges committed Sep 1, 2020
1 parent e2a4bbb commit 01ed10c
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 35 deletions.
1 change: 1 addition & 0 deletions tests/space/test_space_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ def test_add_feature(empty_space):
# add features
space.add_features(features=gj_countries)
feature = space.get_feature(feature_id="FRA")
space.add_features(features=feature)
assert type(feature) == GeoJSON
assert feature["id"] == "FRA"
del feature["id"]
Expand Down
81 changes: 46 additions & 35 deletions xyzspaces/spaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ def add_features(
chunk_size: int = 1,
id_properties: Optional[List[str]] = None,
mutate: Optional[bool] = True,
) -> GeoJSON:
) -> GeoJSON: # noqa DAR401
"""
Add GeoJSON features to this space.
Expand All @@ -449,43 +449,54 @@ def add_features(
may help to improving performance.
:return: A GeoJSON representing a feature collection.
"""
if features.get("features"):

if len(features["features"]) == 0:
raise Exception("Invalid FeatureCollection with zero features")

if not mutate:
features = copy.deepcopy(features)

space_id = self._info["id"]
total = 0
ids_map: Dict[str, str] = dict()
if len(features["features"]) > features_size:
groups = grouper(features_size, features["features"])
part_func = partial(
self._upload_features,
ids_map=ids_map,
add_tags=add_tags,
remove_tags=remove_tags,
id_properties=id_properties,
)
with concurrent.futures.ProcessPoolExecutor() as executor:
for ft in executor.map(
part_func, groups, chunksize=chunk_size
):
logger.info(f"features processed: {ft}")
total += ft
logger.info(
f"{total} features are uploaded on space: {space_id}"
)
else:

if not mutate:
features = copy.deepcopy(features)

space_id = self._info["id"]
total = 0
ids_map: Dict[str, str] = dict()
if len(features["features"]) > features_size:
groups = grouper(features_size, features["features"])
part_func = partial(
self._upload_features,
ids_map=ids_map,
add_tags=add_tags,
remove_tags=remove_tags,
id_properties=id_properties,
)
with concurrent.futures.ProcessPoolExecutor() as executor:
for ft in executor.map(
part_func, groups, chunksize=chunk_size
):
logger.info(f"features processed: {ft}")
total += ft
logger.info(f"{total} features are uploaded on space: {space_id}")
features = self._process_features(
features["features"], id_properties, ids_map
)
feature_collection = dict(
type="FeatureCollection", features=features
)
res = self.api.put_space_features(
space_id=space_id,
data=feature_collection,
add_tags=add_tags,
remove_tags=remove_tags,
)
return GeoJSON(res)
else:
features = self._process_features(
features["features"], id_properties, ids_map
)
feature_collection = dict(
type="FeatureCollection", features=features
)
res = self.api.put_space_features(
space_id=space_id,
data=feature_collection,
add_tags=add_tags,
remove_tags=remove_tags,
return self.add_feature(
data=features, add_tags=add_tags, remove_tags=remove_tags
)
return GeoJSON(res)

def _upload_features(
self,
Expand Down

0 comments on commit 01ed10c

Please sign in to comment.