Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bulk operations: more API tests & fixes #13935

Merged
merged 4 commits into from May 20, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion lib/galaxy/managers/collections.py
Expand Up @@ -759,7 +759,7 @@ def __init_rule_data(self, elements, collection_type_description, parent_identif
return data, sources

def __get_history_collection_instance(self, trans, id, check_ownership=False, check_accessible=True):
instance_id = int(trans.app.security.decode_id(id))
instance_id = trans.app.security.decode_id(id) if isinstance(id, str) else id
collection_instance = trans.sa_session.query(trans.app.model.HistoryDatasetCollectionAssociation).get(
instance_id
)
Expand Down
37 changes: 22 additions & 15 deletions lib/galaxy/webapps/galaxy/services/history_contents.py
Expand Up @@ -221,7 +221,7 @@ def __init__(
self.hda_deserializer = hda_deserializer
self.hdca_serializer = hdca_serializer
self.history_contents_filters = history_contents_filters
self.item_operator = HistoryItemOperator(self.hda_manager, self.hdca_manager)
self.item_operator = HistoryItemOperator(self.hda_manager, self.hdca_manager, self.dataset_collection_manager)
self.short_term_storage_allocator = short_term_storage_allocator

def index(
Expand Down Expand Up @@ -533,7 +533,7 @@ def bulk_operation(
history,
filters,
)
errors = self._apply_bulk_operation(contents, payload.operation)
errors = self._apply_bulk_operation(contents, payload.operation, trans)
trans.sa_session.flush()
success_count = len(contents) - len(errors)
return HistoryContentBulkOperationResult.construct(success_count=success_count, errors=errors)
Expand Down Expand Up @@ -1301,10 +1301,11 @@ def _apply_bulk_operation(
self,
contents: Iterable[HistoryItemModel],
operation: HistoryContentItemOperation,
trans: ProvidesHistoryContext,
) -> List[BulkOperationItemError]:
errors: List[BulkOperationItemError] = []
for item in contents:
error = self._apply_operation_to_item(operation, item)
error = self._apply_operation_to_item(operation, item, trans)
if error:
errors.append(error)
return errors
Expand All @@ -1313,9 +1314,10 @@ def _apply_operation_to_item(
self,
operation: HistoryContentItemOperation,
item: HistoryItemModel,
trans: ProvidesHistoryContext,
) -> Optional[BulkOperationItemError]:
try:
self.item_operator.apply(operation, item)
self.item_operator.apply(operation, item, trans)
return None
except BaseException as exc:
return BulkOperationItemError.construct(
Expand Down Expand Up @@ -1348,7 +1350,7 @@ def _get_contents_by_item_list(


class ItemOperation(Protocol):
def __call__(self, item: HistoryItemModel) -> None:
def __call__(self, item: HistoryItemModel, trans: ProvidesHistoryContext) -> None:
...


Expand All @@ -1359,20 +1361,22 @@ def __init__(
self,
hda_manager: hdas.HDAManager,
hdca_manager: hdcas.HDCAManager,
dataset_collection_manager: DatasetCollectionManager,
):
self.hda_manager = hda_manager
self.hdca_manager = hdca_manager
self.dataset_collection_manager = dataset_collection_manager
self.flush = False
self._operation_map: Dict[HistoryContentItemOperation, ItemOperation] = {
HistoryContentItemOperation.hide: lambda item: self._hide(item),
HistoryContentItemOperation.unhide: lambda item: self._unhide(item),
HistoryContentItemOperation.delete: lambda item: self._delete(item),
HistoryContentItemOperation.undelete: lambda item: self._undelete(item),
HistoryContentItemOperation.purge: lambda item: self._purge(item),
HistoryContentItemOperation.hide: lambda item, trans: self._hide(item),
HistoryContentItemOperation.unhide: lambda item, trans: self._unhide(item),
HistoryContentItemOperation.delete: lambda item, trans: self._delete(item),
HistoryContentItemOperation.undelete: lambda item, trans: self._undelete(item),
HistoryContentItemOperation.purge: lambda item, trans: self._purge(item, trans),
}

def apply(self, operation: HistoryContentItemOperation, item: HistoryItemModel):
self._operation_map[operation](item)
def apply(self, operation: HistoryContentItemOperation, item: HistoryItemModel, trans: ProvidesHistoryContext):
self._operation_map[operation](item, trans)

def _get_item_manager(self, item: HistoryItemModel):
if isinstance(item, HistoryDatasetAssociation):
Expand All @@ -1390,9 +1394,12 @@ def _delete(self, item: HistoryItemModel):
manager.delete(item, flush=self.flush)

def _undelete(self, item: HistoryItemModel):
if getattr(item, "purged", False):
return
manager = self._get_item_manager(item)
manager.undelete(item, flush=self.flush)

def _purge(self, item: HistoryItemModel):
manager = self._get_item_manager(item)
manager.purge(item, flush=self.flush)
def _purge(self, item: HistoryItemModel, trans: ProvidesHistoryContext):
if isinstance(item, HistoryDatasetCollectionAssociation):
return self.dataset_collection_manager.delete(trans, "history", item.id, recursive=True, purge=True)
self.hda_manager.purge(item, flush=self.flush)
70 changes: 67 additions & 3 deletions lib/galaxy_test/api/test_history_contents.py
Expand Up @@ -4,6 +4,7 @@
from typing import (
Any,
List,
Optional,
Tuple,
)

Expand Down Expand Up @@ -1110,6 +1111,53 @@ def test_bulk_operations(self):
bulk_operation_result = self._apply_bulk_operation(history_id, payload)
history_contents = self._get_history_contents(history_id)
self._assert_bulk_success(bulk_operation_result, expected_purged_count)
purged_dataset = self._get_dataset_with_id_from_history_contents(history_contents, datasets_ids[0])
assert purged_dataset["deleted"] is True
assert purged_dataset["purged"] is True
purged_collection = self._get_collection_with_id_from_history_contents(history_contents, collection_ids[0])
# collections don't have a `purged` attribute but they should be marked deleted on purge
assert purged_collection["deleted"] is True

# Un-deleting a purged dataset should not have any effect
payload = {
"operation": "undelete",
"items": [
{
"id": datasets_ids[0],
"history_content_type": "dataset",
},
],
}
bulk_operation_result = self._apply_bulk_operation(history_id, payload)
history_contents = self._get_history_contents(history_id)
self._assert_bulk_success(bulk_operation_result, 1)
purged_dataset = self._get_dataset_with_id_from_history_contents(history_contents, datasets_ids[0])
assert purged_dataset["deleted"] is True
assert purged_dataset["purged"] is True

def test_purging_collection_should_purge_contents(self):
with self.dataset_populator.test_history() as history_id:
datasets_ids, collection_ids, history_contents = self._create_test_history_contents(history_id)

# Purge all collections
payload = {"operation": "purge"}
query = "q=history_content_type-eq&qv=dataset_collection"
bulk_operation_result = self._apply_bulk_operation(history_id, payload, query)
history_contents = self._get_history_contents(history_id)
self._assert_bulk_success(bulk_operation_result, len(collection_ids))
for item in history_contents:
assert item["deleted"] is True
if item["history_content_type"] == "dataset":
assert item["purged"] is True

def test_only_owner_can_apply_bulk_operations(self):
with self.dataset_populator.test_history() as history_id:
self._create_test_history_contents(history_id)

with self._different_user():
payload = {"operation": "hide"}
bulk_operation_result = self._apply_bulk_operation(history_id, payload, expected_status_code=403)
assert bulk_operation_result["err_msg"]

def test_index_returns_expected_total_matches(self):
with self.dataset_populator.test_history() as history_id:
Expand Down Expand Up @@ -1208,14 +1256,30 @@ def _get_history_contents(self, history_id: str):
def _get_hidden_items_from_history_contents(self, history_contents) -> List[Any]:
return [content for content in history_contents if not content["visible"]]

def _apply_bulk_operation(self, history_id: str, payload, query: str = ""):
def _get_collection_with_id_from_history_contents(self, history_contents, collection_id: str) -> Optional[Any]:
return self._get_item_with_id_from_history_contents(history_contents, "dataset_collection", collection_id)

def _get_dataset_with_id_from_history_contents(self, history_contents, dataset_id: str) -> Optional[Any]:
return self._get_item_with_id_from_history_contents(history_contents, "dataset", dataset_id)

def _get_item_with_id_from_history_contents(
self, history_contents, history_content_type: str, dataset_id: str
) -> Optional[Any]:
for item in history_contents:
if item["history_content_type"] == history_content_type and item["id"] == dataset_id:
return item
return None

def _apply_bulk_operation(self, history_id: str, payload, query: str = "", expected_status_code: int = 200):
if query:
query = f"?{query}"
return self._put(
response = self._put(
f"histories/{history_id}/contents/bulk{query}",
data=payload,
json=True,
).json()
)
self._assert_status_code_is(response, expected_status_code)
return response.json()

def _assert_bulk_success(self, bulk_operation_result, expected_success_count: int):
assert bulk_operation_result["success_count"] == expected_success_count, bulk_operation_result
Expand Down