diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index eb70fe4..1197577 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -24,7 +24,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install pytest pytest-cov pytest-responses responses + python -m pip install pytest pytest-cov pytest-responses responses python-dotenv - name: Test with pytest and coverage run: | @@ -33,4 +33,4 @@ jobs: - name: 'Upload coverage to Codecov' uses: codecov/codecov-action@v1 with: - fail_ci_if_error: true + fail_ci_if_error: false diff --git a/CHANGELOG.rst b/CHANGELOG.rst index dff9004..a5bffa0 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,9 @@ Changelog ========= +* Added support for permanent deletion of volumes +* Added a Volume class method that inits a new Volume instance from a dict + v1.3.0 (2023-05-25) ------------------- diff --git a/datacrunch/volumes/volumes.py b/datacrunch/volumes/volumes.py index 8a9a28e..f1928ee 100644 --- a/datacrunch/volumes/volumes.py +++ b/datacrunch/volumes/volumes.py @@ -20,6 +20,7 @@ def __init__(self, location: str = "FIN1", instance_id: str = None, ssh_key_ids: List[str] = [], + deleted_at: str = None, ) -> None: """Initialize the volume object @@ -45,6 +46,8 @@ def __init__(self, :type instance_id: str :param ssh_key_ids: list of ssh keys ids :type ssh_key_ids: List[str] + :param deleted_at: the time the volume was deleted (UTC), defaults to None + :type deleted_at: str, optional """ self._id = id self._status = status @@ -57,6 +60,7 @@ def __init__(self, self._location = location self._instance_id = instance_id self._ssh_key_ids = ssh_key_ids + self._deleted_at = deleted_at @property def id(self) -> str: @@ -157,6 +161,26 @@ def ssh_key_ids(self) -> List[str]: """ return self._ssh_key_ids + @property + def deleted_at(self) -> Optional[str]: + """Get the time when the volume was deleted (UTC) + + :return: time + :rtype: str + """ + return self._deleted_at + + @classmethod + def create_from_dict(cls: 'Volume', volume_dict: dict) -> 'Volume': + """Create a Volume object from a dictionary + + :param volume_dict: dictionary representing the volume + :type volume_dict: dict + :return: Volume + :rtype: Volume + """ + return cls(**volume_dict) + def __str__(self) -> str: """Returns a string of the json representation of the volume @@ -182,21 +206,7 @@ def get(self, status: str = None) -> List[Volume]: """ volumes_dict = self._http_client.get( VOLUMES_ENDPOINT, params={'status': status}).json() - volumes = list(map(lambda volume_dict: Volume( - id=volume_dict['id'], - status=volume_dict['status'], - name=volume_dict['name'], - size=volume_dict['size'], - type=volume_dict['type'], - is_os_volume=volume_dict['is_os_volume'], - created_at=volume_dict['created_at'], - target=volume_dict['target'] if 'target' in volume_dict else None, - location=volume_dict['location'], - instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None, - ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [ - ], - ), volumes_dict)) - return volumes + return list(map(Volume.create_from_dict, volumes_dict)) def get_by_id(self, id: str) -> Volume: """Get a specific volume by its @@ -208,21 +218,20 @@ def get_by_id(self, id: str) -> Volume: """ volume_dict = self._http_client.get( VOLUMES_ENDPOINT + f'/{id}').json() - volume = Volume( - id=volume_dict['id'], - status=volume_dict['status'], - name=volume_dict['name'], - size=volume_dict['size'], - type=volume_dict['type'], - is_os_volume=volume_dict['is_os_volume'], - created_at=volume_dict['created_at'], - target=volume_dict['target'] if 'target' in volume_dict else None, - location=volume_dict['location'], - instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None, - ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [ - ], - ) - return volume + + return Volume.create_from_dict(volume_dict) + + def get_in_trash(self) -> List[Volume]: + """Get all volumes that are in trash + + :return: list of volume details objects + :rtype: List[Volume] + """ + volumes_dicts = self._http_client.get( + VOLUMES_ENDPOINT + '/trash' + ).json() + + return list(map(Volume.create_from_dict, volumes_dicts)) def create(self, type: str, @@ -358,7 +367,7 @@ def increase_size(self, id_list: Union[List[str], str], size: int) -> None: self._http_client.put(VOLUMES_ENDPOINT, json=payload) return - def delete(self, id_list: Union[List[str], str]) -> None: + def delete(self, id_list: Union[List[str], str], is_permanent: bool = False) -> None: """Delete multiple volumes or single volume Note: if attached to any instances, they need to be shut-down (offline) @@ -368,6 +377,7 @@ def delete(self, id_list: Union[List[str], str]) -> None: payload = { "id": id_list, "action": VolumeActions.DELETE, + "is_permanent": is_permanent } self._http_client.put(VOLUMES_ENDPOINT, json=payload) diff --git a/examples/storage_volumes.py b/examples/storage_volumes.py index 6d2c0e4..72e4a2e 100644 --- a/examples/storage_volumes.py +++ b/examples/storage_volumes.py @@ -62,5 +62,11 @@ # clone multiple volumes at once datacrunch.volumes.clone([nvme_volume_id, hdd_volume_id]) -# delete volumes +# delete volumes (move to trash for 96h, not permanent) datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id]) + +# get all volumes in trash +volumes_in_trash = datacrunch.volumes.get_in_trash() + +# delete volumes permanently +datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id], is_permanent=True) diff --git a/pytest.ini b/pytest.ini index 064be9e..d5162a5 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,2 +1,4 @@ [pytest] -testpaths = tests/unit_tests \ No newline at end of file +testpaths = + tests/unit_tests + tests/integration_tests \ No newline at end of file diff --git a/tests/integration_tests/__init__.py b/tests/integration_tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py new file mode 100644 index 0000000..5ca0eaf --- /dev/null +++ b/tests/integration_tests/conftest.py @@ -0,0 +1,20 @@ +import os +import pytest +from dotenv import load_dotenv +from datacrunch.datacrunch import DataCrunchClient + +""" +Make sure to run the server and the account has enough balance before running the tests +""" + +BASE_URL = "http://localhost:3010/v1" + +# Load env variables, make sure there's an env file with valid client credentials +load_dotenv() +CLIENT_SECRET = os.getenv('DATACRUNCH_CLIENT_SECRET') +CLIENT_ID = os.getenv('DATACRUNCH_CLIENT_ID') + + +@pytest.fixture +def datacrunch_client(): + return DataCrunchClient(CLIENT_ID, CLIENT_SECRET, BASE_URL) diff --git a/tests/integration_tests/test_volumes.py b/tests/integration_tests/test_volumes.py new file mode 100644 index 0000000..5ca6a3d --- /dev/null +++ b/tests/integration_tests/test_volumes.py @@ -0,0 +1,70 @@ +import os +import pytest +from datacrunch.datacrunch import DataCrunchClient + +IN_GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") == "true" + + +@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="Test doesn't work in Github Actions.") +@pytest.mark.withoutresponses +class TestVolumes(): + + def test_get_volumes_from_trash(self, datacrunch_client: DataCrunchClient): + # create new volume + volume = datacrunch_client.volumes.create( + type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100) + + # delete volume + datacrunch_client.volumes.delete(volume.id) + + # get volumes from trash + volumes = datacrunch_client.volumes.get_in_trash() + + # assert volume is in trash + assert volume.id in [v.id for v in volumes] + + # cleaning: permanently delete the volume + datacrunch_client.volumes.delete(volume.id, is_permanent=True) + + def test_permanently_delete_detached_volumes(seld, datacrunch_client): + # create new volume + volume = datacrunch_client.volumes.create( + type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100) + + # permanently delete the detached volume + datacrunch_client.volumes.delete(volume.id, is_permanent=True) + + # make sure the volume is not in trash + volumes = datacrunch_client.volumes.get_in_trash() + + # assert volume is not in trash + assert volume.id not in [v.id for v in volumes] + + # get the volume + volume = datacrunch_client.volumes.get_by_id(volume.id) + + # assert volume status is deleted + assert volume.status == datacrunch_client.constants.volume_status.DELETED + + def test_permanently_delete_a_deleted_volume_from_trash(self, datacrunch_client): + # create new volume + volume = datacrunch_client.volumes.create( + type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100) + + # delete volume + datacrunch_client.volumes.delete(volume.id) + + # permanently delete the volume + datacrunch_client.volumes.delete(volume.id, is_permanent=True) + + # get the volume + volume = datacrunch_client.volumes.get_by_id(volume.id) + + # assert volume status is deleted + assert volume.status == datacrunch_client.constants.volume_status.DELETED + + # make sure the volume is not in trash + volumes = datacrunch_client.volumes.get_in_trash() + + # assert volume is not in trash + assert volume.id not in [v.id for v in volumes] diff --git a/tests/unit_tests/volumes/test_volumes.py b/tests/unit_tests/volumes/test_volumes.py index 2e06af3..b2908ef 100644 --- a/tests/unit_tests/volumes/test_volumes.py +++ b/tests/unit_tests/volumes/test_volumes.py @@ -467,7 +467,8 @@ def test_delete_volume_successful(self, volumes_service, endpoint): match=[ responses.json_params_matcher({ "id": NVME_VOL_ID, - "action": VolumeActions.DELETE + "action": VolumeActions.DELETE, + "is_permanent": False }) ] ) @@ -489,7 +490,8 @@ def test_delete_volume_failed(self, volumes_service, endpoint): match=[ responses.json_params_matcher({ "id": NVME_VOL_ID, - "action": VolumeActions.DELETE + "action": VolumeActions.DELETE, + "is_permanent": False }) ] )