From af0d40303fc51af071a3a31ee6bbbf4e43d05bd9 Mon Sep 17 00:00:00 2001 From: Tamir Date: Fri, 2 Jun 2023 15:08:31 +0300 Subject: [PATCH 01/12] added get from trash endpoint, is_permanent delete flag, is_deleted prop --- datacrunch/volumes/volumes.py | 45 ++++++++++++++++++++++++++++++++++- 1 file changed, 44 insertions(+), 1 deletion(-) diff --git a/datacrunch/volumes/volumes.py b/datacrunch/volumes/volumes.py index 8a9a28e..dd66d7b 100644 --- a/datacrunch/volumes/volumes.py +++ b/datacrunch/volumes/volumes.py @@ -20,6 +20,7 @@ def __init__(self, location: str = "FIN1", instance_id: str = None, ssh_key_ids: List[str] = [], + deleted_at: str = None, ) -> None: """Initialize the volume object @@ -45,6 +46,8 @@ def __init__(self, :type instance_id: str :param ssh_key_ids: list of ssh keys ids :type ssh_key_ids: List[str] + :param deleted_at: the time the volume was deleted (UTC), defaults to None + :type deleted_at: str, optional """ self._id = id self._status = status @@ -57,6 +60,7 @@ def __init__(self, self._location = location self._instance_id = instance_id self._ssh_key_ids = ssh_key_ids + self._deleted_at = deleted_at @property def id(self) -> str: @@ -157,6 +161,15 @@ def ssh_key_ids(self) -> List[str]: """ return self._ssh_key_ids + @property + def deleted_at(self) -> Optional[str]: + """Get the time when the volume was deleted (UTC) + + :return: time + :rtype: str + """ + return self._deleted_at + def __str__(self) -> str: """Returns a string of the json representation of the volume @@ -195,6 +208,7 @@ def get(self, status: str = None) -> List[Volume]: instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None, ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [ ], + deleted_at=volume_dict['deleted_at'] if 'deleted_at' in volume_dict else None, ), volumes_dict)) return volumes @@ -221,9 +235,37 @@ def get_by_id(self, id: str) -> Volume: instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None, ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [ ], + deleted_at=volume_dict['deleted_at'] if 'deleted_at' in volume_dict else None, ) return volume + def get_in_trash(self) -> List[Volume]: + """Get all volumes that are in trash + + :return: list of volume details objects + :rtype: List[Volume] + """ + volumes_dicts = self._http_client.get( + VOLUMES_ENDPOINT + '/trash' + ).json() + + volumes = list(map(lambda volume_dict: Volume( + id=volume_dict['id'], + status=volume_dict['status'], + name=volume_dict['name'], + size=volume_dict['size'], + type=volume_dict['type'], + is_os_volume=volume_dict['is_os_volume'], + created_at=volume_dict['created_at'], + target=volume_dict['target'] if 'target' in volume_dict else None, + location=volume_dict['location'], + instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None, + ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [ + ], + deleted_at=volume_dict['deleted_at'] if 'deleted_at' in volume_dict else None, + ), volumes_dicts)) + return volumes + def create(self, type: str, name: str, @@ -358,7 +400,7 @@ def increase_size(self, id_list: Union[List[str], str], size: int) -> None: self._http_client.put(VOLUMES_ENDPOINT, json=payload) return - def delete(self, id_list: Union[List[str], str]) -> None: + def delete(self, id_list: Union[List[str], str], is_permanent: bool = False) -> None: """Delete multiple volumes or single volume Note: if attached to any instances, they need to be shut-down (offline) @@ -368,6 +410,7 @@ def delete(self, id_list: Union[List[str], str]) -> None: payload = { "id": id_list, "action": VolumeActions.DELETE, + "is_permanent": is_permanent } self._http_client.put(VOLUMES_ENDPOINT, json=payload) From cbc00c05e6f7de685d81a4d74b7c3ab68cf5353a Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 7 Jun 2023 14:49:24 +0300 Subject: [PATCH 02/12] Added a Volume class method that inits a new Volume instance from a dict --- CHANGELOG.rst | 3 ++ datacrunch/volumes/volumes.py | 65 +++++++++-------------------------- 2 files changed, 20 insertions(+), 48 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index dff9004..a5bffa0 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,9 @@ Changelog ========= +* Added support for permanent deletion of volumes +* Added a Volume class method that inits a new Volume instance from a dict + v1.3.0 (2023-05-25) ------------------- diff --git a/datacrunch/volumes/volumes.py b/datacrunch/volumes/volumes.py index dd66d7b..2f8dc4a 100644 --- a/datacrunch/volumes/volumes.py +++ b/datacrunch/volumes/volumes.py @@ -170,6 +170,17 @@ def deleted_at(self) -> Optional[str]: """ return self._deleted_at + @classmethod + def create_from_dict(cls: 'Volume', volume_dict: dict) -> 'Volume': + """Create a Volume object from a dictionary + + :param volume_dict: dictionary representing the volume + :type volume_dict: dict + :return: Volume + :rtype: Volume + """ + return cls(**volume_dict) + def __str__(self) -> str: """Returns a string of the json representation of the volume @@ -195,22 +206,7 @@ def get(self, status: str = None) -> List[Volume]: """ volumes_dict = self._http_client.get( VOLUMES_ENDPOINT, params={'status': status}).json() - volumes = list(map(lambda volume_dict: Volume( - id=volume_dict['id'], - status=volume_dict['status'], - name=volume_dict['name'], - size=volume_dict['size'], - type=volume_dict['type'], - is_os_volume=volume_dict['is_os_volume'], - created_at=volume_dict['created_at'], - target=volume_dict['target'] if 'target' in volume_dict else None, - location=volume_dict['location'], - instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None, - ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [ - ], - deleted_at=volume_dict['deleted_at'] if 'deleted_at' in volume_dict else None, - ), volumes_dict)) - return volumes + return list(map(Volume.create_from_dict, volumes_dict)) def get_by_id(self, id: str) -> Volume: """Get a specific volume by its @@ -222,22 +218,8 @@ def get_by_id(self, id: str) -> Volume: """ volume_dict = self._http_client.get( VOLUMES_ENDPOINT + f'/{id}').json() - volume = Volume( - id=volume_dict['id'], - status=volume_dict['status'], - name=volume_dict['name'], - size=volume_dict['size'], - type=volume_dict['type'], - is_os_volume=volume_dict['is_os_volume'], - created_at=volume_dict['created_at'], - target=volume_dict['target'] if 'target' in volume_dict else None, - location=volume_dict['location'], - instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None, - ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [ - ], - deleted_at=volume_dict['deleted_at'] if 'deleted_at' in volume_dict else None, - ) - return volume + + return Volume.create_from_dict(volume_dict) def get_in_trash(self) -> List[Volume]: """Get all volumes that are in trash @@ -249,22 +231,9 @@ def get_in_trash(self) -> List[Volume]: VOLUMES_ENDPOINT + '/trash' ).json() - volumes = list(map(lambda volume_dict: Volume( - id=volume_dict['id'], - status=volume_dict['status'], - name=volume_dict['name'], - size=volume_dict['size'], - type=volume_dict['type'], - is_os_volume=volume_dict['is_os_volume'], - created_at=volume_dict['created_at'], - target=volume_dict['target'] if 'target' in volume_dict else None, - location=volume_dict['location'], - instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None, - ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [ - ], - deleted_at=volume_dict['deleted_at'] if 'deleted_at' in volume_dict else None, - ), volumes_dicts)) - return volumes + print(volumes_dicts) + + return list(map(Volume.create_from_dict, volumes_dicts)) def create(self, type: str, From 55c730a396372ea8f9279f3ecd7e762fc870ea6b Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 7 Jun 2023 15:01:35 +0300 Subject: [PATCH 03/12] remove print --- datacrunch/volumes/volumes.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/datacrunch/volumes/volumes.py b/datacrunch/volumes/volumes.py index 2f8dc4a..f1928ee 100644 --- a/datacrunch/volumes/volumes.py +++ b/datacrunch/volumes/volumes.py @@ -231,8 +231,6 @@ def get_in_trash(self) -> List[Volume]: VOLUMES_ENDPOINT + '/trash' ).json() - print(volumes_dicts) - return list(map(Volume.create_from_dict, volumes_dicts)) def create(self, From 1cbcd26bc30efbb63759e43e771ff12079805ad5 Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 7 Jun 2023 15:12:25 +0300 Subject: [PATCH 04/12] updated mock api call matcher --- tests/unit_tests/volumes/test_volumes.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/unit_tests/volumes/test_volumes.py b/tests/unit_tests/volumes/test_volumes.py index 2e06af3..b2908ef 100644 --- a/tests/unit_tests/volumes/test_volumes.py +++ b/tests/unit_tests/volumes/test_volumes.py @@ -467,7 +467,8 @@ def test_delete_volume_successful(self, volumes_service, endpoint): match=[ responses.json_params_matcher({ "id": NVME_VOL_ID, - "action": VolumeActions.DELETE + "action": VolumeActions.DELETE, + "is_permanent": False }) ] ) @@ -489,7 +490,8 @@ def test_delete_volume_failed(self, volumes_service, endpoint): match=[ responses.json_params_matcher({ "id": NVME_VOL_ID, - "action": VolumeActions.DELETE + "action": VolumeActions.DELETE, + "is_permanent": False }) ] ) From a68743cb6970387b4c65ba76e0046096ae7c2f4f Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 14 Jun 2023 15:19:18 +0300 Subject: [PATCH 05/12] added some basic integration tests for permanent volume deletion --- pytest.ini | 4 +- tests/integration_tests/__init__.py | 0 tests/integration_tests/conftest.py | 22 +++++++++ tests/integration_tests/test_volumes.py | 66 +++++++++++++++++++++++++ 4 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 tests/integration_tests/__init__.py create mode 100644 tests/integration_tests/conftest.py create mode 100644 tests/integration_tests/test_volumes.py diff --git a/pytest.ini b/pytest.ini index 064be9e..d5162a5 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,2 +1,4 @@ [pytest] -testpaths = tests/unit_tests \ No newline at end of file +testpaths = + tests/unit_tests + tests/integration_tests \ No newline at end of file diff --git a/tests/integration_tests/__init__.py b/tests/integration_tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py new file mode 100644 index 0000000..05c4784 --- /dev/null +++ b/tests/integration_tests/conftest.py @@ -0,0 +1,22 @@ +import os +import pytest +from dotenv import load_dotenv +from datacrunch.datacrunch import DataCrunchClient + +""" +Make sure to run the server and the account has enough balance before running the tests +""" + +BASE_URL = "http://localhost:3010/v1" + +# Load env variables, make sure there's an env file with valid client credentials +load_dotenv() +CLIENT_SECRET = os.getenv('DATACRUNCH_CLIENT_SECRET') +CLIENT_ID = os.getenv('DATACRUNCH_CLIENT_ID') + +print(CLIENT_SECRET) + + +@pytest.fixture +def datacrunch_client(): + return DataCrunchClient(CLIENT_ID, CLIENT_SECRET, BASE_URL) diff --git a/tests/integration_tests/test_volumes.py b/tests/integration_tests/test_volumes.py new file mode 100644 index 0000000..04eb0bd --- /dev/null +++ b/tests/integration_tests/test_volumes.py @@ -0,0 +1,66 @@ +import pytest +from datacrunch.datacrunch import DataCrunchClient + + +@pytest.mark.withoutresponses +class TestVolumes(): + + def test_get_volumes_from_trash(self, datacrunch_client: DataCrunchClient): + # create new volume + volume = datacrunch_client.volumes.create( + type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100) + + # delete volume + datacrunch_client.volumes.delete(volume.id) + + # get volumes from trash + volumes = datacrunch_client.volumes.get_in_trash() + + # assert volume is in trash + assert volume.id in [v.id for v in volumes] + + # cleaning: permanently delete the volume + datacrunch_client.volumes.delete(volume.id, is_permanent=True) + + def test_permanently_delete_detached_volumes(seld, datacrunch_client): + # create new volume + volume = datacrunch_client.volumes.create( + type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100) + + # permanently delete the detached volume + datacrunch_client.volumes.delete(volume.id, is_permanent=True) + + # make sure the volume is not in trash + volumes = datacrunch_client.volumes.get_in_trash() + + # assert volume is not in trash + assert volume.id not in [v.id for v in volumes] + + # get the volume + volume = datacrunch_client.volumes.get_by_id(volume.id) + + # assert volume status is deleted + assert volume.status == datacrunch_client.constants.volume_status.DELETED + + def test_permanently_delete_a_deleted_volume_from_trash(self, datacrunch_client): + # create new volume + volume = datacrunch_client.volumes.create( + type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100) + + # delete volume + datacrunch_client.volumes.delete(volume.id) + + # permanently delete the volume + datacrunch_client.volumes.delete(volume.id, is_permanent=True) + + # get the volume + volume = datacrunch_client.volumes.get_by_id(volume.id) + + # assert volume status is deleted + assert volume.status == datacrunch_client.constants.volume_status.DELETED + + # make sure the volume is not in trash + volumes = datacrunch_client.volumes.get_in_trash() + + # assert volume is not in trash + assert volume.id not in [v.id for v in volumes] From 853cbc5d867f12008e2a3178891a7211d7bc1cc5 Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 14 Jun 2023 15:21:42 +0300 Subject: [PATCH 06/12] try to run only unit tests in the github action --- .github/workflows/unit_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index eb70fe4..d15419e 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -28,7 +28,7 @@ jobs: - name: Test with pytest and coverage run: | - pytest --cov=datacrunch + pytest tests/unit_tests --cov=datacrunch - name: 'Upload coverage to Codecov' uses: codecov/codecov-action@v1 From ea3647d8b76aeb5b3e482d466a57e4125392402d Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 14 Jun 2023 15:30:14 +0300 Subject: [PATCH 07/12] maybe now --- .github/workflows/unit_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index d15419e..ad6a752 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -28,7 +28,7 @@ jobs: - name: Test with pytest and coverage run: | - pytest tests/unit_tests --cov=datacrunch + pytest ./tests/unit_tests --cov=datacrunch - name: 'Upload coverage to Codecov' uses: codecov/codecov-action@v1 From 110ae32784069cf1161f2414cb62947a43acd158 Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 14 Jun 2023 15:44:07 +0300 Subject: [PATCH 08/12] try skipping the test if it recognizes gh actions env var --- .github/workflows/unit_tests.yml | 2 +- tests/integration_tests/test_volumes.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index ad6a752..eb70fe4 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -28,7 +28,7 @@ jobs: - name: Test with pytest and coverage run: | - pytest ./tests/unit_tests --cov=datacrunch + pytest --cov=datacrunch - name: 'Upload coverage to Codecov' uses: codecov/codecov-action@v1 diff --git a/tests/integration_tests/test_volumes.py b/tests/integration_tests/test_volumes.py index 04eb0bd..5ca6a3d 100644 --- a/tests/integration_tests/test_volumes.py +++ b/tests/integration_tests/test_volumes.py @@ -1,7 +1,11 @@ +import os import pytest from datacrunch.datacrunch import DataCrunchClient +IN_GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") == "true" + +@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="Test doesn't work in Github Actions.") @pytest.mark.withoutresponses class TestVolumes(): From 7074e0560e7d11695e634f5d921bcab6b5d8f2a0 Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 14 Jun 2023 15:49:05 +0300 Subject: [PATCH 09/12] try to add python-dotenv to prevent fails --- .github/workflows/unit_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index eb70fe4..ffcc486 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -24,7 +24,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install pytest pytest-cov pytest-responses responses + python -m pip install pytest pytest-cov pytest-responses responses python-dotenv - name: Test with pytest and coverage run: | From dced6070d602a30aef0f1ecac765ae3e0c0de932 Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 14 Jun 2023 16:06:19 +0300 Subject: [PATCH 10/12] remove print --- tests/integration_tests/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py index 05c4784..5ca0eaf 100644 --- a/tests/integration_tests/conftest.py +++ b/tests/integration_tests/conftest.py @@ -14,8 +14,6 @@ CLIENT_SECRET = os.getenv('DATACRUNCH_CLIENT_SECRET') CLIENT_ID = os.getenv('DATACRUNCH_CLIENT_ID') -print(CLIENT_SECRET) - @pytest.fixture def datacrunch_client(): From 683f70b4e6c1a74f67ad9b21fea0cf506312ac08 Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 14 Jun 2023 16:10:37 +0300 Subject: [PATCH 11/12] updated the volumes example file --- examples/storage_volumes.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/examples/storage_volumes.py b/examples/storage_volumes.py index 6d2c0e4..72e4a2e 100644 --- a/examples/storage_volumes.py +++ b/examples/storage_volumes.py @@ -62,5 +62,11 @@ # clone multiple volumes at once datacrunch.volumes.clone([nvme_volume_id, hdd_volume_id]) -# delete volumes +# delete volumes (move to trash for 96h, not permanent) datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id]) + +# get all volumes in trash +volumes_in_trash = datacrunch.volumes.get_in_trash() + +# delete volumes permanently +datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id], is_permanent=True) From 8b6f7ee46c5921b42ad9db7796772ab3de2398ed Mon Sep 17 00:00:00 2001 From: Tamir Date: Wed, 14 Jun 2023 16:12:42 +0300 Subject: [PATCH 12/12] don't fail ci if codecov fails, its not that important --- .github/workflows/unit_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index ffcc486..1197577 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -33,4 +33,4 @@ jobs: - name: 'Upload coverage to Codecov' uses: codecov/codecov-action@v1 with: - fail_ci_if_error: true + fail_ci_if_error: false