Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/unit_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install pytest pytest-cov pytest-responses responses
python -m pip install pytest pytest-cov pytest-responses responses python-dotenv

- name: Test with pytest and coverage
run: |
Expand All @@ -33,4 +33,4 @@ jobs:
- name: 'Upload coverage to Codecov'
uses: codecov/codecov-action@v1
with:
fail_ci_if_error: true
fail_ci_if_error: false
3 changes: 3 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
Changelog
=========

* Added support for permanent deletion of volumes
* Added a Volume class method that inits a new Volume instance from a dict

v1.3.0 (2023-05-25)
-------------------

Expand Down
72 changes: 41 additions & 31 deletions datacrunch/volumes/volumes.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def __init__(self,
location: str = "FIN1",
instance_id: str = None,
ssh_key_ids: List[str] = [],
deleted_at: str = None,
) -> None:
"""Initialize the volume object

Expand All @@ -45,6 +46,8 @@ def __init__(self,
:type instance_id: str
:param ssh_key_ids: list of ssh keys ids
:type ssh_key_ids: List[str]
:param deleted_at: the time the volume was deleted (UTC), defaults to None
:type deleted_at: str, optional
"""
self._id = id
self._status = status
Expand All @@ -57,6 +60,7 @@ def __init__(self,
self._location = location
self._instance_id = instance_id
self._ssh_key_ids = ssh_key_ids
self._deleted_at = deleted_at

@property
def id(self) -> str:
Expand Down Expand Up @@ -157,6 +161,26 @@ def ssh_key_ids(self) -> List[str]:
"""
return self._ssh_key_ids

@property
def deleted_at(self) -> Optional[str]:
"""Get the time when the volume was deleted (UTC)

:return: time
:rtype: str
"""
return self._deleted_at

@classmethod
def create_from_dict(cls: 'Volume', volume_dict: dict) -> 'Volume':
"""Create a Volume object from a dictionary

:param volume_dict: dictionary representing the volume
:type volume_dict: dict
:return: Volume
:rtype: Volume
"""
return cls(**volume_dict)

def __str__(self) -> str:
"""Returns a string of the json representation of the volume

Expand All @@ -182,21 +206,7 @@ def get(self, status: str = None) -> List[Volume]:
"""
volumes_dict = self._http_client.get(
VOLUMES_ENDPOINT, params={'status': status}).json()
volumes = list(map(lambda volume_dict: Volume(
id=volume_dict['id'],
status=volume_dict['status'],
name=volume_dict['name'],
size=volume_dict['size'],
type=volume_dict['type'],
is_os_volume=volume_dict['is_os_volume'],
created_at=volume_dict['created_at'],
target=volume_dict['target'] if 'target' in volume_dict else None,
location=volume_dict['location'],
instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None,
ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [
],
), volumes_dict))
return volumes
return list(map(Volume.create_from_dict, volumes_dict))

def get_by_id(self, id: str) -> Volume:
"""Get a specific volume by its
Expand All @@ -208,21 +218,20 @@ def get_by_id(self, id: str) -> Volume:
"""
volume_dict = self._http_client.get(
VOLUMES_ENDPOINT + f'/{id}').json()
volume = Volume(
id=volume_dict['id'],
status=volume_dict['status'],
name=volume_dict['name'],
size=volume_dict['size'],
type=volume_dict['type'],
is_os_volume=volume_dict['is_os_volume'],
created_at=volume_dict['created_at'],
target=volume_dict['target'] if 'target' in volume_dict else None,
location=volume_dict['location'],
instance_id=volume_dict['instance_id'] if 'instance_id' in volume_dict else None,
ssh_key_ids=volume_dict['ssh_key_ids'] if 'ssh_key_ids' in volume_dict else [
],
)
return volume

return Volume.create_from_dict(volume_dict)

def get_in_trash(self) -> List[Volume]:
"""Get all volumes that are in trash

:return: list of volume details objects
:rtype: List[Volume]
"""
volumes_dicts = self._http_client.get(
VOLUMES_ENDPOINT + '/trash'
).json()

return list(map(Volume.create_from_dict, volumes_dicts))

def create(self,
type: str,
Expand Down Expand Up @@ -358,7 +367,7 @@ def increase_size(self, id_list: Union[List[str], str], size: int) -> None:
self._http_client.put(VOLUMES_ENDPOINT, json=payload)
return

def delete(self, id_list: Union[List[str], str]) -> None:
def delete(self, id_list: Union[List[str], str], is_permanent: bool = False) -> None:
"""Delete multiple volumes or single volume
Note: if attached to any instances, they need to be shut-down (offline)

Expand All @@ -368,6 +377,7 @@ def delete(self, id_list: Union[List[str], str]) -> None:
payload = {
"id": id_list,
"action": VolumeActions.DELETE,
"is_permanent": is_permanent
}

self._http_client.put(VOLUMES_ENDPOINT, json=payload)
Expand Down
8 changes: 7 additions & 1 deletion examples/storage_volumes.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,5 +62,11 @@
# clone multiple volumes at once
datacrunch.volumes.clone([nvme_volume_id, hdd_volume_id])

# delete volumes
# delete volumes (move to trash for 96h, not permanent)
datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id])

# get all volumes in trash
volumes_in_trash = datacrunch.volumes.get_in_trash()

# delete volumes permanently
datacrunch.volumes.delete([nvme_volume_id, hdd_volume_id], is_permanent=True)
4 changes: 3 additions & 1 deletion pytest.ini
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
[pytest]
testpaths = tests/unit_tests
testpaths =
tests/unit_tests
tests/integration_tests
Empty file.
20 changes: 20 additions & 0 deletions tests/integration_tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import os
import pytest
from dotenv import load_dotenv
from datacrunch.datacrunch import DataCrunchClient

"""
Make sure to run the server and the account has enough balance before running the tests
"""

BASE_URL = "http://localhost:3010/v1"

# Load env variables, make sure there's an env file with valid client credentials
load_dotenv()
CLIENT_SECRET = os.getenv('DATACRUNCH_CLIENT_SECRET')
CLIENT_ID = os.getenv('DATACRUNCH_CLIENT_ID')


@pytest.fixture
def datacrunch_client():
return DataCrunchClient(CLIENT_ID, CLIENT_SECRET, BASE_URL)
70 changes: 70 additions & 0 deletions tests/integration_tests/test_volumes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import os
import pytest
from datacrunch.datacrunch import DataCrunchClient

IN_GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") == "true"


@pytest.mark.skipif(IN_GITHUB_ACTIONS, reason="Test doesn't work in Github Actions.")
@pytest.mark.withoutresponses
class TestVolumes():

def test_get_volumes_from_trash(self, datacrunch_client: DataCrunchClient):
# create new volume
volume = datacrunch_client.volumes.create(
type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100)

# delete volume
datacrunch_client.volumes.delete(volume.id)

# get volumes from trash
volumes = datacrunch_client.volumes.get_in_trash()

# assert volume is in trash
assert volume.id in [v.id for v in volumes]

# cleaning: permanently delete the volume
datacrunch_client.volumes.delete(volume.id, is_permanent=True)

def test_permanently_delete_detached_volumes(seld, datacrunch_client):
# create new volume
volume = datacrunch_client.volumes.create(
type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100)

# permanently delete the detached volume
datacrunch_client.volumes.delete(volume.id, is_permanent=True)

# make sure the volume is not in trash
volumes = datacrunch_client.volumes.get_in_trash()

# assert volume is not in trash
assert volume.id not in [v.id for v in volumes]

# get the volume
volume = datacrunch_client.volumes.get_by_id(volume.id)

# assert volume status is deleted
assert volume.status == datacrunch_client.constants.volume_status.DELETED

def test_permanently_delete_a_deleted_volume_from_trash(self, datacrunch_client):
# create new volume
volume = datacrunch_client.volumes.create(
type=datacrunch_client.constants.volume_types.NVMe, name="test_volume", size=100)

# delete volume
datacrunch_client.volumes.delete(volume.id)

# permanently delete the volume
datacrunch_client.volumes.delete(volume.id, is_permanent=True)

# get the volume
volume = datacrunch_client.volumes.get_by_id(volume.id)

# assert volume status is deleted
assert volume.status == datacrunch_client.constants.volume_status.DELETED

# make sure the volume is not in trash
volumes = datacrunch_client.volumes.get_in_trash()

# assert volume is not in trash
assert volume.id not in [v.id for v in volumes]
6 changes: 4 additions & 2 deletions tests/unit_tests/volumes/test_volumes.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,8 @@ def test_delete_volume_successful(self, volumes_service, endpoint):
match=[
responses.json_params_matcher({
"id": NVME_VOL_ID,
"action": VolumeActions.DELETE
"action": VolumeActions.DELETE,
"is_permanent": False
})
]
)
Expand All @@ -489,7 +490,8 @@ def test_delete_volume_failed(self, volumes_service, endpoint):
match=[
responses.json_params_matcher({
"id": NVME_VOL_ID,
"action": VolumeActions.DELETE
"action": VolumeActions.DELETE,
"is_permanent": False
})
]
)
Expand Down