/
create_dataset.py
133 lines (107 loc) · 3.91 KB
/
create_dataset.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
# Copyright 2015 ClusterHQ Inc. See LICENSE file for details.
"""
Operation to create a dataset.
"""
from functools import partial
import random
from uuid import uuid4
from pyrsistent import PClass, field
from zope.interface import implementer
from flocker.common import loop_until
from .._interfaces import IProbe, IOperation
class EmptyClusterError(Exception):
"""
Exception indicating that the cluster contains no nodes.
"""
@implementer(IProbe)
class CreateDatasetProbe(PClass):
"""
Probe to create a dataset and wait for it to be mounted.
"""
reactor = field(mandatory=True)
control_service = field(mandatory=True)
primary = field(mandatory=True)
dataset_id = field(mandatory=True)
volume_size = field(mandatory=True)
@classmethod
def setup(cls, reactor, control_service, dataset_id, volume_size):
"""
Create a probe.
:param reactor: Twisted Reactor.
:param control_service: Benchmark control service.
:param UUID dataset_id: UUID for created dataset.
:param int volume_size: Size of created volume, in bytes.
:return: Deferred firing with a new probe.
"""
d = control_service.list_nodes()
# Select an arbitrary node to be the primary for the dataset.
def pick_primary(nodes):
if nodes:
return random.choice(nodes)
else:
# Cannot proceed if there are no nodes in the cluster!
raise EmptyClusterError("Cluster contains no nodes.")
d.addCallback(pick_primary)
# Create the CreateDatasetProbe instance.
def create_probe(node):
return cls(
reactor=reactor,
control_service=control_service,
primary=node,
dataset_id=dataset_id,
volume_size=volume_size,
)
d.addCallback(create_probe)
return d
def _converged(self, expected):
"""
Check whether a dataset has been created.
:param flocker.apiclient.Dataset expected: A dataset to match against
the results of ``list_datasets_state``.
:return: a Deferred that fires True if the expected dataset exists in
the cluster, or False otherwise.
"""
d = self.control_service.list_datasets_state()
def dataset_matches(inspecting, expected):
return (
expected.dataset_id == inspecting.dataset_id and
expected.primary == inspecting.primary and
inspecting.path is not None
)
def find_match(existing_state):
return any(
dataset_matches(state, expected) for state in existing_state
)
d.addCallback(find_match)
return d
def run(self):
"""
Create a dataset, then wait for convergence.
"""
d = self.control_service.create_dataset(
primary=self.primary.uuid,
maximum_size=self.volume_size,
dataset_id=self.dataset_id,
)
def loop_until_converged(expected):
return loop_until(self.reactor, partial(self._converged, expected))
d.addCallback(loop_until_converged)
return d
def cleanup(self):
"""
Delete the dataset created by the probe.
"""
return self.control_service.delete_dataset(dataset_id=self.dataset_id)
@implementer(IOperation)
class CreateDataset(object):
def __init__(self, reactor, cluster, volume_size=None):
self.reactor = reactor
self.control_service = cluster.get_control_service(reactor)
if volume_size is None:
self.volume_size = cluster.default_volume_size()
else:
self.volume_size = volume_size
def get_probe(self):
return CreateDatasetProbe.setup(
self.reactor, self.control_service, uuid4(), self.volume_size
)