Skip to content

Commit

Permalink
Update __init__.py
Browse files Browse the repository at this point in the history
  • Loading branch information
r0h4n committed Mar 31, 2017
1 parent c6d7dee commit 7077122
Showing 1 changed file with 17 additions and 17 deletions.
34 changes: 17 additions & 17 deletions tendrl/commons/flows/create_cluster/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,14 @@
class CreateCluster(flows.BaseFlow):
def run(self):
integration_id = self.parameters['TendrlContext.integration_id']
NS.tendrl_context = NS.tendrl_context.load()
NS.tendrl_context.integration_id = integration_id
NS.tendrl_context.save()
_node_context = NS.node_context.load()
for tag in json.loads(_node_context.tags):
# Ceph Provisioner tendrl-node-agent should not participate in cluster, hence removed
provisioner_tag = NS.compiled_definitions.get_parsed_defs()['namespace.tendrl']['tags']['ceph-provisioner']
if provisioner_tag in tag:
self.parameters['Node[]'].remove(_node_context.node_id)


ssh_job_ids = []
if "ceph" in self.parameters["TendrlContext.sds_name"]:
ssh_job_ids = utils.ceph_create_ssh_setup_jobs(self.parameters)
Expand Down Expand Up @@ -74,19 +79,12 @@ def run(self):

gluster_help.create_gluster(self.parameters)

# Start jobs for importing cluster
node_list = self.parameters['Node[]']
try:
node_list.remove(NS.node_context.node_id)
except ValueError:
# key not found. ignore
pass

# Wait till detected cluster in populated for nodes
all_nodes_have_detected_cluster = False
while not all_nodes_have_detected_cluster:
all_status = []
for node in node_list:
for node in self.parameters['Node[]']:
try:
NS.etcd_orm.client.read("/nodes/%s/DetectedCluster" % node)
all_status.append(True)
Expand All @@ -97,28 +95,29 @@ def run(self):

# Create the params list for import cluster flow
new_params = {}
new_params['Node[]'] = node_list
new_params['Node[]'] = self.parameters['Node[]']
new_params['TendrlContext.integration_id'] = integration_id

# Get node context for one of the nodes from list
sds_pkg_name = NS.etcd_orm.client.read(
"nodes/%s/DetectedCluster/sds_pkg_name" % node_list[0]
"nodes/%s/DetectedCluster/sds_pkg_name" % self.parameters['Node[]'][0]
).value
sds_pkg_version = NS.etcd_orm.client.read(
"nodes/%s/DetectedCluster/sds_pkg_version" % node_list[0]
"nodes/%s/DetectedCluster/sds_pkg_version" % self.parameters['Node[]'][0]
).value
new_params['DetectedCluster.sds_pkg_name'] = \
sds_pkg_name
new_params['DetectedCluster.sds_pkg_version'] = \
sds_pkg_version
payload = {"node_ids": node_list,
payload = {"node_ids": self.parameters['Node[]'],
"run": "tendrl.flows.ImportCluster",
"status": "new",
"parameters": new_params,
"parent": self.parameters['job_id'],
"type": "node"
}
Job(job_id=str(uuid.uuid4()),
_job_id = str(uuid.uuid4())
Job(job_id=_job_id,
status="new",
payload=json.dumps(payload)).save()
Event(
Expand All @@ -127,7 +126,8 @@ def run(self):
flow_id = self.parameters['flow_id'],
priority="info",
publisher=NS.publisher_id,
payload={"message": "Importing newly created %s Storage Cluster %s" % (sds_pkg_name,
payload={"message": "Importing (job_id: %s) newly created %s Storage Cluster %s" % (_job_id,
sds_pkg_name,
integration_id)
}
)
Expand Down

0 comments on commit 7077122

Please sign in to comment.