Skip to content

Commit

Permalink
[DM] Fabric Job uve and Prouter Job uve
Browse files Browse the repository at this point in the history
1. Fabric Job uve creation moved to api server
2. Percentage update is from job mgmr to Fabric Job uve
3. Prouter Job uve creation for ztp job, is from job mgmr
4. Prouter Job uve creation for non-ztp jobs, moved to api-server
5. job status for each prouter and prouter state gets updated in Prouter Job uve
6. Made few optimization changes for removing some markers and filters

Change-Id: Id42cb8e8983c7c092d16f06be9b47bd90431044a
Closes-Bug: #1781499
Closes-Bug: #1781517
Closes-Bug: #1767460
  • Loading branch information
root authored and jnpr-tjiang committed Jul 19, 2018
1 parent 6cc3642 commit 9b88f58
Show file tree
Hide file tree
Showing 33 changed files with 689 additions and 493 deletions.
Expand Up @@ -55,6 +55,14 @@ def test_execute_job(self):
job_template_id = self._vnc_lib.job_template_create(
job_template_object)

# craete test fabric object
fabric_obj = Fabric(
name="fab_name",
fq_name=["default-global-system-config", "fab_name"],
parent_type='global-system-config'
)
fabric_uuid = self._vnc_lib.fabric_create(fabric_obj)

# create test device object
phy_router_obj = PhysicalRouter(
parent_type='global-system-config',
Expand All @@ -67,6 +75,8 @@ def test_execute_job(self):
physical_router_device_family='juniper-mx')
pr_uuid = self._vnc_lib.physical_router_create(phy_router_obj)

self._vnc_lib.ref_update("physical_router", pr_uuid, "fabric", fabric_uuid, None, "ADD")

execute_job_body = json.dumps({'job_template_id': str(job_template_id),
'input':
{'data': 'Playbook input data'},
Expand Down
166 changes: 158 additions & 8 deletions src/config/api-server/vnc_cfg_api_server/vnc_cfg_api_server.py
Expand Up @@ -51,6 +51,11 @@
from cfgm_common.uve.vnc_api.ttypes import VncApiLatencyStats, VncApiLatencyStatsLog
logger = logging.getLogger(__name__)

import time
import requests
import xml.etree.ElementTree as etree
from functools import partial

"""
Following is needed to silence warnings on every request when keystone
auth_token middleware + Sandesh is used. Keystone or Sandesh alone
Expand Down Expand Up @@ -82,6 +87,8 @@
from cfgm_common import ignore_exceptions
from cfgm_common.uve.vnc_api.ttypes import VncApiCommon, VncApiConfigLog,\
VncApiDebug, VncApiInfo, VncApiNotice, VncApiError
from cfgm_common.uve.vnc_api.ttypes import FabricJobExecution, FabricJobUve, \
PhysicalRouterJobExecution, PhysicalRouterJobUve
from cfgm_common import illegal_xml_chars_RE
from sandesh_common.vns.ttypes import Module
from sandesh_common.vns.constants import ModuleNames, Module2NodeType,\
Expand Down Expand Up @@ -341,7 +348,7 @@ def validate_execute_job_input_params(self, request_params):
try:
job_template_fqname = self._db_conn.uuid_to_fq_name(
job_template_id)
request_params['job_template_fqname'] = job_template_fqname
request_params['job_template_fq_name'] = job_template_fqname
except NoIdError as no_id_exec:
raise cfgm_common.exceptions.HttpError(404, str(no_id_exec))
except Exception as e:
Expand Down Expand Up @@ -382,6 +389,99 @@ def validate_execute_job_input_params(self, request_params):

return device_list

def job_mgr_signal_handler(self, signal_var, signalnum, frame):
if signal_var.get('fabric_name') is "__DEFAULT__":
return
# update job manager execution status uve
elapsed_time = time.time() - signal_var.get('start_time')

# read the job object log for a particulare job to check if
# it succeeded or not
jobObjLog_payload = {
'start_time': 'now-%ds' % (elapsed_time),
'end_time': 'now',
'select_fields': ['MessageTS', 'Messagetype', 'ObjectLog'],
'table': 'ObjectJobExecutionTable',
'where': [
[
{
'name': 'ObjectId',
'value': '%s:SUCCESS' % signal_var.get('exec_id'),
'op': 1
}
]
]
}

url = "http://localhost:8081/analytics/query"

resp = requests.post(url, json=jobObjLog_payload)
if resp.status_code == 200:
JobLog = resp.json().get('value')
if JobLog is None:
status = 'FAILURE'
else:
status = 'SUCCESS'

job_execution_data = FabricJobExecution(
name=signal_var.get('fabric_name'),
job_status=status,
percentage_completed=100)
job_execution_uve = FabricJobUve(data=job_execution_data,
sandesh=self._sandesh)
job_execution_uve.send(sandesh=self._sandesh)

# read the last PRouter state for all Prouetrs
payload = {
'sort':1,
'start_time': 'now-%ds' % (elapsed_time),
'sort_fields': ['MessageTS'],
'end_time': 'now',
'select_fields': ['MessageTS', 'Messagetype', 'ObjectLog'],
'table': 'ObjectJobExecutionTable',
'where': [
[
{
'name': 'Messagetype',
'value': 'PRouterOnboardingLog',
'op': 1
},
{
'name': 'ObjectId',
'value': '%s' % signal_var.get('exec_id'),
'op': 1
}
]
]
}
url = "http://localhost:8081/analytics/query"

resp = requests.post(url, json=payload)
if resp.status_code == 200:
PRouterOnboardingLog = resp.json().get('value')
for PRObjectLog in PRouterOnboardingLog:
resp = PRObjectLog.get('ObjectLog')
xmlresp = etree.fromstring(resp)
for ele in xmlresp.iter():
if ele.tag == 'name':
device_fqname = ele.text
if ele.tag == 'onboarding_state':
onboarding_state = ele.text

prouter_uve_name = device_fqname + ":" + \
signal_var.get('fabric_name')

prouter_job_data = PhysicalRouterJobExecution(
name=prouter_uve_name,
execution_id=signal_var.get('exec_id'),
job_start_ts=int(round(signal_var.get('start_time') * 1000)),
prouter_state=onboarding_state
)

prouter_job_uve = PhysicalRouterJobUve(data=prouter_job_data,
sandesh=self._sandesh)
prouter_job_uve.send(sandesh=self._sandesh)

def execute_job_http_post(self):
''' Payload of execute_job
job_template_id (Mandatory if no job_template_fq_name): <uuid> of
Expand Down Expand Up @@ -441,9 +541,55 @@ def execute_job_http_post(self):
}
request_params['args'] = json.dumps(job_args)

fabric_job_uve_name = ''
# create job manager execution status uve
if request_params.get('fabric_fq_name') is not "__DEFAULT__":
fabric_job_name = request_params.get('job_template_fq_name')
fabric_job_name.insert(0, request_params.get('fabric_fq_name'))
fabric_job_uve_name = ':'.join(map(str, fabric_job_name))

job_execution_data = FabricJobExecution(
name=fabric_job_uve_name,
execution_id=request_params.get('job_execution_id'),
job_start_ts=int(round(time.time() * 1000)),
job_status="STARTING",
percentage_completed=0.0
)

job_execution_uve = FabricJobUve(data=job_execution_data,
sandesh=self._sandesh)
job_execution_uve.send(sandesh=self._sandesh)

if device_list:
for device_id in device_list:
device_fqname = request_params.get(
'device_json').get(device_id).get('device_fqname')
device_fqname = ':'.join(map(str, device_fqname))
prouter_uve_name = device_fqname + ":" + \
fabric_job_uve_name

prouter_job_data = PhysicalRouterJobExecution(
name=prouter_uve_name,
execution_id=request_params.get('job_execution_id'),
job_start_ts=int(round(time.time() * 1000))
)

prouter_job_uve = PhysicalRouterJobUve(
data=prouter_job_data, sandesh=self._sandesh)
prouter_job_uve.send(sandesh=self._sandesh)

start_time = time.time()
signal_var = {
'fabric_name': fabric_job_uve_name ,
'start_time': start_time ,
'exec_id': request_params.get('job_execution_id')
}

# handle process exit signal
signal.signal(signal.SIGCHLD, partial(self.job_mgr_signal_handler, signal_var))

# create job manager subprocess
job_mgr_path = os.path.dirname(
__file__) + "/../job_manager/job_mgr.py"
job_mgr_path = os.path.dirname(__file__) + "/../job_manager/job_mgr.py"
job_process = subprocess.Popen(["python", job_mgr_path, "-i",
json.dumps(request_params)],
cwd="/", close_fds=True)
Expand Down Expand Up @@ -474,8 +620,12 @@ def read_fabric_data(self, request_params):
elif request_params.get('input').get('fabric_fq_name'):
fabric_fq_name = request_params.get('input').get('fabric_fq_name')
else:
err_msg = "Missing fabric details in the job input"
raise cfgm_common.exceptions.HttpError(400, err_msg)
if "device_deletion_template" in request_params.get(
'job_template_fq_name'):
fabric_fq_name = "__DEFAULT__"
else:
err_msg = "Missing fabric details in the job input"
raise cfgm_common.exceptions.HttpError(400, err_msg)
if fabric_fq_name:
fabric_fq_name_str = ':'.join(map(str, fabric_fq_name))
request_params['fabric_fq_name'] = fabric_fq_name_str
Expand All @@ -492,7 +642,7 @@ def read_device_data(self, device_list, request_params):
'physical_router_device_family',
'physical_router_vendor_name',
'physical_router_product_name',
'fabric_back_refs'])
'fabric_refs'])
if not ok:
self.config_object_error(device_id, None,
"physical-router ",
Expand Down Expand Up @@ -521,9 +671,9 @@ def read_device_data(self, device_list, request_params):

device_data.update({device_id: device_json})

fabric_refs = result.get('fabric_back_refs')
fabric_refs = result.get('fabric_refs')
if fabric_refs and len(fabric_refs) > 0:
fabric_fq_name = result.get('fabric_back_refs')[0].get('to')
fabric_fq_name = result.get('fabric_refs')[0].get('to')
fabric_fq_name_str = ':'.join(map(str, fabric_fq_name))
request_params['fabric_fq_name'] = fabric_fq_name_str

Expand Down
8 changes: 0 additions & 8 deletions src/config/fabric-ansible/ansible-playbooks/delete_fabric.yml
Expand Up @@ -149,11 +149,3 @@
vars:
current_index: 3
jl_message: "Successfully deleted fabric: {{ playbook_input.input.fabric_fq_name }}"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_pb_output(output)}}"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_end_of_playbook()}}"
Expand Up @@ -23,11 +23,3 @@
job_ctx: "{{ job_ctx }}"
status: "{{ JOBLOG_STATUS.IN_PROGRESS }}"
message: "Successfully deleted fabric: {{ playbook_input.input.fabric_fq_name }}"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_pb_output(output)}}"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_end_of_playbook()}}"
4 changes: 0 additions & 4 deletions src/config/fabric-ansible/ansible-playbooks/device_import.yml
Expand Up @@ -29,7 +29,3 @@
job_ctx: "{{ job_ctx }}"
status: "{{ JOBLOG_STATUS.IN_PROGRESS }}"
message: "Import Device: {{prouter_name}} succeeded importing {{bulk_create_phy_int_resp_set|length}} PhysicalInterfaces and {{bulk_create_log_int_resp_set|length}} Logical Interfaces"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_end_of_playbook()}}"
Expand Up @@ -33,11 +33,7 @@
post_tasks:
- name: set output parameter
set_fact:
output: "{{ output|combine({'status':'SUCCESS', 'message':'Discover playbook successfully executed'})}}"
output: "{{ output|combine({'status':'SUCCESS', 'message':'Discover playbook successfully executed', 'device_json': discovery_results.device_info})}}"

- name: print output
debug: var=output verbosity=1

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_end_of_playbook()}}"
4 changes: 0 additions & 4 deletions src/config/fabric-ansible/ansible-playbooks/discover_role.yml
Expand Up @@ -18,7 +18,3 @@
vars:
current_index: 2
jl_message: "Role Discovery succeeded discovering roles for the prouter {{prouter_name}} in the fabric"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_end_of_playbook()}}"
Expand Up @@ -25,15 +25,3 @@
job_ctx: "{{ job_ctx }}"
status: "{{ JOBLOG_STATUS.IN_PROGRESS }}"
message: "Successfully onboarded fabric: {{ playbook_input.input.fabric_fq_name }}"

- set_fact:
pb_output:
fabric_uuid: "{{output.fabric_uuid}}"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_pb_output(pb_output)}}"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_end_of_playbook()}}"
25 changes: 8 additions & 17 deletions src/config/fabric-ansible/ansible-playbooks/fabric_onboard.yml
Expand Up @@ -5,6 +5,9 @@
connection: local

tasks:
- set_fact:
job_ctx: "{{job_ctx|combine({'total_task_count':1, 'task_weightage_array':[100] })}}"

- name: onboard fabric
set_fact:
output: "{{ job_ctx | onboard_fabric }}"
Expand All @@ -20,20 +23,8 @@
results:
fabric_uuid: "{{ output.fabric_uuid }}"

- name: Update Job log with fabric onboarding job status
job_objectlog:
job_ctx: "{{ job_ctx }}"
status: "{{ JOBLOG_STATUS.IN_PROGRESS }}"
message: "Successfully onboarded fabric: {{ playbook_input.input.fabric_fq_name }}"

- set_fact:
pb_output:
fabric_uuid: "{{output.fabric_uuid}}"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_pb_output(pb_output)}}"

- name: insert/update in /tmp/<exec-id> file
set_fact:
write_resp: "{{job_ctx | report_end_of_playbook()}}"
- name: Update job log with percentage
include_tasks: percentage_update.yml
vars:
current_index: 1
jl_message: "Successfully onboarded fabric: {{ playbook_input.input.fabric_fq_name }}"

0 comments on commit 9b88f58

Please sign in to comment.