Skip to content

Commit

Permalink
Few sanity fixes and changes in K8s.
Browse files Browse the repository at this point in the history
The description of changes mentioned in the associated bug.

Change-Id: Ie352feedd8bef002673cb2d1a269dadf85470765
Closes-bug: #1776460
  • Loading branch information
pulkitt committed Jun 12, 2018
1 parent c15ebbf commit f117253
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 8 deletions.
2 changes: 1 addition & 1 deletion fixtures/k8s/deployment.py
Expand Up @@ -90,7 +90,7 @@ def delete(self):
return self.k8s_client.delete_deployment(self.namespace, self.name)
# end delete

@retry(delay=5, tries=40)
@retry(delay=5, tries=60)
def verify_deployment_in_k8s(self):
self.read()
self.logger.debug('Replicas: %s, Available: %s' %(
Expand Down
Expand Up @@ -7,7 +7,7 @@
from tcutils.util import get_random_name
import test
import time

from tcutils.contrail_status_check import ContrailStatusChecker

class TestFabricSNATRestarts(BaseK8sTest):

Expand Down Expand Up @@ -207,7 +207,6 @@ def test_snat_with_kubelet_restart_on_master(self):
client3, client4)
#end test_snat_with_kubelet_restart_on_master

@test.attr(type=['k8s_sanity'])
@preposttest_wrapper
def test_snat_with_docker_restart_on_master(self):
"""
Expand All @@ -222,7 +221,10 @@ def test_snat_with_docker_restart_on_master(self):
client3, client4)
self.inputs.restart_service(service_name = "docker",
host_ips = [self.inputs.k8s_master_ip])
time.sleep(60) # Wait timer for all contrail service to come up.
time.sleep(30)
cluster_status, error_nodes = ContrailStatusChecker(self.inputs).wait_till_contrail_cluster_stable()
assert cluster_status, 'All nodes and services not up. Failure nodes are: %s' % (
error_nodes)
self.verify_ping_between_pods_across_namespaces_and_public_network(client1, client2,
client3, client4)
#end test_snat_with_docker_restart
Expand Down
11 changes: 8 additions & 3 deletions serial_scripts/k8s_scripts/test_ingress.py
Expand Up @@ -268,15 +268,14 @@ def test_ingress_fanout_with_vrouter_agent_restart(self):
assert self.validate_nginx_lb([pod3, pod4], ingress.cluster_ip,
test_pod=pod5, path=path2, host=host2)

@test.attr(type=['k8s_sanity'])
@skip_because(mx_gw = False)
@preposttest_wrapper
def test_ingress_fanout_with_node_reboot(self):
'''Creating a fanout ingress with 2 different host having 2 different path along with a default backend
This host are supported by repective service. Service has required backend pod with required path
mentioned in ingress rule. From the local node, do a wget on the ingress public ip
Validate that service and its loadbalancing works.
Reboot the nodes
Reboot the compute nodes
Re verify the loadbalancing works after the nodes reboot
'''
app1 = 'http_test1'
Expand Down Expand Up @@ -354,7 +353,13 @@ def test_ingress_fanout_with_node_reboot(self):
# Now validate ingress from public network
assert self.validate_nginx_lb([pod1, pod2], ingress.external_ips[0], path=path1, host=host1)
assert self.validate_nginx_lb([pod3, pod4], ingress.external_ips[0], path=path2, host=host2)
self.restart_vrouter_agent()
for node in self.inputs.k8s_slave_ips:
self.inputs.reboot(node)
time.sleep(60) # Noticed that services are not ready after reboot. Thus, giving some time for service
# like docker, kubelet and contrail services to start
cluster_status, error_nodes = ContrailStatusChecker(self.inputs).wait_till_contrail_cluster_stable(
nodes=self.inputs.compute_ips, roles="vrouter")
assert cluster_status, 'Cluster is not stable after restart'
assert self.validate_nginx_lb([pod1, pod2], ingress.cluster_ip,
test_pod=pod5, path=path1, host=host1)
assert self.validate_nginx_lb([pod3, pod4], ingress.cluster_ip,
Expand Down
1 change: 0 additions & 1 deletion serial_scripts/k8s_scripts/test_policy.py
Expand Up @@ -664,7 +664,6 @@ def test_policy_kubelet_restart_on_slave(self):
self.verify_policy_post_modification_common(pod_list_ns1, pod_list_ns2, pod_list_ns3)
#end test_policy_kubelet_restart_on_slave

@test.attr(type=['k8s_sanity'])
@preposttest_wrapper
def test_policy_kubelet_restart_on_master(self):
"""
Expand Down

0 comments on commit f117253

Please sign in to comment.