From 11d204924d6901a437ab18f4513f3227db99d5fb Mon Sep 17 00:00:00 2001 From: nickyinluo Date: Fri, 11 Nov 2022 17:14:16 +0800 Subject: [PATCH] fix tke endpoint and node pool e2e case --- .../resource_tc_kubernetes_cluster_endpoint_test.go | 6 +++--- tencentcloud/resource_tc_kubernetes_node_pool_test.go | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tencentcloud/resource_tc_kubernetes_cluster_endpoint_test.go b/tencentcloud/resource_tc_kubernetes_cluster_endpoint_test.go index 7f4c0ea92c..3c12b6e4f3 100644 --- a/tencentcloud/resource_tc_kubernetes_cluster_endpoint_test.go +++ b/tencentcloud/resource_tc_kubernetes_cluster_endpoint_test.go @@ -63,12 +63,12 @@ func TestAccTencentCloudTkeClusterEndpoint(t *testing.T) { } const testAccTkeClusterEndpointNewSG = ` -resource "tencentcloud_security_group" "new_sg" { - name = "test-endpoint" +data "tencentcloud_security_groups" "new_sg" { + name = "keep-tke-ep-sg-fwf8zdkx" } locals { - new_sg = tencentcloud_security_group.new_sg.id + new_sg = data.tencentcloud_security_groups.new_sg.security_groups.0.security_group_id } ` diff --git a/tencentcloud/resource_tc_kubernetes_node_pool_test.go b/tencentcloud/resource_tc_kubernetes_node_pool_test.go index 431dc859ca..65e20abb32 100644 --- a/tencentcloud/resource_tc_kubernetes_node_pool_test.go +++ b/tencentcloud/resource_tc_kubernetes_node_pool_test.go @@ -94,7 +94,7 @@ func TestAccTencentCloudTkeNodePoolResourceBasic(t *testing.T) { resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "desired_capacity", "1"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "name", "mynodepool"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "unschedulable", "0"), - resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "scaling_group_name", "basic_group"), + resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "scaling_group_name", "asg_np_test"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "default_cooldown", "400"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "termination_policies.#", "1"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "termination_policies.0", "OLDEST_INSTANCE"), @@ -129,7 +129,7 @@ func TestAccTencentCloudTkeNodePoolResourceBasic(t *testing.T) { resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "name", "mynodepoolupdate"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "node_os", defaultTkeOSImageName), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "unschedulable", "0"), - resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "scaling_group_name", "basic_group_test"), + resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "scaling_group_name", "asg_np_test_changed"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "default_cooldown", "350"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "termination_policies.#", "1"), resource.TestCheckResourceAttr(testTkeClusterNodePoolResourceKey, "termination_policies.0", "NEWEST_INSTANCE"), @@ -264,7 +264,7 @@ resource "tencentcloud_kubernetes_node_pool" "np_test" { retry_policy = "INCREMENTAL_INTERVALS" desired_capacity = 1 enable_auto_scale = true - scaling_group_name = "basic_group" + scaling_group_name = "asg_np_test" default_cooldown = 400 termination_policies = ["OLDEST_INSTANCE"] scaling_group_project_id = var.default_project @@ -329,7 +329,7 @@ resource "tencentcloud_kubernetes_node_pool" "np_test" { node_os = var.default_img scaling_group_project_id = var.default_project delete_keep_instance = false - scaling_group_name = "basic_group_test" + scaling_group_name = "asg_np_test_changed" default_cooldown = 350 termination_policies = ["NEWEST_INSTANCE"] multi_zone_subnet_policy = "EQUALITY"