Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
116 changes: 116 additions & 0 deletions test/unit/unit_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -305,3 +305,119 @@ func dummyRequiredVariables() map[string]interface{} {
"location": "eastus",
}
}

// TestScaleDownDelayAfterDeleteNotSetShouldUseCustomScanInterval validates that when
// auto_scaler_profile_scale_down_delay_after_delete is not set, it inherits from
// auto_scaler_profile_scan_interval even when scan interval is customized.
func TestScaleDownDelayAfterDeleteNotSetShouldUseCustomScanInterval(t *testing.T) {
t.Parallel()
vars := dummyRequiredVariables()
// Customize scan interval to verify fallback uses this value
vars["auto_scaler_profile_scan_interval"] = "30s"

test_helper.RunUnitTest(t, "../../", "unit-test-fixture", terraform.Options{
Upgrade: false,
Vars: vars,
}, func(t *testing.T, output test_helper.TerraformOutput) {
t.Helper()
scaleDownDelayAfterDelete, ok := output["auto_scaler_profile_scale_down_delay_after_delete"].(string)
assert.True(t, ok, "expected output auto_scaler_profile_scale_down_delay_after_delete to be a string")
scanInterval, ok := output["auto_scaler_profile_scan_interval"].(string)
assert.True(t, ok, "expected output auto_scaler_profile_scan_interval to be a string")
assert.Equal(t, "30s", scanInterval)
assert.Equal(t, scanInterval, scaleDownDelayAfterDelete)
})
}

// TestScaleDownDelayAfterDeleteOverridesScanInterval ensures explicit value is respected
// even when auto_scaler_profile_scan_interval is also set.
func TestScaleDownDelayAfterDeleteOverridesScanInterval(t *testing.T) {
t.Parallel()
vars := dummyRequiredVariables()
vars["auto_scaler_profile_scan_interval"] = "30s"
vars["auto_scaler_profile_scale_down_delay_after_delete"] = "10s"

test_helper.RunUnitTest(t, "../../", "unit-test-fixture", terraform.Options{
Upgrade: false,
Vars: vars,
}, func(t *testing.T, output test_helper.TerraformOutput) {
t.Helper()
val, ok := output["auto_scaler_profile_scale_down_delay_after_delete"].(string)
assert.True(t, ok)
assert.Equal(t, "10s", val)
// Also confirm scan interval remains the configured value
scan, ok := output["auto_scaler_profile_scan_interval"].(string)
assert.True(t, ok)
assert.Equal(t, "30s", scan)
})
}

// TestDisabledLogAnalyticsWithProvidedSolutionShouldNotCreate validates that even if a solution map
// is provided, disabling the workspace prevents solution creation.
func TestDisabledLogAnalyticsWithProvidedSolutionShouldNotCreate(t *testing.T) {
t.Parallel()
vars := dummyRequiredVariables()
vars["log_analytics_workspace_enabled"] = false
vars["log_analytics_solution"] = map[string]interface{}{
"id": "dummySolutionId",
}

test_helper.RunUnitTest(t, "../../", "unit-test-fixture", terraform.Options{
Upgrade: false,
Vars: vars,
}, func(t *testing.T, output test_helper.TerraformOutput) {
t.Helper()
createSolution, ok := output["create_analytics_solution"].(bool)
assert.True(t, ok)
assert.False(t, createSolution, "solution must not be created when workspace is disabled")
createWorkspace, ok := output["create_analytics_workspace"].(bool)
assert.True(t, ok)
assert.False(t, createWorkspace, "workspace must not be created when disabled")
})
}

// Test_LogAnalyticsWorkspaceWithEmptyLocation_ShouldQueryDS ensures that an explicitly empty location
// triggers the same behavior as not specifying a location at all.
func Test_LogAnalyticsWorkspaceWithEmptyLocation_ShouldQueryDS(t *testing.T) {
vars := dummyRequiredVariables()
vars["log_analytics_workspace_enabled"] = true
vars["log_analytics_workspace"] = map[string]any{
"id": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1",
"name": "dummyName",
"location": "",
}

varFile := test_helper.VarsToFile(t, vars)
defer func() { _ = os.Remove(varFile) }()

test_helper.RunUnitTest(t, "../../", "unit-test-fixture", terraform.Options{
Upgrade: false,
VarFiles: []string{varFile},
}, func(t *testing.T, output test_helper.TerraformOutput) {
t.Helper()
r := output["query_datasource_for_log_analytics_workspace_location"].(bool)
assert.True(t, r, "empty location should require querying the data source")
})
}

// TestInvalidAutomaticChannelUpgradeValue guards against unexpected inputs by ensuring
// the check is not considered passing for unknown channels.
func TestInvalidAutomaticChannelUpgradeValue(t *testing.T) {
t.Parallel()
vars := map[string]interface{}{
"prefix": "foo",
"resource_group_name": "bar",
"location": "eastus",
"automatic_channel_upgrade": "unknown", // defensive test for invalid input
}

test_helper.RunUnitTest(t, "../../", "unit-test-fixture", terraform.Options{
Upgrade: false,
Vars: vars,
}, func(t *testing.T, output test_helper.TerraformOutput) {
t.Helper()
upgrades, ok := output["automatic_channel_upgrade_check"].(bool)
assert.True(t, ok)
assert.False(t, upgrades, "unknown automatic_channel_upgrade should not pass the check")
})
}
Comment on lines +403 to +423
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

🧩 Analysis chain

Consider verifying the fixture handles invalid values gracefully.

The test assumes that the fixture properly handles the "unknown" value for automatic_channel_upgrade. If the fixture doesn't validate this input, the test might pass for the wrong reasons (e.g., due to a panic or error rather than deliberate validation).


🏁 Script executed:

#!/bin/bash
# Check if the fixture validates automatic_channel_upgrade values
fd -e tf -e hcl . --exec grep -l "automatic_channel_upgrade" | head -5 | xargs grep -A5 -B5 "automatic_channel_upgrade"

Length of output: 6973


Fix test: fixture rejects unknown automatic_channel_upgrade — update test or input

unit-test-fixture/variables.tf contains a validation that only allows "patch", "stable", "rapid", "node-image", so passing "unknown" will trigger Terraform variable validation. Update TestInvalidAutomaticChannelUpgradeValue (test/unit/unit_test.go lines 403–423) to either expect a validation error or use an allowed value.

🤖 Prompt for AI Agents
test/unit/unit_test.go lines 403-423: the test currently sets
automatic_channel_upgrade to "unknown" which the fixture's variables.tf
validation rejects; either change the input to one of the allowed values
("patch", "stable", "rapid", "node-image") and update the assertions to match
the expected output for that allowed value, or modify the test to assert that
Terraform variable validation fails (i.e., expect and assert a validation error
from the test helper instead of treating the run as successful).

16 changes: 4 additions & 12 deletions variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -479,23 +479,15 @@ variable "data_collection_settings" {
EOT
}

variable "default_node_pool_fips_enabled" {
type = bool
default = null
description = " (Optional) Should the nodes in this Node Pool have Federal Information Processing Standard enabled? Changing this forces a new resource to be created."
}


variable "disk_encryption_set_id" {
type = string
default = null
description = "(Optional) The ID of the Disk Encryption Set which should be used for the Nodes and Volumes. More information [can be found in the documentation](https://docs.microsoft.com/azure/aks/azure-disk-customer-managed-keys). Changing this forces a new resource to be created."
}

variable "dns_prefix_private_cluster" {
type = string
default = null
description = "(Optional) Specifies the DNS prefix to use with private clusters. Only one of `var.prefix,var.dns_prefix_private_cluster` can be specified. Changing this forces a new resource to be created."
}


variable "ebpf_data_plane" {
type = string
Expand Down Expand Up @@ -1353,7 +1345,7 @@ variable "prefix" {
description = "(Optional) The prefix for the resources created in the specified Azure Resource Group. Omitting this variable requires both `var.cluster_log_analytics_workspace_name` and `var.cluster_name` have been set. Only one of `var.prefix,var.dns_prefix_private_cluster` can be specified."
}

variable "private_cluster_enabled" {
variable "why_private_cluster_enabled" {
type = bool
default = false
description = "If true cluster API server will be exposed only on internal IP address and available only in cluster vnet."
Expand Down Expand Up @@ -1573,4 +1565,4 @@ variable "upgrade_override" {
`force_upgrade_enabled` - (Required) Whether to force upgrade the cluster. Possible values are `true` or `false`.
`effective_until` - (Optional) Specifies the duration, in RFC 3339 format (e.g., `2025-10-01T13:00:00Z`), the upgrade_override values are effective. This field must be set for the `upgrade_override` values to take effect. The date-time must be within the next 30 days.
EOT
}
}