Skip to content

Commit

Permalink
Merge pull request #1199 from shreyabiradar07/test_list_metadata
Browse files Browse the repository at this point in the history
Add testcases for GET /dsmetadata API
  • Loading branch information
dinogun committed Jun 18, 2024
2 parents 5772a27 + da34086 commit cfadfe0
Show file tree
Hide file tree
Showing 12 changed files with 893 additions and 5 deletions.
35 changes: 34 additions & 1 deletion tests/scripts/helpers/kruize.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,4 +330,37 @@ def delete_metadata(input_json_file, invalid_header=False):

print(response)
print("Response status code = ", response.status_code)
return response
return response


# Description: This function obtains the metadata from Kruize Autotune using GET dsmetadata API
# Input Parameters: datasource name, cluster name, namespace, verbose - flag indicating granularity of data to be listed
def list_metadata(datasource=None, cluster_name=None, namespace=None, verbose=None, logging=True):
print("\nListing the metadata...")

query_params = {}

if datasource is not None:
query_params['datasource'] = datasource
if cluster_name is not None:
query_params['cluster_name'] = cluster_name
if namespace is not None:
query_params['namespace'] = namespace
if verbose is not None:
query_params['verbose'] = verbose

query_string = "&".join(f"{key}={value}" for key, value in query_params.items())

url = URL + "/dsmetadata"
if query_string:
url += "?" + query_string
print("URL = ", url)
print("PARAMS = ", query_params)
response = requests.get(url)

print("Response status code = ", response.status_code)
if logging:
print("\n************************************************************")
print(response.text)
print("\n************************************************************")
return response
2 changes: 1 addition & 1 deletion tests/scripts/helpers/list_datasources_json_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"format": "uri"
}
},
"required": ["name", "provider", "serviceName", "namespace", "url"]
"required": ["name", "provider", "url"]
}
}
},
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
list_metadata_json_cluster_name_without_verbose_schema = {
"type": "object",
"properties": {
"datasources": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"datasource_name": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"clusters": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"cluster_name": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"namespaces": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"namespace": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
}
},
"required": ["namespace"]
}
}
}
},
"required": ["cluster_name", "namespaces"]
}
}
}
},
"required": ["datasource_name", "clusters"]
}
}
}
},
"required": ["datasources"]
}
36 changes: 36 additions & 0 deletions tests/scripts/helpers/list_metadata_json_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
list_metadata_json_schema = {
"type": "object",
"properties": {
"datasources": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"datasource_name": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"clusters": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"cluster_name": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
}
},
"required": ["cluster_name"]
}
}
}
},
"required": ["datasource_name", "clusters"]
}
}
}
},
"required": ["datasources"]
}
68 changes: 68 additions & 0 deletions tests/scripts/helpers/list_metadata_json_validate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
"""
Copyright (c) 2024, 2024 Red Hat, IBM Corporation and others.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import jsonschema
from jsonschema import FormatChecker
from jsonschema.exceptions import ValidationError
from helpers.import_metadata_json_schema import import_metadata_json_schema

JSON_NULL_VALUES = ("is not of type 'string'", "is not of type 'integer'", "is not of type 'number'")
VALUE_MISSING = " cannot be empty or null!"

def validate_list_metadata_json(list_metadata_json, json_schema):
errorMsg = ""
try:
# create a validator with the format checker
print("Validating json against the json schema...")
validator = jsonschema.Draft7Validator(json_schema, format_checker=FormatChecker())

# validate the JSON data against the schema
errors = ""
errors = list(validator.iter_errors(list_metadata_json))
print("Validating json against the json schema...done")
errorMsg = validate_list_metadata_json_values(list_metadata_json)

if errors:
custom_err = ValidationError(errorMsg)
errors.append(custom_err)
return errors
else:
return errorMsg
except ValidationError as err:
print("Received a VaidationError")

# Check if the exception is due to empty or null required parameters and prepare the response accordingly
if any(word in err.message for word in JSON_NULL_VALUES):
errorMsg = "Parameters" + VALUE_MISSING
return errorMsg
# Modify the error response in case of additional properties error
elif str(err.message).__contains__('('):
errorMsg = str(err.message).split('(')
return errorMsg[0]
else:
return err.message

def validate_list_metadata_json_values(metadata):
validationErrorMsg = ""

for key in metadata.keys():

# Check if any of the key is empty or null
if not (str(metadata[key]) and str(metadata[key]).strip()):
validationErrorMsg = ",".join([validationErrorMsg, "Parameters" + VALUE_MISSING])

return validationErrorMsg.lstrip(',')

88 changes: 88 additions & 0 deletions tests/scripts/helpers/list_metadata_json_verbose_true_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
list_metadata_json_verbose_true_schema = {
"type": "object",
"properties": {
"datasources": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"datasource_name": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"clusters": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"cluster_name": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"namespaces": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"namespace": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"workloads": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"workload_name": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"workload_type": {
"type": "string"
},
"containers": {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_-]+$": {
"type": "object",
"properties": {
"container_name": {
"type": "string",
"pattern": "^[a-zA-Z0-9_-]+$"
},
"container_image_name": {
"type": "string"
}
},
"required": ["container_name",
"container_image_name"]
}
}
}
},
"required": ["workload_name", "workload_type", "containers"]
}
}
}
},
"required": ["namespace"]
}
}
}
},
"required": ["cluster_name", "namespaces"]
}
}
}
},
"required": ["datasource_name", "clusters"]
}
}
}
},
"required": ["datasources"]
}
73 changes: 72 additions & 1 deletion tests/scripts/helpers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import time
import math
from datetime import datetime, timedelta
from kubernetes import client, config

SUCCESS_STATUS_CODE = 201
SUCCESS_200_STATUS_CODE = 200
Expand Down Expand Up @@ -49,7 +50,11 @@
COST_RECOMMENDATIONS_AVAILABLE = "Cost Recommendations Available"
PERFORMANCE_RECOMMENDATIONS_AVAILABLE = "Performance Recommendations Available"
CONTAINER_AND_EXPERIMENT_NAME = " for container : %s for experiment: %s.]"
LIST_DATASOURCES_ERROR_MSG = "Given datasource name - \" %s \" either does not exist or is not valid"
LIST_DATASOURCES_ERROR_MSG = "Given datasource name - %s either does not exist or is not valid"
LIST_METADATA_DATASOURCE_NAME_ERROR_MSG = "Metadata for a given datasource name - %s either does not exist or is not valid"
LIST_METADATA_ERROR_MSG = ("Metadata for a given datasource - %s, cluster name - %s, namespace - %s "
"either does not exist or is not valid")
LIST_METADATA_DATASOURCE_NAME_CLUSTER_NAME_ERROR_MSG = "Metadata for a given datasource name - %s, cluster_name - %s either does not exist or is not valid"

# Kruize Recommendations Notification codes
NOTIFICATION_CODE_FOR_RECOMMENDATIONS_AVAILABLE = "111000"
Expand Down Expand Up @@ -864,6 +869,72 @@ def validate_recommendation_for_cpu_mem_optimised(recommendations: dict, current
assert recommendations["recommendation_engines"][profile]["config"]["limits"]["memory"]["amount"] == current["limits"]["memory"]["amount"]


def validate_list_metadata_parameters(import_metadata_json, list_metadata_json, cluster_name=None, namespace=None):
datasources = list_metadata_json.get('datasources', {})

if len(datasources) != 1:
return False

# Loop through the datasources dictionary
for key, value in datasources.items():
assert import_metadata_json['datasource_name'] == value.get('datasource_name')

if cluster_name is not None:
# Extract clusters from the current datasource
clusters = value.get('clusters', {})

for clusters_key, clusters_value in clusters.items():
assert cluster_name == clusters_value.get('cluster_name'), f"Invalid cluster name: {cluster_name}"

# If namespace is provided, perform namespace validation
if namespace is not None:
# Extract namespaces from the current cluster
namespaces = clusters[cluster_name].get('namespaces', {})

assert namespace in [ns.get('namespace') for ns in namespaces.values()], f"Invalid namespace: {namespace}"


def create_namespace(namespace_name):
# Load kube config
config.load_kube_config()

# Create a V1Namespace object
namespace = client.V1Namespace(
metadata=client.V1ObjectMeta(name=namespace_name)
)

# Create a Kubernetes API client
api_instance = client.CoreV1Api()

# Create the namespace
try:
api_instance.create_namespace(namespace)

print(f"Namespace '{namespace_name}' created successfully.")
except client.exceptions.ApiException as e:
if e.status == 409:
print(f"Namespace '{namespace_name}' already exists.")
else:
print(f"Error creating namespace: {e}")


def delete_namespace(namespace_name):
# Load kube config
config.load_kube_config()

# Create a Kubernetes API client
api_instance = client.CoreV1Api()

# Delete the namespace
try:
api_instance.delete_namespace(name=namespace_name)
print(f"Namespace '{namespace_name}' deleted successfully.")
except client.exceptions.ApiException as e:
if e.status == 404:
print(f"Namespace '{namespace_name}' not found.")
else:
print(f"Exception deleting namespace: {e}")

# validate duration_in_hours decimal precision
def validate_duration_in_hours_decimal_precision(duration_in_hours):
"""
Expand Down
Loading

0 comments on commit cfadfe0

Please sign in to comment.