diff --git a/config/main.py b/config/main.py index a068a1b7f4..8f3b7245bd 100644 --- a/config/main.py +++ b/config/main.py @@ -19,7 +19,7 @@ from jsonpatch import JsonPatchConflict from jsonpointer import JsonPointerException from collections import OrderedDict -from generic_config_updater.generic_updater import GenericUpdater, ConfigFormat +from generic_config_updater.generic_updater import GenericUpdater, ConfigFormat, extract_scope from minigraph import parse_device_desc_xml, minigraph_encoder from natsort import natsorted from portconfig import get_child_ports @@ -1152,6 +1152,24 @@ def validate_gre_type(ctx, _, value): return gre_type_value except ValueError: raise click.UsageError("{} is not a valid GRE type".format(value)) + +# Function to apply patch for a single ASIC. +def apply_patch_for_scope(scope_changes, results, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_path): + scope, changes = scope_changes + # Replace localhost to DEFAULT_NAMESPACE which is db definition of Host + if scope.lower() == "localhost" or scope == "": + scope = multi_asic.DEFAULT_NAMESPACE + + scope_for_log = scope if scope else "localhost" + try: + # Call apply_patch with the ASIC-specific changes and predefined parameters + GenericUpdater(namespace=scope).apply_patch(jsonpatch.JsonPatch(changes), config_format, verbose, dry_run, ignore_non_yang_tables, ignore_path) + results[scope_for_log] = {"success": True, "message": "Success"} + log.log_notice(f"'apply-patch' executed successfully for {scope_for_log} by {changes}") + except Exception as e: + results[scope_for_log] = {"success": False, "message": str(e)} + log.log_error(f"'apply-patch' executed failed for {scope_for_log} by {changes} due to {str(e)}") + # This is our main entrypoint - the main 'config' command @click.group(cls=clicommon.AbbreviationGroup, context_settings=CONTEXT_SETTINGS) @@ -1357,12 +1375,47 @@ def apply_patch(ctx, patch_file_path, format, dry_run, ignore_non_yang_tables, i patch_as_json = json.loads(text) patch = jsonpatch.JsonPatch(patch_as_json) + results = {} config_format = ConfigFormat[format.upper()] - GenericUpdater().apply_patch(patch, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_path) + # Initialize a dictionary to hold changes categorized by scope + changes_by_scope = {} + + # Iterate over each change in the JSON Patch + for change in patch: + scope, modified_path = extract_scope(change["path"]) + + # Modify the 'path' in the change to remove the scope + change["path"] = modified_path + + # Check if the scope is already in our dictionary, if not, initialize it + if scope not in changes_by_scope: + changes_by_scope[scope] = [] + # Add the modified change to the appropriate list based on scope + changes_by_scope[scope].append(change) + + # Empty case to force validate YANG model. + if not changes_by_scope: + asic_list = [multi_asic.DEFAULT_NAMESPACE] + asic_list.extend(multi_asic.get_namespace_list()) + for asic in asic_list: + changes_by_scope[asic] = [] + + # Apply changes for each scope + for scope_changes in changes_by_scope.items(): + apply_patch_for_scope(scope_changes, results, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_path) + + # Check if any updates failed + failures = [scope for scope, result in results.items() if not result['success']] + + if failures: + failure_messages = '\n'.join([f"- {failed_scope}: {results[failed_scope]['message']}" for failed_scope in failures]) + raise Exception(f"Failed to apply patch on the following scopes:\n{failure_messages}") + + log.log_notice(f"Patch applied successfully for {patch}.") click.secho("Patch applied successfully.", fg="cyan", underline=True) except Exception as ex: - click.secho("Failed to apply patch", fg="red", underline=True, err=True) + click.secho("Failed to apply patch due to: {}".format(ex), fg="red", underline=True, err=True) ctx.fail(ex) @config.command() @@ -2078,7 +2131,7 @@ def synchronous_mode(sync_mode): if ADHOC_VALIDATION: if sync_mode != 'enable' and sync_mode != 'disable': raise click.BadParameter("Error: Invalid argument %s, expect either enable or disable" % sync_mode) - + config_db = ValidatedConfigDBConnector(ConfigDBConnector()) config_db.connect() try: @@ -2086,7 +2139,7 @@ def synchronous_mode(sync_mode): except ValueError as e: ctx = click.get_current_context() ctx.fail("Error: Invalid argument %s, expect either enable or disable" % sync_mode) - + click.echo("""Wrote %s synchronous mode into CONFIG_DB, swss restart required to apply the configuration: \n Option 1. config save -y \n config reload -y \n @@ -2152,7 +2205,7 @@ def portchannel(db, ctx, namespace): @click.pass_context def add_portchannel(ctx, portchannel_name, min_links, fallback, fast_rate): """Add port channel""" - + fvs = { 'admin_status': 'up', 'mtu': '9100', @@ -2164,7 +2217,7 @@ def add_portchannel(ctx, portchannel_name, min_links, fallback, fast_rate): fvs['min_links'] = str(min_links) if fallback != 'false': fvs['fallback'] = 'true' - + db = ValidatedConfigDBConnector(ctx.obj['db']) if ADHOC_VALIDATION: if is_portchannel_name_valid(portchannel_name) != True: @@ -2172,18 +2225,18 @@ def add_portchannel(ctx, portchannel_name, min_links, fallback, fast_rate): .format(portchannel_name, CFG_PORTCHANNEL_PREFIX, CFG_PORTCHANNEL_NO)) if is_portchannel_present_in_db(db, portchannel_name): ctx.fail("{} already exists!".format(portchannel_name)) # TODO: MISSING CONSTRAINT IN YANG MODEL - + try: db.set_entry('PORTCHANNEL', portchannel_name, fvs) except ValueError: ctx.fail("{} is invalid!, name should have prefix '{}' and suffix '{}'".format(portchannel_name, CFG_PORTCHANNEL_PREFIX, CFG_PORTCHANNEL_NO)) - + @portchannel.command('del') @click.argument('portchannel_name', metavar='', required=True) @click.pass_context def remove_portchannel(ctx, portchannel_name): """Remove port channel""" - + db = ValidatedConfigDBConnector(ctx.obj['db']) if ADHOC_VALIDATION: if is_portchannel_name_valid(portchannel_name) != True: @@ -2201,7 +2254,7 @@ def remove_portchannel(ctx, portchannel_name): if len([(k, v) for k, v in db.get_table('PORTCHANNEL_MEMBER') if k == portchannel_name]) != 0: # TODO: MISSING CONSTRAINT IN YANG MODEL ctx.fail("Error: Portchannel {} contains members. Remove members before deleting Portchannel!".format(portchannel_name)) - + try: db.set_entry('PORTCHANNEL', portchannel_name, None) except JsonPatchConflict: @@ -2219,7 +2272,7 @@ def portchannel_member(ctx): def add_portchannel_member(ctx, portchannel_name, port_name): """Add member to port channel""" db = ValidatedConfigDBConnector(ctx.obj['db']) - + if ADHOC_VALIDATION: if clicommon.is_port_mirror_dst_port(db, port_name): ctx.fail("{} is configured as mirror destination port".format(port_name)) # TODO: MISSING CONSTRAINT IN YANG MODEL @@ -2236,7 +2289,7 @@ def add_portchannel_member(ctx, portchannel_name, port_name): # Dont proceed if the port channel does not exist if is_portchannel_present_in_db(db, portchannel_name) is False: ctx.fail("{} is not present.".format(portchannel_name)) - + # Don't allow a port to be member of port channel if it is configured with an IP address for key,value in db.get_table('INTERFACE').items(): if type(key) == tuple: @@ -2274,7 +2327,7 @@ def add_portchannel_member(ctx, portchannel_name, port_name): member_port_speed = member_port_entry.get(PORT_SPEED) port_speed = port_entry.get(PORT_SPEED) # TODO: MISSING CONSTRAINT IN YANG MODEL - if member_port_speed != port_speed: + if member_port_speed != port_speed: ctx.fail("Port speed of {} is different than the other members of the portchannel {}" .format(port_name, portchannel_name)) @@ -2347,7 +2400,7 @@ def del_portchannel_member(ctx, portchannel_name, port_name): # Dont proceed if the the port is not an existing member of the port channel if not is_port_member_of_this_portchannel(db, port_name, portchannel_name): ctx.fail("{} is not a member of portchannel {}".format(port_name, portchannel_name)) - + try: db.set_entry('PORTCHANNEL_MEMBER', portchannel_name + '|' + port_name, None) except JsonPatchConflict: @@ -2534,7 +2587,7 @@ def add_erspan(session_name, src_ip, dst_ip, dscp, ttl, gre_type, queue, policer if not namespaces['front_ns']: config_db = ValidatedConfigDBConnector(ConfigDBConnector()) config_db.connect() - if ADHOC_VALIDATION: + if ADHOC_VALIDATION: if validate_mirror_session_config(config_db, session_name, None, src_port, direction) is False: return try: @@ -3504,7 +3557,7 @@ def del_community(db, community): if community not in snmp_communities: click.echo("SNMP community {} is not configured".format(community)) sys.exit(1) - + config_db = ValidatedConfigDBConnector(db.cfgdb) try: config_db.set_entry('SNMP_COMMUNITY', community, None) @@ -4562,7 +4615,7 @@ def fec(ctx, interface_name, interface_fec, verbose): def ip(ctx): """Set IP interface attributes""" pass - + def validate_vlan_exists(db,text): data = db.get_table('VLAN') keys = list(data.keys()) @@ -4630,12 +4683,12 @@ def add(ctx, interface_name, ip_addr, gw): table_name = get_interface_table_name(interface_name) if table_name == "": ctx.fail("'interface_name' is not valid. Valid names [Ethernet/PortChannel/Vlan/Loopback]") - + if table_name == "VLAN_INTERFACE": if not validate_vlan_exists(config_db, interface_name): ctx.fail(f"Error: {interface_name} does not exist. Vlan must be created before adding an IP address") return - + interface_entry = config_db.get_entry(table_name, interface_name) if len(interface_entry) == 0: if table_name == "VLAN_SUB_INTERFACE": @@ -5057,7 +5110,7 @@ def cable_length(ctx, interface_name, length): if not is_dynamic_buffer_enabled(config_db): ctx.fail("This command can only be supported on a system with dynamic buffer enabled") - + if ADHOC_VALIDATION: # Check whether port is legal ports = config_db.get_entry("PORT", interface_name) @@ -5402,7 +5455,7 @@ def unbind(ctx, interface_name): config_db.set_entry(table_name, interface_name, subintf_entry) else: config_db.set_entry(table_name, interface_name, None) - + click.echo("Interface {} IP disabled and address(es) removed due to unbinding VRF.".format(interface_name)) # # 'ipv6' subgroup ('config interface ipv6 ...') @@ -6580,7 +6633,7 @@ def add_loopback(ctx, loopback_name): lo_intfs = [k for k, v in config_db.get_table('LOOPBACK_INTERFACE').items() if type(k) != tuple] if loopback_name in lo_intfs: ctx.fail("{} already exists".format(loopback_name)) # TODO: MISSING CONSTRAINT IN YANG VALIDATION - + try: config_db.set_entry('LOOPBACK_INTERFACE', loopback_name, {"NULL" : "NULL"}) except ValueError: @@ -6604,7 +6657,7 @@ def del_loopback(ctx, loopback_name): ips = [ k[1] for k in lo_config_db if type(k) == tuple and k[0] == loopback_name ] for ip in ips: config_db.set_entry('LOOPBACK_INTERFACE', (loopback_name, ip), None) - + try: config_db.set_entry('LOOPBACK_INTERFACE', loopback_name, None) except JsonPatchConflict: @@ -6662,9 +6715,9 @@ def ntp(ctx): def add_ntp_server(ctx, ntp_ip_address): """ Add NTP server IP """ if ADHOC_VALIDATION: - if not clicommon.is_ipaddress(ntp_ip_address): + if not clicommon.is_ipaddress(ntp_ip_address): ctx.fail('Invalid IP address') - db = ValidatedConfigDBConnector(ctx.obj['db']) + db = ValidatedConfigDBConnector(ctx.obj['db']) ntp_servers = db.get_table("NTP_SERVER") if ntp_ip_address in ntp_servers: click.echo("NTP server {} is already configured".format(ntp_ip_address)) @@ -6675,7 +6728,7 @@ def add_ntp_server(ctx, ntp_ip_address): {'resolve_as': ntp_ip_address, 'association_type': 'server'}) except ValueError as e: - ctx.fail("Invalid ConfigDB. Error: {}".format(e)) + ctx.fail("Invalid ConfigDB. Error: {}".format(e)) click.echo("NTP server {} added to configuration".format(ntp_ip_address)) try: click.echo("Restarting ntp-config service...") @@ -6691,7 +6744,7 @@ def del_ntp_server(ctx, ntp_ip_address): if ADHOC_VALIDATION: if not clicommon.is_ipaddress(ntp_ip_address): ctx.fail('Invalid IP address') - db = ValidatedConfigDBConnector(ctx.obj['db']) + db = ValidatedConfigDBConnector(ctx.obj['db']) ntp_servers = db.get_table("NTP_SERVER") if ntp_ip_address in ntp_servers: try: @@ -7019,19 +7072,19 @@ def add(ctx, name, ipaddr, port, vrf): if not is_valid_collector_info(name, ipaddr, port, vrf): return - config_db = ValidatedConfigDBConnector(ctx.obj['db']) + config_db = ValidatedConfigDBConnector(ctx.obj['db']) collector_tbl = config_db.get_table('SFLOW_COLLECTOR') if (collector_tbl and name not in collector_tbl and len(collector_tbl) == 2): click.echo("Only 2 collectors can be configured, please delete one") return - + try: config_db.mod_entry('SFLOW_COLLECTOR', name, {"collector_ip": ipaddr, "collector_port": port, "collector_vrf": vrf}) except ValueError as e: - ctx.fail("Invalid ConfigDB. Error: {}".format(e)) + ctx.fail("Invalid ConfigDB. Error: {}".format(e)) return # @@ -7364,7 +7417,7 @@ def add_subinterface(ctx, subinterface_name, vid): if vid is not None: subintf_dict.update({"vlan" : vid}) subintf_dict.update({"admin_status" : "up"}) - + try: config_db.set_entry('VLAN_SUB_INTERFACE', subinterface_name, subintf_dict) except ValueError as e: diff --git a/generic_config_updater/change_applier.py b/generic_config_updater/change_applier.py index d0818172f8..32a356bf9a 100644 --- a/generic_config_updater/change_applier.py +++ b/generic_config_updater/change_applier.py @@ -1,12 +1,14 @@ import copy import json +import subprocess import jsondiff import importlib import os import tempfile from collections import defaultdict from swsscommon.swsscommon import ConfigDBConnector -from .gu_common import genericUpdaterLogging +from sonic_py_common import multi_asic +from .gu_common import GenericConfigUpdaterError, genericUpdaterLogging SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) UPDATER_CONF_FILE = f"{SCRIPT_DIR}/gcu_services_validator.conf.json" @@ -32,12 +34,11 @@ def log_error(m): logger.log(logger.LOG_PRIORITY_ERROR, m, print_to_console) -def get_config_db(): - config_db = ConfigDBConnector() +def get_config_db(namespace=multi_asic.DEFAULT_NAMESPACE): + config_db = ConfigDBConnector(use_unix_socket_path=True, namespace=namespace) config_db.connect() return config_db - def set_config(config_db, tbl, key, data): config_db.set_entry(tbl, key, data) @@ -73,8 +74,9 @@ class ChangeApplier: updater_conf = None - def __init__(self): - self.config_db = get_config_db() + def __init__(self, namespace=multi_asic.DEFAULT_NAMESPACE): + self.namespace = namespace + self.config_db = get_config_db(self.namespace) self.backend_tables = [ "BUFFER_PG", "BUFFER_PROFILE", @@ -160,18 +162,32 @@ def apply(self, change): log_error("Failed to apply Json change") return ret - def remove_backend_tables_from_config(self, data): for key in self.backend_tables: data.pop(key, None) - def _get_running_config(self): - (_, fname) = tempfile.mkstemp(suffix="_changeApplier") - os.system("sonic-cfggen -d --print-data > {}".format(fname)) - run_data = {} - with open(fname, "r") as s: - run_data = json.load(s) - if os.path.isfile(fname): + _, fname = tempfile.mkstemp(suffix="_changeApplier") + + if self.namespace: + cmd = ['sonic-cfggen', '-d', '--print-data', '-n', self.namespace] + else: + cmd = ['sonic-cfggen', '-d', '--print-data'] + + with open(fname, "w") as file: + result = subprocess.Popen(cmd, stdout=file, stderr=subprocess.PIPE, text=True) + _, err = result.communicate() + + return_code = result.returncode + if return_code: os.remove(fname) + raise GenericConfigUpdaterError(f"Failed to get running config for namespace: {self.namespace}, Return code: {return_code}, Error: {err}") + + run_data = {} + try: + with open(fname, "r") as file: + run_data = json.load(file) + finally: + if os.path.isfile(fname): + os.remove(fname) return run_data diff --git a/generic_config_updater/generic_updater.py b/generic_config_updater/generic_updater.py index f9aab82336..b75939749c 100644 --- a/generic_config_updater/generic_updater.py +++ b/generic_config_updater/generic_updater.py @@ -1,4 +1,5 @@ import json +import jsonpointer import os from enum import Enum from .gu_common import GenericConfigUpdaterError, EmptyTableError, ConfigWrapper, \ @@ -6,10 +7,37 @@ from .patch_sorter import StrictPatchSorter, NonStrictPatchSorter, ConfigSplitter, \ TablesWithoutYangConfigSplitter, IgnorePathsFromYangConfigSplitter from .change_applier import ChangeApplier, DryRunChangeApplier +from sonic_py_common import multi_asic CHECKPOINTS_DIR = "/etc/sonic/checkpoints" CHECKPOINT_EXT = ".cp.json" +def extract_scope(path): + if not path: + raise Exception("Wrong patch with empty path.") + + try: + pointer = jsonpointer.JsonPointer(path) + parts = pointer.parts + except Exception as e: + raise Exception(f"Error resolving path: '{path}' due to {e}") + + if not parts: + raise Exception("Wrong patch with empty path.") + if parts[0].startswith("asic"): + if not parts[0][len("asic"):].isnumeric(): + raise Exception(f"Error resolving path: '{path}' due to incorrect ASIC number.") + scope = parts[0] + remainder = "/" + "/".join(parts[1:]) + elif parts[0] == "localhost": + scope = "localhost" + remainder = "/" + "/".join(parts[1:]) + else: + scope = "" + remainder = path + + return scope, remainder + class ConfigLock: def acquire_lock(self): # TODO: Implement ConfigLock @@ -29,77 +57,82 @@ def __init__(self, patchsorter=None, changeapplier=None, config_wrapper=None, - patch_wrapper=None): + patch_wrapper=None, + namespace=multi_asic.DEFAULT_NAMESPACE): + self.namespace = namespace self.logger = genericUpdaterLogging.get_logger(title="Patch Applier", print_all_to_console=True) - self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper() - self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper() + self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(namespace=self.namespace) + self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper(namespace=self.namespace) self.patchsorter = patchsorter if patchsorter is not None else StrictPatchSorter(self.config_wrapper, self.patch_wrapper) - self.changeapplier = changeapplier if changeapplier is not None else ChangeApplier() + self.changeapplier = changeapplier if changeapplier is not None else ChangeApplier(namespace=self.namespace) def apply(self, patch, sort=True): - self.logger.log_notice("Patch application starting.") - self.logger.log_notice(f"Patch: {patch}") + scope = self.namespace if self.namespace else 'localhost' + self.logger.log_notice(f"{scope}: Patch application starting.") + self.logger.log_notice(f"{scope}: Patch: {patch}") # Get old config - self.logger.log_notice("Getting current config db.") + self.logger.log_notice(f"{scope} getting current config db.") old_config = self.config_wrapper.get_config_db_as_json() # Generate target config - self.logger.log_notice("Simulating the target full config after applying the patch.") + self.logger.log_notice(f"{scope}: simulating the target full config after applying the patch.") target_config = self.patch_wrapper.simulate_patch(patch, old_config) - + # Validate all JsonPatch operations on specified fields - self.logger.log_notice("Validating all JsonPatch operations are permitted on the specified fields") + self.logger.log_notice(f"{scope}: validating all JsonPatch operations are permitted on the specified fields") self.config_wrapper.validate_field_operation(old_config, target_config) # Validate target config does not have empty tables since they do not show up in ConfigDb - self.logger.log_notice("Validating target config does not have empty tables, " \ + self.logger.log_notice(f"{scope}: alidating target config does not have empty tables, " \ "since they do not show up in ConfigDb.") empty_tables = self.config_wrapper.get_empty_tables(target_config) if empty_tables: # if there are empty tables empty_tables_txt = ", ".join(empty_tables) - raise EmptyTableError("Given patch is not valid because it will result in empty tables " \ + raise EmptyTableError(f"{scope}: given patch is not valid because it will result in empty tables " \ "which is not allowed in ConfigDb. " \ f"Table{'s' if len(empty_tables) != 1 else ''}: {empty_tables_txt}") # Generate list of changes to apply if sort: - self.logger.log_notice("Sorting patch updates.") + self.logger.log_notice(f"{scope}: sorting patch updates.") changes = self.patchsorter.sort(patch) else: - self.logger.log_notice("Converting patch to JsonChange.") + self.logger.log_notice(f"{scope}: converting patch to JsonChange.") changes = [JsonChange(jsonpatch.JsonPatch([element])) for element in patch] - + changes_len = len(changes) - self.logger.log_notice(f"The patch was converted into {changes_len} " \ + self.logger.log_notice(f"The {scope} patch was converted into {changes_len} " \ f"change{'s' if changes_len != 1 else ''}{':' if changes_len > 0 else '.'}") for change in changes: self.logger.log_notice(f" * {change}") # Apply changes in order - self.logger.log_notice(f"Applying {changes_len} change{'s' if changes_len != 1 else ''} " \ + self.logger.log_notice(f"{scope}: applying {changes_len} change{'s' if changes_len != 1 else ''} " \ f"in order{':' if changes_len > 0 else '.'}") for change in changes: self.logger.log_notice(f" * {change}") self.changeapplier.apply(change) # Validate config updated successfully - self.logger.log_notice("Verifying patch updates are reflected on ConfigDB.") + self.logger.log_notice(f"{scope}: verifying patch updates are reflected on ConfigDB.") new_config = self.config_wrapper.get_config_db_as_json() self.changeapplier.remove_backend_tables_from_config(target_config) self.changeapplier.remove_backend_tables_from_config(new_config) if not(self.patch_wrapper.verify_same_json(target_config, new_config)): - raise GenericConfigUpdaterError(f"After applying patch to config, there are still some parts not updated") + raise GenericConfigUpdaterError(f"{scope}: after applying patch to config, there are still some parts not updated") + + self.logger.log_notice(f"{scope} patch application completed.") - self.logger.log_notice("Patch application completed.") class ConfigReplacer: - def __init__(self, patch_applier=None, config_wrapper=None, patch_wrapper=None): + def __init__(self, patch_applier=None, config_wrapper=None, patch_wrapper=None, namespace=multi_asic.DEFAULT_NAMESPACE): + self.namespace = namespace self.logger = genericUpdaterLogging.get_logger(title="Config Replacer", print_all_to_console=True) - self.patch_applier = patch_applier if patch_applier is not None else PatchApplier() - self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper() - self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper() + self.patch_applier = patch_applier if patch_applier is not None else PatchApplier(namespace=self.namespace) + self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(namespace=self.namespace) + self.patch_wrapper = patch_wrapper if patch_wrapper is not None else PatchWrapper(namespace=self.namespace) def replace(self, target_config): self.logger.log_notice("Config replacement starting.") @@ -122,15 +155,18 @@ def replace(self, target_config): self.logger.log_notice("Config replacement completed.") + class FileSystemConfigRollbacker: def __init__(self, checkpoints_dir=CHECKPOINTS_DIR, config_replacer=None, - config_wrapper=None): + config_wrapper=None, + namespace=multi_asic.DEFAULT_NAMESPACE): + self.namespace = namespace self.logger = genericUpdaterLogging.get_logger(title="Config Rollbacker", print_all_to_console=True) self.checkpoints_dir = checkpoints_dir - self.config_replacer = config_replacer if config_replacer is not None else ConfigReplacer() - self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper() + self.config_replacer = config_replacer if config_replacer is not None else ConfigReplacer(namespace=self.namespace) + self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(namespace=self.namespace) def rollback(self, checkpoint_name): self.logger.log_notice("Config rollbacking starting.") @@ -168,7 +204,7 @@ def checkpoint(self, checkpoint_name): def list_checkpoints(self): self.logger.log_info("Listing checkpoints starting.") - + self.logger.log_info(f"Verifying checkpoints directory '{self.checkpoints_dir}' exists.") if not self._checkpoints_dir_exist(): self.logger.log_info("Checkpoints directory is empty, returning empty checkpoints list.") @@ -236,12 +272,13 @@ def _delete_checkpoint(self, name): path = self._get_checkpoint_full_path(name) return os.remove(path) + class Decorator(PatchApplier, ConfigReplacer, FileSystemConfigRollbacker): - def __init__(self, decorated_patch_applier=None, decorated_config_replacer=None, decorated_config_rollbacker=None): + def __init__(self, decorated_patch_applier=None, decorated_config_replacer=None, decorated_config_rollbacker=None, namespace=multi_asic.DEFAULT_NAMESPACE): # initing base classes to make LGTM happy - PatchApplier.__init__(self) - ConfigReplacer.__init__(self) - FileSystemConfigRollbacker.__init__(self) + PatchApplier.__init__(self, namespace=namespace) + ConfigReplacer.__init__(self, namespace=namespace) + FileSystemConfigRollbacker.__init__(self, namespace=namespace) self.decorated_patch_applier = decorated_patch_applier self.decorated_config_replacer = decorated_config_replacer @@ -265,10 +302,12 @@ def list_checkpoints(self): def delete_checkpoint(self, checkpoint_name): self.decorated_config_rollbacker.delete_checkpoint(checkpoint_name) + class SonicYangDecorator(Decorator): - def __init__(self, patch_wrapper, config_wrapper, decorated_patch_applier=None, decorated_config_replacer=None): - Decorator.__init__(self, decorated_patch_applier, decorated_config_replacer) + def __init__(self, patch_wrapper, config_wrapper, decorated_patch_applier=None, decorated_config_replacer=None, namespace=multi_asic.DEFAULT_NAMESPACE): + Decorator.__init__(self, decorated_patch_applier, decorated_config_replacer, namespace=namespace) + self.namespace = namespace self.patch_wrapper = patch_wrapper self.config_wrapper = config_wrapper @@ -280,13 +319,15 @@ def replace(self, target_config): config_db_target_config = self.config_wrapper.convert_sonic_yang_to_config_db(target_config) Decorator.replace(self, config_db_target_config) + class ConfigLockDecorator(Decorator): def __init__(self, decorated_patch_applier=None, decorated_config_replacer=None, decorated_config_rollbacker=None, - config_lock = ConfigLock()): - Decorator.__init__(self, decorated_patch_applier, decorated_config_replacer, decorated_config_rollbacker) + config_lock=ConfigLock(), + namespace=multi_asic.DEFAULT_NAMESPACE): + Decorator.__init__(self, decorated_patch_applier, decorated_config_replacer, decorated_config_rollbacker, namespace=namespace) self.config_lock = config_lock @@ -307,28 +348,35 @@ def execute_write_action(self, action, *args): action(*args) self.config_lock.release_lock() + class GenericUpdateFactory: + def __init__(self, namespace=multi_asic.DEFAULT_NAMESPACE): + self.namespace = namespace + def create_patch_applier(self, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_paths): self.init_verbose_logging(verbose) config_wrapper = self.get_config_wrapper(dry_run) change_applier = self.get_change_applier(dry_run, config_wrapper) - patch_wrapper = PatchWrapper(config_wrapper) + patch_wrapper = PatchWrapper(config_wrapper, namespace=self.namespace) patch_sorter = self.get_patch_sorter(ignore_non_yang_tables, ignore_paths, config_wrapper, patch_wrapper) patch_applier = PatchApplier(config_wrapper=config_wrapper, patchsorter=patch_sorter, patch_wrapper=patch_wrapper, - changeapplier=change_applier) + changeapplier=change_applier, + namespace=self.namespace) if config_format == ConfigFormat.CONFIGDB: pass elif config_format == ConfigFormat.SONICYANG: - patch_applier = SonicYangDecorator( - decorated_patch_applier = patch_applier, patch_wrapper=patch_wrapper, config_wrapper=config_wrapper) + patch_applier = SonicYangDecorator(decorated_patch_applier=patch_applier, + patch_wrapper=patch_wrapper, + config_wrapper=config_wrapper, + namespace=self.namespace) else: raise ValueError(f"config-format '{config_format}' is not supported") if not dry_run: - patch_applier = ConfigLockDecorator(decorated_patch_applier = patch_applier) + patch_applier = ConfigLockDecorator(decorated_patch_applier=patch_applier, namespace=self.namespace) return patch_applier @@ -337,24 +385,27 @@ def create_config_replacer(self, config_format, verbose, dry_run, ignore_non_yan config_wrapper = self.get_config_wrapper(dry_run) change_applier = self.get_change_applier(dry_run, config_wrapper) - patch_wrapper = PatchWrapper(config_wrapper) + patch_wrapper = PatchWrapper(config_wrapper, namespace=self.namespace) patch_sorter = self.get_patch_sorter(ignore_non_yang_tables, ignore_paths, config_wrapper, patch_wrapper) patch_applier = PatchApplier(config_wrapper=config_wrapper, patchsorter=patch_sorter, patch_wrapper=patch_wrapper, - changeapplier=change_applier) + changeapplier=change_applier, + namespace=self.namespace) - config_replacer = ConfigReplacer(patch_applier=patch_applier, config_wrapper=config_wrapper) + config_replacer = ConfigReplacer(patch_applier=patch_applier, config_wrapper=config_wrapper, namespace=self.namespace) if config_format == ConfigFormat.CONFIGDB: pass elif config_format == ConfigFormat.SONICYANG: - config_replacer = SonicYangDecorator( - decorated_config_replacer = config_replacer, patch_wrapper=patch_wrapper, config_wrapper=config_wrapper) + config_replacer = SonicYangDecorator(decorated_config_replacer=config_replacer, + patch_wrapper=patch_wrapper, + config_wrapper=config_wrapper, + namespace=self.namespace) else: raise ValueError(f"config-format '{config_format}' is not supported") if not dry_run: - config_replacer = ConfigLockDecorator(decorated_config_replacer = config_replacer) + config_replacer = ConfigLockDecorator(decorated_config_replacer=config_replacer, namespace=self.namespace) return config_replacer @@ -363,18 +414,19 @@ def create_config_rollbacker(self, verbose, dry_run=False, ignore_non_yang_table config_wrapper = self.get_config_wrapper(dry_run) change_applier = self.get_change_applier(dry_run, config_wrapper) - patch_wrapper = PatchWrapper(config_wrapper) + patch_wrapper = PatchWrapper(config_wrapper, namespace=self.namespace) patch_sorter = self.get_patch_sorter(ignore_non_yang_tables, ignore_paths, config_wrapper, patch_wrapper) patch_applier = PatchApplier(config_wrapper=config_wrapper, patchsorter=patch_sorter, patch_wrapper=patch_wrapper, - changeapplier=change_applier) + changeapplier=change_applier, + namespace=self.namespace) - config_replacer = ConfigReplacer(config_wrapper=config_wrapper, patch_applier=patch_applier) - config_rollbacker = FileSystemConfigRollbacker(config_wrapper = config_wrapper, config_replacer = config_replacer) + config_replacer = ConfigReplacer(config_wrapper=config_wrapper, patch_applier=patch_applier, namespace=self.namespace) + config_rollbacker = FileSystemConfigRollbacker(config_wrapper=config_wrapper, config_replacer=config_replacer, namespace=self.namespace) if not dry_run: - config_rollbacker = ConfigLockDecorator(decorated_config_rollbacker = config_rollbacker) + config_rollbacker = ConfigLockDecorator(decorated_config_rollbacker=config_rollbacker, namespace=self.namespace) return config_rollbacker @@ -383,15 +435,15 @@ def init_verbose_logging(self, verbose): def get_config_wrapper(self, dry_run): if dry_run: - return DryRunConfigWrapper() + return DryRunConfigWrapper(namespace=self.namespace) else: - return ConfigWrapper() + return ConfigWrapper(namespace=self.namespace) def get_change_applier(self, dry_run, config_wrapper): if dry_run: return DryRunChangeApplier(config_wrapper) else: - return ChangeApplier() + return ChangeApplier(namespace=self.namespace) def get_patch_sorter(self, ignore_non_yang_tables, ignore_paths, config_wrapper, patch_wrapper): if not ignore_non_yang_tables and not ignore_paths: @@ -408,10 +460,11 @@ def get_patch_sorter(self, ignore_non_yang_tables, ignore_paths, config_wrapper, return NonStrictPatchSorter(config_wrapper, patch_wrapper, config_splitter) + class GenericUpdater: - def __init__(self, generic_update_factory=None): + def __init__(self, generic_update_factory=None, namespace=multi_asic.DEFAULT_NAMESPACE): self.generic_update_factory = \ - generic_update_factory if generic_update_factory is not None else GenericUpdateFactory() + generic_update_factory if generic_update_factory is not None else GenericUpdateFactory(namespace=namespace) def apply_patch(self, patch, config_format, verbose, dry_run, ignore_non_yang_tables, ignore_paths, sort=True): patch_applier = self.generic_update_factory.create_patch_applier(config_format, verbose, dry_run, ignore_non_yang_tables, ignore_paths) diff --git a/generic_config_updater/gu_common.py b/generic_config_updater/gu_common.py index a6cb8de094..974c540c07 100644 --- a/generic_config_updater/gu_common.py +++ b/generic_config_updater/gu_common.py @@ -9,7 +9,7 @@ import copy import re import os -from sonic_py_common import logger +from sonic_py_common import logger, multi_asic from enum import Enum YANG_DIR = "/usr/local/yang-models" @@ -52,7 +52,8 @@ def __eq__(self, other): return False class ConfigWrapper: - def __init__(self, yang_dir = YANG_DIR): + def __init__(self, yang_dir=YANG_DIR, namespace=multi_asic.DEFAULT_NAMESPACE): + self.namespace = namespace self.yang_dir = YANG_DIR self.sonic_yang_with_loaded_models = None @@ -63,13 +64,16 @@ def get_config_db_as_json(self): return config_db_json def _get_config_db_as_text(self): - # TODO: Getting configs from CLI is very slow, need to get it from sonic-cffgen directly - cmd = "show runningconfiguration all" - result = subprocess.Popen(cmd, shell=True, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if self.namespace is not None and self.namespace != multi_asic.DEFAULT_NAMESPACE: + cmd = ['sonic-cfggen', '-d', '--print-data', '-n', self.namespace] + else: + cmd = ['sonic-cfggen', '-d', '--print-data'] + + result = subprocess.Popen(cmd, shell=False, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) text, err = result.communicate() return_code = result.returncode if return_code: # non-zero means failure - raise GenericConfigUpdaterError(f"Failed to get running config, Return code: {return_code}, Error: {err}") + raise GenericConfigUpdaterError(f"Failed to get running config for namespace: {self.namespace}, Return code: {return_code}, Error: {err}") return text def get_sonic_yang_as_json(self): @@ -147,12 +151,12 @@ def validate_config_db_config(self, config_db_as_json): def validate_field_operation(self, old_config, target_config): """ - Some fields in ConfigDB are restricted and may not allow third-party addition, replacement, or removal. - Because YANG only validates state and not transitions, this method helps to JsonPatch operations/transitions for the specified fields. + Some fields in ConfigDB are restricted and may not allow third-party addition, replacement, or removal. + Because YANG only validates state and not transitions, this method helps to JsonPatch operations/transitions for the specified fields. """ patch = jsonpatch.JsonPatch.from_diff(old_config, target_config) - - # illegal_operations_to_fields_map['remove'] yields a list of fields for which `remove` is an illegal operation + + # illegal_operations_to_fields_map['remove'] yields a list of fields for which `remove` is an illegal operation illegal_operations_to_fields_map = { 'add':[], 'replace': [], @@ -180,7 +184,7 @@ def _invoke_validating_function(cmd, jsonpatch_element): with open(GCU_FIELD_OP_CONF_FILE, "r") as s: gcu_field_operation_conf = json.load(s) else: - raise GenericConfigUpdaterError("GCU field operation validators config file not found") + raise GenericConfigUpdaterError("GCU field operation validators config file not found") for element in patch: path = element["path"] @@ -296,8 +300,8 @@ def create_sonic_yang_with_loaded_models(self): class DryRunConfigWrapper(ConfigWrapper): # This class will simulate all read/write operations to ConfigDB on a virtual storage unit. - def __init__(self, initial_imitated_config_db = None): - super().__init__() + def __init__(self, initial_imitated_config_db = None, namespace=multi_asic.DEFAULT_NAMESPACE): + super().__init__(namespace=namespace) self.logger = genericUpdaterLogging.get_logger(title="** DryRun", print_all_to_console=True) self.imitated_config_db = copy.deepcopy(initial_imitated_config_db) @@ -317,8 +321,9 @@ def _init_imitated_config_db_if_none(self): class PatchWrapper: - def __init__(self, config_wrapper=None): - self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper() + def __init__(self, config_wrapper=None, namespace=multi_asic.DEFAULT_NAMESPACE): + self.namespace = namespace + self.config_wrapper = config_wrapper if config_wrapper is not None else ConfigWrapper(self.namespace) self.path_addressing = PathAddressing(self.config_wrapper) def validate_config_db_patch_has_yang_models(self, patch): diff --git a/tests/config_test.py b/tests/config_test.py index cc0ac22e98..1054a52a33 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -15,7 +15,7 @@ import click from click.testing import CliRunner -from sonic_py_common import device_info +from sonic_py_common import device_info, multi_asic from utilities_common.db import Db from utilities_common.general import load_module_from_source from mock import call, patch, mock_open, MagicMock @@ -1699,7 +1699,7 @@ def test_config_load_mgmt_config_ipv6_only(self, get_cmd_module, setup_single_br } } self.check_output(get_cmd_module, device_desc_result, load_mgmt_config_command_ipv6_only_output, 7) - + def test_config_load_mgmt_config_ipv4_ipv6(self, get_cmd_module, setup_single_broadcom_asic): device_desc_result = { 'DEVICE_METADATA': { @@ -1931,19 +1931,19 @@ def test_warm_restart_neighsyncd_timer_yang_validation(self): print(result.output) assert result.exit_code != 0 assert "Invalid ConfigDB. Error" in result.output - + def test_warm_restart_neighsyncd_timer(self): config.ADHOC_VALIDATION = True runner = CliRunner() db = Db() obj = {'db':db.cfgdb} - + result = runner.invoke(config.config.commands["warm_restart"].commands["neighsyncd_timer"], ["0"], obj=obj) print(result.exit_code) print(result.output) assert result.exit_code != 0 assert "neighsyncd warm restart timer must be in range 1-9999" in result.output - + @patch("validated_config_db_connector.device_info.is_yang_config_validation_enabled", mock.Mock(return_value=True)) @patch("config.validated_config_db_connector.ValidatedConfigDBConnector.validated_mod_entry", mock.Mock(side_effect=ValueError)) def test_warm_restart_bgp_timer_yang_validation(self): @@ -1957,7 +1957,7 @@ def test_warm_restart_bgp_timer_yang_validation(self): print(result.output) assert result.exit_code != 0 assert "Invalid ConfigDB. Error" in result.output - + def test_warm_restart_bgp_timer(self): config.ADHOC_VALIDATION = True runner = CliRunner() @@ -1969,7 +1969,7 @@ def test_warm_restart_bgp_timer(self): print(result.output) assert result.exit_code != 0 assert "bgp warm restart timer must be in range 1-3600" in result.output - + @patch("validated_config_db_connector.device_info.is_yang_config_validation_enabled", mock.Mock(return_value=True)) @patch("config.validated_config_db_connector.ValidatedConfigDBConnector.validated_mod_entry", mock.Mock(side_effect=ValueError)) def test_warm_restart_teamsyncd_timer_yang_validation(self): @@ -1995,7 +1995,7 @@ def test_warm_restart_teamsyncd_timer(self): print(result.output) assert result.exit_code != 0 assert "teamsyncd warm restart timer must be in range 1-3600" in result.output - + @patch("validated_config_db_connector.device_info.is_yang_config_validation_enabled", mock.Mock(return_value=True)) @patch("config.validated_config_db_connector.ValidatedConfigDBConnector.validated_mod_entry", mock.Mock(side_effect=ValueError)) def test_warm_restart_bgp_eoiu_yang_validation(self): @@ -2052,7 +2052,7 @@ def test_add_cablelength_invalid_yang_validation(self): print(result.output) assert result.exit_code != 0 assert "Invalid ConfigDB. Error" in result.output - + @patch("config.main.ConfigDBConnector.get_entry", mock.Mock(return_value="Port Info")) @patch("config.main.is_dynamic_buffer_enabled", mock.Mock(return_value=True)) def test_add_cablelength_with_invalid_name_invalid_length(self): @@ -2078,7 +2078,7 @@ def setup_class(cls): print("SETUP") import config.main importlib.reload(config.main) - + @patch("validated_config_db_connector.device_info.is_yang_config_validation_enabled", mock.Mock(return_value=True)) @patch("config.validated_config_db_connector.ValidatedConfigDBConnector.validated_set_entry", mock.Mock(side_effect=ValueError)) def test_add_loopback_with_invalid_name_yang_validation(self): @@ -2116,7 +2116,7 @@ def test_del_nonexistent_loopback_adhoc_validation(self): print(result.output) assert result.exit_code != 0 assert "Loopback12 does not exist" in result.output - + def test_del_nonexistent_loopback_adhoc_validation(self): config.ADHOC_VALIDATION = True runner = CliRunner() @@ -2128,7 +2128,7 @@ def test_del_nonexistent_loopback_adhoc_validation(self): print(result.output) assert result.exit_code != 0 assert "Loopbax1 is invalid, name should have prefix 'Loopback' and suffix '<0-999>'" in result.output - + @patch("config.validated_config_db_connector.ValidatedConfigDBConnector.validated_set_entry", mock.Mock(return_value=True)) @patch("validated_config_db_connector.device_info.is_yang_config_validation_enabled", mock.Mock(return_value=True)) def test_add_loopback_yang_validation(self): @@ -2152,7 +2152,7 @@ def test_add_loopback_adhoc_validation(self): print(result.exit_code) print(result.output) assert result.exit_code == 0 - + @classmethod def teardown_class(cls): print("TEARDOWN") @@ -2635,3 +2635,110 @@ def test_date_bad(self): @classmethod def teardown_class(cls): print('TEARDOWN') + + +class TestApplyPatchMultiAsic(unittest.TestCase): + def setUp(self): + os.environ['UTILITIES_UNIT_TESTING'] = "2" + os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "multi_asic" + import config.main + importlib.reload(config.main) + # change to multi asic config + from .mock_tables import dbconnector + from .mock_tables import mock_multi_asic + importlib.reload(mock_multi_asic) + dbconnector.load_namespace_config() + + self.runner = CliRunner() + self.patch_file_path = 'path/to/patch.json' + self.patch_content = [ + { + "op": "add", + "path": "/localhost/ACL_TABLE/NEW_ACL_TABLE", + "value": { + "policy_desc": "New ACL Table", + "ports": ["Ethernet1", "Ethernet2"], + "stage": "ingress", + "type": "L3" + } + }, + { + "op": "add", + "path": "/asic0/ACL_TABLE/NEW_ACL_TABLE", + "value": { + "policy_desc": "New ACL Table", + "ports": ["Ethernet3", "Ethernet4"], + "stage": "ingress", + "type": "L3" + } + }, + { + "op": "replace", + "path": "/asic1/PORT/Ethernet1/mtu", + "value": "9200" + } + ] + + def test_apply_patch_multiasic(self): + # Mock open to simulate file reading + with patch('builtins.open', mock_open(read_data=json.dumps(self.patch_content)), create=True) as mocked_open: + # Mock GenericUpdater to avoid actual patch application + with patch('config.main.GenericUpdater') as mock_generic_updater: + mock_generic_updater.return_value.apply_patch = MagicMock() + + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["apply-patch"], [self.patch_file_path], catch_exceptions=True) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertEqual(result.exit_code, 0, "Command should succeed") + self.assertIn("Patch applied successfully.", result.output) + + # Verify mocked_open was called as expected + mocked_open.assert_called_with(self.patch_file_path, 'r') + + def test_apply_patch_dryrun_multiasic(self): + # Mock open to simulate file reading + with patch('builtins.open', mock_open(read_data=json.dumps(self.patch_content)), create=True) as mocked_open: + # Mock GenericUpdater to avoid actual patch application + with patch('config.main.GenericUpdater') as mock_generic_updater: + mock_generic_updater.return_value.apply_patch = MagicMock() + + # Mock ConfigDBConnector to ensure it's not called during dry-run + with patch('config.main.ConfigDBConnector') as mock_config_db_connector: + + print("Multi ASIC: {}".format(multi_asic.is_multi_asic())) + # Invocation of the command with the CliRunner + result = self.runner.invoke(config.config.commands["apply-patch"], + [self.patch_file_path, + "--format", ConfigFormat.SONICYANG.name, + "--dry-run", + "--ignore-non-yang-tables", + "--ignore-path", "/ANY_TABLE", + "--ignore-path", "/ANY_OTHER_TABLE/ANY_FIELD", + "--ignore-path", "", + "--verbose"], + catch_exceptions=False) + + print("Exit Code: {}, output: {}".format(result.exit_code, result.output)) + # Assertions and verifications + self.assertEqual(result.exit_code, 0, "Command should succeed") + self.assertIn("Patch applied successfully.", result.output) + + # Verify mocked_open was called as expected + mocked_open.assert_called_with(self.patch_file_path, 'r') + + # Ensure ConfigDBConnector was never instantiated or called + mock_config_db_connector.assert_not_called() + + @classmethod + def teardown_class(cls): + print("TEARDOWN") + os.environ['UTILITIES_UNIT_TESTING'] = "0" + os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "" + # change back to single asic config + from .mock_tables import dbconnector + from .mock_tables import mock_single_asic + importlib.reload(mock_single_asic) + dbconnector.load_database_config() \ No newline at end of file diff --git a/tests/generic_config_updater/change_applier_test.py b/tests/generic_config_updater/change_applier_test.py index afe166b008..4c9b33c3a4 100644 --- a/tests/generic_config_updater/change_applier_test.py +++ b/tests/generic_config_updater/change_applier_test.py @@ -74,16 +74,28 @@ def debug_print(msg): # Mimics os.system call for sonic-cfggen -d --print-data > filename -# -def os_system_cfggen(cmd): +def subprocess_Popen_cfggen(cmd, *args, **kwargs): global running_config - fname = cmd.split(">")[-1].strip() + # Extract file name from kwargs if 'stdout' is a file object + stdout = kwargs.get('stdout') + if hasattr(stdout, 'name'): + fname = stdout.name + else: + raise ValueError("stdout is not a file") + + # Write the running configuration to the file specified in stdout with open(fname, "w") as s: - s.write(json.dumps(running_config, indent=4)) - debug_print("File created {} type={} cfg={}".format(fname, - type(running_config), json.dumps(running_config)[1:40])) - return 0 + json.dump(running_config, s, indent=4) + + class MockPopen: + def __init__(self): + self.returncode = 0 # Simulate successful command execution + + def communicate(self): + return "", "" # Simulate empty stdout and stderr + + return MockPopen() # mimics config_db.set_entry @@ -213,14 +225,14 @@ def vlan_validate(old_cfg, new_cfg, keys): class TestChangeApplier(unittest.TestCase): - @patch("generic_config_updater.change_applier.os.system") + @patch("generic_config_updater.change_applier.subprocess.Popen") @patch("generic_config_updater.change_applier.get_config_db") @patch("generic_config_updater.change_applier.set_config") - def test_change_apply(self, mock_set, mock_db, mock_os_sys): + def test_change_apply(self, mock_set, mock_db, mock_subprocess_Popen): global read_data, running_config, json_changes, json_change_index global start_running_config - mock_os_sys.side_effect = os_system_cfggen + mock_subprocess_Popen.side_effect = subprocess_Popen_cfggen mock_db.return_value = DB_HANDLE mock_set.side_effect = set_entry diff --git a/tests/generic_config_updater/multiasic_change_applier_test.py b/tests/generic_config_updater/multiasic_change_applier_test.py new file mode 100644 index 0000000000..e8b277618f --- /dev/null +++ b/tests/generic_config_updater/multiasic_change_applier_test.py @@ -0,0 +1,172 @@ +import unittest +from importlib import reload +from unittest.mock import patch, MagicMock +from generic_config_updater.generic_updater import extract_scope +import generic_config_updater.change_applier +import generic_config_updater.services_validator +import generic_config_updater.gu_common + + +class TestMultiAsicChangeApplier(unittest.TestCase): + + def test_extract_scope(self): + test_paths_expectedresults = { + "/asic0/PORTCHANNEL/PortChannel102/admin_status": (True, "asic0", "/PORTCHANNEL/PortChannel102/admin_status"), + "/asic01/PORTCHANNEL/PortChannel102/admin_status": (True, "asic01", "/PORTCHANNEL/PortChannel102/admin_status"), + "/asic123456789/PORTCHANNEL/PortChannel102/admin_status": (True, "asic123456789", "/PORTCHANNEL/PortChannel102/admin_status"), + "/asic0123456789/PORTCHANNEL/PortChannel102/admin_status": (True, "asic0123456789", "/PORTCHANNEL/PortChannel102/admin_status"), + "/localhost/BGP_DEVICE_GLOBAL/STATE/tsa_enabled": (True, "localhost", "/BGP_DEVICE_GLOBAL/STATE/tsa_enabled"), + "/asic1/BGP_DEVICE_GLOBAL/STATE/tsa_enabled": (True, "asic1", "/BGP_DEVICE_GLOBAL/STATE/tsa_enabled"), + "/sometable/data": (True, "", "/sometable/data"), + "": (False, "", ""), + "localhostabc/BGP_DEVICE_GLOBAL/STATE/tsa_enabled": (False, "", ""), + "/asic77": (False, "", ""), + "/Asic0/PORTCHANNEL/PortChannel102/admin_status": (False, "", ""), + "/ASIC1/PORTCHANNEL/PortChannel102/admin_status": (False, "", ""), + "/Localhost/PORTCHANNEL/PortChannel102/admin_status": (False, "", ""), + "/LocalHost/PORTCHANNEL/PortChannel102/admin_status": (False, "", ""), + "/asci1/PORTCHANNEL/PortChannel102/admin_status": (False, "", ""), + "/asicx/PORTCHANNEL/PortChannel102/admin_status": (False, "", ""), + "/asic-12/PORTCHANNEL/PortChannel102/admin_status": (False, "", ""), + } + + for test_path, (result, expectedscope, expectedremainder) in test_paths_expectedresults.items(): + try: + scope, remainder = extract_scope(test_path) + assert(scope == expectedscope) + assert(remainder == expectedremainder) + except Exception as e: + assert(result == False) + + @patch('generic_config_updater.change_applier.ChangeApplier._get_running_config', autospec=True) + @patch('generic_config_updater.change_applier.ConfigDBConnector', autospec=True) + def test_apply_change_default_namespace(self, mock_ConfigDBConnector, mock_get_running_config): + # Setup mock for ConfigDBConnector + mock_db = MagicMock() + mock_ConfigDBConnector.return_value = mock_db + + # Setup mock for json.load to return some running configuration + mock_get_running_config.return_value = { + "tables": { + "ACL_TABLE": { + "services_to_validate": ["aclservice"], + "validate_commands": ["acl_loader show table"] + }, + "PORT": { + "services_to_validate": ["portservice"], + "validate_commands": ["show interfaces status"] + } + }, + "services": { + "aclservice": { + "validate_commands": ["acl_loader show table"] + }, + "portservice": { + "validate_commands": ["show interfaces status"] + } + } + } + + # Instantiate ChangeApplier with the default namespace + applier = generic_config_updater.change_applier.ChangeApplier() + + # Prepare a change object or data that applier.apply would use + change = MagicMock() + + # Call the apply method with the change object + applier.apply(change) + + # Assert ConfigDBConnector called with the correct namespace + mock_ConfigDBConnector.assert_called_once_with(use_unix_socket_path=True, namespace="") + + @patch('generic_config_updater.change_applier.ChangeApplier._get_running_config', autospec=True) + @patch('generic_config_updater.change_applier.ConfigDBConnector', autospec=True) + def test_apply_change_given_namespace(self, mock_ConfigDBConnector, mock_get_running_config): + # Setup mock for ConfigDBConnector + mock_db = MagicMock() + mock_ConfigDBConnector.return_value = mock_db + + # Setup mock for json.load to return some running configuration + mock_get_running_config.return_value = { + "tables": { + "ACL_TABLE": { + "services_to_validate": ["aclservice"], + "validate_commands": ["acl_loader show table"] + }, + "PORT": { + "services_to_validate": ["portservice"], + "validate_commands": ["show interfaces status"] + } + }, + "services": { + "aclservice": { + "validate_commands": ["acl_loader show table"] + }, + "portservice": { + "validate_commands": ["show interfaces status"] + } + } + } + + # Instantiate ChangeApplier with the default namespace + applier = generic_config_updater.change_applier.ChangeApplier(namespace="asic0") + + # Prepare a change object or data that applier.apply would use + change = MagicMock() + + # Call the apply method with the change object + applier.apply(change) + + # Assert ConfigDBConnector called with the correct namespace + mock_ConfigDBConnector.assert_called_once_with(use_unix_socket_path=True, namespace="asic0") + + @patch('generic_config_updater.change_applier.ChangeApplier._get_running_config', autospec=True) + @patch('generic_config_updater.change_applier.ConfigDBConnector', autospec=True) + def test_apply_change_failure(self, mock_ConfigDBConnector, mock_get_running_config): + # Setup mock for ConfigDBConnector + mock_db = MagicMock() + mock_ConfigDBConnector.return_value = mock_db + + # Setup mock for json.load to return some running configuration + mock_get_running_config.side_effect = Exception("Failed to get running config") + # Instantiate ChangeApplier with a specific namespace to simulate applying changes in a multi-asic environment + namespace = "asic0" + applier = generic_config_updater.change_applier.ChangeApplier(namespace=namespace) + + # Prepare a change object or data that applier.apply would use + change = MagicMock() + + # Test the behavior when os.system fails + with self.assertRaises(Exception) as context: + applier.apply(change) + + self.assertTrue('Failed to get running config' in str(context.exception)) + + @patch('generic_config_updater.change_applier.ChangeApplier._get_running_config', autospec=True) + @patch('generic_config_updater.change_applier.ConfigDBConnector', autospec=True) + def test_apply_patch_with_empty_tables_failure(self, mock_ConfigDBConnector, mock_get_running_config): + # Setup mock for ConfigDBConnector + mock_db = MagicMock() + mock_ConfigDBConnector.return_value = mock_db + + # Setup mock for json.load to simulate configuration where crucial tables are unexpectedly empty + mock_get_running_config.return_value = { + "tables": { + # Simulate empty tables or missing crucial configuration + }, + "services": { + # Normally, services would be listed here + } + } + + # Instantiate ChangeApplier with a specific namespace to simulate applying changes in a multi-asic environment + applier = generic_config_updater.change_applier.ChangeApplier(namespace="asic0") + + # Prepare a change object or data that applier.apply would use, simulating a patch that requires non-empty tables + change = MagicMock() + + # Apply the patch + try: + assert(applier.apply(change) != 0) + except Exception: + pass diff --git a/tests/generic_config_updater/multiasic_generic_updater_test.py b/tests/generic_config_updater/multiasic_generic_updater_test.py new file mode 100644 index 0000000000..4a55eb98be --- /dev/null +++ b/tests/generic_config_updater/multiasic_generic_updater_test.py @@ -0,0 +1,167 @@ +import json +import jsonpatch +import unittest +from unittest.mock import patch, MagicMock + +import generic_config_updater.change_applier +import generic_config_updater.generic_updater +import generic_config_updater.services_validator +import generic_config_updater.gu_common + +# import sys +# sys.path.insert(0,'../../generic_config_updater') +# import generic_updater as gu + +class TestMultiAsicPatchApplier(unittest.TestCase): + + @patch('generic_config_updater.gu_common.ConfigWrapper.get_empty_tables', return_value=[]) + @patch('generic_config_updater.gu_common.ConfigWrapper.get_config_db_as_json') + @patch('generic_config_updater.gu_common.PatchWrapper.simulate_patch') + @patch('generic_config_updater.generic_updater.ChangeApplier') + def test_apply_patch_specific_namespace(self, mock_ChangeApplier, mock_simulate_patch, mock_get_config, mock_get_empty_tables): + namespace = "asic0" + patch_data = jsonpatch.JsonPatch([ + { + "op": "add", + "path": "/ACL_TABLE/NEW_ACL_TABLE", + "value": { + "policy_desc": "New ACL Table", + "ports": ["Ethernet1", "Ethernet2"], + "stage": "ingress", + "type": "L3" + } + }, + { + "op": "replace", + "path": "/PORT/Ethernet1/mtu", + "value": "9200" + } + ]) + + original_config = { + "ACL_TABLE": { + "MY_ACL_TABLE": { + "policy_desc": "My ACL", + "ports": ["Ethernet1", "Ethernet2"], + "stage": "ingress", + "type": "L3" + } + }, + "PORT": { + "Ethernet1": { + "alias": "fortyGigE0/0", + "description": "fortyGigE0/0", + "index": "0", + "lanes": "29,30,31,32", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + }, + "Ethernet2": { + "alias": "fortyGigE0/100", + "description": "fortyGigE0/100", + "index": "25", + "lanes": "125,126,127,128", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + } + } + } + + applied_config = { + "ACL_TABLE": { + "MY_ACL_TABLE": { + "policy_desc": "My ACL", + "ports": ["Ethernet1", "Ethernet2"], + "stage": "ingress", + "type": "L3" + }, + "NEW_ACL_TABLE": { + "policy_desc": "New ACL Table", + "ports": [ + "Ethernet1", + "Ethernet2" + ], + "stage": "ingress", + "type": "L3" + } + }, + "PORT": { + "Ethernet1": { + "alias": "fortyGigE0/0", + "description": "fortyGigE0/0", + "index": "0", + "lanes": "29,30,31,32", + "mtu": "9200", + "pfc_asym": "off", + "speed": "40000" + }, + "Ethernet2": { + "alias": "fortyGigE0/100", + "description": "fortyGigE0/100", + "index": "25", + "lanes": "125,126,127,128", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + } + } + } + + mock_get_config.side_effect = [ + original_config, + original_config, + original_config, + applied_config + ] + + mock_simulate_patch.return_value = { + "ACL_TABLE": { + "MY_ACL_TABLE": { + "policy_desc": "My ACL", + "ports": [ + "Ethernet1", "Ethernet2" + ], + "stage": "ingress", + "type": "L3" + }, + "NEW_ACL_TABLE": { + "policy_desc": "New ACL Table", + "ports": [ + "Ethernet1", + "Ethernet2" + ], + "stage": "ingress", + "type": "L3" + } + }, + "PORT": { + "Ethernet1": { + "alias": "fortyGigE0/0", + "description": "fortyGigE0/0", + "index": "0", + "lanes": "29,30,31,32", + "mtu": "9200", + "pfc_asym": "off", + "speed": "40000" + }, + "Ethernet2": { + "alias": "fortyGigE0/100", + "description": "fortyGigE0/100", + "index": "25", + "lanes": "125,126,127,128", + "mtu": "9100", + "pfc_asym": "off", + "speed": "40000" + } + } + } + + patch_applier = generic_config_updater.generic_updater.PatchApplier(namespace=namespace) + + # Apply the patch and verify + patch_applier.apply(patch_data) + + # Assertions to ensure the namespace is correctly used in underlying calls + mock_ChangeApplier.assert_called_once_with(namespace=namespace)