diff --git a/README.md b/README.md index 11cf3367..509a6161 100644 --- a/README.md +++ b/README.md @@ -94,22 +94,25 @@ class BackendA(DSync): ## Update Remote system on Sync -To update a remote system, you need to extend your DSync object to define your own `create`, `update` or `delete` method for each model. The DSync engine will automatically look for `create_`, `update_` or `delete_` depending on the status of the object. -Each method need to accept 2 dict, `keys` and `params` and should return a DSyncModel object: -- `keys` dict containing all identifier for this object -- `params` dict containing all attributes for this object +To update a remote system, you need to extend your DSyncModel class(es) to define your own `create`, `update` and/or `delete` methods for each model. +A DSyncModel instance stores a reference to its parent DSync class in case you need to use it to look up other model instances from the DSync's cache. ```python -class BackendA(DSync): +class Device(DSyncModel): [...] - def create_device(self, keys, params): + @classmethod + def create(cls, dsync, ids, attrs): ## TODO add your own logic here to create the device on the remote system - item = self.default_create(object_type="device", keys=keys, params=params) - return item + return cls(**ids, **attrs) - def update_device(self, keys, params): + def update(self, attrs): ## TODO add your own logic here to update the device on the remote system - item = self.default_update(object_type="device", keys=keys, params=params) - return item + for attr, value in attrs.items(): + setattr(self, attr, value) + return self + + def delete(self): + ## TODO add your own logic here to delete the device on the remote system + return self ``` diff --git a/dsync/__init__.py b/dsync/__init__.py index 5f9e8b30..a73548c2 100644 --- a/dsync/__init__.py +++ b/dsync/__init__.py @@ -11,11 +11,12 @@ See the License for the specific language governing permissions and limitations under the License. """ -from inspect import isclass -import logging from collections import defaultdict from collections.abc import Iterable as ABCIterable, Mapping as ABCMapping -from typing import Iterable, List, Mapping, Optional, Tuple, Type, Union +import enum +from inspect import isclass +import logging +from typing import ClassVar, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Type, Union from pydantic import BaseModel @@ -34,6 +35,29 @@ logger = logging.getLogger(__name__) +class DSyncFlags(enum.Flag): + """Flags that can be passed to a sync_* or diff_* call to affect its behavior.""" + + NONE = 0 + + CONTINUE_ON_FAILURE = enum.auto() + """Continue synchronizing even if failures are encountered when syncing individual models.""" + + SKIP_UNMATCHED_SRC = enum.auto() + """Ignore objects that only exist in the source/"from" DSync when determining diffs and syncing. + + If this flag is set, no new objects will be created in the target/"to" DSync. + """ + + SKIP_UNMATCHED_DST = enum.auto() + """Ignore objects that only exist in the target/"to" DSync when determining diffs and syncing. + + If this flag is set, no objects will be deleted from the target/"to" DSync. + """ + + SKIP_UNMATCHED_BOTH = SKIP_UNMATCHED_SRC | SKIP_UNMATCHED_DST + + class DSyncModel(BaseModel): """Base class for all DSync object models. @@ -47,26 +71,26 @@ class DSyncModel(BaseModel): be included in **at most** one of these three tuples. """ - _modelname: str = "dsyncmodel" + _modelname: ClassVar[str] = "dsyncmodel" """Name of this model, used by DSync to store and look up instances of this model or its equivalents. Lowercase by convention; typically corresponds to the class name, but that is not enforced. """ - _identifiers: Tuple[str, ...] = () + _identifiers: ClassVar[Tuple[str, ...]] = () """List of model fields which together uniquely identify an instance of this model. This identifier MUST be globally unique among all instances of this class. """ - _shortname: Tuple[str, ...] = () + _shortname: ClassVar[Tuple[str, ...]] = () """Optional: list of model fields that together form a shorter identifier of an instance. This MUST be locally unique (e.g., interface shortnames MUST be unique among all interfaces on a given device), but does not need to be guaranteed to be globally unique among all instances. """ - _attributes: Tuple[str, ...] = () + _attributes: ClassVar[Tuple[str, ...]] = () """Optional: list of additional model fields (beyond those in `_identifiers`) that are relevant to this model. Only the fields in `_attributes` (as well as any `_children` fields, see below) will be considered @@ -77,12 +101,21 @@ class DSyncModel(BaseModel): Note: inclusion in `_attributes` is mutually exclusive from inclusion in `_identifiers`; a field cannot be in both! """ - _children: Mapping[str, str] = {} + _children: ClassVar[Mapping[str, str]] = {} """Optional: dict of `{_modelname: field_name}` entries describing how to store "child" models in this model. When calculating a Diff or performing a sync, DSync will automatically recurse into these child models. """ + dsync: Optional["DSync"] = None + """Optional: the DSync instance that owns this model instance.""" + + class Config: # pylint: disable=too-few-public-methods + """Pydantic class configuration.""" + + # Let us have a DSync as an instance variable even though DSync is not a Pydantic model itself. + arbitrary_types_allowed = True + def __init_subclass__(cls): """Validate that the various class attribute declarations correspond to actual instance fields. @@ -120,6 +153,54 @@ def __repr__(self): def __str__(self): return self.get_unique_id() + @classmethod + def create(cls, dsync: "DSync", ids: dict, attrs: dict) -> Optional["DSyncModel"]: + """Instantiate this class, along with any platform-specific data creation. + + Args: + dsync: The master data store for other DSyncModel instances that we might need to reference + ids: Dictionary of unique-identifiers needed to create the new object + attrs: Dictionary of additional attributes to set on the new object + + Returns: + DSyncModel: instance of this class, if all data was successfully created. + None: if data creation failed in such a way that child objects of this model should not be created. + + Raises: + ObjectNotCreated: if an error occurred. + """ + return cls(**ids, dsync=dsync, **attrs) + + def update(self, attrs: dict) -> Optional["DSyncModel"]: + """Update the attributes of this instance, along with any platform-specific data updates. + + Args: + attrs: Dictionary of attributes to update on the object + + Returns: + DSyncModel: this instance, if all data was successfully updated. + None: if data updates failed in such a way that child objects of this model should not be modified. + + Raises: + ObjectNotUpdated: if an error occurred. + """ + for attr, value in attrs.items(): + # TODO: enforce that only attrs in self._attributes can be updated in this way? + setattr(self, attr, value) + return self + + def delete(self) -> Optional["DSyncModel"]: + """Delete any platform-specific data corresponding to this instance. + + Returns: + DSyncModel: this instance, if all data was successfully deleted. + None: if data deletion failed in such a way that child objects of this model should not be deleted. + + Raises: + ObjectNotDeleted: if an error occurred. + """ + return self + @classmethod def get_type(cls): """Return the type AKA modelname of the object or the class @@ -236,23 +317,25 @@ def remove_child(self, child: "DSyncModel"): class DSync: """Class for storing a group of DSyncModel instances and diffing or synchronizing to another DSync instance.""" - # Add mapping to objects here: + # Add mapping of names to specific model classes here: # modelname1 = MyModelClass1 # modelname2 = MyModelClass2 - top_level: List[str] = [] + top_level: ClassVar[List[str]] = [] """List of top-level modelnames to begin from when diffing or synchronizing.""" + _data: MutableMapping[str, MutableMapping[str, DSyncModel]] + """Defaultdict storing model instances. + + `self._data[modelname][unique_id] == model_instance` + """ + def __init__(self): """Generic initialization function. Subclasses should be careful to call super().__init__() if they override this method. """ self._data = defaultdict(dict) - """Defaultdict storing model instances. - - `self._data[modelname][unique_id] == model_instance` - """ def __init_subclass__(cls): """Validate that references to specific DSyncModels use the correct modelnames. @@ -270,109 +353,160 @@ def load(self): """Load all desired data from whatever backend data source into this instance.""" # No-op in this generic class - def sync_from(self, source: "DSync"): + # ------------------------------------------------------------------------------ + # Synchronization between DSync instances + # ------------------------------------------------------------------------------ + + def sync_from(self, source: "DSync", diff_class: Type[Diff] = Diff, flags: DSyncFlags = DSyncFlags.NONE): """Synchronize data from the given source DSync object into the current DSync object. Args: source (DSync): object to sync data from into this one + diff_class (class): Diff or subclass thereof to use to calculate the diffs to use for synchronization + flags (DSyncFlags): Flags influencing the behavior of this sync. """ - diff = self.diff_from(source) + diff = self.diff_from(source, diff_class=diff_class, flags=flags) + logger.info("Beginning sync") for child in diff.get_children(): - self._sync_from_diff_element(child) + self._sync_from_diff_element(child, flags=flags) + logger.info("Sync complete") - def sync_to(self, target: "DSync"): + def sync_to(self, target: "DSync", diff_class: Type[Diff] = Diff, flags: DSyncFlags = DSyncFlags.NONE): """Synchronize data from the current DSync object into the given target DSync object. Args: target (DSync): object to sync data into from this one. + diff_class (class): Diff or subclass thereof to use to calculate the diffs to use for synchronization + flags (DSyncFlags): Flags influencing the behavior of this sync. """ - target.sync_from(self) + target.sync_from(self, diff_class=diff_class, flags=flags) - def _sync_from_diff_element(self, element: DiffElement, parent_model: DSyncModel = None) -> bool: + def _sync_from_diff_element( + self, element: DiffElement, flags: DSyncFlags = DSyncFlags.NONE, parent_model: DSyncModel = None, + ): """Synchronize a given DiffElement (and its children, if any) into this DSync. Helper method for `sync_from`/`sync_to`; this generally shouldn't be called on its own. Args: element: DiffElement to synchronize diffs from + flags (DSyncFlags): Flags influencing the behavior of this sync. parent_model: Parent object to update (`add_child`/`remove_child`) if the sync creates/deletes an object. - - Returns: - bool: Return False if there is nothing to sync """ - if not element.has_diffs(): - return False - - if element.source_attrs is None: - obj = self.delete_object(object_type=element.type, keys=element.keys) - if parent_model: - parent_model.remove_child(obj) - elif element.dest_attrs is None: - obj = self.create_object( - object_type=element.type, - keys=element.keys, - params={attr_key: element.source_attrs[attr_key] for attr_key in element.get_attrs_keys()}, + # pylint: disable=too-many-branches + # GFM: I made a few attempts at refactoring this to reduce the branching, but found that it was less readable. + # So let's live with the slightly too high number of branches (14/12) for now. + object_class = getattr(self, element.type) + obj = self.get(object_class, element.keys) + attrs = ( + {attr_key: element.source_attrs[attr_key] for attr_key in element.get_attrs_keys()} + if element.source_attrs is not None + else {} + ) + + try: + if element.action == "create": + if obj: + raise ObjectNotCreated(f"Failed to create {object_class.get_type()} {element.keys} - it exists!") + logger.info("Creating %s %s (%s)", object_class.get_type(), element.keys, attrs) + obj = object_class.create(dsync=self, ids=element.keys, attrs=attrs) + elif element.action == "update": + if not obj: + raise ObjectNotUpdated(f"Failed to update {object_class.get_type()} {element.keys} - not found!") + logger.info("Updating %s %s with %s", object_class.get_type(), element.keys, attrs) + obj = obj.update(attrs=attrs) + elif element.action == "delete": + if not obj: + raise ObjectNotDeleted(f"Failed to delete {object_class.get_type()} {element.keys} - not found!") + logger.info("Deleting %s %s", object_class.get_type(), element.keys) + obj = obj.delete() + except ObjectCrudException as exception: + logger.error( + "Error during %s of %s %s (%s): %s", + element.action, + object_class.get_type(), + element.keys, + attrs, + exception, ) + if not flags & DSyncFlags.CONTINUE_ON_FAILURE: + raise + + if obj is None: + logger.warning("Not syncing children of %s %s", element.type, element.keys) + return + + if element.action == "create": + self.add(obj) if parent_model: parent_model.add_child(obj) - elif element.source_attrs != element.dest_attrs: - obj = self.update_object( - object_type=element.type, - keys=element.keys, - params={attr_key: element.source_attrs[attr_key] for attr_key in element.get_attrs_keys()}, - ) - else: - object_class = getattr(self, element.type) - uid = object_class.create_unique_id(**element.keys) - obj = self.get(element.type, uid) + elif element.action == "delete": + self.remove(obj) + if parent_model: + parent_model.remove_child(obj) for child in element.get_children(): - self._sync_from_diff_element(child, parent_model=obj) + self._sync_from_diff_element(child, flags=flags, parent_model=obj) - return True + # ------------------------------------------------------------------------------ + # Diff calculation and construction + # ------------------------------------------------------------------------------ - def diff_from(self, source: "DSync") -> Diff: + def diff_from(self, source: "DSync", diff_class: Type[Diff] = Diff, flags: DSyncFlags = DSyncFlags.NONE) -> Diff: """Generate a Diff describing the difference from the other DSync to this one. Args: source (DSync): Object to diff against. + diff_class (class): Diff or subclass thereof to use for diff calculation and storage. + flags (DSyncFlags): Flags influencing the behavior of this diff operation. """ - diff = Diff() + logger.info("Beginning diff") + diff = diff_class() for obj_type in intersection(self.top_level, source.top_level): diff_elements = self._diff_objects( - source=source.get_all(obj_type), dest=self.get_all(obj_type), source_root=source, + source=source.get_all(obj_type), dest=self.get_all(obj_type), source_root=source, flags=flags, ) for diff_element in diff_elements: diff.add(diff_element) + # Notify the diff that it has been fully populated, in case it wishes to print, save to a file, etc. + logger.info("Diff complete") + diff.complete() return diff - def diff_to(self, target: "DSync") -> Diff: + def diff_to(self, target: "DSync", diff_class: Type[Diff] = Diff, flags: DSyncFlags = DSyncFlags.NONE) -> Diff: """Generate a Diff describing the difference from this DSync to another one. Args: target (DSync): Object to diff against. + diff_class (class): Diff or subclass thereof to use for diff calculation and storage. + flags (DSyncFlags): Flags influencing the behavior of this diff operation. """ - return target.diff_from(self) + return target.diff_from(self, diff_class=diff_class, flags=flags) def _diff_objects( - self, source: Iterable[DSyncModel], dest: Iterable[DSyncModel], source_root: "DSync" + self, + source: Iterable[DSyncModel], + dest: Iterable[DSyncModel], + source_root: "DSync", + flags: DSyncFlags = DSyncFlags.NONE, ) -> List[DiffElement]: """Generate a list of DiffElement between the given lists of objects. Helper method for `diff_from`/`diff_to`; this generally shouldn't be called on its own. Args: - source: DSyncModel instances retrieved from another DSync instance - dest: DSyncModel instances retrieved from this DSync instance - source_root (DSync): The other DSync object being diffed against (owner of the `source` models). + source: DSyncModel instances retrieved from another DSync instance + dest: DSyncModel instances retrieved from this DSync instance + source_root (DSync): The other DSync object being diffed against (owner of the `source` models, if any) + flags (DSyncFlags): Flags influencing the behavior of this diff operation. Raises: - TypeError: if the source and dest args are not the same type, or if that type is unsupported + TypeError: if the source and dest args are not the same type, or if that type is unsupported """ diffs = [] @@ -395,6 +529,13 @@ def _diff_objects( for uid in combined_dict: src_obj, dst_obj = combined_dict[uid] + if flags & DSyncFlags.SKIP_UNMATCHED_SRC and not dst_obj: + logger.debug("Skipping unmatched source object {src_obj}") + continue + if flags & DSyncFlags.SKIP_UNMATCHED_DST and not src_obj: + logger.debug("Skipping unmatched dest object {dst_obj}") + continue + if src_obj: diff_element = DiffElement( obj_type=src_obj.get_type(), name=src_obj.get_shortname(), keys=src_obj.get_identifiers() @@ -413,7 +554,7 @@ def _diff_objects( diff_element.add_attrs(source=None, dest=dst_obj.get_attrs()) # Recursively diff the children of src_obj and dst_obj and attach the resulting diffs to the diff_element - self._diff_child_objects(diff_element, src_obj, dst_obj, source_root) + self._diff_child_objects(diff_element, src_obj, dst_obj, source_root, flags) diffs.append(diff_element) @@ -430,6 +571,7 @@ def _validate_objects_for_diff(combined_dict: Mapping[str, Tuple[Optional[DSyncM ValueError: If any pair of objects in the dict have differing get_shortname() or get_identifiers() values. """ for uid in combined_dict: + # TODO: should we check/enforce whether all source models have the same DSync, whether all dest likewise? # TODO: should we check/enforce whether ALL DSyncModels in this dict have the same get_type() output? src_obj, dst_obj = combined_dict[uid] if src_obj and dst_obj: @@ -440,12 +582,13 @@ def _validate_objects_for_diff(combined_dict: Mapping[str, Tuple[Optional[DSyncM if src_obj.get_identifiers() != dst_obj.get_identifiers(): raise ValueError(f"Keys mismatch: {src_obj.get_identifiers()} vs {dst_obj.get_identifiers()}") - def _diff_child_objects( + def _diff_child_objects( # pylint: disable=too-many-arguments self, diff_element: DiffElement, src_obj: Optional[DSyncModel], dst_obj: Optional[DSyncModel], source_root: "DSync", + flags: DSyncFlags, ): """For all children of the given DSyncModel pair, diff them recursively, adding diffs to the given diff_element. @@ -469,152 +612,43 @@ def _diff_child_objects( raise RuntimeError("Called with neither src_obj nor dest_obj??") for child_type, child_fieldname in children_mapping.items(): - src_uids: List[str] = getattr(src_obj, child_fieldname) if src_obj else [] - dst_uids: List[str] = getattr(dst_obj, child_fieldname) if dst_obj else [] + # for example, child_type == "device" and child_fieldname == "devices" + + # for example, getattr(src_obj, "devices") --> list of device uids + # --> src_dsync.get_by_uids(, "device") --> list of device instances + src_objs = source_root.get_by_uids(getattr(src_obj, child_fieldname), child_type) if src_obj else [] + dst_objs = self.get_by_uids(getattr(dst_obj, child_fieldname), child_type) if dst_obj else [] + for child_diff_element in self._diff_objects( - source=source_root.get_by_uids(src_uids, child_type), - dest=self.get_by_uids(dst_uids, child_type), - source_root=source_root, + source=src_objs, dest=dst_objs, source_root=source_root, flags=flags ): diff_element.add_child(child_diff_element) - def create_object(self, object_type, keys, params): - """TODO: move to a `create` method on DSyncModel class.""" - return self._crud_change(action="create", keys=keys, object_type=object_type, params=params) - - def update_object(self, object_type, keys, params): - """TODO: move to a `update` method on DSyncModel class.""" - return self._crud_change(action="update", object_type=object_type, keys=keys, params=params) - - def delete_object(self, object_type, keys, params=None): - """TODO: move to a `delete` method on DSyncModel class.""" - if not params: - params = {} - return self._crud_change(action="delete", object_type=object_type, keys=keys, params=params) - - def _crud_change(self, action: str, object_type: str, keys: dict, params: dict) -> DSyncModel: - """Dispatcher function to Create, Update or Delete an object. - - Based on the type of the action and the type of the object, - we'll first try to execute a function named after the object type and the action - "{action}_{object_type}" update_interface or delete_device ... - if such function is not available, the default function will be executed instead - default_create, default_update or default_delete - - The goal is to all each DSync class to insert its own logic per object type when we manipulate these objects - - TODO: move to DSyncModel class? - - Args: - action (str): type of action, must be create, update or delete - object_type (str): Attribute name on this class describing the desired DSyncModel subclass. - keys (dict): Dictionary containing the primary attributes of an object and their value - params (dict): Dictionary containing the attributes of an object and their value - - Raises: - ObjectCrudException: Object type does not exist in this class - - Returns: - DSyncModel: object created/updated/deleted - """ - - if not hasattr(self, object_type): - if action == "create": - raise ObjectNotCreated(f"Unable to find object type {object_type}") - if action == "update": - raise ObjectNotUpdated(f"Unable to find object type {object_type}") - if action == "delete": - raise ObjectNotDeleted(f"Unable to find object type {object_type}") - raise ObjectCrudException(f"Unable to find object type {object_type}") - - # Check if a specific crud function is available - # update_interface or create_device etc ... - # If not apply the default one - - if hasattr(self, f"{action}_{object_type}"): - item = getattr(self, f"{action}_{object_type}")(keys=keys, params=params) - logger.debug("%sd %s - %s", action, object_type, params) - else: - item = getattr(self, f"default_{action}")(object_type=object_type, keys=keys, params=params) - logger.debug("%sd %s = %s - %s (default)", action, object_type, keys, params) - return item - - # ---------------------------------------------------------------------------- - def default_create(self, object_type, keys, params): - """Default function to create a new object in the local storage. - - This function will be called if a more specific function of type create_ is not defined - - Args: - object_type (str): Attribute name on this class identifying the DSyncModel subclass of the object - keys (dict): Dictionary containing the primary attributes of an object and their value - params (dict): Dictionary containing the attributes of an object and their value - - Returns: - DSyncModel: Return the newly created object - """ - object_class = getattr(self, object_type) - item = object_class(**keys, **params) - self.add(item) - return item - - def default_update(self, object_type, keys, params): - """Update an object locally based on its primary keys and attributes. - - This function will be called if a more specific function of type update_ is not defined - - Args: - object_type (str): Attribute name on this class identifying the DSyncModel subclass of the object - keys (dict): Dictionary containing the primary attributes of an object and their value - params (dict): Dictionary containing the attributes of an object and their value - - Returns: - DSyncModel: Return the object after update - """ - object_class = getattr(self, object_type) - uid = object_class.create_unique_id(**keys) - item = self.get(object_type, uid) - - for attr, value in params.items(): - setattr(item, attr, value) - - return item - - def default_delete(self, object_type, keys, params): # pylint: disable=unused-argument - """Delete an object locally based on its primary keys and attributes. - - This function will be called if a more specific function of type delete_ is not defined - - Args: - object_type (str): Attribute name on this class identifying the DSyncModel subclass of the object - keys (dict): Dictionnary containings the primary attributes of an object and their value - params: Unused argument included only for parallels to the other APIs. - - Returns: - DSyncModel: Return the object that has been deleted - """ - object_class = getattr(self, object_type) - uid = object_class.create_unique_id(**keys) - item = self.get(object_type, uid) - self.remove(item) - return item - # ------------------------------------------------------------------------------ # Object Storage Management # ------------------------------------------------------------------------------ - def get(self, obj: Union[str, DSyncModel, Type[DSyncModel]], uid: str) -> Optional[DSyncModel]: + def get(self, obj: Union[str, DSyncModel, Type[DSyncModel]], identifier: Union[str, dict]) -> Optional[DSyncModel]: """Get one object from the data store based on its unique id. Args: obj (class, DSyncModel, str): DSyncModel class or DSyncModel instance or modelname string - uid (str): Unique identifier of the object to retrieve + identifier (str, dict): Unique ID of the object to retrieve, or dict of unique identifier keys/values """ if isinstance(obj, str): modelname = obj + if not hasattr(self, obj): + return None + object_class = getattr(self, obj) else: + object_class = obj modelname = obj.get_type() + if isinstance(identifier, str): + uid = identifier + else: + uid = object_class.create_unique_id(**identifier) + return self._data[modelname].get(uid) def get_all(self, obj): @@ -667,6 +701,9 @@ def add(self, obj: DSyncModel): if uid in self._data[modelname]: raise ObjectAlreadyExists(f"Object {uid} already present") + if not obj.dsync: + obj.dsync = self + self._data[modelname][uid] = obj def remove(self, obj: DSyncModel): @@ -684,4 +721,11 @@ def remove(self, obj: DSyncModel): if uid not in self._data[modelname]: raise ObjectNotFound(f"Object {uid} not present") + if obj.dsync is self: + obj.dsync = None + del self._data[modelname][uid] + + +# DSyncModel references DSync and DSync references DSyncModel. Break the typing loop: +DSyncModel.update_forward_refs() diff --git a/dsync/diff.py b/dsync/diff.py index 9acf801e..48f4dd3c 100644 --- a/dsync/diff.py +++ b/dsync/diff.py @@ -31,6 +31,13 @@ def __init__(self): `self.children[group][unique_id] == DiffElement(...)` """ + def complete(self): + """Method to call when this Diff has been fully populated with data and is "complete". + + The default implementation does nothing, but a subclass could use this, for example, to save + the completed Diff to a file or database record. + """ + def add(self, element: "DiffElement"): """Add a new DiffElement to the changeset of this Diff. @@ -134,6 +141,26 @@ def __str__(self): """Basic string representation of a DiffElement.""" return f"{self.type} : {self.name} : {self.keys} : {self.source_attrs} : {self.dest_attrs}" + @property + def action(self) -> Optional[str]: + """Action, if any, that should be taken to remediate the diffs described by this element. + + Returns: + str: "create", "update", "delete", or None + """ + if self.source_attrs is not None and self.dest_attrs is None: + return "create" + if self.source_attrs is None and self.dest_attrs is not None: + return "delete" + if ( + self.source_attrs is not None + and self.dest_attrs is not None + and any(self.source_attrs[attr_key] != self.dest_attrs[attr_key] for attr_key in self.get_attrs_keys()) + ): + return "update" + + return None + # TODO: separate into set_source_attrs() and set_dest_attrs() methods, or just use direct property access instead? def add_attrs(self, source: Optional[dict] = None, dest: Optional[dict] = None): """Set additional attributes of a source and/or destination item that may result in diffs.""" diff --git a/examples/example1/README.md b/examples/example1/README.md index bba42372..cdf43dc7 100644 --- a/examples/example1/README.md +++ b/examples/example1/README.md @@ -1,6 +1,6 @@ -This is a simple example to show how dsync can be used to compare and syncronize multiple data sources. +This is a simple example to show how dsync can be used to compare and synchronize multiple data sources. For this example, we have a shared model for Device and Interface defined in `models.py` And we have 3 instances of DSync based on the same model but with different values (BackendA, BackendB & BackendC). @@ -46,4 +46,4 @@ diff_a_b = a.diff_to(b) diff_a_b.print_detailed() ``` -> In the Device model, the role is not defined as an attribute so it's not showned with we are comparing the different objects, even if the value is different. +> In the Device model, the role is not defined as an attribute so it's not shown when we are comparing the different objects, even if the value is different. diff --git a/poetry.lock b/poetry.lock index ae4425c9..6129af59 100644 --- a/poetry.lock +++ b/poetry.lock @@ -130,6 +130,17 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" version = "0.4.3" +[[package]] +category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +version = "5.3" + +[package.extras] +toml = ["toml"] + [[package]] category = "dev" description = "Decorators for Humans" @@ -525,6 +536,21 @@ version = ">=0.12" checkqa_mypy = ["mypy (0.780)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +[[package]] +category = "dev" +description = "Pytest plugin for measuring coverage." +name = "pytest-cov" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "2.10.1" + +[package.dependencies] +coverage = ">=4.4" +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] + [[package]] category = "dev" description = "YAML parser and emitter for Python" @@ -707,7 +733,7 @@ docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] testing = ["jaraco.itertools", "func-timeout"] [metadata] -content-hash = "a0fae8962cbb37a012c040f98321dc5506ca698896694e5634996c3d47941dc8" +content-hash = "bea8d554afbb65ce4e439f8b4c026abcbef2a308a587a6f5bd1ad4dbdb8da4c6" python-versions = "^3.7" [metadata.files] @@ -759,6 +785,42 @@ colorama = [ {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, ] +coverage = [ + {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, + {file = "coverage-5.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4"}, + {file = "coverage-5.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9"}, + {file = "coverage-5.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729"}, + {file = "coverage-5.3-cp27-cp27m-win32.whl", hash = "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d"}, + {file = "coverage-5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418"}, + {file = "coverage-5.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9"}, + {file = "coverage-5.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5"}, + {file = "coverage-5.3-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822"}, + {file = "coverage-5.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097"}, + {file = "coverage-5.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9"}, + {file = "coverage-5.3-cp35-cp35m-win32.whl", hash = "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636"}, + {file = "coverage-5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f"}, + {file = "coverage-5.3-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237"}, + {file = "coverage-5.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54"}, + {file = "coverage-5.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7"}, + {file = "coverage-5.3-cp36-cp36m-win32.whl", hash = "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a"}, + {file = "coverage-5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d"}, + {file = "coverage-5.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"}, + {file = "coverage-5.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f"}, + {file = "coverage-5.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c"}, + {file = "coverage-5.3-cp37-cp37m-win32.whl", hash = "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751"}, + {file = "coverage-5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709"}, + {file = "coverage-5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516"}, + {file = "coverage-5.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f"}, + {file = "coverage-5.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259"}, + {file = "coverage-5.3-cp38-cp38-win32.whl", hash = "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82"}, + {file = "coverage-5.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221"}, + {file = "coverage-5.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978"}, + {file = "coverage-5.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21"}, + {file = "coverage-5.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24"}, + {file = "coverage-5.3-cp39-cp39-win32.whl", hash = "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7"}, + {file = "coverage-5.3-cp39-cp39-win_amd64.whl", hash = "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7"}, + {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, +] decorator = [ {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"}, @@ -942,6 +1004,10 @@ pytest = [ {file = "pytest-6.1.0-py3-none-any.whl", hash = "sha256:1cd09785c0a50f9af72220dd12aa78cfa49cbffc356c61eab009ca189e018a33"}, {file = "pytest-6.1.0.tar.gz", hash = "sha256:d010e24666435b39a4cf48740b039885642b6c273a3f77be3e7e03554d2806b7"}, ] +pytest-cov = [ + {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, + {file = "pytest_cov-2.10.1-py2.py3-none-any.whl", hash = "sha256:45ec2d5182f89a81fc3eb29e3d1ed3113b9e9a873bcddb2a71faaab066110191"}, +] pyyaml = [ {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, diff --git a/pyproject.toml b/pyproject.toml index d4c9c1be..1c12d475 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ invoke = "^1.4.1" flake8 = "^3.8.3" ipython = "^7.18.1" mypy = "^0.782" +pytest-cov = "^2.10.1" [tool.poetry.scripts] dsync = 'dsync.cli:main' @@ -60,15 +61,15 @@ notes = """, XXX, """ +[tool.pylint.similarities] +# There's a lot of duplicate code in the examples/backend_*.py files - don't complain about it for now +min-similarity-lines = 20 + [tool.pytest.ini_options] testpaths = [ "tests" ] -[tool.pylint.similarities] -# There's a lot of duplicate code in the examples/backend_*.py files - don't complain about it for now -min-similarity-lines = 20 - [build-system] requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api" diff --git a/tasks.py b/tasks.py index e9140d84..c3c312b1 100644 --- a/tasks.py +++ b/tasks.py @@ -107,7 +107,10 @@ def pytest(context, name=NAME, python_ver=PYTHON_VER): # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information # Install python module docker = f"docker run -it -v {PWD}:/local {name}-{python_ver}:latest" - context.run(f"{docker} /bin/bash -c 'poetry install && pytest -vv'", pty=True) + context.run( + f"{docker} /bin/bash -c 'poetry install && pytest --cov=dsync --cov-report html --cov-report term -vv'", + pty=True, + ) @task @@ -152,7 +155,7 @@ def mypy(context, name=NAME, python_ver=PYTHON_VER): # pty is set to true to properly run the docker commands due to the invocation process of docker # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information docker = f"docker run -it -v {PWD}:/local {name}-{python_ver}:latest" - context.run(f"{docker} sh -c \"find . -name '*.py' | xargs mypy\"", pty=True) + context.run(f"{docker} sh -c \"find . -name '*.py' | xargs mypy --show-error-codes \"", pty=True) @task diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index a6cfb5aa..65e84c8e 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,9 +1,11 @@ """Used to setup fixtures to be used through tests""" -from typing import List, Optional, Tuple +from typing import ClassVar, List, Optional, Tuple import pytest from dsync import DSync, DSyncModel +from dsync.diff import Diff +from dsync.exceptions import ObjectNotCreated, ObjectNotUpdated, ObjectNotDeleted @pytest.fixture() @@ -23,6 +25,36 @@ def generic_dsync_model(): return DSyncModel() +class ErrorProneModel(DSyncModel): + """Test class that sometimes throws exceptions when creating/updating/deleting instances.""" + + _counter: ClassVar[int] = 0 + + @classmethod + def create(cls, dsync: DSync, ids: dict, attrs: dict): + """As DSyncModel.create(), but periodically throw exceptions.""" + cls._counter += 1 + if not cls._counter % 3: + raise ObjectNotCreated("Random creation error!") + return super().create(dsync, ids, attrs) + + def update(self, attrs: dict): + """As DSyncModel.update(), but periodically throw exceptions.""" + # pylint: disable=protected-access + self.__class__._counter += 1 + if not self.__class__._counter % 3: + raise ObjectNotUpdated("Random update error!") + return super().update(attrs) + + def delete(self): + """As DSyncModel.delete(), but periodically throw exceptions.""" + # pylint: disable=protected-access + self.__class__._counter += 1 + if not self.__class__._counter % 3: + raise ObjectNotDeleted("Random deletion error!") + return super().delete() + + class Site(DSyncModel): """Concrete DSyncModel subclass representing a site or location that contains devices.""" @@ -52,7 +84,7 @@ class Device(DSyncModel): _modelname = "device" _identifiers = ("name",) - _attributes: Tuple[str, ...] = ("role",) + _attributes: ClassVar[Tuple[str, ...]] = ("role",) _children = {"interface": "interfaces"} name: str @@ -195,6 +227,34 @@ def backend_a(): return dsync +class ErrorProneSiteA(ErrorProneModel, SiteA): + """A Site that sometimes throws exceptions.""" + + +class ErrorProneDeviceA(ErrorProneModel, DeviceA): + """A Device that sometimes throws exceptions.""" + + +class ErrorProneInterface(ErrorProneModel, Interface): + """An Interface that sometimes throws exceptions.""" + + +class ErrorProneBackendA(BackendA): + """A variant of BackendA that sometimes fails to create/update/delete objects.""" + + site = ErrorProneSiteA + device = ErrorProneDeviceA + interface = ErrorProneInterface + + +@pytest.fixture +def error_prone_backend_a(): + """Provide an instance of ErrorProneBackendA subclass of DSync.""" + dsync = ErrorProneBackendA() + dsync.load() + return dsync + + class SiteB(Site): """Extend Site with a `places` list.""" @@ -256,3 +316,13 @@ def backend_b(): dsync = BackendB() dsync.load() return dsync + + +class TrackedDiff(Diff): + """Subclass of Diff that knows when it's completed.""" + + is_complete: bool = False + + def complete(self): + """Function called when the Diff has been fully constructed and populated with data.""" + self.is_complete = True diff --git a/tests/unit/test_dsync.py b/tests/unit/test_dsync.py index 3363daae..4e0d10ef 100644 --- a/tests/unit/test_dsync.py +++ b/tests/unit/test_dsync.py @@ -1,11 +1,13 @@ """Unit tests for the DSync class.""" +import logging + import pytest -from dsync import DSync, DSyncModel -from dsync.exceptions import ObjectAlreadyExists, ObjectNotFound, ObjectNotCreated, ObjectNotUpdated, ObjectNotDeleted +from dsync import DSync, DSyncModel, DSyncFlags +from dsync.exceptions import ObjectAlreadyExists, ObjectNotFound, ObjectCrudException -from .conftest import Site, Device, Interface +from .conftest import Site, Device, Interface, TrackedDiff, BackendA def test_generic_dsync_methods(generic_dsync, generic_dsync_model): @@ -37,7 +39,8 @@ def test_generic_dsync_methods(generic_dsync, generic_dsync_model): # The generic_dsync_model has an empty identifier/unique-id assert generic_dsync.get(DSyncModel, "") == generic_dsync_model - assert generic_dsync.get(DSyncModel.get_type(), "") == generic_dsync_model + # DSync doesn't know what a "dsyncmodel" is + assert generic_dsync.get(DSyncModel.get_type(), "") is None # Wrong object-type - no match assert generic_dsync.get("", "") is None # Wrong unique-id - no match @@ -63,21 +66,11 @@ def test_generic_dsync_methods(generic_dsync, generic_dsync_model): assert list(generic_dsync.get_all(DSyncModel)) == [] assert generic_dsync.get_by_uids([""], DSyncModel) == [] - # Default (empty) DSync class doesn't know how to create any objects - with pytest.raises(ObjectNotCreated): - generic_dsync.create_object("dsyncmodel", {}, {}) - with pytest.raises(ObjectNotUpdated): - generic_dsync.update_object("dsyncmodel", {}, {}) - with pytest.raises(ObjectNotDeleted): - generic_dsync.delete_object("dsyncmodel", {}, {}) - diff_elements = generic_dsync._diff_objects( # pylint: disable=protected-access [generic_dsync_model], [generic_dsync_model], generic_dsync, ) assert len(diff_elements) == 1 assert not diff_elements[0].has_diffs() - # No-op as diff_element.has_diffs() is False - generic_dsync._sync_from_diff_element(diff_elements[0]) # pylint: disable=protected-access def test_dsync_subclass_validation(): @@ -117,6 +110,7 @@ def test_dsync_subclass_methods_diff_sync(backend_a, backend_b): ) assert len(diff_elements) == 4 # atl, nyc, sfo, rdu for diff_element in diff_elements: + diff_element.print_detailed() assert diff_element.has_diffs() # We don't inspect the contents of the diff elements in detail here - see test_diff_element.py for that @@ -132,7 +126,7 @@ def test_dsync_subclass_methods_diff_sync(backend_a, backend_b): check_diff_symmetry(diff_ab, diff_ba) # Perform sync of one subtree of diffs - assert backend_a._sync_from_diff_element(diff_elements[0]) is True # pylint: disable=protected-access + backend_a._sync_from_diff_element(diff_elements[0]) # pylint: disable=protected-access # Make sure the sync descended through the diff element all the way to the leafs assert backend_a.get(Interface, "nyc-spine1__eth0").description == "Interface 0/0" # was initially Interface 0 # Recheck diffs @@ -151,9 +145,12 @@ def test_dsync_subclass_methods_diff_sync(backend_a, backend_b): backend_a.sync_from(backend_b) # Make sure the sync descended through the diff elements to their children assert backend_a.get(Device, "sfo-spine1").role == "leaf" # was initially "spine" - # Recheck diffs - backend_a.diff_from(backend_b).print_detailed() - assert backend_a.diff_from(backend_b).has_diffs() is False + # Recheck diffs, using a custom Diff subclass this time. + diff_ba = backend_a.diff_from(backend_b, diff_class=TrackedDiff) + assert isinstance(diff_ba, TrackedDiff) + assert diff_ba.is_complete is True + diff_ba.print_detailed() + assert diff_ba.has_diffs() is False # site_nyc and site_sfo should be updated, site_atl should be created, site_rdu should be deleted site_nyc_a = backend_a.get(Site, "nyc") @@ -201,30 +198,80 @@ def test_dsync_subclass_methods_crud(backend_a): with pytest.raises(ObjectNotFound): backend_a.remove(site_atl_a) - # Test low-level default_* CRUD APIs - backend_a.default_create("interface", {"device_name": "nyc-spine1", "name": "eth2"}, {"description": "Interface 2"}) - new_interface = backend_a.get("interface", "nyc-spine1__eth2") - assert new_interface.description == "Interface 2" - - backend_a.default_update("interface", {"device_name": "nyc-spine1", "name": "eth2"}, {"description": "Intf 2"}) - new_interface_2 = backend_a.get("interface", "nyc-spine1__eth2") - assert new_interface.description == "Intf 2" - assert new_interface_2 is new_interface - - backend_a.default_delete("interface", {"device_name": "nyc-spine1", "name": "eth2"}, {}) - assert backend_a.get("interface", "nyc-spine1__eth2") is None - - # Test higher-level *_object CRUD APIs - backend_a.create_object("device", {"name": "new_device"}, {"role": "new_role", "site_name": "nyc"}) - new_device = backend_a.get("device", "new_device") - assert new_device.role == "new_role" - assert new_device.site_name == "nyc" - - backend_a.update_object("device", {"name": "new_device"}, {"role": "another_role"}) - new_device_2 = backend_a.get(Device, "new_device") - assert new_device_2 is new_device - assert new_device.role == "another_role" - - backend_a.delete_object("device", {"name": "new_device"}) - new_device_3 = backend_a.get("device", "new_device") - assert new_device_3 is None + +def test_dsync_subclass_methods_sync_exceptions(caplog, error_prone_backend_a, backend_b): + """Test handling of exceptions during a sync.""" + caplog.set_level(logging.INFO) + with pytest.raises(ObjectCrudException): + error_prone_backend_a.sync_from(backend_b) + + error_prone_backend_a.sync_from(backend_b, flags=DSyncFlags.CONTINUE_ON_FAILURE) + # Not all sync operations succeeded on the first try + remaining_diffs = error_prone_backend_a.diff_from(backend_b) + remaining_diffs.print_detailed() + assert remaining_diffs.has_diffs() + + # Some ERROR messages should have been logged + assert [record.message for record in caplog.records if record.levelname == "ERROR"] != [] + caplog.clear() + + # Retry up to 10 times, we should sync successfully eventually + for i in range(10): + print(f"Sync retry #{i}") + error_prone_backend_a.sync_from(backend_b, flags=DSyncFlags.CONTINUE_ON_FAILURE) + remaining_diffs = error_prone_backend_a.diff_from(backend_b) + remaining_diffs.print_detailed() + if remaining_diffs.has_diffs(): + # If we still have diffs, some ERROR messages should have been logged + assert [record.message for record in caplog.records if record.levelname == "ERROR"] != [] + caplog.clear() + else: + # No error messages should have been logged on the last, fully successful attempt + assert [record.message for record in caplog.records if record.levelname == "ERROR"] == [] + break + else: + pytest.fail("Sync was still incomplete after 10 retries") + + +def test_dsync_subclass_methods_diff_sync_skip_flags(): + """Test diff and sync behavior when using the SKIP_UNMATCHED_* flags.""" + baseline = BackendA() + baseline.load() + + extra_models = BackendA() + extra_models.load() + extra_site = extra_models.site(name="lax") + extra_models.add(extra_site) + extra_device = extra_models.device(name="nyc-spine3", site_name="nyc", role="spine") + extra_models.get(extra_models.site, "nyc").add_child(extra_device) + extra_models.add(extra_device) + + missing_models = BackendA() + missing_models.load() + missing_models.remove(missing_models.get(missing_models.site, "rdu")) + missing_device = missing_models.get(missing_models.device, "sfo-spine2") + missing_models.get(missing_models.site, "sfo").remove_child(missing_device) + missing_models.remove(missing_device) + + assert baseline.diff_from(extra_models).has_diffs() + assert baseline.diff_to(missing_models).has_diffs() + + # SKIP_UNMATCHED_SRC should mean that extra models in the src are not flagged for creation in the dest + assert not baseline.diff_from(extra_models, flags=DSyncFlags.SKIP_UNMATCHED_SRC).has_diffs() + # SKIP_UNMATCHED_DST should mean that missing models in the src are not flagged for deletion from the dest + assert not baseline.diff_from(missing_models, flags=DSyncFlags.SKIP_UNMATCHED_DST).has_diffs() + # SKIP_UNMATCHED_BOTH means, well, both + assert not extra_models.diff_from(missing_models, flags=DSyncFlags.SKIP_UNMATCHED_BOTH).has_diffs() + assert not extra_models.diff_to(missing_models, flags=DSyncFlags.SKIP_UNMATCHED_BOTH).has_diffs() + + baseline.sync_from(extra_models, flags=DSyncFlags.SKIP_UNMATCHED_SRC) + # New objects should not have been created + assert baseline.get(baseline.site, "lax") is None + assert baseline.get(baseline.device, "nyc-spine3") is None + assert "nyc-spine3" not in baseline.get(baseline.site, "nyc").devices + + baseline.sync_from(missing_models, flags=DSyncFlags.SKIP_UNMATCHED_DST) + # Objects should not have been deleted + assert baseline.get(baseline.site, "rdu") is not None + assert baseline.get(baseline.device, "sfo-spine2") is not None + assert "sfo-spine2" in baseline.get(baseline.site, "sfo").devices diff --git a/tests/unit/test_dsync_model.py b/tests/unit/test_dsync_model.py index 3dd4a903..aec01110 100644 --- a/tests/unit/test_dsync_model.py +++ b/tests/unit/test_dsync_model.py @@ -5,7 +5,9 @@ import pytest from dsync import DSyncModel -from dsync.exceptions import ObjectStoreWrongType, ObjectAlreadyExists +from dsync.exceptions import ObjectStoreWrongType, ObjectAlreadyExists, ObjectNotFound + +from .conftest import Device, Interface def test_generic_dsync_model_methods(generic_dsync_model, make_site): @@ -23,7 +25,7 @@ def test_generic_dsync_model_methods(generic_dsync_model, make_site): generic_dsync_model.add_child(make_site()) -def test_dsync_model_subclass_methods(make_site, make_device, make_interface): +def test_dsync_model_subclass_getters(make_site, make_device, make_interface): """Check that the DSyncModel APIs work correctly for various subclasses.""" site1 = make_site() device1 = make_device() @@ -61,6 +63,13 @@ def test_dsync_model_subclass_methods(make_site, make_device, make_interface): assert device1.get_shortname() == "device1" assert device1_eth0.get_shortname() == "eth0" + +def test_dsync_model_subclass_add_remove(make_site, make_device, make_interface): + """Check that the DSyncModel add_child/remove_child APIs work correctly for various subclasses.""" + site1 = make_site() + device1 = make_device() + device1_eth0 = make_interface() + assert site1.devices == [] site1.add_child(device1) assert site1.devices == ["device1"] @@ -69,6 +78,13 @@ def test_dsync_model_subclass_methods(make_site, make_device, make_interface): with pytest.raises(ObjectAlreadyExists): site1.add_child(device1) + site1.remove_child(device1) + assert site1.devices == [] + with pytest.raises(ObjectStoreWrongType): + site1.remove_child(device1_eth0) + with pytest.raises(ObjectNotFound): + site1.remove_child(device1) + assert device1.interfaces == [] device1.add_child(device1_eth0) assert device1.interfaces == ["device1__eth0"] @@ -77,6 +93,51 @@ def test_dsync_model_subclass_methods(make_site, make_device, make_interface): with pytest.raises(ObjectAlreadyExists): device1.add_child(device1_eth0) + device1.remove_child(device1_eth0) + assert device1.interfaces == [] + with pytest.raises(ObjectStoreWrongType): + device1.remove_child(site1) + with pytest.raises(ObjectNotFound): + device1.remove_child(device1_eth0) + + +def test_dsync_model_subclass_crud(generic_dsync): + """Test basic CRUD operations on generic DSyncModel subclasses.""" + device1 = Device.create(generic_dsync, {"name": "device1"}, {"role": "spine"}) + assert isinstance(device1, Device) + assert device1.dsync == generic_dsync + assert device1.name == "device1" + assert device1.role == "spine" + + device1_eth0 = Interface.create( + generic_dsync, {"name": "eth0", "device_name": "device1"}, {"description": "some description"}, + ) + assert isinstance(device1_eth0, Interface) + assert device1_eth0.dsync == generic_dsync + assert device1_eth0.name == "eth0" + assert device1_eth0.device_name == "device1" + assert device1_eth0.description == "some description" + + device1 = device1.update({"site_name": "site1", "role": "leaf"}) + assert isinstance(device1, Device) + assert device1.name == "device1" + assert device1.site_name == "site1" + assert device1.role == "leaf" + + device1_eth0 = device1_eth0.update({"description": ""}) + assert isinstance(device1_eth0, Interface) + assert device1_eth0.name == "eth0" + assert device1_eth0.device_name == "device1" + assert device1_eth0.description == "" + + # TODO: negative tests - try to update identifiers with update(), for example + + device1 = device1.delete() + assert isinstance(device1, Device) + + device1_eth0.delete() + assert isinstance(device1_eth0, Interface) + def test_dsync_model_subclass_validation(): """Verify that invalid subclasses of DSyncModel are detected at declaration time."""