From cae961cf10ac7a37af291b93f42af16e506dfea9 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 20 Feb 2025 14:57:55 +0100 Subject: [PATCH 01/30] fix: scope support on apply change set (#64) Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/urls.py | 3 ++- netbox_diode_plugin/api/views.py | 21 +++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/netbox_diode_plugin/api/urls.py b/netbox_diode_plugin/api/urls.py index 9fff272..6e25625 100644 --- a/netbox_diode_plugin/api/urls.py +++ b/netbox_diode_plugin/api/urls.py @@ -5,12 +5,13 @@ from django.urls import include, path from netbox.api.routers import NetBoxRouter -from .views import ApplyChangeSetView, ObjectStateView +from .views import ApplyChangeSetView, ObjectStateView, GenerateDiffView router = NetBoxRouter() urlpatterns = [ path("object-state/", ObjectStateView.as_view()), path("apply-change-set/", ApplyChangeSetView.as_view()), + path("generate-diff/", GenerateDiffView.as_view()), path("", include(router.urls)), ] diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index e791ab8..a8f7358 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -673,3 +673,24 @@ class ApplyChangeSetException(Exception): """ApplyChangeSetException used to cause atomic transaction rollback.""" pass + +##### + +import logging +logger = logging.getLogger("netbox.diode_data") + + +class GenerateDiffView(views.APIView): + """GenerateDiff view.""" + + permission_classes = [IsAuthenticated, IsDiodeWriter] + + def post(self, request, *args, **kwargs): + """Generate diff for entity.""" + + entity = request.data.get("entity") + object_type = request.data.get("object_type") + + logger.error(f"generate diff called with entity: {entity} and object_type: {object_type}") + + return Response({}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) \ No newline at end of file From 1f0d1df98eefddbebd336329115fcc7314b42ae4 Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 27 Mar 2025 02:13:55 -0400 Subject: [PATCH 02/30] wip diff api --- docker/netbox/env/netbox.env | 1 + docker/netbox/local_settings.py | 3 +- netbox_diode_plugin/api/differ.py | 209 ++++++ netbox_diode_plugin/api/matcher.py | 379 ++++++++++ netbox_diode_plugin/api/plugin_utils.py | 757 ++++++++++++++++++++ netbox_diode_plugin/api/supported_models.py | 292 ++++++++ netbox_diode_plugin/api/transformer.py | 224 ++++++ netbox_diode_plugin/api/urls.py | 2 +- netbox_diode_plugin/api/views.py | 65 +- 9 files changed, 1925 insertions(+), 7 deletions(-) create mode 100644 netbox_diode_plugin/api/differ.py create mode 100644 netbox_diode_plugin/api/matcher.py create mode 100644 netbox_diode_plugin/api/plugin_utils.py create mode 100644 netbox_diode_plugin/api/supported_models.py create mode 100644 netbox_diode_plugin/api/transformer.py diff --git a/docker/netbox/env/netbox.env b/docker/netbox/env/netbox.env index 45993fc..38a0211 100644 --- a/docker/netbox/env/netbox.env +++ b/docker/netbox/env/netbox.env @@ -41,3 +41,4 @@ DIODE_TO_NETBOX_API_KEY=1368dbad13e418d5a443d93cf255edde03a2a754 NETBOX_TO_DIODE_API_KEY=1e99338b8cab5fc637bc55f390bda1446f619c42 DIODE_API_KEY=5a52c45ee8231156cb620d193b0291912dd15433 BASE_PATH=netbox/ +DEBUG=True \ No newline at end of file diff --git a/docker/netbox/local_settings.py b/docker/netbox/local_settings.py index 6ab2063..0542c56 100644 --- a/docker/netbox/local_settings.py +++ b/docker/netbox/local_settings.py @@ -1,4 +1,5 @@ from netbox_branching.utilities import DynamicSchemaDict + from .configuration import DATABASE # Wrap DATABASES with DynamicSchemaDict for dynamic schema support @@ -9,4 +10,4 @@ # Employ our custom database router DATABASE_ROUTERS = [ 'netbox_branching.database.BranchAwareRouter', -] \ No newline at end of file +] diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py new file mode 100644 index 0000000..8e1ae7c --- /dev/null +++ b/netbox_diode_plugin/api/differ.py @@ -0,0 +1,209 @@ +"""Differ.""" + +import copy +import decimal +import json +import logging +import re +import uuid +from dataclasses import dataclass, field +from enum import Enum +from functools import lru_cache +from dataclasses import dataclass, field +from enum import Enum +import copy +import uuid + +from django.db import models +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.utils.text import slugify +from utilities.data import shallow_compare_dict +from vpn.constants import L2VPN_ASSIGNMENT_MODELS + +from .supported_models import extract_supported_models +from .transformer import transform_proto_json, cleanup_unresolved_references + + +logger = logging.getLogger(__name__) + +SUPPORTED_MODELS = extract_supported_models() + +class ChangeType(Enum): + """Change type enum.""" + + CREATE = "create" + UPDATE = "update" + NOOP = "noop" + + +@dataclass +class Change: + """A change to a model instance.""" + + change_type: ChangeType + object_type: str + object_id: str + object_primary_value: str + id: str = field(default_factory=lambda: str(uuid.uuid4())) + before: dict | None = field(default=None) + data: dict | None = field(default=None) + new_refs: list[str] = field(default_factory=list) + + def to_dict(self) -> dict: + """Convert the change to a dictionary.""" + return { + "id": self.id, + "change_type": self.change_type.value, + "object_type": self.object_type, + "object_id": self.object_id, + "object_primary_value": self.object_primary_value, + "before": self.before, + "data": self.data, + "new_refs": self.new_refs, + } + + +@dataclass +class ChangeSet: + """A set of changes to a model instance.""" + + id: str = field(default_factory=lambda: str(uuid.uuid4())) + changes: list[Change] = field(default_factory=list) + branch: dict[str, str] | None = field(default=None) # {"id": str, "name": str} + + def to_dict(self) -> dict: + """Convert the change set to a dictionary.""" + return { + "id": self.id, + "changes": [change.to_dict() for change in self.changes], + "branch": self.branch, + } + +def prechange_data_from_instance(instance) -> dict: + """Convert model instance data to a dictionary format for comparison.""" + prechange_data = {} + + if instance is None: + return prechange_data + + model_class = instance.__class__ + + model = SUPPORTED_MODELS.get(model_class.__name__) + if not model: + raise ValidationError(f"Model {model_class.__name__} is not supported") + + fields = model.get("fields", {}) + if not fields: + raise ValidationError(f"Model {model_class.__name__} has no fields") + + for field_name, field_info in fields.items(): + if not hasattr(instance, field_name): + continue + + value = getattr(instance, field_name) + if hasattr(value, "all"): # Handle many-to-many and many-to-one relationships + # For any relationship that has an 'all' method, get all related objects' primary keys + prechange_data[field_name] = ( + [item.pk for item in value.all()] if value is not None else [] + ) + elif hasattr( + value, "pk" + ): # Handle regular related fields (ForeignKey, OneToOne) + # Handle ContentType fields + if isinstance(value, ContentType): + prechange_data[field_name] = f"{value.app_label}.{value.model}" + else: + # For regular related fields, get the primary key + prechange_data[field_name] = value.pk if value is not None else None + else: + prechange_data[field_name] = value + + return prechange_data + + +def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: + """Clean diff data by removing null values.""" + result = {} + for k, v in data.items(): + if exclude_empty_values: + if v is None: + continue + if isinstance(v, list) and len(v) == 0: + continue + if isinstance(v, dict) and len(v) == 0: + continue + if isinstance(v, str) and v == "": + continue + result[k] = v + return result + + +def diff_to_change( + object_type: str, + prechange_data: dict, + postchange_data: dict, + changed_attrs: list[str], + unresolved_references: list[str], +) -> Change: + """Convert a diff to a change.""" + change_type = ChangeType.UPDATE if prechange_data.get("id") else ChangeType.CREATE + if change_type == ChangeType.UPDATE and not len(changed_attrs) > 0: + change_type = ChangeType.NOOP + + change = Change( + change_type=change_type, + object_type=object_type, + object_id=prechange_data.get("id") or postchange_data.get("id"), + object_primary_value="__PLACEHOLDER__", # TODO: get primary value + new_refs=unresolved_references, + ) + + postchange_data_clean = clean_diff_data(postchange_data) + change.data = postchange_data_clean + + if change_type == ChangeType.UPDATE: + # remove null values + prechange_data_clean = clean_diff_data(prechange_data) + + merged_data = copy.deepcopy(prechange_data_clean) + + merged_data.update({ + attr: postchange_data_clean[attr] + for attr in changed_attrs + if attr in postchange_data_clean + }) + change.before = prechange_data_clean + change.data = merged_data + + return change + +def generate_changeset(entity: dict, object_type: str) -> ChangeSet: + """Generate a changeset for an entity.""" + change_set = ChangeSet() + + entities = transform_proto_json(entity, object_type) + for entity in entities: + prechange_data = {} + changed_attrs = [] + new_refs = cleanup_unresolved_references(entity) + object_type = entity.pop("_object_type") + _ = entity.pop("_uuid") + instance = entity.pop("_instance", None) + + if instance: + prechange_data = prechange_data_from_instance(instance) + changed_data = shallow_compare_dict( + prechange_data, entity, + ) + changed_attrs = sorted(changed_data.keys()) + change = diff_to_change( + object_type, + prechange_data, + entity, + changed_attrs, + new_refs, + ) + change_set.changes.append(change) + logger.error(f"change_set: {json.dumps(change_set.to_dict(), default=str, indent=4)}") + return change_set diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py new file mode 100644 index 0000000..200eb8a --- /dev/null +++ b/netbox_diode_plugin/api/matcher.py @@ -0,0 +1,379 @@ +"""Object matching utilities.""" + +import logging +from functools import cache, lru_cache +from dataclasses import dataclass +from typing import List, Optional, Type + +from core.models import ObjectType as NetBoxType +from django.db import models +from django.db.models import F +from django.db.models.lookups import Exact +from django.db.models.query_utils import Q + +logger = logging.getLogger(__name__) + + +# +# TODO: add special cases for things that lack any unique constraints, +# but may have logical pre-existing matches ... eg an ip address in +# a certain context ... etc ? possibly mac address also ? +# + +@dataclass +class ObjectMatchCriteria: + """ + Defines criteria for identifying a specific object. + + This matcher expects a fully 'transformed' and resolved + set of fields. ie field names are snake case and match + the model fields and any references to another object + specify a specific id in the appropriate field name. + eg device_id=123 etc and for any generic references, + both the type and idshould be specified, eg: + scope_type="dcim.site" and scope_id=123 + """ + + fields: tuple[str] | None = None + expressions: tuple | None = None + condition: Q | None = None + model_class: Type[models.Model] | None = None + name: str | None = None + + def __hash__(self): + return hash((self.fields, self.expressions, self.condition, self.model_class.__name__, self.name)) + + def has_required_fields(self, data) -> bool: + """Returns True if the data given contains a value for all fields referenced by the constraint.""" + return all(field in data for field in self.get_refs()) + + @cache + def get_refs(self) -> set[str]: + """Returns a set of all field names referenced by the constraint.""" + refs = set() + if self.fields: + refs.update(self.fields) + elif self.expressions: + for expr in self.expressions: + refs |= _get_refs(expr) + return frozenset(refs) + + @cache + def get_insensitive_refs(self) -> set[str]: + """ + Returns a set of all field names that should be compared in a case insensitive manner. + + best effort, doesn't handle things being nested in a complex way. + """ + refs = set() + if self.expressions: + for expr in self.expressions: + # TODO be more careful here + if expr.__class__.__name__ == "Lower": + for source_expr in getattr(expr, "source_expressions", []): + if hasattr(source_expr, "name"): + refs.add(source_expr.name) + return refs + + def fingerprint(self, data: dict) -> str|None: + """ + Returns a fingerprint of the data based on these criteria. + + These criteria that can be used to determine if two + data structs roughly match. + + This is a best effort based on the referenced fields + and some interrogation of case sensitivity. The + real criteria are potentially complex... + """ + if not self.has_required_fields(data): + return None + + if self.condition: + if not self._check_condition(data): + return None + + # sort the fields by name + sorted_fields = sorted(self.get_refs()) + insensitive = self.get_insensitive_refs() + values = [] + for field in sorted_fields: + value = data[field] + if field in insensitive: + value = value.lower() + values.append(value) + # logger.error(f"fingerprint {self}: {data} -> values: {tuple(values)}") + + return hash(tuple(values)) + + def _check_condition(self, data) -> bool: + if self.condition is None: + return True + # TODO: handle evaluating complex conditions, + # there are only simple ones currently + if self.condition.connector != Q.AND: + logger.error(f"Unhandled condition {self.condition}") + return False + + if len(self.condition.children) != 1: + logger.error(f"Unhandled condition {self.condition}") + return False + + if len(self.condition.children[0]) != 2: + logger.error(f"Unhandled condition {self.condition}") + return False + + k, v = self.condition.children[0] + result = False + if k.endswith("__isnull"): + k = k[:-8] + result = k not in data or data[k] is None + else: + result = k in data and data[k] == v + + if self.condition.negated: + result = not result + + return result + + def build_queryset(self, data) -> models.QuerySet: + """Builds a queryset for the constraint with the given data.""" + if self.fields and len(self.fields) > 0: + return self._build_fields_queryset(data) + if self.expressions and len(self.expressions) > 0: + return self._build_expressions_queryset(data) + raise ValueError("No fields or expressions to build queryset from") + + def _build_fields_queryset(self, data) -> models.QuerySet: + """Builds a queryset for a simple set-of-fields constraint.""" + lookup_kwargs = {} + for field_name in self.fields: + field = self.model_class._meta.get_field(field_name) + attribute = field.attname + if attribute not in data: + return None # cannot match, missing field data + lookup_value = data.get(field.attname) + lookup_kwargs[field.name] = lookup_value + + logger.error(f" * query kwargs: {lookup_kwargs}") + qs = self.model_class.objects.filter(**lookup_kwargs) + if self.condition: + qs = qs.filter(self.condition) + return qs + + def _build_expressions_queryset(self, data) -> models.QuerySet: + """Builds a queryset for the constraint with the given data.""" + replacements = { + F(field): value + for field, value in data.items() + } + + filters = [] + for expr in self.expressions: + if hasattr(expr, "get_expression_for_validation"): + expr = expr.get_expression_for_validation() + + refs = _get_refs(expr) + for ref in refs: + if ref not in replacements: + return None # cannot match, missing field data + + rhs = expr.replace_expressions(replacements) + condition = Exact(expr, rhs) + filters.append(condition) + + qs = self.model_class.objects.filter(*filters) + if self.condition: + qs = qs.filter(self.condition) + return qs + +@lru_cache(maxsize=256) +def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: + """Extract unique constraints from a Django model.""" + constraints = [] + + # collect single fields that are unique + for field in model_class._meta.fields: + if field.name == "id": + # TODO(ltucker): more django-general detection of pk field? + continue + + if field.unique: + constraints.append( + ObjectMatchCriteria( + model_class=model_class, + fields=(field.name,), + name=f"unique_{field.name}", + ) + ) + + # collect UniqueConstraint constraints + for constraint in model_class._meta.constraints: + if not _is_supported_constraint(constraint, model_class): + continue + if len(constraint.fields) > 0: + constraints.append( + ObjectMatchCriteria( + model_class=model_class, + fields=tuple(constraint.fields), + condition=constraint.condition, + name=constraint.name, + ) + ) + elif len(constraint.expressions) > 0: + constraints.append( + ObjectMatchCriteria( + model_class=model_class, + expressions=tuple(constraint.expressions), + condition=constraint.condition, + name=constraint.name, + ) + ) + else: + logger.error( + f"Constraint {constraint.name} on {model_class.__name__} had no fields or expressions (skipped)" + ) + # (this shouldn't happen / enforced by django) + continue + + return constraints + +def _is_supported_constraint(constraint, model_class) -> bool: + if not isinstance(constraint, models.UniqueConstraint): + return False + + if len(constraint.opclasses) > 0: + logger.warning(f"Constraint {constraint.name} on {model_class.__name__} had opclasses (skipped)") + return False + + if constraint.nulls_distinct is not None and constraint.nulls_distinct is True: + logger.warning(f"Constraint {constraint.name} on {model_class.__name__} had nulls_distinct (skipped)") + return False + + for field_name in constraint.fields: + field = model_class._meta.get_field(field_name) + if field.generated: + logger.warning( + f"Constraint {constraint.name} on {model_class.__name__} had" + f" generated field {field_name} (skipped)" + ) + return False + + return True + +def _get_refs(expr) -> set[str]: + refs = set() + if isinstance(expr, str): + refs.add(expr) + elif isinstance(expr, F): + refs.add(expr.name) + elif hasattr(expr, "get_source_expressions"): + for subexpr in expr.get_source_expressions(): + refs |= _get_refs(subexpr) + else: + logger.warning(f"Unhandled expression type for _get_refs: {type(expr)}") + return refs + +def _fingerprint_all(data: dict) -> str: + """ + Returns a fingerprint of the data based on all fields. + + Data should be a (flattened) dictionary of field values. + This ignores any fields that start with an underscore. + """ + if data is None: + return None + + values = [] + for k, v in sorted(data.items()): + if k.startswith("_"): + continue + values.append(k) + if isinstance(v, (list, tuple)): + values.extend(sorted(v)) + # TODO: handle dicts + else: + values.append(v) + # logger.error(f"_fingerprint_all: {data} -> values: {tuple(values)}") + + return hash(tuple(values)) + +def fingerprint(data: dict, object_type: str) -> str: + """ + Fingerprint a data structure. + + This uses the first matcher that has all + required fields or else uses all fields. + + TODO: This means there are pathological? cases where + the same object is being referenced but by + different unique constraints in the same diff... + this could lead to some unexpected behavior. + """ + if data is None: + return None + + model_class = get_object_type_model(object_type) + # check any known match criteria + for matcher in get_model_matchers(model_class): + fp = matcher.fingerprint(data) + if fp is not None: + return fp + # fall back to fingerprinting all the data + return _fingerprint_all(data) + +def find_existing_object(data: dict, object_type: str): + """ + Find an existing object that matches the given data. + + Uses all object match criteria to look for an existing + object. Returns the first match found. + + Returns the object if found, otherwise None. + """ + logger.error(f"resolving {data}") + model_class = get_object_type_model(object_type) + for matcher in get_model_matchers(model_class): + if not matcher.has_required_fields(data): + logger.error(f" * skipped matcher {matcher.name} (missing fields)") + continue + q = matcher.build_queryset(data) + if q is None: + logger.error(f" * skipped matcher {matcher.name} (no queryset)") + continue + try: + logger.error(f" * trying query {q.query}") + existing = q.get() + logger.error(f" -> Found object {existing} via {matcher.name}") + return existing + except model_class.DoesNotExist: + logger.error(f" -> No object found for matcher {matcher.name}") + continue + logger.error(" * No matchers found an existing object") + return None + +@lru_cache(maxsize=256) +def get_object_type_model(object_type: str) -> Type[models.Model]: + """Get the model class for a given object type.""" + app_label, model_name = object_type.split(".") + object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) + return object_content_type.model_class() + +def merge_data(a: dict, b: dict) -> dict: + """ + Merges two structures. + + If there are any conflicts, an error is raised. + Ignores conflicts in fields that start with an underscore, + preferring a's value. + """ + if a is None or b is None: + raise ValueError("Cannot merge None values") + merged = a.copy() + for k, v in b.items(): + if k.startswith("_"): + continue + if k in merged and merged[k] != v: + raise ValueError(f"Conflict merging {a} and {b} on {k}: {merged[k]} and {v}") + merged[k] = v + return merged diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py new file mode 100644 index 0000000..6b4a96a --- /dev/null +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -0,0 +1,757 @@ +"""Diode plugin helpers.""" + +# Generated code. DO NOT EDIT. +# Timestamp: 2025-03-26 20:52:02Z + +from dataclasses import dataclass +from typing import Type + +from django.contrib.contenttypes.models import ContentType +from django.db import models + + +@dataclass +class RefInfo: + object_type: str + field_name: str + is_generic: bool = False + +_REF_INFO = { + 'ipam.asn': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.asnrange': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.aggregate': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.cable': { + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.cabletermination': { + 'cable': RefInfo(object_type='dcim.cable', field_name='cable'), + 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), + }, + 'circuits.circuit': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'type': RefInfo(object_type='circuits.circuittype', field_name='type'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'assignments': RefInfo(object_type='circuits.circuitgroupassignment', field_name='assignments'), + }, + 'circuits.circuitgroup': { + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.circuitgroupassignment': { + 'group': RefInfo(object_type='circuits.circuitgroup', field_name='group'), + 'memberCircuit': RefInfo(object_type='circuits.circuit', field_name='member', is_generic=True), + 'memberVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='member', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.circuittermination': { + 'circuit': RefInfo(object_type='circuits.circuit', field_name='circuit'), + 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), + 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), + 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), + 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), + 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.circuittype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'virtualization.cluster': { + 'type': RefInfo(object_type='virtualization.clustertype', field_name='type'), + 'group': RefInfo(object_type='virtualization.clustergroup', field_name='group'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'virtualization.clustergroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'virtualization.clustertype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.consoleport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.consoleserverport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'tenancy.contact': { + 'group': RefInfo(object_type='tenancy.contactgroup', field_name='group'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'tenancy.contactassignment': { + 'objectAsn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), + 'objectAsnRange': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), + 'objectAggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), + 'objectCable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), + 'objectCablePath': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), + 'objectCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), + 'objectCircuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), + 'objectCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), + 'objectCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), + 'objectCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), + 'objectCircuitType': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), + 'objectCluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), + 'objectClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), + 'objectClusterType': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), + 'objectConsolePort': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), + 'objectConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), + 'objectContact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), + 'objectContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), + 'objectContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), + 'objectContactRole': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), + 'objectDevice': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), + 'objectDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), + 'objectDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), + 'objectDeviceType': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), + 'objectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), + 'objectFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), + 'objectFrontPort': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), + 'objectIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), + 'objectIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), + 'objectIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), + 'objectIpRange': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), + 'objectIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), + 'objectIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), + 'objectIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), + 'objectInterface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), + 'objectInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), + 'objectInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), + 'objectL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), + 'objectL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), + 'objectLocation': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), + 'objectMacAddress': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), + 'objectManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), + 'objectModule': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), + 'objectModuleBay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), + 'objectModuleType': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), + 'objectPlatform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), + 'objectPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), + 'objectPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), + 'objectPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), + 'objectPowerPort': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), + 'objectPrefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), + 'objectProvider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), + 'objectProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), + 'objectProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), + 'objectRir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), + 'objectRack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), + 'objectRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), + 'objectRackRole': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), + 'objectRackType': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), + 'objectRearPort': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), + 'objectRegion': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), + 'objectRole': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), + 'objectRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), + 'objectService': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), + 'objectSite': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), + 'objectSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), + 'objectTag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), + 'objectTenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), + 'objectTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), + 'objectTunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), + 'objectTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), + 'objectTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), + 'objectVlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), + 'objectVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), + 'objectVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), + 'objectVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), + 'objectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), + 'objectVrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), + 'objectVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), + 'objectVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), + 'objectVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), + 'objectVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), + 'objectVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), + 'objectVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), + 'objectVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), + 'objectWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), + 'objectWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), + 'objectWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), + 'contact': RefInfo(object_type='tenancy.contact', field_name='contact'), + 'role': RefInfo(object_type='tenancy.contactrole', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'tenancy.contactgroup': { + 'parent': RefInfo(object_type='tenancy.contactgroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'tenancy.contactrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.device': { + 'deviceType': RefInfo(object_type='dcim.devicetype', field_name='device_type'), + 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'oobIp': RefInfo(object_type='ipam.ipaddress', field_name='oob_ip'), + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), + 'virtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='virtual_chassis'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.devicebay': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'installedDevice': RefInfo(object_type='dcim.device', field_name='installed_device'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.devicerole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.devicetype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'defaultPlatform': RefInfo(object_type='dcim.platform', field_name='default_platform'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.fhrpgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.fhrpgroupassignment': { + 'group': RefInfo(object_type='ipam.fhrpgroup', field_name='group'), + 'interfaceAsn': RefInfo(object_type='ipam.asn', field_name='interface', is_generic=True), + 'interfaceAsnRange': RefInfo(object_type='ipam.asnrange', field_name='interface', is_generic=True), + 'interfaceAggregate': RefInfo(object_type='ipam.aggregate', field_name='interface', is_generic=True), + 'interfaceCable': RefInfo(object_type='dcim.cable', field_name='interface', is_generic=True), + 'interfaceCablePath': RefInfo(object_type='dcim.cablepath', field_name='interface', is_generic=True), + 'interfaceCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='interface', is_generic=True), + 'interfaceCircuit': RefInfo(object_type='circuits.circuit', field_name='interface', is_generic=True), + 'interfaceCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='interface', is_generic=True), + 'interfaceCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='interface', is_generic=True), + 'interfaceCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='interface', is_generic=True), + 'interfaceCircuitType': RefInfo(object_type='circuits.circuittype', field_name='interface', is_generic=True), + 'interfaceCluster': RefInfo(object_type='virtualization.cluster', field_name='interface', is_generic=True), + 'interfaceClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='interface', is_generic=True), + 'interfaceClusterType': RefInfo(object_type='virtualization.clustertype', field_name='interface', is_generic=True), + 'interfaceConsolePort': RefInfo(object_type='dcim.consoleport', field_name='interface', is_generic=True), + 'interfaceConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='interface', is_generic=True), + 'interfaceContact': RefInfo(object_type='tenancy.contact', field_name='interface', is_generic=True), + 'interfaceContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='interface', is_generic=True), + 'interfaceContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='interface', is_generic=True), + 'interfaceContactRole': RefInfo(object_type='tenancy.contactrole', field_name='interface', is_generic=True), + 'interfaceDevice': RefInfo(object_type='dcim.device', field_name='interface', is_generic=True), + 'interfaceDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='interface', is_generic=True), + 'interfaceDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='interface', is_generic=True), + 'interfaceDeviceType': RefInfo(object_type='dcim.devicetype', field_name='interface', is_generic=True), + 'interfaceFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='interface', is_generic=True), + 'interfaceFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='interface', is_generic=True), + 'interfaceFrontPort': RefInfo(object_type='dcim.frontport', field_name='interface', is_generic=True), + 'interfaceIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='interface', is_generic=True), + 'interfaceIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='interface', is_generic=True), + 'interfaceIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='interface', is_generic=True), + 'interfaceIpRange': RefInfo(object_type='ipam.iprange', field_name='interface', is_generic=True), + 'interfaceIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='interface', is_generic=True), + 'interfaceIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='interface', is_generic=True), + 'interfaceIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='interface', is_generic=True), + 'interfaceInterface': RefInfo(object_type='dcim.interface', field_name='interface', is_generic=True), + 'interfaceInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='interface', is_generic=True), + 'interfaceInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='interface', is_generic=True), + 'interfaceL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='interface', is_generic=True), + 'interfaceL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='interface', is_generic=True), + 'interfaceLocation': RefInfo(object_type='dcim.location', field_name='interface', is_generic=True), + 'interfaceMacAddress': RefInfo(object_type='dcim.macaddress', field_name='interface', is_generic=True), + 'interfaceManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='interface', is_generic=True), + 'interfaceModule': RefInfo(object_type='dcim.module', field_name='interface', is_generic=True), + 'interfaceModuleBay': RefInfo(object_type='dcim.modulebay', field_name='interface', is_generic=True), + 'interfaceModuleType': RefInfo(object_type='dcim.moduletype', field_name='interface', is_generic=True), + 'interfacePlatform': RefInfo(object_type='dcim.platform', field_name='interface', is_generic=True), + 'interfacePowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='interface', is_generic=True), + 'interfacePowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='interface', is_generic=True), + 'interfacePowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='interface', is_generic=True), + 'interfacePowerPort': RefInfo(object_type='dcim.powerport', field_name='interface', is_generic=True), + 'interfacePrefix': RefInfo(object_type='ipam.prefix', field_name='interface', is_generic=True), + 'interfaceProvider': RefInfo(object_type='circuits.provider', field_name='interface', is_generic=True), + 'interfaceProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='interface', is_generic=True), + 'interfaceProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='interface', is_generic=True), + 'interfaceRir': RefInfo(object_type='ipam.rir', field_name='interface', is_generic=True), + 'interfaceRack': RefInfo(object_type='dcim.rack', field_name='interface', is_generic=True), + 'interfaceRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='interface', is_generic=True), + 'interfaceRackRole': RefInfo(object_type='dcim.rackrole', field_name='interface', is_generic=True), + 'interfaceRackType': RefInfo(object_type='dcim.racktype', field_name='interface', is_generic=True), + 'interfaceRearPort': RefInfo(object_type='dcim.rearport', field_name='interface', is_generic=True), + 'interfaceRegion': RefInfo(object_type='dcim.region', field_name='interface', is_generic=True), + 'interfaceRole': RefInfo(object_type='ipam.role', field_name='interface', is_generic=True), + 'interfaceRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='interface', is_generic=True), + 'interfaceService': RefInfo(object_type='ipam.service', field_name='interface', is_generic=True), + 'interfaceSite': RefInfo(object_type='dcim.site', field_name='interface', is_generic=True), + 'interfaceSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='interface', is_generic=True), + 'interfaceTag': RefInfo(object_type='extras.tag', field_name='interface', is_generic=True), + 'interfaceTenant': RefInfo(object_type='tenancy.tenant', field_name='interface', is_generic=True), + 'interfaceTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='interface', is_generic=True), + 'interfaceTunnel': RefInfo(object_type='vpn.tunnel', field_name='interface', is_generic=True), + 'interfaceTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='interface', is_generic=True), + 'interfaceTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='interface', is_generic=True), + 'interfaceVlan': RefInfo(object_type='ipam.vlan', field_name='interface', is_generic=True), + 'interfaceVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='interface', is_generic=True), + 'interfaceVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='interface', is_generic=True), + 'interfaceVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='interface', is_generic=True), + 'interfaceVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='interface', is_generic=True), + 'interfaceVrf': RefInfo(object_type='ipam.vrf', field_name='interface', is_generic=True), + 'interfaceVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='interface', is_generic=True), + 'interfaceVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='interface', is_generic=True), + 'interfaceVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='interface', is_generic=True), + 'interfaceVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='interface', is_generic=True), + 'interfaceVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='interface', is_generic=True), + 'interfaceVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='interface', is_generic=True), + 'interfaceVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='interface', is_generic=True), + 'interfaceWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='interface', is_generic=True), + 'interfaceWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='interface', is_generic=True), + 'interfaceWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='interface', is_generic=True), + }, + 'dcim.frontport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'rearPort': RefInfo(object_type='dcim.rearport', field_name='rear_port'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.ikepolicy': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.ikeproposal': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.ipaddress': { + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'assignedObjectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='assigned_object', is_generic=True), + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'natInside': RefInfo(object_type='ipam.ipaddress', field_name='nat_inside'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.iprange': { + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.ipsecpolicy': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.ipsecprofile': { + 'ikePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='ike_policy'), + 'ipsecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='ipsec_policy'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.ipsecproposal': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.interface': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'parent': RefInfo(object_type='dcim.interface', field_name='parent'), + 'bridge': RefInfo(object_type='dcim.interface', field_name='bridge'), + 'lag': RefInfo(object_type='dcim.interface', field_name='lag'), + 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.inventoryitem': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'parent': RefInfo(object_type='dcim.inventoryitem', field_name='parent'), + 'role': RefInfo(object_type='dcim.inventoryitemrole', field_name='role'), + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'componentConsolePort': RefInfo(object_type='dcim.consoleport', field_name='component', is_generic=True), + 'componentConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='component', is_generic=True), + 'componentFrontPort': RefInfo(object_type='dcim.frontport', field_name='component', is_generic=True), + 'componentInterface': RefInfo(object_type='dcim.interface', field_name='component', is_generic=True), + 'componentPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='component', is_generic=True), + 'componentPowerPort': RefInfo(object_type='dcim.powerport', field_name='component', is_generic=True), + 'componentRearPort': RefInfo(object_type='dcim.rearport', field_name='component', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.inventoryitemrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.l2vpn': { + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.l2vpntermination': { + 'l2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='l2vpn'), + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVlan': RefInfo(object_type='ipam.vlan', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.location': { + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'parent': RefInfo(object_type='dcim.location', field_name='parent'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.macaddress': { + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.manufacturer': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.module': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'moduleBay': RefInfo(object_type='dcim.modulebay', field_name='module_bay'), + 'moduleType': RefInfo(object_type='dcim.moduletype', field_name='module_type'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.modulebay': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'installedModule': RefInfo(object_type='dcim.module', field_name='installed_module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.moduletype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.platform': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.powerfeed': { + 'powerPanel': RefInfo(object_type='dcim.powerpanel', field_name='power_panel'), + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.poweroutlet': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'powerPort': RefInfo(object_type='dcim.powerport', field_name='power_port'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.powerpanel': { + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.powerport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.prefix': { + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.provider': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.provideraccount': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.providernetwork': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.rir': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.rack': { + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'role': RefInfo(object_type='dcim.rackrole', field_name='role'), + 'rackType': RefInfo(object_type='dcim.racktype', field_name='rack_type'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.rackreservation': { + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.rackrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.racktype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.rearport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.region': { + 'parent': RefInfo(object_type='dcim.region', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.role': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.routetarget': { + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.service': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.site': { + 'region': RefInfo(object_type='dcim.region', field_name='region'), + 'group': RefInfo(object_type='dcim.sitegroup', field_name='group'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.sitegroup': { + 'parent': RefInfo(object_type='dcim.sitegroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'tenancy.tenant': { + 'group': RefInfo(object_type='tenancy.tenantgroup', field_name='group'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'tenancy.tenantgroup': { + 'parent': RefInfo(object_type='tenancy.tenantgroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.tunnel': { + 'group': RefInfo(object_type='vpn.tunnelgroup', field_name='group'), + 'ipsecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='ipsec_profile'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.tunnelgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'vpn.tunneltermination': { + 'tunnel': RefInfo(object_type='vpn.tunnel', field_name='tunnel'), + 'terminationAsn': RefInfo(object_type='ipam.asn', field_name='termination', is_generic=True), + 'terminationAsnRange': RefInfo(object_type='ipam.asnrange', field_name='termination', is_generic=True), + 'terminationAggregate': RefInfo(object_type='ipam.aggregate', field_name='termination', is_generic=True), + 'terminationCable': RefInfo(object_type='dcim.cable', field_name='termination', is_generic=True), + 'terminationCablePath': RefInfo(object_type='dcim.cablepath', field_name='termination', is_generic=True), + 'terminationCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='termination', is_generic=True), + 'terminationCircuit': RefInfo(object_type='circuits.circuit', field_name='termination', is_generic=True), + 'terminationCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='termination', is_generic=True), + 'terminationCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='termination', is_generic=True), + 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'terminationCircuitType': RefInfo(object_type='circuits.circuittype', field_name='termination', is_generic=True), + 'terminationCluster': RefInfo(object_type='virtualization.cluster', field_name='termination', is_generic=True), + 'terminationClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='termination', is_generic=True), + 'terminationClusterType': RefInfo(object_type='virtualization.clustertype', field_name='termination', is_generic=True), + 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'terminationContact': RefInfo(object_type='tenancy.contact', field_name='termination', is_generic=True), + 'terminationContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='termination', is_generic=True), + 'terminationContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='termination', is_generic=True), + 'terminationContactRole': RefInfo(object_type='tenancy.contactrole', field_name='termination', is_generic=True), + 'terminationDevice': RefInfo(object_type='dcim.device', field_name='termination', is_generic=True), + 'terminationDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='termination', is_generic=True), + 'terminationDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='termination', is_generic=True), + 'terminationDeviceType': RefInfo(object_type='dcim.devicetype', field_name='termination', is_generic=True), + 'terminationFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='termination', is_generic=True), + 'terminationFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='termination', is_generic=True), + 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'terminationIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='termination', is_generic=True), + 'terminationIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='termination', is_generic=True), + 'terminationIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='termination', is_generic=True), + 'terminationIpRange': RefInfo(object_type='ipam.iprange', field_name='termination', is_generic=True), + 'terminationIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='termination', is_generic=True), + 'terminationIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='termination', is_generic=True), + 'terminationIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='termination', is_generic=True), + 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'terminationInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='termination', is_generic=True), + 'terminationInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='termination', is_generic=True), + 'terminationL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='termination', is_generic=True), + 'terminationL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='termination', is_generic=True), + 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), + 'terminationMacAddress': RefInfo(object_type='dcim.macaddress', field_name='termination', is_generic=True), + 'terminationManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='termination', is_generic=True), + 'terminationModule': RefInfo(object_type='dcim.module', field_name='termination', is_generic=True), + 'terminationModuleBay': RefInfo(object_type='dcim.modulebay', field_name='termination', is_generic=True), + 'terminationModuleType': RefInfo(object_type='dcim.moduletype', field_name='termination', is_generic=True), + 'terminationPlatform': RefInfo(object_type='dcim.platform', field_name='termination', is_generic=True), + 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'terminationPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='termination', is_generic=True), + 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'terminationPrefix': RefInfo(object_type='ipam.prefix', field_name='termination', is_generic=True), + 'terminationProvider': RefInfo(object_type='circuits.provider', field_name='termination', is_generic=True), + 'terminationProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='termination', is_generic=True), + 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), + 'terminationRir': RefInfo(object_type='ipam.rir', field_name='termination', is_generic=True), + 'terminationRack': RefInfo(object_type='dcim.rack', field_name='termination', is_generic=True), + 'terminationRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='termination', is_generic=True), + 'terminationRackRole': RefInfo(object_type='dcim.rackrole', field_name='termination', is_generic=True), + 'terminationRackType': RefInfo(object_type='dcim.racktype', field_name='termination', is_generic=True), + 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), + 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), + 'terminationRole': RefInfo(object_type='ipam.role', field_name='termination', is_generic=True), + 'terminationRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='termination', is_generic=True), + 'terminationService': RefInfo(object_type='ipam.service', field_name='termination', is_generic=True), + 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), + 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), + 'terminationTag': RefInfo(object_type='extras.tag', field_name='termination', is_generic=True), + 'terminationTenant': RefInfo(object_type='tenancy.tenant', field_name='termination', is_generic=True), + 'terminationTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='termination', is_generic=True), + 'terminationTunnel': RefInfo(object_type='vpn.tunnel', field_name='termination', is_generic=True), + 'terminationTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='termination', is_generic=True), + 'terminationTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='termination', is_generic=True), + 'terminationVlan': RefInfo(object_type='ipam.vlan', field_name='termination', is_generic=True), + 'terminationVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='termination', is_generic=True), + 'terminationVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='termination', is_generic=True), + 'terminationVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='termination', is_generic=True), + 'terminationVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='termination', is_generic=True), + 'terminationVrf': RefInfo(object_type='ipam.vrf', field_name='termination', is_generic=True), + 'terminationVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='termination', is_generic=True), + 'terminationVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='termination', is_generic=True), + 'terminationVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='termination', is_generic=True), + 'terminationVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='termination', is_generic=True), + 'terminationVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='termination', is_generic=True), + 'terminationVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='termination', is_generic=True), + 'terminationVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='termination', is_generic=True), + 'terminationWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='termination', is_generic=True), + 'terminationWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='termination', is_generic=True), + 'terminationWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='termination', is_generic=True), + 'outsideIp': RefInfo(object_type='ipam.ipaddress', field_name='outside_ip'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.vlan': { + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'group': RefInfo(object_type='ipam.vlangroup', field_name='group'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.vlangroup': { + 'scopeCluster': RefInfo(object_type='virtualization.cluster', field_name='scope', is_generic=True), + 'scopeClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='scope', is_generic=True), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRack': RefInfo(object_type='dcim.rack', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.vlantranslationrule': { + 'policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='policy'), + }, + 'virtualization.vminterface': { + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'parent': RefInfo(object_type='virtualization.vminterface', field_name='parent'), + 'bridge': RefInfo(object_type='virtualization.vminterface', field_name='bridge'), + 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'ipam.vrf': { + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.virtualchassis': { + 'master': RefInfo(object_type='dcim.device', field_name='master'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.virtualcircuit': { + 'providerNetwork': RefInfo(object_type='circuits.providernetwork', field_name='provider_network'), + 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'type': RefInfo(object_type='circuits.virtualcircuittype', field_name='type'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.virtualcircuittermination': { + 'virtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='virtual_circuit'), + 'interface': RefInfo(object_type='dcim.interface', field_name='interface'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'circuits.virtualcircuittype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'dcim.virtualdevicecontext': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'virtualization.virtualdisk': { + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'virtualization.virtualmachine': { + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'wireless.wirelesslan': { + 'group': RefInfo(object_type='wireless.wirelesslangroup', field_name='group'), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'wireless.wirelesslangroup': { + 'parent': RefInfo(object_type='wireless.wirelesslangroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, + 'wireless.wirelesslink': { + 'interfaceA': RefInfo(object_type='dcim.interface', field_name='interface_a'), + 'interfaceB': RefInfo(object_type='dcim.interface', field_name='interface_b'), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + }, +} + +def get_json_ref_info(object_type: str|Type[models.Model], json_field_name: str) -> RefInfo|None: + if isinstance(object_type, models.Model): + content_type = ContentType.objects.get_for_model(object_type) + object_type = content_type.app_label + '.' + content_type.model + return _REF_INFO.get(object_type, {}).get(json_field_name) diff --git a/netbox_diode_plugin/api/supported_models.py b/netbox_diode_plugin/api/supported_models.py new file mode 100644 index 0000000..ac90caf --- /dev/null +++ b/netbox_diode_plugin/api/supported_models.py @@ -0,0 +1,292 @@ +#!/usr/bin/env python +# Copyright 2025 NetBox Labs Inc +"""NetBox Diode Data - API supported models.""" + +import importlib +import logging +import time +from functools import lru_cache +from typing import List, Type + +from django.apps import apps +from django.db import models +from django.db.models import ManyToOneRel +from django.db.models.fields import NOT_PROVIDED +from rest_framework import serializers +from utilities.api import get_serializer_for_model as netbox_get_serializer_for_model + +logger = logging.getLogger(__name__) + +# Supported apps +SUPPORTED_APPS = [ + "circuits", + "dcim", + "extras", + "ipam", + "virtualization", + "vpn", + "wireless", + "tenancy", +] + +# Models that are not supported +EXCLUDED_MODELS = [ + "TaggedItem", + "Subscription", + "ScriptModule", + "Dashboard", + "Notification", +] + + +def extract_supported_models() -> dict[str, dict]: + """Extract supported models from NetBox.""" + supported_models = discover_models(SUPPORTED_APPS) + processed_models = set() + + logger.debug(f"Supported models: {supported_models}") + + models_to_process = supported_models + extracted_models: dict[str, dict] = {} + + start_ts = time.time() + while models_to_process: + model = models_to_process.pop() + try: + fields, related_models = get_model_fields(model) + if not fields: + continue + + prerequisites = get_prerequisites(model, fields) + extracted_models[model.__name__] = { + "fields": fields, + "prerequisites": prerequisites, + } + processed_models.add(model.__name__) + for related_model in related_models: + if ( + related_model.__name__ not in extracted_models + and related_model not in models_to_process + ): + models_to_process.append(related_model) + except Exception as e: + logger.error(f"extract_supported_models: {model.__name__} error: {e}") + + finish_ts = time.time() + lapsed_millis = (finish_ts - start_ts) * 1000 + logger.info( + f"done extracting supported models in {lapsed_millis:.2f} milliseconds - extracted_models: {len(extracted_models)}" + ) + + return extracted_models + + +def get_prerequisites(model_class, fields) -> List[dict[str, str]]: + """Get the prerequisite models for the model.""" + prerequisites: List[dict[str, str]] = [] + prerequisite_models = getattr(model_class, "prerequisite_models", []) + + for prereq in prerequisite_models: + prereq_model = apps.get_model(prereq) + + for field_name, field_info in fields.items(): + related_model = field_info.get("related_model") + prerequisite_info = { + "field_name": field_name, + "prerequisite_model": prereq_model, + } + if ( + prerequisite_info not in prerequisites + and related_model + and related_model.get("model_class_name") == prereq_model.__name__ + ): + prerequisites.append(prerequisite_info) + break + + return prerequisites + + +@lru_cache(maxsize=128) +def get_model_fields(model_class) -> tuple[dict, list]: + """Get the fields for the model ordered as they are in the serializer.""" + related_models_to_process = [] + + # Skip unsupported apps and excluded models + if ( + model_class._meta.app_label not in SUPPORTED_APPS + or model_class.__name__ in EXCLUDED_MODELS + ): + return {}, [] + + try: + # Get serializer fields to maintain order + serializer_class = get_serializer_for_model(model_class) + serializer_fields = serializer_class().get_fields() + serializer_fields_names = list(serializer_fields.keys()) + except Exception as e: + logger.error(f"Error getting serializer fields for model {model_class}: {e}") + return {}, [] + + # Get all model fields + model_fields = { + field.name: field + for field in model_class._meta.get_fields() + if field.__class__.__name__ != "CounterCacheField" + } + + # Reorder fields to match serializer order + ordered_fields = { + field_name: model_fields[field_name] + for field_name in serializer_fields_names + if field_name in model_fields + } + + # Add remaining fields + ordered_fields.update( + { + field_name: field + for field_name, field in model_fields.items() + if field_name not in ordered_fields + } + ) + + fields_info = {} + + for field_name, field in ordered_fields.items(): + field_info = { + "type": field.get_internal_type(), + "required": not field.null and not field.blank, + "is_many_to_one_rel": isinstance(field, ManyToOneRel), + "is_numeric": field.get_internal_type() + in [ + "IntegerField", + "FloatField", + "DecimalField", + "PositiveIntegerField", + "PositiveSmallIntegerField", + "SmallIntegerField", + "BigIntegerField", + ], + } + + # Handle default values + default_value = None + if hasattr(field, "default"): + default_value = ( + field.default if field.default not in (NOT_PROVIDED, dict) else None + ) + field_info["default"] = default_value + + # Handle related fields + if field.is_relation: + related_model = field.related_model + if related_model: + related_model_key = ( + f"{related_model._meta.app_label}.{related_model._meta.model_name}" + ) + related_model_info = { + "app_label": related_model._meta.app_label, + "model_name": related_model._meta.model_name, + "model_class_name": related_model.__name__, + "object_type": related_model_key, + "filters": get_field_filters(model_class, field_name), + } + field_info["related_model"] = related_model_info + if ( + related_model.__name__ not in EXCLUDED_MODELS + and related_model not in related_models_to_process + ): + related_models_to_process.append(related_model) + + fields_info[field_name] = field_info + + return fields_info, related_models_to_process + + +@lru_cache(maxsize=128) +def get_field_filters(model_class, field_name): + """Get filters for a field.""" + if hasattr(model_class, "_netbox_private"): + return None + + try: + filterset_name = f"{model_class.__name__}FilterSet" + filterset_module = importlib.import_module( + f"{model_class._meta.app_label}.filtersets" + ) + filterset_class = getattr(filterset_module, filterset_name) + + _filters = set() + field_filters = [] + for filter_name, filter_instance in filterset_class.get_filters().items(): + filter_by = getattr(filter_instance, "field_name", None) + filter_field_extra = getattr(filter_instance, "extra", None) + + if not filter_name.startswith(field_name) or filter_by.endswith("_id"): + continue + + if filter_by and filter_by not in _filters: + _filters.add(filter_by) + field_filters.append( + { + "filter_by": filter_by, + "filter_to_field_name": ( + filter_field_extra.get("to_field_name", None) + if filter_field_extra + else None + ), + } + ) + return list(field_filters) if field_filters else None + except Exception as e: + logger.error( + f"Error getting field filters for model {model_class.__name__} and field {field_name}: {e}" + ) + return None + + +@lru_cache(maxsize=128) +def get_serializer_for_model(model, prefix=""): + """Cached wrapper for NetBox's get_serializer_for_model function.""" + return netbox_get_serializer_for_model(model, prefix) + + +def discover_models(root_packages: List[str]) -> list[Type[models.Model]]: + """Discovers all model classes in specified root packages.""" + discovered_models = [] + + # Look through all modules that might contain serializers + module_names = [ + "api.serializers", + ] + + for root_package in root_packages: + logger.debug(f"Searching in root package: {root_package}") + + for module_name in module_names: + full_module_path = f"{root_package}.{module_name}" + try: + module = __import__(full_module_path, fromlist=["*"]) + except ImportError: + logger.error(f"Could not import {full_module_path}") + continue + + # Find all serializer classes in the module + for serializer_name in dir(module): + serializer = getattr(module, serializer_name) + if ( + isinstance(serializer, type) + and issubclass(serializer, serializers.Serializer) + and serializer != serializers.Serializer + and serializer != serializers.ModelSerializer + and hasattr(serializer, "Meta") + and hasattr(serializer.Meta, "model") + ): + model = serializer.Meta.model + if model not in discovered_models: + discovered_models.append(model) + logger.debug( + f"Discovered model: {model.__module__}.{model.__name__}" + ) + + return discovered_models diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py new file mode 100644 index 0000000..9b36863 --- /dev/null +++ b/netbox_diode_plugin/api/transformer.py @@ -0,0 +1,224 @@ +"""Object resolution for diffing.""" +from collections import defaultdict +import copy +from dataclasses import dataclass +from functools import lru_cache +import json +import logging +import re +from uuid import uuid4 + +from .plugin_utils import get_json_ref_info +from .matcher import fingerprint, merge_data, find_existing_object + + +logger = logging.getLogger("netbox.diode_data") + +@dataclass +class UnresolvedReference: + """unresolved reference to an object.""" + + object_type: str + uuid: str + + def __str__(self): + return f"new_object:{self.object_type}:{self.uuid}" + + def __eq__(self, other): + return self.object_type == other.object_type and self.uuid == other.uuid + + def __hash__(self): + return hash((self.object_type, self.uuid)) + + def __lt__(self, other): + return self.object_type < other.object_type or (self.object_type == other.object_type and self.uuid < other.uuid) + + +@lru_cache(maxsize=128) +def _camel_to_snake_case(name): + """Convert camelCase string to snake_case.""" + name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() + + +# These are cases that imply a circular reference / implied parentage. +# TODO: Can we detect these cases ? +_NESTED_CONTEXT = { + "dcim.interface": { + # interface.primary_mac_address -> mac_address.assigned_object = interface + "primary_mac_address": lambda object_type, uuid: { + "assigned_object_type": object_type, + "assigned_object_id": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + }, +} + +# these fields cannot be assigned until both objects are saved already. +_IS_CIRCULAR = { + "dcim.interface": {"primary_mac_address", }, +} + +def _no_context(object_type, uuid): + return None + +def _nested_context(object_type, uuid, field_name): + return _NESTED_CONTEXT.get(object_type, {}).get(field_name, _no_context)(object_type, uuid) + +def _is_circular(object_type, field_name): + return field_name in _IS_CIRCULAR.get(object_type, set()) + +def transform_proto_json(proto_json: dict, object_type: str) -> list[dict]: + """ + Transform keys of proto json dict to flattened dictionaries with model field keys. + + This also handles placing `_type` fields for generic references, + a certain form of deduplication and resolution of existing objects. + """ + entities = _transform_proto_json_1(proto_json, object_type) + logger.error(f"_transform_proto_json_1: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + deduplicated = _fingerprint_dedupe(entities) + logger.error(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + # TODO defaulting + # TODO autoslug + resolved = _resolve_existing_references(deduplicated) + logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + + return resolved + +def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, existing=None) -> list[dict]: + uuid = str(uuid4()) + transformed = { + "_object_type": object_type, + "_uuid": uuid, + } + if context is not None: + transformed.update(context) + existing = existing or {} + entities = [transformed] + for key, value in proto_json.items(): + ref_info = get_json_ref_info(object_type, key) + if ref_info is None: + transformed[_camel_to_snake_case(key)] = copy.deepcopy(value) + continue + + nested_context = _nested_context(object_type, uuid, ref_info.field_name) + + # nested reference + field_name = ref_info.field_name + + if ref_info.is_generic: + transformed[field_name + "_type"] = ref_info.object_type + field_name = field_name + "_id" + + if isinstance(value, list): + ref_values = [] + for item in value: + nested_refs = _transform_proto_json_1(item, ref_info.object_type, nested_context) + ref = nested_refs[-1] + ref_values.append(UnresolvedReference( + object_type=ref_info.object_type, + uuid=ref['_uuid'], + )) + entities = nested_refs + entities + transformed[field_name] = ref_values + else: + nested_refs = _transform_proto_json_1(value, ref_info.object_type, nested_context) + ref = nested_refs[-1] + transformed[field_name] = UnresolvedReference( + object_type=ref_info.object_type, + uuid=ref['_uuid'], + ) + entities = nested_refs + entities + return entities + +def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: + by_fp = {} + deduplicated = [] + new_refs = {} # uuid -> uuid + + for entity in entities: + fp = fingerprint(entity, entity['_object_type']) + existing = by_fp.get(fp) + if existing is None: + logger.error(" * entity is new.") + new_entity = copy.deepcopy(entity) + _update_unresolved_refs(new_entity, new_refs) + by_fp[fp] = new_entity + deduplicated.append(fp) + else: + logger.error(" * entity already exists.") + new_refs[entity['_uuid']] = existing['_uuid'] + merged = merge_data(existing, entity) + _update_unresolved_refs(merged, new_refs) + by_fp[fp] = merged + + return [by_fp[fp] for fp in deduplicated] + +def _update_unresolved_refs(entity, new_refs): + for k, v in entity.items(): + if isinstance(v, UnresolvedReference) and v.uuid in new_refs: + v.uuid = new_refs[v.uuid] + elif isinstance(v, (list, tuple)): + for item in v: + if isinstance(item, UnresolvedReference) and item.uuid in new_refs: + item.uuid = new_refs[item.uuid] + # TODO maps ... + +def _resolve_existing_references(entities: list[dict]) -> list[dict]: + seen = {} + new_refs = {} + resolved = [] + for data in entities: + object_type = data['_object_type'] + data = copy.deepcopy(data) + existing = find_existing_object(data, object_type) + if existing is not None: + logger.error(f"existing {data} -> {existing}") + fp = (object_type, existing.id) + if fp in seen: + logger.warning(f"objects resolved to the same existing id after deduplication: {seen[fp]} and {data}") + else: + seen[fp] = data + data['id'] = existing.id + data['_instance'] = existing + new_refs[data['_uuid']] = existing.id + _update_resolved_refs(data, new_refs) + resolved.append(data) + else: + data['id'] = UnresolvedReference(object_type, data['_uuid']) + _update_resolved_refs(data, new_refs) + resolved.append(data) + return resolved + +def _update_resolved_refs(data, new_refs): + for k, v in data.items(): + if isinstance(v, UnresolvedReference) and v.uuid in new_refs: + data[k] = new_refs[v.uuid] + elif isinstance(v, (list, tuple)): + new_items = [] + for item in v: + if isinstance(item, UnresolvedReference) and item.uuid in new_refs: + new_items.append(new_refs[item.uuid]) + else: + new_items.append(item) + data[k] = new_items + # TODO maps ... + +def cleanup_unresolved_references(data: dict) -> list[str]: + """Find and stringify unresolved references in fields.""" + unresolved = set() + for k, v in data.items(): + if isinstance(v, UnresolvedReference): + unresolved.add(k) + data[k] = str(v) + elif isinstance(v, (list, tuple)): + items = [] + for item in v: + if isinstance(item, UnresolvedReference): + unresolved.add(k) + items.append(str(item)) + else: + items.append(item) + data[k] = items + # TODO maps + return sorted(unresolved) diff --git a/netbox_diode_plugin/api/urls.py b/netbox_diode_plugin/api/urls.py index 6e25625..aa0cf62 100644 --- a/netbox_diode_plugin/api/urls.py +++ b/netbox_diode_plugin/api/urls.py @@ -5,7 +5,7 @@ from django.urls import include, path from netbox.api.routers import NetBoxRouter -from .views import ApplyChangeSetView, ObjectStateView, GenerateDiffView +from .views import ApplyChangeSetView, GenerateDiffView, ObjectStateView router = NetBoxRouter() diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index a8f7358..1b9fef5 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -1,6 +1,8 @@ #!/usr/bin/env python # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - API Views.""" +import json +import logging from typing import Any, Dict, Optional from django.apps import apps @@ -24,7 +26,19 @@ from netbox_diode_plugin.api.permissions import IsDiodeReader, IsDiodeWriter from netbox_diode_plugin.api.serializers import ApplyChangeSetRequestSerializer, ObjectStateSerializer +from netbox_diode_plugin.api.differ import generate_changeset + +logger = logging.getLogger("netbox.diode_data") +# Try to import Branch model at module level +Branch = None +try: + if apps.is_installed("netbox_branching"): + from netbox_branching.models import Branch +except ImportError: + logger.warning( + "netbox_branching plugin is installed but models could not be imported" + ) def dynamic_import(name): """Dynamically import a class from an absolute path string.""" @@ -674,10 +688,18 @@ class ApplyChangeSetException(Exception): pass -##### -import logging -logger = logging.getLogger("netbox.diode_data") +def pascal_to_lower_camel_case(name): + """Convert PascalCase to lowerCamelCase.""" + return name[0].lower() + name[1:] + +def get_entity_key(model_name): + """Get the entity key for a model name.""" + # Use a dictionary for special cases instead of match-case + special_cases = {"VMInterface": "vminterface", "IPAddress": "ipAddress"} + + # Return from special cases if present, otherwise convert to lowerCamelCase + return special_cases.get(model_name, pascal_to_lower_camel_case(model_name)) class GenerateDiffView(views.APIView): @@ -687,10 +709,43 @@ class GenerateDiffView(views.APIView): def post(self, request, *args, **kwargs): """Generate diff for entity.""" + try: + return self._post(request, *args, **kwargs) + except Exception: + import traceback + traceback.print_exc() + raise + def _post(self, request, *args, **kwargs): entity = request.data.get("entity") object_type = request.data.get("object_type") - logger.error(f"generate diff called with entity: {entity} and object_type: {object_type}") + if not entity: + raise ValidationError("Entity is required") + if not object_type: + raise ValidationError("Object type is required") + + app_label, model_name = object_type.split(".") + model_class = apps.get_model(app_label, model_name) + + # Convert model name to lowerCamelCase for entity lookup + entity_key = get_entity_key(model_class.__name__) + original_entity_data = entity.get(entity_key) + + if original_entity_data is None: + raise ValidationError(f"No data found for {entity_key} in entity") + + change_set = generate_changeset(original_entity_data, object_type) + + branch_id = request.headers.get("X-NetBox-Branch") + + # If branch ID is provided and branching plugin is installed, get branch name + if branch_id and Branch is not None: + try: + branch = Branch.objects.get(id=branch_id) + change_set.branch = {"id": branch.id, "name": branch.name} + except Branch.DoesNotExist: + logger.warning(f"Branch with ID {branch_id} does not exist") - return Response({}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) \ No newline at end of file + logger.info(f"change_set: {json.dumps(change_set.to_dict(), default=str)}") + return Response(change_set.to_dict(), status=status.HTTP_200_OK) From a433f3cdb5c4a0e5d5777b37f79933dbabc8af30 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 27 Mar 2025 12:37:34 +0100 Subject: [PATCH 03/30] set default values and missing slugs Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/differ.py | 5 +-- netbox_diode_plugin/api/supported_models.py | 10 +++--- netbox_diode_plugin/api/transformer.py | 38 +++++++++++++++++++-- 3 files changed, 43 insertions(+), 10 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 8e1ae7c..0077d93 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -88,8 +88,9 @@ def prechange_data_from_instance(instance) -> dict: return prechange_data model_class = instance.__class__ + object_type = f"{model_class._meta.app_label}.{model_class.__name__}" - model = SUPPORTED_MODELS.get(model_class.__name__) + model = SUPPORTED_MODELS.get(object_type) if not model: raise ValidationError(f"Model {model_class.__name__} is not supported") @@ -182,7 +183,7 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSet: """Generate a changeset for an entity.""" change_set = ChangeSet() - entities = transform_proto_json(entity, object_type) + entities = transform_proto_json(entity, object_type, SUPPORTED_MODELS) for entity in entities: prechange_data = {} changed_attrs = [] diff --git a/netbox_diode_plugin/api/supported_models.py b/netbox_diode_plugin/api/supported_models.py index ac90caf..4c016bd 100644 --- a/netbox_diode_plugin/api/supported_models.py +++ b/netbox_diode_plugin/api/supported_models.py @@ -42,7 +42,6 @@ def extract_supported_models() -> dict[str, dict]: """Extract supported models from NetBox.""" supported_models = discover_models(SUPPORTED_APPS) - processed_models = set() logger.debug(f"Supported models: {supported_models}") @@ -58,15 +57,16 @@ def extract_supported_models() -> dict[str, dict]: continue prerequisites = get_prerequisites(model, fields) - extracted_models[model.__name__] = { + object_type = f"{model._meta.app_label}.{model._meta.model_name}" + extracted_models[object_type] = { "fields": fields, "prerequisites": prerequisites, } - processed_models.add(model.__name__) for related_model in related_models: + related_object_type = f"{related_model._meta.app_label}.{related_model._meta.model_name}" if ( - related_model.__name__ not in extracted_models - and related_model not in models_to_process + related_object_type not in extracted_models + and related_object_type not in models_to_process ): models_to_process.append(related_model) except Exception as e: diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 9b36863..919a83f 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -7,6 +7,8 @@ import logging import re from uuid import uuid4 +from django.core.exceptions import ValidationError +from django.utils.text import slugify from .plugin_utils import get_json_ref_info from .matcher import fingerprint, merge_data, find_existing_object @@ -14,6 +16,13 @@ logger = logging.getLogger("netbox.diode_data") +_DEFAULT_SLUG_SOURCE_FIELD_NAME = "name" + +_OBJECT_TYPE_SLUG_FIELD_MAP = { + "dcim.devicetype": "model", + "dcim.racktype": "model", +} + @dataclass class UnresolvedReference: """unresolved reference to an object.""" @@ -67,7 +76,7 @@ def _nested_context(object_type, uuid, field_name): def _is_circular(object_type, field_name): return field_name in _IS_CIRCULAR.get(object_type, set()) -def transform_proto_json(proto_json: dict, object_type: str) -> list[dict]: +def transform_proto_json(proto_json: dict, object_type: str, supported_models: dict) -> list[dict]: """ Transform keys of proto json dict to flattened dictionaries with model field keys. @@ -78,8 +87,7 @@ def transform_proto_json(proto_json: dict, object_type: str) -> list[dict]: logger.error(f"_transform_proto_json_1: {json.dumps(entities, default=lambda o: str(o), indent=4)}") deduplicated = _fingerprint_dedupe(entities) logger.error(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") - # TODO defaulting - # TODO autoslug + deduplicated = _set_defaults(deduplicated, supported_models) resolved = _resolve_existing_references(deduplicated) logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") @@ -131,6 +139,30 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, ex entities = nested_refs + entities return entities +def _set_defaults(entities: list[dict], supported_models: dict): + for entity in entities: + model_fields = supported_models.get(entity['_object_type']) + if model_fields is None: + raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + + for field_name, field_info in model_fields.get('fields', {}).items(): + if entity.get(field_name) is None and field_info.get("default") is not None: + entity[field_name] = field_info["default"] + elif field_info["type"] == "SlugField" and entity.get(field_name) is None: + entity[field_name] = _generate_slug(entity['_object_type'], entity) + +def _generate_slug(object_type, data): + """Generate a slug for a model instance.""" + source_field = get_field_to_slugify(object_type) + if source_field in data and data[source_field]: + return slugify(str(data[source_field])) + + return None + +def get_field_to_slugify(object_type): + """Get the field to use as the source for the slug.""" + return _OBJECT_TYPE_SLUG_FIELD_MAP.get(object_type, _DEFAULT_SLUG_SOURCE_FIELD_NAME) + def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: by_fp = {} deduplicated = [] From 4a932b7dd8b1ce6a07550b0511689f42a4074865 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 27 Mar 2025 12:40:19 +0100 Subject: [PATCH 04/30] tidy up setting defaults Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/transformer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 919a83f..6e245e0 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -87,7 +87,8 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d logger.error(f"_transform_proto_json_1: {json.dumps(entities, default=lambda o: str(o), indent=4)}") deduplicated = _fingerprint_dedupe(entities) logger.error(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") - deduplicated = _set_defaults(deduplicated, supported_models) + # TODO: do we want to set defaults and slugs before resolving or after, as it may affect search scope? + _set_defaults(deduplicated, supported_models) resolved = _resolve_existing_references(deduplicated) logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") From c5529e41a1ddfad154f685d3f38738d08e2f8998 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 27 Mar 2025 13:08:12 +0100 Subject: [PATCH 05/30] remove unused imports Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/differ.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 0077d93..dbe658a 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -1,25 +1,19 @@ """Differ.""" import copy -import decimal import json import logging -import re import uuid from dataclasses import dataclass, field from enum import Enum -from functools import lru_cache from dataclasses import dataclass, field from enum import Enum import copy import uuid -from django.db import models from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError -from django.utils.text import slugify from utilities.data import shallow_compare_dict -from vpn.constants import L2VPN_ASSIGNMENT_MODELS from .supported_models import extract_supported_models from .transformer import transform_proto_json, cleanup_unresolved_references From 0c730644a047e44d23fde79cd5e4880fe8008d6c Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 27 Mar 2025 14:23:56 +0100 Subject: [PATCH 06/30] fix constructor of object type Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/differ.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index dbe658a..a4197b3 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -82,7 +82,7 @@ def prechange_data_from_instance(instance) -> dict: return prechange_data model_class = instance.__class__ - object_type = f"{model_class._meta.app_label}.{model_class.__name__}" + object_type = f"{model_class._meta.app_label}.{model_class._meta.model_name}" model = SUPPORTED_MODELS.get(object_type) if not model: From 0b33e8e3ca9cf531952fb5ab094ea0c45bb4eea2 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 27 Mar 2025 14:32:42 +0100 Subject: [PATCH 07/30] set slugs (if not present) after resolving existing instances Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/transformer.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 6e245e0..9f730a7 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -87,10 +87,12 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d logger.error(f"_transform_proto_json_1: {json.dumps(entities, default=lambda o: str(o), indent=4)}") deduplicated = _fingerprint_dedupe(entities) logger.error(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") - # TODO: do we want to set defaults and slugs before resolving or after, as it may affect search scope? - _set_defaults(deduplicated, supported_models) + _set_slugs(deduplicated, supported_models) + logger.error(f"_set_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") resolved = _resolve_existing_references(deduplicated) logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + _set_defaults(resolved, supported_models) + logger.error(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") return resolved @@ -149,7 +151,15 @@ def _set_defaults(entities: list[dict], supported_models: dict): for field_name, field_info in model_fields.get('fields', {}).items(): if entity.get(field_name) is None and field_info.get("default") is not None: entity[field_name] = field_info["default"] - elif field_info["type"] == "SlugField" and entity.get(field_name) is None: + +def _set_slugs(entities: list[dict], supported_models: dict): + for entity in entities: + model_fields = supported_models.get(entity['_object_type']) + if model_fields is None: + raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + + for field_name, field_info in model_fields.get('fields', {}).items(): + if field_info["type"] == "SlugField" and entity.get(field_name) is None: entity[field_name] = _generate_slug(entity['_object_type'], entity) def _generate_slug(object_type, data): From ceabfefcde9fd11e864ea16065daa273b454e15a Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 27 Mar 2025 11:23:06 -0400 Subject: [PATCH 08/30] emit ref_id instead of variable object_id field for new objects --- netbox_diode_plugin/api/differ.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index a4197b3..359f43b 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -37,8 +37,9 @@ class Change: change_type: ChangeType object_type: str - object_id: str + object_id: int | None object_primary_value: str + ref_id: str | None = field(default=None) id: str = field(default_factory=lambda: str(uuid.uuid4())) before: dict | None = field(default=None) data: dict | None = field(default=None) @@ -51,6 +52,7 @@ def to_dict(self) -> dict: "change_type": self.change_type.value, "object_type": self.object_type, "object_id": self.object_id, + "ref_id": self.ref_id, "object_primary_value": self.object_primary_value, "before": self.before, "data": self.data, @@ -149,10 +151,12 @@ def diff_to_change( change = Change( change_type=change_type, object_type=object_type, - object_id=prechange_data.get("id") or postchange_data.get("id"), + object_id=prechange_data.get("id"), object_primary_value="__PLACEHOLDER__", # TODO: get primary value new_refs=unresolved_references, ) + if change.object_id is None: + change.ref_id = postchange_data.get("id") postchange_data_clean = clean_diff_data(postchange_data) change.data = postchange_data_clean From 839b6847a9e9d67853bc668e252ef8d15e0723f8 Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 27 Mar 2025 11:38:21 -0400 Subject: [PATCH 09/30] improve entity field mapping coverage --- netbox_diode_plugin/api/views.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 1b9fef5..37f4a6d 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -3,6 +3,7 @@ """Diode NetBox Plugin - API Views.""" import json import logging +import re from typing import Any, Dict, Optional from django.apps import apps @@ -689,17 +690,14 @@ class ApplyChangeSetException(Exception): pass -def pascal_to_lower_camel_case(name): - """Convert PascalCase to lowerCamelCase.""" - return name[0].lower() + name[1:] def get_entity_key(model_name): """Get the entity key for a model name.""" - # Use a dictionary for special cases instead of match-case - special_cases = {"VMInterface": "vminterface", "IPAddress": "ipAddress"} - - # Return from special cases if present, otherwise convert to lowerCamelCase - return special_cases.get(model_name, pascal_to_lower_camel_case(model_name)) + s = re.sub(r'([A-Z0-9]{2,})([A-Z])([a-z])', r'\1_\2\3', model_name) + s = re.sub(r'([a-z])([A-Z])', r'\1_\2', s) + s = re.sub(r'_+', '_', s.lower()) # snake + s = ''.join([word.capitalize() for word in s.split("_")]) # upperCamelCase + return s[0].lower() + s[1:] # lowerCamelCase class GenerateDiffView(views.APIView): @@ -733,7 +731,7 @@ def _post(self, request, *args, **kwargs): original_entity_data = entity.get(entity_key) if original_entity_data is None: - raise ValidationError(f"No data found for {entity_key} in entity") + raise ValidationError(f"No data found for {entity_key} in entity got: {entity.keys()}") change_set = generate_changeset(original_entity_data, object_type) From ab9bbf349a3502abf72da78cb23e244cd3d74617 Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 27 Mar 2025 12:39:12 -0400 Subject: [PATCH 10/30] fill in primary value mapping, use primary value for slug --- netbox_diode_plugin/api/differ.py | 8 ++- netbox_diode_plugin/api/plugin_utils.py | 88 ++++++++++++++++++++++++- netbox_diode_plugin/api/transformer.py | 13 ++-- 3 files changed, 96 insertions(+), 13 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 359f43b..81f44e5 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -17,7 +17,7 @@ from .supported_models import extract_supported_models from .transformer import transform_proto_json, cleanup_unresolved_references - +from .plugin_utils import get_primary_value logger = logging.getLogger(__name__) @@ -148,11 +148,15 @@ def diff_to_change( if change_type == ChangeType.UPDATE and not len(changed_attrs) > 0: change_type = ChangeType.NOOP + primary_value = get_primary_value(postchange_data, object_type) + if primary_value is None: + primary_value = "(unnamed)" + change = Change( change_type=change_type, object_type=object_type, object_id=prechange_data.get("id"), - object_primary_value="__PLACEHOLDER__", # TODO: get primary value + object_primary_value=primary_value, new_refs=unresolved_references, ) if change.object_id is None: diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index 6b4a96a..16ac1c6 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1,7 +1,7 @@ """Diode plugin helpers.""" # Generated code. DO NOT EDIT. -# Timestamp: 2025-03-26 20:52:02Z +# Timestamp: 2025-03-27 16:35:12Z from dataclasses import dataclass from typing import Type @@ -9,7 +9,6 @@ from django.contrib.contenttypes.models import ContentType from django.db import models - @dataclass class RefInfo: object_type: str @@ -755,3 +754,88 @@ def get_json_ref_info(object_type: str|Type[models.Model], json_field_name: str) content_type = ContentType.objects.get_for_model(object_type) object_type = content_type.app_label + '.' + content_type.model return _REF_INFO.get(object_type, {}).get(json_field_name) + +_OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP = { + 'ipam.asn': 'asn', + 'dcim.devicetype': 'model', + 'circuits.circuit': 'cid', + 'ipam.ipaddress': 'address', + 'dcim.macaddress': 'mac_address', + 'dcim.moduletype': 'model', + 'ipam.prefix': 'prefix', + 'dcim.racktype': 'model', + 'circuits.virtualcircuit': 'cid', + 'wireless.wirelesslan': 'ssid', + 'ipam.asnrange': 'name', + 'circuits.circuitgroup': 'name', + 'circuits.circuittype': 'name', + 'virtualization.cluster': 'name', + 'virtualization.clustergroup': 'name', + 'virtualization.clustertype': 'name', + 'dcim.consoleport': 'name', + 'dcim.consoleserverport': 'name', + 'tenancy.contact': 'name', + 'tenancy.contactgroup': 'name', + 'tenancy.contactrole': 'name', + 'dcim.device': 'name', + 'dcim.devicebay': 'name', + 'dcim.devicerole': 'name', + 'ipam.fhrpgroup': 'name', + 'dcim.frontport': 'name', + 'vpn.ikepolicy': 'name', + 'vpn.ikeproposal': 'name', + 'vpn.ipsecpolicy': 'name', + 'vpn.ipsecprofile': 'name', + 'vpn.ipsecproposal': 'name', + 'dcim.interface': 'name', + 'dcim.inventoryitem': 'name', + 'dcim.inventoryitemrole': 'name', + 'vpn.l2vpn': 'name', + 'dcim.location': 'name', + 'dcim.manufacturer': 'name', + 'dcim.modulebay': 'name', + 'dcim.platform': 'name', + 'dcim.powerfeed': 'name', + 'dcim.poweroutlet': 'name', + 'dcim.powerpanel': 'name', + 'dcim.powerport': 'name', + 'circuits.provider': 'name', + 'circuits.provideraccount': 'name', + 'circuits.providernetwork': 'name', + 'ipam.rir': 'name', + 'dcim.rack': 'name', + 'dcim.rackrole': 'name', + 'dcim.rearport': 'name', + 'dcim.region': 'name', + 'ipam.role': 'name', + 'ipam.routetarget': 'name', + 'ipam.service': 'name', + 'dcim.site': 'name', + 'dcim.sitegroup': 'name', + 'extras.tag': 'name', + 'tenancy.tenant': 'name', + 'tenancy.tenantgroup': 'name', + 'vpn.tunnel': 'name', + 'vpn.tunnelgroup': 'name', + 'ipam.vlan': 'name', + 'ipam.vlangroup': 'name', + 'ipam.vlantranslationpolicy': 'name', + 'virtualization.vminterface': 'name', + 'ipam.vrf': 'name', + 'dcim.virtualchassis': 'name', + 'circuits.virtualcircuittype': 'name', + 'dcim.virtualdevicecontext': 'name', + 'virtualization.virtualdisk': 'name', + 'virtualization.virtualmachine': 'name', + 'wireless.wirelesslangroup': 'name', +} + +def get_primary_value(data: dict, object_type: str) -> str|None: + field = _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP.get(object_type) + if field is None: + return None + return data.get(field) + + +def get_primary_value_field(object_type: str, default=None) -> str: + return _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP.get(object_type, default) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 9f730a7..74434d8 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -10,7 +10,7 @@ from django.core.exceptions import ValidationError from django.utils.text import slugify -from .plugin_utils import get_json_ref_info +from .plugin_utils import get_json_ref_info, get_primary_value_field from .matcher import fingerprint, merge_data, find_existing_object @@ -18,11 +18,6 @@ _DEFAULT_SLUG_SOURCE_FIELD_NAME = "name" -_OBJECT_TYPE_SLUG_FIELD_MAP = { - "dcim.devicetype": "model", - "dcim.racktype": "model", -} - @dataclass class UnresolvedReference: """unresolved reference to an object.""" @@ -147,7 +142,7 @@ def _set_defaults(entities: list[dict], supported_models: dict): model_fields = supported_models.get(entity['_object_type']) if model_fields is None: raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") - + for field_name, field_info in model_fields.get('fields', {}).items(): if entity.get(field_name) is None and field_info.get("default") is not None: entity[field_name] = field_info["default"] @@ -157,7 +152,7 @@ def _set_slugs(entities: list[dict], supported_models: dict): model_fields = supported_models.get(entity['_object_type']) if model_fields is None: raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") - + for field_name, field_info in model_fields.get('fields', {}).items(): if field_info["type"] == "SlugField" and entity.get(field_name) is None: entity[field_name] = _generate_slug(entity['_object_type'], entity) @@ -172,7 +167,7 @@ def _generate_slug(object_type, data): def get_field_to_slugify(object_type): """Get the field to use as the source for the slug.""" - return _OBJECT_TYPE_SLUG_FIELD_MAP.get(object_type, _DEFAULT_SLUG_SOURCE_FIELD_NAME) + return get_primary_value_field(object_type, _DEFAULT_SLUG_SOURCE_FIELD_NAME) def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: by_fp = {} From 753fd69b8fb80f95188ce4f0c2911bf062d3e1cb Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 27 Mar 2025 13:57:27 -0400 Subject: [PATCH 11/30] use canonical field ordering in change dicts --- netbox_diode_plugin/api/differ.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 81f44e5..3a8a287 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -163,7 +163,6 @@ def diff_to_change( change.ref_id = postchange_data.get("id") postchange_data_clean = clean_diff_data(postchange_data) - change.data = postchange_data_clean if change_type == ChangeType.UPDATE: # remove null values @@ -176,11 +175,23 @@ def diff_to_change( for attr in changed_attrs if attr in postchange_data_clean }) - change.before = prechange_data_clean - change.data = merged_data + change.before = sort_dict_recursively(prechange_data_clean) + change.data = sort_dict_recursively(merged_data) + else: + change.data = sort_dict_recursively(postchange_data_clean) return change +def sort_dict_recursively(d): + """Recursively sorts a dictionary by keys.""" + if isinstance(d, dict): + return {k: sort_dict_recursively(v) for k, v in sorted(d.items())} + if isinstance(d, list): + return sorted([sort_dict_recursively(item) for item in d]) + return d + + + def generate_changeset(entity: dict, object_type: str) -> ChangeSet: """Generate a changeset for an entity.""" change_set = ChangeSet() From b59b06a7306f1a751d81fc0bb1b65e63b4eb399d Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 27 Mar 2025 16:59:15 -0400 Subject: [PATCH 12/30] first pass at certain common circular refs --- netbox_diode_plugin/api/differ.py | 12 +++- netbox_diode_plugin/api/matcher.py | 18 +++--- netbox_diode_plugin/api/transformer.py | 88 ++++++++++++++++++++++---- 3 files changed, 95 insertions(+), 23 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 3a8a287..1cb6af2 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -152,10 +152,11 @@ def diff_to_change( if primary_value is None: primary_value = "(unnamed)" + prior_id = prechange_data.get("id") change = Change( change_type=change_type, object_type=object_type, - object_id=prechange_data.get("id"), + object_id=prior_id if isinstance(prior_id, int) else None, object_primary_value=primary_value, new_refs=unresolved_references, ) @@ -197,6 +198,7 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSet: change_set = ChangeSet() entities = transform_proto_json(entity, object_type, SUPPORTED_MODELS) + by_uuid = {x['_uuid']: x for x in entities} for entity in entities: prechange_data = {} changed_attrs = [] @@ -206,7 +208,13 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSet: instance = entity.pop("_instance", None) if instance: - prechange_data = prechange_data_from_instance(instance) + # the prior state is another new object... + if isinstance(instance, str): + prechange_data = copy.deepcopy(by_uuid[instance]) + # prior state is a model instance + else: + prechange_data = prechange_data_from_instance(instance) + changed_data = shallow_compare_dict( prechange_data, entity, ) diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 200eb8a..cf40c4c 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -102,7 +102,7 @@ def fingerprint(self, data: dict) -> str|None: if field in insensitive: value = value.lower() values.append(value) - # logger.error(f"fingerprint {self}: {data} -> values: {tuple(values)}") + # logger.debug(f"fingerprint {self}: {data} -> values: {tuple(values)}") return hash(tuple(values)) @@ -155,7 +155,7 @@ def _build_fields_queryset(self, data) -> models.QuerySet: lookup_value = data.get(field.attname) lookup_kwargs[field.name] = lookup_value - logger.error(f" * query kwargs: {lookup_kwargs}") + # logger.error(f" * query kwargs: {lookup_kwargs}") qs = self.model_class.objects.filter(**lookup_kwargs) if self.condition: qs = qs.filter(self.condition) @@ -331,25 +331,25 @@ def find_existing_object(data: dict, object_type: str): Returns the object if found, otherwise None. """ - logger.error(f"resolving {data}") + logger.debug(f"resolving {data}") model_class = get_object_type_model(object_type) for matcher in get_model_matchers(model_class): if not matcher.has_required_fields(data): - logger.error(f" * skipped matcher {matcher.name} (missing fields)") + logger.debug(f" * skipped matcher {matcher.name} (missing fields)") continue q = matcher.build_queryset(data) if q is None: - logger.error(f" * skipped matcher {matcher.name} (no queryset)") + logger.debug(f" * skipped matcher {matcher.name} (no queryset)") continue try: - logger.error(f" * trying query {q.query}") + logger.debug(f" * trying query {q.query}") existing = q.get() - logger.error(f" -> Found object {existing} via {matcher.name}") + logger.debug(f" -> Found object {existing} via {matcher.name}") return existing except model_class.DoesNotExist: - logger.error(f" -> No object found for matcher {matcher.name}") + logger.debug(f" -> No object found for matcher {matcher.name}") continue - logger.error(" * No matchers found an existing object") + logger.debug(" * No matchers found an existing object") return None @lru_cache(maxsize=256) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 74434d8..9e085f2 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -29,6 +29,8 @@ def __str__(self): return f"new_object:{self.object_type}:{self.uuid}" def __eq__(self, other): + if not isinstance(other, UnresolvedReference): + return False return self.object_type == other.object_type and self.uuid == other.uuid def __hash__(self): @@ -45,8 +47,7 @@ def _camel_to_snake_case(name): return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() -# These are cases that imply a circular reference / implied parentage. -# TODO: Can we detect these cases ? +# these are implied values pushed down to referenced objects. _NESTED_CONTEXT = { "dcim.interface": { # interface.primary_mac_address -> mac_address.assigned_object = interface @@ -55,11 +56,13 @@ def _camel_to_snake_case(name): "assigned_object_id": UnresolvedReference(object_type=object_type, uuid=uuid), }, }, -} - -# these fields cannot be assigned until both objects are saved already. -_IS_CIRCULAR = { - "dcim.interface": {"primary_mac_address", }, + "virtualization.vminterface": { + # interface.primary_mac_address -> mac_address.assigned_object = vinterface + "primary_mac_address": lambda object_type, uuid: { + "assigned_object_type": object_type, + "assigned_object_id": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + }, } def _no_context(object_type, uuid): @@ -68,8 +71,13 @@ def _no_context(object_type, uuid): def _nested_context(object_type, uuid, field_name): return _NESTED_CONTEXT.get(object_type, {}).get(field_name, _no_context)(object_type, uuid) -def _is_circular(object_type, field_name): - return field_name in _IS_CIRCULAR.get(object_type, set()) +_IS_CIRCULAR_REFERENCE = { + "dcim.interface": frozenset(["primary_mac_address"]), + "virtualization.vminterface": frozenset(["primary_mac_address"]), +} + +def _is_circular_reference(object_type, field_name): + return field_name in _IS_CIRCULAR_REFERENCE.get(object_type, frozenset()) def transform_proto_json(proto_json: dict, object_type: str, supported_models: dict) -> list[dict]: """ @@ -88,8 +96,11 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") _set_defaults(resolved, supported_models) logger.error(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + output = _move_if_unresolved(resolved) + logger.error(f"_move_if_unresolved: {json.dumps(output, default=lambda o: str(o), indent=4)}") - return resolved + _check_unresolved_refs(output) + return output def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, existing=None) -> list[dict]: uuid = str(uuid4()) @@ -101,6 +112,9 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, ex transformed.update(context) existing = existing or {} entities = [transformed] + + move_if_unresolved = defaultdict(list) + for key, value in proto_json.items(): ref_info = get_json_ref_info(object_type, key) if ref_info is None: @@ -112,6 +126,10 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, ex # nested reference field_name = ref_info.field_name + # if this is potentially a circular reference, we need to mark this for + # later checking. + is_circular = _is_circular_reference(object_type, field_name) + if ref_info.is_generic: transformed[field_name + "_type"] = ref_info.object_type field_name = field_name + "_id" @@ -121,6 +139,8 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, ex for item in value: nested_refs = _transform_proto_json_1(item, ref_info.object_type, nested_context) ref = nested_refs[-1] + if is_circular: + move_if_unresolved[field_name].append(ref['_uuid']) ref_values.append(UnresolvedReference( object_type=ref_info.object_type, uuid=ref['_uuid'], @@ -130,11 +150,15 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, ex else: nested_refs = _transform_proto_json_1(value, ref_info.object_type, nested_context) ref = nested_refs[-1] + if is_circular: + move_if_unresolved[field_name].append(ref['_uuid']) transformed[field_name] = UnresolvedReference( object_type=ref_info.object_type, uuid=ref['_uuid'], ) entities = nested_refs + entities + if len(move_if_unresolved) > 0: + transformed['_move_if_unresolved'] = move_if_unresolved return entities def _set_defaults(entities: list[dict], supported_models: dict): @@ -178,13 +202,13 @@ def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: fp = fingerprint(entity, entity['_object_type']) existing = by_fp.get(fp) if existing is None: - logger.error(" * entity is new.") + logger.debug(" * entity is new.") new_entity = copy.deepcopy(entity) _update_unresolved_refs(new_entity, new_refs) by_fp[fp] = new_entity deduplicated.append(fp) else: - logger.error(" * entity already exists.") + logger.debug(" * entity already exists.") new_refs[entity['_uuid']] = existing['_uuid'] merged = merge_data(existing, entity) _update_unresolved_refs(merged, new_refs) @@ -260,3 +284,43 @@ def cleanup_unresolved_references(data: dict) -> list[str]: data[k] = items # TODO maps return sorted(unresolved) + +def _move_if_unresolved(entities: list[dict]) -> list[str]: + min_index = {} + by_uuid = {x['_uuid']: x for x in entities} + + cur = 1 + for entity in entities: + min_index[entity['_uuid']] = cur + cur += 1 + + moves = entity.pop('_move_if_unresolved', None) + if moves is None or entity.get('_instance') is not None: + continue + + logger.debug(f" * {entity} needs circular reference moves: {moves}") + entity2 = entity.copy() + entity2['_uuid'] = str(uuid4()) + by_uuid[entity2['_uuid']] = entity2 + for field_name, uuids in moves.items(): + entity.pop(field_name, None) + for uuid in uuids: + min_index[uuid] = cur + cur += 1 + + entity2['_instance'] = entity['_uuid'] + min_index[entity2['_uuid']] = cur + cur += 1 + + in_order = sorted((min_index[x], x) for x in min_index) + return [by_uuid[x[1]] for x in in_order] + + +def _check_unresolved_refs(entities: list[dict]) -> list[str]: + seen = set() + for e in entities: + seen.add((e['_object_type'], e['_uuid'])) + for k, v in e.items(): + if isinstance(v, UnresolvedReference): + if (v.object_type, v.uuid) not in seen: + raise ValueError(f"Unresolved reference {v} in {e} does not refer to a prior created object (circular reference?)") From 1a9289e02090a4833a9042042a1d94a1503be823 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 27 Mar 2025 23:03:20 +0100 Subject: [PATCH 13/30] remove ref id to itself Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/differ.py | 9 +++------ netbox_diode_plugin/api/transformer.py | 3 ++- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 1cb6af2..a986b6b 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -6,10 +6,6 @@ import uuid from dataclasses import dataclass, field from enum import Enum -from dataclasses import dataclass, field -from enum import Enum -import copy -import uuid from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError @@ -37,8 +33,8 @@ class Change: change_type: ChangeType object_type: str - object_id: int | None - object_primary_value: str + object_id: int | None = field(default=None) + object_primary_value: str | None = field(default=None) ref_id: str | None = field(default=None) id: str = field(default_factory=lambda: str(uuid.uuid4())) before: dict | None = field(default=None) @@ -162,6 +158,7 @@ def diff_to_change( ) if change.object_id is None: change.ref_id = postchange_data.get("id") + _ = postchange_data.pop("id", None) postchange_data_clean = clean_diff_data(postchange_data) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 9e085f2..424f6f6 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -271,7 +271,8 @@ def cleanup_unresolved_references(data: dict) -> list[str]: unresolved = set() for k, v in data.items(): if isinstance(v, UnresolvedReference): - unresolved.add(k) + if k != 'id': + unresolved.add(k) data[k] = str(v) elif isinstance(v, (list, tuple)): items = [] From a47924655358bfe76a3215b0837db02c92aff721 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Thu, 27 Mar 2025 23:08:31 +0100 Subject: [PATCH 14/30] tidy up Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/differ.py | 4 +++- netbox_diode_plugin/api/matcher.py | 4 +++- netbox_diode_plugin/api/transformer.py | 5 ++++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index a986b6b..3119881 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -1,4 +1,6 @@ -"""Differ.""" +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Differ.""" import copy import json diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index cf40c4c..4f3f931 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -1,4 +1,6 @@ -"""Object matching utilities.""" +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Object matching utilities.""" import logging from functools import cache, lru_cache diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 424f6f6..fa85158 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -1,4 +1,7 @@ -"""Object resolution for diffing.""" +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Object resolution for diffing.""" + from collections import defaultdict import copy from dataclasses import dataclass From db56ef623f94f51e446ea9784d7231728d76c98f Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 00:41:20 +0100 Subject: [PATCH 15/30] add applier Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/applier.py | 100 ++++ netbox_diode_plugin/api/differ.py | 1 - netbox_diode_plugin/api/urls.py | 3 +- netbox_diode_plugin/api/views.py | 731 +++-------------------------- 4 files changed, 165 insertions(+), 670 deletions(-) create mode 100644 netbox_diode_plugin/api/applier.py diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py new file mode 100644 index 0000000..033fe03 --- /dev/null +++ b/netbox_diode_plugin/api/applier.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Applier.""" + + +import logging +from dataclasses import dataclass, field + +from django.apps import apps +from django.db import models + +from .differ import Change, ChangeSet, ChangeType + +logger = logging.getLogger(__name__) + + +@dataclass +class ApplyChangeSetResult: + """A result of applying a change set.""" + + id: str + success: bool + errors: dict | None = field(default=None) + + def to_dict(self) -> dict: + """Convert the result to a dictionary.""" + return { + "id": self.id, + "success": self.success, + "errors": self.errors, + } + + +class ApplyChangeSetException(Exception): + """ApplyChangeSetException is raised when an error occurs while applying a change set.""" + + def __init__(self, message, errors=None): + super().__init__(message) + self.message = message + self.errors = errors or {} + + def __str__(self): + if self.errors: + return f"{self.message}: {self.errors}" + return self.message + + +def apply_changeset(change_set: ChangeSet) -> ApplyChangeSetResult: + """Apply a change set.""" + + created = {} + + for change in change_set.changes: + change_type = change.change_type + object_type = change.object_type + data = change.data + new_refs = change.new_refs + + app_label, model_name = object_type.split(".") + model_class = apps.get_model(app_label, model_name) + + fk_fields = { + field.name: field.related_model + for field in model_class._meta.get_fields() + if field.is_relation + } + + for ref_field in new_refs: + data[ref_field] = created[data[ref_field]] + + # get model fields matching data keys if foreign key + for key, value in data.items(): + if fk_model := fk_fields.get(key): + if isinstance(value, int): + data[key] = fk_model.objects.get(id=value) + elif isinstance(value, models.Model): + data[key] = value + + if change_type == ChangeType.CREATE.value: + new_object = model_class.objects.create(**data) + created[change.ref_id] = new_object + + elif change_type == ChangeType.UPDATE.value: + object_id = change.object_id + if object_id is None: + raise ApplyChangeSetException(f"Object ID is required for update") + + model_class.objects.filter(id=object_id).update(**data) + elif change_type == ChangeType.NOOP.value: + pass + + else: + raise ApplyChangeSetException(f"Unknown change type: {change.type}") + + return ApplyChangeSetResult( + id=change_set.id, + success=True, + errors=None, + ) + diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 3119881..61772d1 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -191,7 +191,6 @@ def sort_dict_recursively(d): return d - def generate_changeset(entity: dict, object_type: str) -> ChangeSet: """Generate a changeset for an entity.""" change_set = ChangeSet() diff --git a/netbox_diode_plugin/api/urls.py b/netbox_diode_plugin/api/urls.py index aa0cf62..cb6b3d4 100644 --- a/netbox_diode_plugin/api/urls.py +++ b/netbox_diode_plugin/api/urls.py @@ -5,12 +5,11 @@ from django.urls import include, path from netbox.api.routers import NetBoxRouter -from .views import ApplyChangeSetView, GenerateDiffView, ObjectStateView +from .views import ApplyChangeSetView, GenerateDiffView router = NetBoxRouter() urlpatterns = [ - path("object-state/", ObjectStateView.as_view()), path("apply-change-set/", ApplyChangeSetView.as_view()), path("generate-diff/", GenerateDiffView.as_view()), path("", include(router.urls)), diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 37f4a6d..dc4ffab 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -4,30 +4,21 @@ import json import logging import re -from typing import Any, Dict, Optional from django.apps import apps -from django.conf import settings -from packaging import version - -if version.parse(settings.VERSION).major >= 4: - from core.models import ObjectType as NetBoxType -else: - from django.contrib.contenttypes.models import ContentType as NetBoxType - -from django.core.exceptions import FieldError -from django.core.exceptions import ValidationError as DjangoValidationError -from django.db import models, transaction -from django.db.models import Q +from django.db import transaction from rest_framework import status, views from rest_framework.exceptions import ValidationError from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response -from utilities.api import get_serializer_for_model -from netbox_diode_plugin.api.permissions import IsDiodeReader, IsDiodeWriter -from netbox_diode_plugin.api.serializers import ApplyChangeSetRequestSerializer, ObjectStateSerializer -from netbox_diode_plugin.api.differ import generate_changeset +from netbox_diode_plugin.api.applier import ( + ApplyChangeSetException, + ApplyChangeSetResult, + apply_changeset, +) +from netbox_diode_plugin.api.differ import Change, ChangeSet, generate_changeset +from netbox_diode_plugin.api.permissions import IsDiodeWriter logger = logging.getLogger("netbox.diode_data") @@ -41,655 +32,6 @@ "netbox_branching plugin is installed but models could not be imported" ) -def dynamic_import(name): - """Dynamically import a class from an absolute path string.""" - components = name.split(".") - mod = __import__(components[0]) - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - - -def _get_index_class_fields(object_type: str | NetBoxType): - """ - Given an object type name (e.g., 'dcim.site'), dynamically find and return the corresponding Index class fields. - - :param object_type: Object type name in the format 'app_label.model_name' - :return: The corresponding model and its Index class (e.g., SiteIndex) field names or None. - """ - try: - if isinstance(object_type, str): - app_label, model_name = object_type.split('.') - else: - app_label, model_name = object_type.app_label, object_type.model - - model = apps.get_model(app_label, model_name) - - if app_label == "extras" and model_name == "tag": - app_label = "netbox_diode_plugin" - - index_module = dynamic_import(f"{app_label}.search.{model.__name__}Index") - fields = getattr(index_module, "fields", None) - field_names = [field[0] for field in fields] - - return model, field_names - - except (LookupError, ModuleNotFoundError, AttributeError, ValueError): - return None, None - -def _validate_model_instance_fields(instance, fields, value): - """ - Validate the model instance fields against the value. - - :param instance: The model instance. - :param fields: The fields of the model instance. - :param value: The value to validate against the model instance fields. - :return: fields list passed validation - """ - errors = {} - - # Set provided values to the instance fields - for field in fields: - if hasattr(instance, field): - # get the field type - field_cls = instance._meta.get_field(field).__class__ - - field_value = _convert_field_value(field_cls, value) - setattr(instance, field, field_value) - - # Attempt to validate the instance - try: - instance.clean_fields() - except DjangoValidationError as e: - errors = e.message_dict - return errors - -def _convert_field_value(field_cls, value): - """Return the converted field value based on the field type.""" - if value is None: - return value - - try: - if issubclass(field_cls, (models.FloatField, models.DecimalField)): - return float(value) - if issubclass(field_cls, models.IntegerField): - return int(value) - except (ValueError, TypeError): - pass - - return value - - -class ObjectStateView(views.APIView): - """ObjectState view.""" - - permission_classes = [IsAuthenticated, IsDiodeReader] - - def _get_lookups(self, object_type_model: str) -> tuple: - """ - This method returns a tuple of related object lookups based on the provided object type model. - - Args: - ---- - object_type_model (str): The name of the object type model. - - Returns: - ------- - tuple: A tuple of related object lookups. The tuple is empty if the object type model does not match any - of the specified models. - - """ - if "'ipam.models.ip.ipaddress'" in object_type_model: - return ( - "assigned_object", - "assigned_object__device", - "assigned_object__device__site", - ) - if "'dcim.models.device_components.interface'" in object_type_model: - return "device", "device__site" - if "'dcim.models.devices.device'" in object_type_model: - return ("site",) - return () - - def _search_queryset(self, request): - """Search for objects according to object type using search index classes.""" - object_type = request.GET.get("object_type", None) - object_id = request.GET.get("id", None) - query = request.GET.get("q", None) - - if not object_type: - raise ValidationError("object_type parameter is required") - - if not object_id and not query: - raise ValidationError("id or q parameter is required") - - model, fields = _get_index_class_fields(object_type) - - if object_id: - queryset = model.objects.filter(id=object_id) - else: - q = Q() - - invalid_fields = _validate_model_instance_fields(model(), fields, query) - - fields = [field for field in fields if field not in invalid_fields] - - for field in fields: - q |= Q(**{f"{field}__exact": query}) # Exact match - - try: - queryset = model.objects.filter(q) - except DjangoValidationError: - queryset = model.objects.none() - pass - - lookups = self._get_lookups(str(model).lower()) - - if lookups: - queryset = queryset.prefetch_related(*lookups) - - additional_attributes_query_filter = ( - self._additional_attributes_query_filter() - ) - - if additional_attributes_query_filter: - queryset = queryset.filter(**additional_attributes_query_filter) - - return queryset - - def get(self, request, *args, **kwargs): - """ - Return a JSON with object_type, object_change_id, and object. - - Search for objects according to object type. - If the obj_type parameter is not in the parameters, raise a ValidationError. - When object ID is provided in the request, search using it in the model specified by object type. - If ID is not provided, use the q parameter for searching. - Lookup is iexact - """ - try: - queryset = self._search_queryset(request) - except (FieldError, ValueError): - return Response( - {"errors": ["invalid additional attributes provided"]}, - status=status.HTTP_400_BAD_REQUEST, - ) - - self.check_object_permissions(request, queryset) - - object_type = request.GET.get("object_type", None) - - serializer = ObjectStateSerializer( - queryset, - many=True, - context={ - "request": request, - "object_type": f"{object_type}", - }, - ) - - try: - if len(serializer.data) > 0: - return Response(serializer.data[0]) - return Response({}) - except AttributeError as e: - return Response( - {"errors": [f"Serializer error: {e.args[0]}"]}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def _additional_attributes_query_filter(self): - """Get the additional attributes query filter.""" - additional_attributes = {} - for attr in self.request.query_params: - if attr not in ["object_type", "id", "q", "_branch"]: - additional_attributes[attr] = self.request.query_params.get(attr) - - return dict(additional_attributes.items()) - - -class ApplyChangeSetView(views.APIView): - """ApplyChangeSet view.""" - - permission_classes = [IsAuthenticated, IsDiodeWriter] - - @staticmethod - def _get_object_type_model(object_type: str | NetBoxType): - """Get the object type model from object_type.""" - if isinstance(object_type, str): - app_label, model_name = object_type.split(".") - object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) - else: - object_content_type = object_type - return object_content_type, object_content_type.model_class() - - def _get_assigned_object_type(self, model_name: str): - """Get the object type model from applied IPAddress assigned object.""" - assignable_object_types = { - "interface": "dcim.interface", - } - return assignable_object_types.get(model_name.lower(), None) - - def _add_nested_opts(self, fields, key, value): - if isinstance(value, dict): - for nested_key, nested_value in value.items(): - self._add_nested_opts(fields, f"{key}__{nested_key}", nested_value) - elif not isinstance(value, list): - fields[key] = value - - def _get_serializer( - self, - change_type: str, - object_id: int, - object_type: str, - object_data: dict, - ): - """Get the serializer for the object type.""" - _, object_type_model_class = self._get_object_type_model(object_type) - - if change_type == "create": - return self._get_serializer_to_create(object_data, object_type, object_type_model_class) - - if change_type == "update": - return self._get_serializer_to_update(object_data, object_id, object_type, object_type_model_class) - - raise ValidationError("Invalid change_type") - - def _get_serializer_to_create(self, object_data, object_type, object_type_model_class): - # Get object data fields that are not dictionaries or lists - fields = self._get_fields_to_find_existing_objects(object_data, object_type) - # Check if the object already exists - try: - instance = object_type_model_class.objects.get(**fields) - return get_serializer_for_model(object_type_model_class)( - instance, data=object_data, context={"request": self.request, "pk": instance.pk} - ) - except object_type_model_class.DoesNotExist: - pass - serializer = get_serializer_for_model(object_type_model_class)( - data=object_data, context={"request": self.request} - ) - return serializer - - def _get_serializer_to_update(self, object_data, object_id, object_type, object_type_model_class): - lookups = () - fields = {} - primary_ip_to_set: Optional[dict] = None - if object_id: - fields["id"] = object_id - elif object_type == "dcim.device" and any( - object_data.get(attr) for attr in ("primary_ip4", "primary_ip6") - ): - ip_address = self._retrieve_primary_ip_address( - "primary_ip4", object_data - ) - - if ip_address is None: - ip_address = self._retrieve_primary_ip_address( - "primary_ip6", object_data - ) - - if ip_address is None: - raise ValidationError("primary IP not found") - - if ip_address: - primary_ip_to_set = { - "id": ip_address.id, - "family": ip_address.family, - } - - lookups = ("site",) - fields["name"] = object_data.get("name") - fields["site__name"] = object_data.get("site").get("name") - else: - raise ValidationError("object_id parameter is required") - try: - instance = object_type_model_class.objects.prefetch_related(*lookups).get(**fields) - if object_type == "dcim.device" and primary_ip_to_set: - object_data = { - "id": instance.id, - "device_type": instance.device_type.id, - "role": instance.role.id, - "site": instance.site.id, - f'primary_ip{primary_ip_to_set.get("family")}': primary_ip_to_set.get( - "id" - ), - } - except object_type_model_class.DoesNotExist: - raise ValidationError(f"object with id {object_id} does not exist") - serializer = get_serializer_for_model(object_type_model_class)( - instance, data=object_data, context={"request": self.request} - ) - return serializer - - def _get_fields_to_find_existing_objects(self, object_data, object_type): - fields = {} - for key, value in object_data.items(): - self._add_nested_opts(fields, key, value) - - match object_type: - case "dcim.interface" | "virtualization.vminterface": - mac_address = fields.pop("mac_address", None) - if mac_address is not None: - fields["primary_mac_address__mac_address"] = mac_address - case "ipam.ipaddress": - fields.pop("assigned_object_type") - fields["assigned_object_type_id"] = fields.pop("assigned_object_id") - case "ipam.prefix" | "virtualization.cluster": - if scope_type := object_data.get("scope_type"): - scope_type_model, _ = self._get_object_type_model(scope_type) - fields["scope_type"] = scope_type_model - case "virtualization.virtualmachine": - if cluster_scope_type := fields.get("cluster__scope_type"): - cluster_scope_type_model, _ = self._get_object_type_model(cluster_scope_type) - fields["cluster__scope_type"] = cluster_scope_type_model - case "virtualization.vminterface": - if cluster_scope_type := fields.get("virtual_machine__cluster__scope_type"): - cluster_scope_type_model, _ = self._get_object_type_model(cluster_scope_type) - fields["virtual_machine__cluster__scope_type"] = cluster_scope_type_model - - return fields - - def _retrieve_primary_ip_address(self, primary_ip_attr: str, object_data: dict): - """Retrieve the primary IP address object.""" - ip_address = object_data.get(primary_ip_attr) - if ip_address is None: - return None - - ipaddress_assigned_object = object_data.get(primary_ip_attr, {}).get( - "assigned_object", None - ) - if ipaddress_assigned_object is None: - return None - - interface = ipaddress_assigned_object.get("interface") - if interface is None: - return None - - interface_device = interface.get("device") - if interface_device is None: - return None - object_type_mode, object_type_model_class = self._get_object_type_model("ipam.ipaddress") - ip_address_object = object_type_model_class.objects.get( - address=ip_address.get("address"), - interface__name=interface.get("name"), - interface__device__name=interface_device.get("name"), - interface__device__site__name=interface_device.get("site").get("name"), - ) - return ip_address_object - - @staticmethod - def _get_error_response(change_set_id, error): - """Get the error response.""" - return Response( - { - "change_set_id": change_set_id, - "result": "failed", - "errors": error, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - def _retrieve_assigned_object_interface_device_lookup_args( - self, device: dict - ) -> dict: - """ - This method retrieves the lookup arguments for the interface device of an assigned object. - - Args: - ---- - device (dict): A dictionary containing the details of the device. It should contain either 'id' or 'name' - of the device and 'site' which is another dictionary containing either 'id' or 'name' of the site. - - Returns: - ------- - dict: A dictionary containing the lookup arguments for the interface device. - - Raises: - ------ - ValidationError: If neither 'id' nor 'name' is provided for the device or the site. - - """ - args = {} - if device.get("id"): - args["device__id"] = device.get("id") - elif device.get("name"): - args["device__name"] = device.get("name") - else: - raise ValidationError( - "Interface device needs to have either id or name provided" - ) - - site = device.get("site", {}) - if site: - if site.get("id"): - args["device__site__id"] = site.get("id") - elif site.get("name"): - args["device__site__name"] = site.get("name") - else: - raise ValidationError( - "Interface device site needs to have either id or name provided" - ) - return args - - def _handle_ipaddress_assigned_object(self, object_data: dict) -> Optional[Dict[str, Any]]: - """Handle IPAM IP address assigned object.""" - ipaddress_assigned_object = object_data.get("assigned_object", None) - - if ipaddress_assigned_object is not None: - assigned_object_keys = list(ipaddress_assigned_object.keys()) - model_name = assigned_object_keys[0] - assigned_object_type = self._get_assigned_object_type(model_name) - assigned_object_model, object_type_model_class = self._get_object_type_model(assigned_object_type) - assigned_object_properties_dict = dict( - ipaddress_assigned_object[model_name].items() - ) - - if len(assigned_object_properties_dict) == 0: - return {"assigned_object": f"properties not provided for {model_name}"} - - try: - lookups = ( - ("device", "device__site") if model_name == "interface" else () - ) - args = {} - - if model_name == "interface": - if assigned_object_properties_dict.get("id"): - args["id"] = assigned_object_properties_dict.get("id") - elif assigned_object_properties_dict.get("name"): - try: - device = assigned_object_properties_dict.get("device", {}) - args = self._retrieve_assigned_object_interface_device_lookup_args( - device - ) - args["name"] = assigned_object_properties_dict.get("name") - except ValidationError as e: - return {"assigned_object": str(e)} - else: - error = f"provided properties '{assigned_object_properties_dict}' not sufficient to retrieve {model_name}" - return {"assigned_object": error} - - assigned_object_instance = ( - object_type_model_class.objects.prefetch_related(*lookups).get(**args) - ) - except object_type_model_class.DoesNotExist: - return { - "assigned_object": f"Assigned object with name {ipaddress_assigned_object[model_name]} does not exist" - } - - object_data.pop("assigned_object") - object_data["assigned_object_type"] = assigned_object_type - object_data["assigned_object_id"] = assigned_object_instance.id - return None - - def _handle_interface_mac_address_compat(self, instance, object_type: str, object_data: dict) -> Optional[Dict[str, Any]]: - """Handle interface mac address backward compatibility.""" - # TODO(ltucker): deprecate. - if object_type != "dcim.interface" and object_type != "virtualization.vminterface": - return None - - if object_data.get("mac_address"): - mac_address_value = object_data.pop("mac_address") - mac_address_instance, _ = instance.mac_addresses.get_or_create( - mac_address=mac_address_value, - ) - instance.primary_mac_address = mac_address_instance - instance.save() - return None - - def _handle_scope(self, object_data: dict, is_nested: bool = False) -> Optional[Dict[str, Any]]: - """Handle scope object.""" - if object_data.get("site"): - site = object_data.pop("site") - scope_type = "dcim.site" - object_type_model, object_type_model_class = self._get_object_type_model(scope_type) - # Scope type of the nested object happens to be resolved differently than in the top-level object - # and is expected to be a content type object instead of "app_label.model_name" string format - if is_nested: - object_data["scope_type"] = object_type_model - else: - object_data["scope_type"] = scope_type - site_id = site.get("id", None) - if site_id is None: - try: - site = object_type_model_class.objects.get( - name=site.get("name") - ) - site_id = site.id - except object_type_model_class.DoesNotExist: - return {"site": f"site with name {site.get('name')} does not exist"} - - object_data["scope_id"] = site_id - - return None - - def _transform_object_data(self, object_type: str, object_data: dict) -> Optional[Dict[str, Any]]: - """Transform object data.""" - errors = None - - match object_type: - case "ipam.ipaddress": - errors = self._handle_ipaddress_assigned_object(object_data) - case "ipam.prefix": - errors = self._handle_scope(object_data, False) - case "virtualization.cluster": - errors = self._handle_scope(object_data, False) - case "virtualization.virtualmachine": - if cluster_object_data := object_data.get("cluster"): - errors = self._handle_scope(cluster_object_data, True) - object_data["cluster"] = cluster_object_data - case "virtualization.vminterface": - cluster_object_data = object_data.get("virtual_machine", {}).get("cluster") - if cluster_object_data is not None: - errors = self._handle_scope(cluster_object_data, True) - object_data["virtual_machine"]["cluster"] = cluster_object_data - case _: - pass - - return errors - - def post(self, request, *args, **kwargs): - """ - Create a new change set and apply it to the current state. - - The request body should contain a list of changes to be applied. - """ - serializer_errors = [] - - request_serializer = ApplyChangeSetRequestSerializer(data=request.data) - - change_set_id = self.request.data.get("change_set_id", None) - - if not request_serializer.is_valid(): - for field_error_name in request_serializer.errors: - self._extract_serializer_errors( - field_error_name, request_serializer, serializer_errors - ) - - return self._get_error_response(change_set_id, serializer_errors) - - change_set = request_serializer.data.get("change_set", None) - - try: - with transaction.atomic(): - for change in change_set: - change_id = change.get("change_id", None) - change_type = change.get("change_type", None) - object_type = change.get("object_type", None) - object_data = change.get("data", None) - object_id = change.get("object_id", None) - - errors = self._transform_object_data(object_type, object_data) - - if errors is not None: - serializer_errors.append({"change_id": change_id, **errors}) - continue - - serializer = self._get_serializer(change_type, object_id, object_type, object_data) - - # Skip creating an object if it already exists - if change_type == "create" and serializer.context.get("pk"): - continue - - if serializer.is_valid(): - serializer.save() - else: - errors_dict = { - field_name: f"{field_name}: {str(field_errors[0])}" - for field_name, field_errors in serializer.errors.items() - } - - serializer_errors.append( - {"change_id": change_id, **errors_dict} - ) - continue - - errors = self._handle_interface_mac_address_compat(serializer.instance, object_type, object_data) - if errors is not None: - serializer_errors.append({"change_id": change_id, **errors}) - continue - if len(serializer_errors) > 0: - raise ApplyChangeSetException - except ApplyChangeSetException: - return self._get_error_response(change_set_id, serializer_errors) - - data = {"change_set_id": change_set_id, "result": "success"} - return Response(data, status=status.HTTP_200_OK) - - def _extract_serializer_errors( - self, field_error_name, request_serializer, serializer_errors - ): - """Extract serializer errors.""" - if isinstance(request_serializer.errors[field_error_name], dict): - for error_index, error_values in request_serializer.errors[ - field_error_name - ].items(): - errors_dict = { - "change_id": request_serializer.data.get("change_set")[ - error_index - ].get("change_id") - } - - for field_name, field_errors in error_values.items(): - errors_dict[field_name] = f"{str(field_errors[0])}" - - serializer_errors.append(errors_dict) - else: - errors = { - field_error_name: f"{str(field_errors)}" - for field_errors in request_serializer.errors[field_error_name] - } - - serializer_errors.append(errors) - - -class ApplyChangeSetException(Exception): - """ApplyChangeSetException used to cause atomic transaction rollback.""" - - pass - - def get_entity_key(model_name): """Get the entity key for a model name.""" @@ -731,7 +73,9 @@ def _post(self, request, *args, **kwargs): original_entity_data = entity.get(entity_key) if original_entity_data is None: - raise ValidationError(f"No data found for {entity_key} in entity got: {entity.keys()}") + raise ValidationError( + f"No data found for {entity_key} in entity got: {entity.keys()}" + ) change_set = generate_changeset(original_entity_data, object_type) @@ -747,3 +91,56 @@ def _post(self, request, *args, **kwargs): logger.info(f"change_set: {json.dumps(change_set.to_dict(), default=str)}") return Response(change_set.to_dict(), status=status.HTTP_200_OK) + + +class ApplyChangeSetView(views.APIView): + """ApplyChangeSet view.""" + + permission_classes = [IsAuthenticated, IsDiodeWriter] + + def post(self, request, *args, **kwargs): + """Apply change set for entity.""" + try: + return self._post(request, *args, **kwargs) + except Exception: + import traceback + + traceback.print_exc() + raise + + def _post(self, request, *args, **kwargs): + data = request.data.copy() + + if 'changes' in data: + data['changes'] = [Change(**change) for change in data['changes']] + change_set = ChangeSet(**data) + + if not change_set.id: + raise ValidationError("Change set ID is required") + if not change_set.changes: + raise ValidationError("Changes are required") + + try: + with transaction.atomic(): + result = apply_changeset(change_set) + except ApplyChangeSetException as e: + logger.error(f"Error applying change set: {e}") + result = ApplyChangeSetResult( + id=change_set.id, + success=False, + errors=e.errors, + ) + + return Response(result.to_dict(), status=status.HTTP_200_OK) + + @staticmethod + def _get_error_response(change_set_id, errors): + """Get the error response.""" + return Response( + { + "change_set_id": change_set_id, + "result": "failed", + "errors": errors, + }, + status=status.HTTP_400_BAD_REQUEST, + ) From 108604e323fb066091ba798d30fefc79ae5fe3bb Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 27 Mar 2025 20:39:52 -0400 Subject: [PATCH 16/30] fix resolve ref before lookup, use field name directly, not field attr --- netbox_diode_plugin/api/matcher.py | 22 ++++++++++++---------- netbox_diode_plugin/api/transformer.py | 2 +- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 4f3f931..ad427ff 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -151,10 +151,11 @@ def _build_fields_queryset(self, data) -> models.QuerySet: lookup_kwargs = {} for field_name in self.fields: field = self.model_class._meta.get_field(field_name) - attribute = field.attname - if attribute not in data: + # attribute = field.attname (we just use field name, since not using the model instances...) + if field_name not in data: + logger.error(f" * cannot build fields queryset for {self.name} (missing field {field_name})") return None # cannot match, missing field data - lookup_value = data.get(field.attname) + lookup_value = data.get(field_name) lookup_kwargs[field.name] = lookup_value # logger.error(f" * query kwargs: {lookup_kwargs}") @@ -178,6 +179,7 @@ def _build_expressions_queryset(self, data) -> models.QuerySet: refs = _get_refs(expr) for ref in refs: if ref not in replacements: + logger.error(f" * cannot build expr queryset for {self.name} (missing field {ref})") return None # cannot match, missing field data rhs = expr.replace_expressions(replacements) @@ -333,25 +335,25 @@ def find_existing_object(data: dict, object_type: str): Returns the object if found, otherwise None. """ - logger.debug(f"resolving {data}") + logger.error(f"resolving {data}") model_class = get_object_type_model(object_type) for matcher in get_model_matchers(model_class): if not matcher.has_required_fields(data): - logger.debug(f" * skipped matcher {matcher.name} (missing fields)") + logger.error(f" * skipped matcher {matcher.name} (missing fields)") continue q = matcher.build_queryset(data) if q is None: - logger.debug(f" * skipped matcher {matcher.name} (no queryset)") + logger.error(f" * skipped matcher {matcher.name} (no queryset)") continue try: - logger.debug(f" * trying query {q.query}") + logger.error(f" * trying query {q.query}") existing = q.get() - logger.debug(f" -> Found object {existing} via {matcher.name}") + logger.error(f" -> Found object {existing} via {matcher.name}") return existing except model_class.DoesNotExist: - logger.debug(f" -> No object found for matcher {matcher.name}") + logger.error(f" -> No object found for matcher {matcher.name}") continue - logger.debug(" * No matchers found an existing object") + logger.error(" * No matchers found an existing object") return None @lru_cache(maxsize=256) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index fa85158..6836c41 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -236,6 +236,7 @@ def _resolve_existing_references(entities: list[dict]) -> list[dict]: for data in entities: object_type = data['_object_type'] data = copy.deepcopy(data) + _update_resolved_refs(data, new_refs) existing = find_existing_object(data, object_type) if existing is not None: logger.error(f"existing {data} -> {existing}") @@ -247,7 +248,6 @@ def _resolve_existing_references(entities: list[dict]) -> list[dict]: data['id'] = existing.id data['_instance'] = existing new_refs[data['_uuid']] = existing.id - _update_resolved_refs(data, new_refs) resolved.append(data) else: data['id'] = UnresolvedReference(object_type, data['_uuid']) From 6f795c239a06f342c759da4b8ba4c97a60c4cd7f Mon Sep 17 00:00:00 2001 From: Luke Tucker Date: Thu, 27 Mar 2025 20:58:05 -0400 Subject: [PATCH 17/30] don't query with unresolved references --- netbox_diode_plugin/api/common.py | 27 ++++++++++++++++++++++++++ netbox_diode_plugin/api/matcher.py | 9 ++++++++- netbox_diode_plugin/api/transformer.py | 23 +--------------------- 3 files changed, 36 insertions(+), 23 deletions(-) create mode 100644 netbox_diode_plugin/api/common.py diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py new file mode 100644 index 0000000..d582443 --- /dev/null +++ b/netbox_diode_plugin/api/common.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Common types and utilities.""" + +from dataclasses import dataclass + + +@dataclass +class UnresolvedReference: + """unresolved reference to an object.""" + + object_type: str + uuid: str + + def __str__(self): + return f"new_object:{self.object_type}:{self.uuid}" + + def __eq__(self, other): + if not isinstance(other, UnresolvedReference): + return False + return self.object_type == other.object_type and self.uuid == other.uuid + + def __hash__(self): + return hash((self.object_type, self.uuid)) + + def __lt__(self, other): + return self.object_type < other.object_type or (self.object_type == other.object_type and self.uuid < other.uuid) diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index ad427ff..d93e1d2 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -13,8 +13,9 @@ from django.db.models.lookups import Exact from django.db.models.query_utils import Q -logger = logging.getLogger(__name__) +from .common import UnresolvedReference +logger = logging.getLogger(__name__) # # TODO: add special cases for things that lack any unique constraints, @@ -156,6 +157,9 @@ def _build_fields_queryset(self, data) -> models.QuerySet: logger.error(f" * cannot build fields queryset for {self.name} (missing field {field_name})") return None # cannot match, missing field data lookup_value = data.get(field_name) + if isinstance(lookup_value, UnresolvedReference): + logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") + return None # cannot match, missing field data lookup_kwargs[field.name] = lookup_value # logger.error(f" * query kwargs: {lookup_kwargs}") @@ -181,6 +185,9 @@ def _build_expressions_queryset(self, data) -> models.QuerySet: if ref not in replacements: logger.error(f" * cannot build expr queryset for {self.name} (missing field {ref})") return None # cannot match, missing field data + if isinstance(replacements[ref], UnresolvedReference): + logger.error(f" * cannot build expr queryset for {self.name} ({ref} is unresolved reference)") + return None # cannot match, missing field data rhs = expr.replace_expressions(replacements) condition = Exact(expr, rhs) diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 6836c41..00149a1 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -13,6 +13,7 @@ from django.core.exceptions import ValidationError from django.utils.text import slugify +from .common import UnresolvedReference from .plugin_utils import get_json_ref_info, get_primary_value_field from .matcher import fingerprint, merge_data, find_existing_object @@ -21,28 +22,6 @@ _DEFAULT_SLUG_SOURCE_FIELD_NAME = "name" -@dataclass -class UnresolvedReference: - """unresolved reference to an object.""" - - object_type: str - uuid: str - - def __str__(self): - return f"new_object:{self.object_type}:{self.uuid}" - - def __eq__(self, other): - if not isinstance(other, UnresolvedReference): - return False - return self.object_type == other.object_type and self.uuid == other.uuid - - def __hash__(self): - return hash((self.object_type, self.uuid)) - - def __lt__(self, other): - return self.object_type < other.object_type or (self.object_type == other.object_type and self.uuid < other.uuid) - - @lru_cache(maxsize=128) def _camel_to_snake_case(name): """Convert camelCase string to snake_case.""" From 370bf26b49c023cf0d39bda060cdff27f60b13b1 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 13:06:21 +0100 Subject: [PATCH 18/30] fix _build_expressions_queryset Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/matcher.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index d93e1d2..8ee5303 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -5,11 +5,11 @@ import logging from functools import cache, lru_cache from dataclasses import dataclass -from typing import List, Optional, Type +from typing import Type from core.models import ObjectType as NetBoxType from django.db import models -from django.db.models import F +from django.db.models import F, Value from django.db.models.lookups import Exact from django.db.models.query_utils import Q @@ -171,7 +171,7 @@ def _build_fields_queryset(self, data) -> models.QuerySet: def _build_expressions_queryset(self, data) -> models.QuerySet: """Builds a queryset for the constraint with the given data.""" replacements = { - F(field): value + F(field): Value(value) if isinstance(value, (str, int, float, bool)) else value for field, value in data.items() } @@ -180,7 +180,7 @@ def _build_expressions_queryset(self, data) -> models.QuerySet: if hasattr(expr, "get_expression_for_validation"): expr = expr.get_expression_for_validation() - refs = _get_refs(expr) + refs = [F(ref) for ref in _get_refs(expr)] for ref in refs: if ref not in replacements: logger.error(f" * cannot build expr queryset for {self.name} (missing field {ref})") From 33b2f24222fa8713ee513d58be12dc98548d95c3 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 13:09:19 +0100 Subject: [PATCH 19/30] resolve lint issues Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/applier.py | 11 ++++++----- netbox_diode_plugin/api/matcher.py | 3 ++- netbox_diode_plugin/api/transformer.py | 9 ++++----- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 033fe03..b2329cb 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -35,11 +35,13 @@ class ApplyChangeSetException(Exception): """ApplyChangeSetException is raised when an error occurs while applying a change set.""" def __init__(self, message, errors=None): + """Initialize the exception.""" super().__init__(message) self.message = message self.errors = errors or {} def __str__(self): + """Return the string representation of the exception.""" if self.errors: return f"{self.message}: {self.errors}" return self.message @@ -47,7 +49,6 @@ def __str__(self): def apply_changeset(change_set: ChangeSet) -> ApplyChangeSetResult: """Apply a change set.""" - created = {} for change in change_set.changes: @@ -58,7 +59,7 @@ def apply_changeset(change_set: ChangeSet) -> ApplyChangeSetResult: app_label, model_name = object_type.split(".") model_class = apps.get_model(app_label, model_name) - + fk_fields = { field.name: field.related_model for field in model_class._meta.get_fields() @@ -79,16 +80,16 @@ def apply_changeset(change_set: ChangeSet) -> ApplyChangeSetResult: if change_type == ChangeType.CREATE.value: new_object = model_class.objects.create(**data) created[change.ref_id] = new_object - + elif change_type == ChangeType.UPDATE.value: object_id = change.object_id if object_id is None: - raise ApplyChangeSetException(f"Object ID is required for update") + raise ApplyChangeSetException("Object ID is required for update") model_class.objects.filter(id=object_id).update(**data) elif change_type == ChangeType.NOOP.value: pass - + else: raise ApplyChangeSetException(f"Unknown change type: {change.type}") diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 8ee5303..eef75b7 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -3,8 +3,8 @@ """Diode NetBox Plugin - API - Object matching utilities.""" import logging -from functools import cache, lru_cache from dataclasses import dataclass +from functools import cache, lru_cache from typing import Type from core.models import ObjectType as NetBoxType @@ -44,6 +44,7 @@ class ObjectMatchCriteria: name: str | None = None def __hash__(self): + """Hash the object match criteria.""" return hash((self.fields, self.expressions, self.condition, self.model_class.__name__, self.name)) def has_required_fields(self, data) -> bool: diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 00149a1..88961af 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -2,21 +2,20 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - API - Object resolution for diffing.""" -from collections import defaultdict import copy -from dataclasses import dataclass -from functools import lru_cache import json import logging import re +from collections import defaultdict +from functools import lru_cache from uuid import uuid4 + from django.core.exceptions import ValidationError from django.utils.text import slugify from .common import UnresolvedReference +from .matcher import find_existing_object, fingerprint, merge_data from .plugin_utils import get_json_ref_info, get_primary_value_field -from .matcher import fingerprint, merge_data, find_existing_object - logger = logging.getLogger("netbox.diode_data") From 35e56b07f3a8ff7b245c0944d44aea26b6b46709 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 16:05:01 +0100 Subject: [PATCH 20/30] exclude fields with GenericRelation type Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/supported_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netbox_diode_plugin/api/supported_models.py b/netbox_diode_plugin/api/supported_models.py index 4c016bd..3ec47ce 100644 --- a/netbox_diode_plugin/api/supported_models.py +++ b/netbox_diode_plugin/api/supported_models.py @@ -131,7 +131,7 @@ def get_model_fields(model_class) -> tuple[dict, list]: model_fields = { field.name: field for field in model_class._meta.get_fields() - if field.__class__.__name__ != "CounterCacheField" + if field.__class__.__name__ not in ["CounterCacheField", "GenericRelation"] } # Reorder fields to match serializer order From d01627cdee555b0a36bb8ebc8cdfe335aca9d3c4 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 16:41:33 +0100 Subject: [PATCH 21/30] fix sorting dict we may get ints strings etc Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/differ.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 61772d1..1547b1a 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -187,7 +187,8 @@ def sort_dict_recursively(d): if isinstance(d, dict): return {k: sort_dict_recursively(v) for k, v in sorted(d.items())} if isinstance(d, list): - return sorted([sort_dict_recursively(item) for item in d]) + # Convert all items to strings for comparison + return sorted([sort_dict_recursively(item) for item in d], key=str) return d From aaed4f89d67219226e10d6fe11eae5894d77fc34 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 16:42:03 +0100 Subject: [PATCH 22/30] rework applier logic Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/applier.py | 78 ++++++++++++++++++++++++------ 1 file changed, 62 insertions(+), 16 deletions(-) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index b2329cb..452155a 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -51,47 +51,93 @@ def apply_changeset(change_set: ChangeSet) -> ApplyChangeSetResult: """Apply a change set.""" created = {} - for change in change_set.changes: - change_type = change.change_type - object_type = change.object_type - data = change.data - new_refs = change.new_refs + def pre_apply(model_class: models.Model, change: Change) -> tuple[dict, list]: + """Pre-apply the data.""" - app_label, model_name = object_type.split(".") - model_class = apps.get_model(app_label, model_name) + data = change.data.copy() + # get foreign key fields with model fk_fields = { field.name: field.related_model for field in model_class._meta.get_fields() if field.is_relation } - - for ref_field in new_refs: + + # resolve foreign key references + for ref_field in change.new_refs: + if isinstance(data[ref_field], (list, tuple)): + ref_list = [] + for ref in data[ref_field]: + if isinstance(ref, str): + ref_list.append(created[ref]) + elif isinstance(ref, models.Model): + ref_list.append(ref) + data[ref_field] = ref_list + else: data[ref_field] = created[data[ref_field]] - + + tags = data.pop("tags", None) + if tags: + tags_model_class = fk_fields.get("tags") + if isinstance(tags, list) and isinstance(tags[0], models.Model): + tags = [tag.pk for tag in tags] + tags = tags_model_class.objects.filter(id__in=tags) + # get model fields matching data keys if foreign key + # TODO: consider use of existing model serializers accepting PKs for key, value in data.items(): if fk_model := fk_fields.get(key): if isinstance(value, int): + # ensure the value is an integer data[key] = fk_model.objects.get(id=value) + elif isinstance(value, list) and len(value) > 0 and isinstance(value[0], models.Model): + data[key] = [ref.pk for ref in value] elif isinstance(value, models.Model): data[key] = value + return data, tags + + def post_apply(instance: models.Model, tags: list[models.Model]): + """Post-apply the data.""" + + # set tags + if tags and hasattr(instance, "tags"): + instance.tags.set(tags) + + for change in change_set.changes: + change_type = change.change_type + object_type = change.object_type + + app_label, model_name = object_type.split(".") + model_class = apps.get_model(app_label, model_name) + + data, tags = pre_apply(model_class, change) + instance = None + if change_type == ChangeType.CREATE.value: - new_object = model_class.objects.create(**data) - created[change.ref_id] = new_object + instance = model_class.objects.create(**data) + created[change.ref_id] = instance elif change_type == ChangeType.UPDATE.value: - object_id = change.object_id - if object_id is None: - raise ApplyChangeSetException("Object ID is required for update") + if object_id := change.object_id: + model_class.objects.filter(id=object_id).update(**data) + instance = model_class.objects.get(id=object_id) + + # # MACAddress case (create and update in a same change set) + # elif instance := created[change.ref_id]: + # instance.update(**data) + # if tags: + # instance.tags.set(tags) + else: + raise ApplyChangeSetException("Object ID or ref_id is required for update") - model_class.objects.filter(id=object_id).update(**data) elif change_type == ChangeType.NOOP.value: pass else: raise ApplyChangeSetException(f"Unknown change type: {change.type}") + + post_apply(instance, tags) return ApplyChangeSetResult( id=change_set.id, From f84158acaaf12f60819ef06da5b2a0593fe09a96 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 17:17:52 +0100 Subject: [PATCH 23/30] applier with content type fields Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/applier.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 452155a..7c30063 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -7,6 +7,7 @@ from dataclasses import dataclass, field from django.apps import apps +from django.contrib.contenttypes.models import ContentType from django.db import models from .differ import Change, ChangeSet, ChangeType @@ -82,6 +83,17 @@ def pre_apply(model_class: models.Model, change: Change) -> tuple[dict, list]: if isinstance(tags, list) and isinstance(tags[0], models.Model): tags = [tag.pk for tag in tags] tags = tags_model_class.objects.filter(id__in=tags) + + # resolve contenttype fields + for key, value in data.items(): + field_type = fk_fields.get(key) + if field_type and field_type == ContentType: + data[key] = ContentType.objects.get(app_label=value.split(".")[0], model=value.split(".")[1]) + # If the field name ends with _type, extract the base field name for the ID field + content_type_id_field = f"{key[:-5]}_id" + content_type_id_value = data[content_type_id_field] + if isinstance(content_type_id_value, str): + data[content_type_id_field] = int(content_type_id_value) # get model fields matching data keys if foreign key # TODO: consider use of existing model serializers accepting PKs From 97693699f371695b04c9167ecc40e67fd94d9996 Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 17:35:37 +0100 Subject: [PATCH 24/30] fix content type related existing value Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/applier.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 7c30063..5621da1 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -94,6 +94,8 @@ def pre_apply(model_class: models.Model, change: Change) -> tuple[dict, list]: content_type_id_value = data[content_type_id_field] if isinstance(content_type_id_value, str): data[content_type_id_field] = int(content_type_id_value) + elif isinstance(content_type_id_value, models.Model): + data[content_type_id_field] = content_type_id_value.pk # get model fields matching data keys if foreign key # TODO: consider use of existing model serializers accepting PKs From 2016dc4542d120a5ae34fb461d5e1d429a964f1f Mon Sep 17 00:00:00 2001 From: Michal Fiedorowicz Date: Fri, 28 Mar 2025 18:06:11 +0100 Subject: [PATCH 25/30] exclude foreign key fields with many to one rel Signed-off-by: Michal Fiedorowicz --- netbox_diode_plugin/api/differ.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 1547b1a..a33350e 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -96,6 +96,9 @@ def prechange_data_from_instance(instance) -> dict: if not hasattr(instance, field_name): continue + if field_info["type"] == "ForeignKey" and field_info.get("is_many_to_one_rel", False): + continue + value = getattr(instance, field_name) if hasattr(value, "all"): # Handle many-to-many and many-to-one relationships # For any relationship that has an 'all' method, get all related objects' primary keys From 0e48f7b20d9ed1f33fb7667fafaecef3faf0b9ab Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Wed, 2 Apr 2025 10:00:32 -0400 Subject: [PATCH 26/30] fix: support for post create updates eg (primary mac address) (#68) * fix: support for post create updates eg (primary mac address) * use serializers, fix relevent tests * linting * filter fields in the prior state * add some basic smoke tests for diff / diff+apply --- docker/netbox/configuration/configuration.py | 9 +- netbox_diode_plugin/api/applier.py | 188 ++- netbox_diode_plugin/api/common.py | 4 + netbox_diode_plugin/api/differ.py | 40 +- netbox_diode_plugin/api/matcher.py | 82 +- netbox_diode_plugin/api/plugin_utils.py | 1067 +++++++++-------- netbox_diode_plugin/api/transformer.py | 115 +- netbox_diode_plugin/api/views.py | 27 +- .../tests/test_api_apply_change_set.py | 737 +++++------- .../tests/test_api_diff_and_apply.py | 97 ++ .../tests/test_api_generate_diff.py | 107 ++ .../tests/test_api_object_state.py | 391 ------ pyproject.toml | 2 + 13 files changed, 1323 insertions(+), 1543 deletions(-) create mode 100644 netbox_diode_plugin/tests/test_api_diff_and_apply.py create mode 100644 netbox_diode_plugin/tests/test_api_generate_diff.py delete mode 100644 netbox_diode_plugin/tests/test_api_object_state.py diff --git a/docker/netbox/configuration/configuration.py b/docker/netbox/configuration/configuration.py index cc51c59..d459441 100644 --- a/docker/netbox/configuration/configuration.py +++ b/docker/netbox/configuration/configuration.py @@ -44,9 +44,12 @@ def _environ_get_and_map(variable_name: str, default: str | None = None, return map_fn(env_value) -_AS_BOOL = lambda value: value.lower() == 'true' -_AS_INT = lambda value: int(value) -_AS_LIST = lambda value: list(filter(None, value.split(' '))) +def _AS_BOOL(value): + return value.lower() == 'true' +def _AS_INT(value): + return int(value) +def _AS_LIST(value): + return list(filter(None, value.split(' '))) _BASE_DIR = dirname(dirname(abspath(__file__))) diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index 5621da1..eed793b 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -8,9 +8,13 @@ from django.apps import apps from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ObjectDoesNotExist from django.db import models +from rest_framework.exceptions import ValidationError as ValidationError from .differ import Change, ChangeSet, ChangeType +from .plugin_utils import get_object_type_model, legal_fields +from .supported_models import get_serializer_for_model logger = logging.getLogger(__name__) @@ -50,108 +54,26 @@ def __str__(self): def apply_changeset(change_set: ChangeSet) -> ApplyChangeSetResult: """Apply a change set.""" - created = {} - - def pre_apply(model_class: models.Model, change: Change) -> tuple[dict, list]: - """Pre-apply the data.""" + _validate_change_set(change_set) - data = change.data.copy() - - # get foreign key fields with model - fk_fields = { - field.name: field.related_model - for field in model_class._meta.get_fields() - if field.is_relation - } - - # resolve foreign key references - for ref_field in change.new_refs: - if isinstance(data[ref_field], (list, tuple)): - ref_list = [] - for ref in data[ref_field]: - if isinstance(ref, str): - ref_list.append(created[ref]) - elif isinstance(ref, models.Model): - ref_list.append(ref) - data[ref_field] = ref_list - else: - data[ref_field] = created[data[ref_field]] - - tags = data.pop("tags", None) - if tags: - tags_model_class = fk_fields.get("tags") - if isinstance(tags, list) and isinstance(tags[0], models.Model): - tags = [tag.pk for tag in tags] - tags = tags_model_class.objects.filter(id__in=tags) - - # resolve contenttype fields - for key, value in data.items(): - field_type = fk_fields.get(key) - if field_type and field_type == ContentType: - data[key] = ContentType.objects.get(app_label=value.split(".")[0], model=value.split(".")[1]) - # If the field name ends with _type, extract the base field name for the ID field - content_type_id_field = f"{key[:-5]}_id" - content_type_id_value = data[content_type_id_field] - if isinstance(content_type_id_value, str): - data[content_type_id_field] = int(content_type_id_value) - elif isinstance(content_type_id_value, models.Model): - data[content_type_id_field] = content_type_id_value.pk - - # get model fields matching data keys if foreign key - # TODO: consider use of existing model serializers accepting PKs - for key, value in data.items(): - if fk_model := fk_fields.get(key): - if isinstance(value, int): - # ensure the value is an integer - data[key] = fk_model.objects.get(id=value) - elif isinstance(value, list) and len(value) > 0 and isinstance(value[0], models.Model): - data[key] = [ref.pk for ref in value] - elif isinstance(value, models.Model): - data[key] = value - - return data, tags - - def post_apply(instance: models.Model, tags: list[models.Model]): - """Post-apply the data.""" - - # set tags - if tags and hasattr(instance, "tags"): - instance.tags.set(tags) - - for change in change_set.changes: + created = {} + for i, change in enumerate(change_set.changes): change_type = change.change_type object_type = change.object_type - app_label, model_name = object_type.split(".") - model_class = apps.get_model(app_label, model_name) - - data, tags = pre_apply(model_class, change) - instance = None - - if change_type == ChangeType.CREATE.value: - instance = model_class.objects.create(**data) - created[change.ref_id] = instance + if change_type == ChangeType.NOOP.value: + continue - elif change_type == ChangeType.UPDATE.value: - if object_id := change.object_id: - model_class.objects.filter(id=object_id).update(**data) - instance = model_class.objects.get(id=object_id) - - # # MACAddress case (create and update in a same change set) - # elif instance := created[change.ref_id]: - # instance.update(**data) - # if tags: - # instance.tags.set(tags) - else: - raise ApplyChangeSetException("Object ID or ref_id is required for update") - - elif change_type == ChangeType.NOOP.value: - pass - - else: - raise ApplyChangeSetException(f"Unknown change type: {change.type}") - - post_apply(instance, tags) + try: + model_class = get_object_type_model(object_type) + data = _pre_apply(model_class, change, created) + _apply_change(data, model_class, change, created) + except ValidationError as e: + raise _err_from_validation_error(e, f"changes[{i}]") + except ObjectDoesNotExist: + raise _err(f"{object_type} with id {change.object_id} does not exist", f"changes[{i}].object_id") + # ConstraintViolationError ? + # ... return ApplyChangeSetResult( id=change_set.id, @@ -159,3 +81,75 @@ def post_apply(instance: models.Model, tags: list[models.Model]): errors=None, ) +def _apply_change(data: dict, model_class: models.Model, change: Change, created: dict): + serializer_class = get_serializer_for_model(model_class) + change_type = change.change_type + if change_type == ChangeType.CREATE.value: + serializer = serializer_class(data=data) + serializer.is_valid(raise_exception=True) + instance = serializer.save() + created[change.ref_id] = instance + + elif change_type == ChangeType.UPDATE.value: + if object_id := change.object_id: + instance = model_class.objects.get(id=object_id) + serializer = serializer_class(instance, data=data, partial=True) + serializer.is_valid(raise_exception=True) + serializer.save() + # create and update in a same change set + elif change.ref_id and (instance := created[change.ref_id]): + serializer = serializer_class(instance, data=data, partial=True) + serializer.is_valid(raise_exception=True) + serializer.save() + +def _pre_apply(model_class: models.Model, change: Change, created: dict): + data = change.data.copy() + + # resolve foreign key references to new objects + for ref_field in change.new_refs: + if isinstance(data[ref_field], (list, tuple)): + ref_list = [] + for ref in data[ref_field]: + if isinstance(ref, str): + ref_list.append(created[ref].pk) + elif isinstance(ref, int): + ref_list.append(ref) + data[ref_field] = ref_list + else: + data[ref_field] = created[data[ref_field]].pk + + # ignore? fields that are not in the data model (error?) + allowed_fields = legal_fields(model_class) + for key in list(data.keys()): + if key not in allowed_fields: + logger.warning(f"Field {key} is not in the diode data model, ignoring.") + data.pop(key) + + return data + +def _validate_change_set(change_set: ChangeSet): + if not change_set.id: + raise _err("Change set ID is required", "id") + if not change_set.changes: + raise _err("Changes are required", "changes") + + for i, change in enumerate(change_set.changes): + if change.object_id is None and change.ref_id is None: + raise _err("Object ID or Ref ID must be provided", f"changes[{i}]") + if change.change_type not in ChangeType: + raise _err(f"Unsupported change type '{change.change_type}'", f"changes[{i}].change_type") + +def _err(message, field): + return ApplyChangeSetException(message, errors={field: [message]}) + +def _err_from_validation_error(e, prefix): + errors = {} + if e.detail: + if isinstance(e.detail, dict): + for k, v in e.detail.items(): + errors[f"{prefix}.{k}"] = v + elif isinstance(e.detail, (list, tuple)): + errors[prefix] = e.detail + else: + errors[prefix] = [e.detail] + return ApplyChangeSetException("validation error", errors=errors) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index d582443..41011b4 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -13,15 +13,19 @@ class UnresolvedReference: uuid: str def __str__(self): + """String representation of the unresolved reference.""" return f"new_object:{self.object_type}:{self.uuid}" def __eq__(self, other): + """Equality operator.""" if not isinstance(other, UnresolvedReference): return False return self.object_type == other.object_type and self.uuid == other.uuid def __hash__(self): + """Hash function.""" return hash((self.object_type, self.uuid)) def __lt__(self, other): + """Less than operator.""" return self.object_type < other.object_type or (self.object_type == other.object_type and self.uuid < other.uuid) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index a33350e..84b6848 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -13,9 +13,9 @@ from django.core.exceptions import ValidationError from utilities.data import shallow_compare_dict +from .plugin_utils import get_primary_value, legal_fields from .supported_models import extract_supported_models -from .transformer import transform_proto_json, cleanup_unresolved_references -from .plugin_utils import get_primary_value +from .transformer import cleanup_unresolved_references, transform_proto_json logger = logging.getLogger(__name__) @@ -74,7 +74,7 @@ def to_dict(self) -> dict: "branch": self.branch, } -def prechange_data_from_instance(instance) -> dict: +def prechange_data_from_instance(instance) -> dict: # noqa: C901 """Convert model instance data to a dictionary format for comparison.""" prechange_data = {} @@ -92,7 +92,13 @@ def prechange_data_from_instance(instance) -> dict: if not fields: raise ValidationError(f"Model {model_class.__name__} has no fields") + diode_fields = legal_fields(model_class) + for field_name, field_info in fields.items(): + # permit only diode fields and the primary key + if field_name not in diode_fields and field_name != "id": + continue + if not hasattr(instance, field_name): continue @@ -145,43 +151,35 @@ def diff_to_change( unresolved_references: list[str], ) -> Change: """Convert a diff to a change.""" - change_type = ChangeType.UPDATE if prechange_data.get("id") else ChangeType.CREATE + change_type = ChangeType.UPDATE if len(prechange_data) > 0 else ChangeType.CREATE if change_type == ChangeType.UPDATE and not len(changed_attrs) > 0: change_type = ChangeType.NOOP - primary_value = get_primary_value(postchange_data, object_type) + primary_value = get_primary_value(prechange_data | postchange_data, object_type) if primary_value is None: primary_value = "(unnamed)" prior_id = prechange_data.get("id") + ref_id = None + if prior_id is None: + ref_id = postchange_data.pop("id", None) + change = Change( change_type=change_type, object_type=object_type, object_id=prior_id if isinstance(prior_id, int) else None, + ref_id=ref_id, object_primary_value=primary_value, new_refs=unresolved_references, ) - if change.object_id is None: - change.ref_id = postchange_data.get("id") - _ = postchange_data.pop("id", None) - - postchange_data_clean = clean_diff_data(postchange_data) if change_type == ChangeType.UPDATE: # remove null values prechange_data_clean = clean_diff_data(prechange_data) - - merged_data = copy.deepcopy(prechange_data_clean) - - merged_data.update({ - attr: postchange_data_clean[attr] - for attr in changed_attrs - if attr in postchange_data_clean - }) change.before = sort_dict_recursively(prechange_data_clean) - change.data = sort_dict_recursively(merged_data) - else: - change.data = sort_dict_recursively(postchange_data_clean) + + postchange_data_clean = clean_diff_data(postchange_data) + change.data = sort_dict_recursively(postchange_data_clean) return change diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index eef75b7..7d6973d 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -2,26 +2,43 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - API - Object matching utilities.""" +import copy import logging from dataclasses import dataclass from functools import cache, lru_cache from typing import Type from core.models import ObjectType as NetBoxType +from django.contrib.contenttypes.fields import ContentType +from django.core.exceptions import FieldDoesNotExist from django.db import models from django.db.models import F, Value from django.db.models.lookups import Exact from django.db.models.query_utils import Q from .common import UnresolvedReference +from .plugin_utils import content_type_id, get_object_type, get_object_type_model logger = logging.getLogger(__name__) # -# TODO: add special cases for things that lack any unique constraints, -# but may have logical pre-existing matches ... eg an ip address in -# a certain context ... etc ? possibly mac address also ? +# these matchers are not driven by netbox unique constraints, +# but are logical criteria that may be used to match objects. +# These should represent the likely intent of a user when +# matching existing objects. # +_LOGICAL_MATCHERS = { + "dcim.macaddress": lambda: [ + ObjectMatchCriteria( + # consider a matching mac address within the same parent object + # to be the same object although not technically required to be. + fields=("mac_address", "assigned_object_type", "assigned_object_id"), + name="logical_mac_address_within_parent", + model_class=get_object_type_model("dcim.macaddress"), + ), + ], +} + @dataclass class ObjectMatchCriteria: @@ -103,6 +120,9 @@ def fingerprint(self, data: dict) -> str|None: values = [] for field in sorted_fields: value = data[field] + if isinstance(value, (dict, UnresolvedReference)): + logger.warning(f"unexpected value type for fingerprinting: {value}") + return None if field in insensitive: value = value.lower() values.append(value) @@ -150,10 +170,10 @@ def build_queryset(self, data) -> models.QuerySet: def _build_fields_queryset(self, data) -> models.QuerySet: """Builds a queryset for a simple set-of-fields constraint.""" + data = self._prepare_data(data) lookup_kwargs = {} for field_name in self.fields: field = self.model_class._meta.get_field(field_name) - # attribute = field.attname (we just use field name, since not using the model instances...) if field_name not in data: logger.error(f" * cannot build fields queryset for {self.name} (missing field {field_name})") return None # cannot match, missing field data @@ -161,6 +181,9 @@ def _build_fields_queryset(self, data) -> models.QuerySet: if isinstance(lookup_value, UnresolvedReference): logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") return None # cannot match, missing field data + if isinstance(lookup_value, dict): + logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is dict)") + return None # cannot match, missing field data lookup_kwargs[field.name] = lookup_value # logger.error(f" * query kwargs: {lookup_kwargs}") @@ -171,6 +194,7 @@ def _build_fields_queryset(self, data) -> models.QuerySet: def _build_expressions_queryset(self, data) -> models.QuerySet: """Builds a queryset for the constraint with the given data.""" + data = self._prepare_data(data) replacements = { F(field): Value(value) if isinstance(value, (str, int, float, bool)) else value for field, value in data.items() @@ -199,10 +223,29 @@ def _build_expressions_queryset(self, data) -> models.QuerySet: qs = qs.filter(self.condition) return qs + def _prepare_data(self, data: dict) -> dict: + prepared = {} + for field_name, value in data.items(): + try: + field = self.model_class._meta.get_field(field_name) + # special handling for object type -> content type id + if field.is_relation and hasattr(field, "related_model") and field.related_model == ContentType: + prepared[field_name] = content_type_id(value) + else: + logger.error("no.") + prepared[field_name] = value + logger.error(f"field: {field_name} -> {value}") + + except FieldDoesNotExist: + continue + logger.error(f"prepared data: {data} -> {prepared}") + return prepared + @lru_cache(maxsize=256) def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: """Extract unique constraints from a Django model.""" - constraints = [] + object_type = get_object_type(model_class) + matchers = _LOGICAL_MATCHERS.get(object_type, lambda: [])() # collect single fields that are unique for field in model_class._meta.fields: @@ -211,7 +254,7 @@ def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: continue if field.unique: - constraints.append( + matchers.append( ObjectMatchCriteria( model_class=model_class, fields=(field.name,), @@ -224,7 +267,7 @@ def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: if not _is_supported_constraint(constraint, model_class): continue if len(constraint.fields) > 0: - constraints.append( + matchers.append( ObjectMatchCriteria( model_class=model_class, fields=tuple(constraint.fields), @@ -233,7 +276,7 @@ def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: ) ) elif len(constraint.expressions) > 0: - constraints.append( + matchers.append( ObjectMatchCriteria( model_class=model_class, expressions=tuple(constraint.expressions), @@ -248,7 +291,8 @@ def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: # (this shouldn't happen / enforced by django) continue - return constraints + return matchers + def _is_supported_constraint(constraint, model_class) -> bool: if not isinstance(constraint, models.UniqueConstraint): @@ -303,7 +347,8 @@ def _fingerprint_all(data: dict) -> str: values.append(k) if isinstance(v, (list, tuple)): values.extend(sorted(v)) - # TODO: handle dicts + if isinstance(v, dict): + values.append(_fingerprint_all(v)) else: values.append(v) # logger.error(f"_fingerprint_all: {data} -> values: {tuple(values)}") @@ -353,24 +398,15 @@ def find_existing_object(data: dict, object_type: str): if q is None: logger.error(f" * skipped matcher {matcher.name} (no queryset)") continue - try: - logger.error(f" * trying query {q.query}") - existing = q.get() + logger.error(f" * trying query {q.query}") + existing = q.order_by('pk').first() + if existing is not None: logger.error(f" -> Found object {existing} via {matcher.name}") return existing - except model_class.DoesNotExist: - logger.error(f" -> No object found for matcher {matcher.name}") - continue + logger.error(f" -> No object found for matcher {matcher.name}") logger.error(" * No matchers found an existing object") return None -@lru_cache(maxsize=256) -def get_object_type_model(object_type: str) -> Type[models.Model]: - """Get the model class for a given object type.""" - app_label, model_name = object_type.split(".") - object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) - return object_content_type.model_class() - def merge_data(a: dict, b: dict) -> dict: """ Merges two structures. diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py index 16ac1c6..b526a5c 100644 --- a/netbox_diode_plugin/api/plugin_utils.py +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -1,242 +1,320 @@ """Diode plugin helpers.""" # Generated code. DO NOT EDIT. -# Timestamp: 2025-03-27 16:35:12Z +# Timestamp: 2025-04-01 21:05:16Z from dataclasses import dataclass +from functools import lru_cache from typing import Type +from core.models import ObjectType as NetBoxType from django.contrib.contenttypes.models import ContentType from django.db import models + +@lru_cache(maxsize=256) +def get_object_type_model(object_type: str) -> Type[models.Model]: + """Get the model class for a given object type.""" + app_label, model_name = object_type.split('.') + object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) + return object_content_type.model_class() + +@lru_cache(maxsize=256) +def get_object_type(model_class: Type[models.Model]) -> str: + """Get the object type for a given model class.""" + content_type = ContentType.objects.get_for_model(model_class) + return content_type.app_label + '.' + content_type.model + +@lru_cache(maxsize=256) +def content_type_id(object_type: str) -> int: + """Get the content type id for a given object type.""" + app_label, model_name = object_type.split('.') + object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) + return object_content_type.id + @dataclass class RefInfo: object_type: str field_name: str is_generic: bool = False + is_many: bool = False -_REF_INFO = { - 'ipam.asn': { - 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'ipam.asnrange': { - 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'ipam.aggregate': { - 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'dcim.cable': { - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'dcim.cabletermination': { - 'cable': RefInfo(object_type='dcim.cable', field_name='cable'), - 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), - 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), - 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), - 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), - 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), - 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), - 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), - 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), - 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), - }, +_JSON_REF_INFO = { 'circuits.circuit': { + 'assignments': RefInfo(object_type='circuits.circuitgroupassignment', field_name='assignments', is_many=True), 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), - 'type': RefInfo(object_type='circuits.circuittype', field_name='type'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - 'assignments': RefInfo(object_type='circuits.circuitgroupassignment', field_name='assignments'), + 'type': RefInfo(object_type='circuits.circuittype', field_name='type'), }, 'circuits.circuitgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, 'circuits.circuitgroupassignment': { 'group': RefInfo(object_type='circuits.circuitgroup', field_name='group'), 'memberCircuit': RefInfo(object_type='circuits.circuit', field_name='member', is_generic=True), 'memberVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='member', is_generic=True), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'circuits.circuittermination': { 'circuit': RefInfo(object_type='circuits.circuit', field_name='circuit'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, 'circuits.circuittype': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'virtualization.cluster': { - 'type': RefInfo(object_type='virtualization.clustertype', field_name='type'), - 'group': RefInfo(object_type='virtualization.clustergroup', field_name='group'), + 'circuits.provider': { + 'accounts': RefInfo(object_type='circuits.provideraccount', field_name='accounts', is_many=True), + 'asns': RefInfo(object_type='ipam.asn', field_name='asns', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.provideraccount': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.providernetwork': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.virtualcircuit': { + 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'providerNetwork': RefInfo(object_type='circuits.providernetwork', field_name='provider_network'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), - 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), - 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), - 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'type': RefInfo(object_type='circuits.virtualcircuittype', field_name='type'), }, - 'virtualization.clustergroup': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'circuits.virtualcircuittermination': { + 'interface': RefInfo(object_type='dcim.interface', field_name='interface'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='virtual_circuit'), }, - 'virtualization.clustertype': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'circuits.virtualcircuittype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.cable': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.cabletermination': { + 'cable': RefInfo(object_type='dcim.cable', field_name='cable'), + 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), }, 'dcim.consoleport': { 'device': RefInfo(object_type='dcim.device', field_name='device'), 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.consoleserverport': { 'device': RefInfo(object_type='dcim.device', field_name='device'), 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'tenancy.contact': { - 'group': RefInfo(object_type='tenancy.contactgroup', field_name='group'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'tenancy.contactassignment': { - 'objectAsn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), - 'objectAsnRange': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), - 'objectAggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), - 'objectCable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), - 'objectCablePath': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), - 'objectCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), - 'objectCircuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), - 'objectCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), - 'objectCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), - 'objectCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), - 'objectCircuitType': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), - 'objectCluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), - 'objectClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), - 'objectClusterType': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), - 'objectConsolePort': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), - 'objectConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), - 'objectContact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), - 'objectContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), - 'objectContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), - 'objectContactRole': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), - 'objectDevice': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), - 'objectDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), - 'objectDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), - 'objectDeviceType': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), - 'objectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), - 'objectFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), - 'objectFrontPort': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), - 'objectIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), - 'objectIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), - 'objectIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), - 'objectIpRange': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), - 'objectIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), - 'objectIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), - 'objectIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), - 'objectInterface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), - 'objectInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), - 'objectInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), - 'objectL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), - 'objectL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), - 'objectLocation': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), - 'objectMacAddress': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), - 'objectManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), - 'objectModule': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), - 'objectModuleBay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), - 'objectModuleType': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), - 'objectPlatform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), - 'objectPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), - 'objectPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), - 'objectPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), - 'objectPowerPort': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), - 'objectPrefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), - 'objectProvider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), - 'objectProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), - 'objectProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), - 'objectRir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), - 'objectRack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), - 'objectRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), - 'objectRackRole': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), - 'objectRackType': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), - 'objectRearPort': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), - 'objectRegion': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), - 'objectRole': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), - 'objectRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), - 'objectService': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), - 'objectSite': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), - 'objectSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), - 'objectTag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), - 'objectTenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), - 'objectTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), - 'objectTunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), - 'objectTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), - 'objectTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), - 'objectVlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), - 'objectVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), - 'objectVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), - 'objectVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), - 'objectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), - 'objectVrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), - 'objectVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), - 'objectVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), - 'objectVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), - 'objectVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), - 'objectVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), - 'objectVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), - 'objectVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), - 'objectWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), - 'objectWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), - 'objectWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), - 'contact': RefInfo(object_type='tenancy.contact', field_name='contact'), - 'role': RefInfo(object_type='tenancy.contactrole', field_name='role'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'tenancy.contactgroup': { - 'parent': RefInfo(object_type='tenancy.contactgroup', field_name='parent'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'tenancy.contactrole': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.device': { + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), 'deviceType': RefInfo(object_type='dcim.devicetype', field_name='device_type'), - 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), - 'site': RefInfo(object_type='dcim.site', field_name='site'), 'location': RefInfo(object_type='dcim.location', field_name='location'), - 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'oobIp': RefInfo(object_type='ipam.ipaddress', field_name='oob_ip'), + 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), - 'oobIp': RefInfo(object_type='ipam.ipaddress', field_name='oob_ip'), - 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'virtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='virtual_chassis'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, 'dcim.devicebay': { 'device': RefInfo(object_type='dcim.device', field_name='device'), 'installedDevice': RefInfo(object_type='dcim.device', field_name='installed_device'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.devicerole': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'dcim.devicetype': { - 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), 'defaultPlatform': RefInfo(object_type='dcim.platform', field_name='default_platform'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.frontport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'rearPort': RefInfo(object_type='dcim.rearport', field_name='rear_port'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.interface': { + 'bridge': RefInfo(object_type='dcim.interface', field_name='bridge'), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'lag': RefInfo(object_type='dcim.interface', field_name='lag'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'parent': RefInfo(object_type='dcim.interface', field_name='parent'), + 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'taggedVlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'vdcs': RefInfo(object_type='dcim.virtualdevicecontext', field_name='vdcs', is_many=True), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'wirelessLans': RefInfo(object_type='wireless.wirelesslan', field_name='wireless_lans', is_many=True), + }, + 'dcim.inventoryitem': { + 'componentConsolePort': RefInfo(object_type='dcim.consoleport', field_name='component', is_generic=True), + 'componentConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='component', is_generic=True), + 'componentFrontPort': RefInfo(object_type='dcim.frontport', field_name='component', is_generic=True), + 'componentInterface': RefInfo(object_type='dcim.interface', field_name='component', is_generic=True), + 'componentPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='component', is_generic=True), + 'componentPowerPort': RefInfo(object_type='dcim.powerport', field_name='component', is_generic=True), + 'componentRearPort': RefInfo(object_type='dcim.rearport', field_name='component', is_generic=True), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'parent': RefInfo(object_type='dcim.inventoryitem', field_name='parent'), + 'role': RefInfo(object_type='dcim.inventoryitemrole', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.inventoryitemrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.location': { + 'parent': RefInfo(object_type='dcim.location', field_name='parent'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.macaddress': { + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.manufacturer': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.module': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'moduleBay': RefInfo(object_type='dcim.modulebay', field_name='module_bay'), + 'moduleType': RefInfo(object_type='dcim.moduletype', field_name='module_type'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.modulebay': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'installedModule': RefInfo(object_type='dcim.module', field_name='installed_module'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.moduletype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.platform': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerfeed': { + 'powerPanel': RefInfo(object_type='dcim.powerpanel', field_name='power_panel'), + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.poweroutlet': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'powerPort': RefInfo(object_type='dcim.powerport', field_name='power_port'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerpanel': { + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.rack': { + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'rackType': RefInfo(object_type='dcim.racktype', field_name='rack_type'), + 'role': RefInfo(object_type='dcim.rackrole', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.rackreservation': { + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.rackrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.racktype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.rearport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.region': { + 'parent': RefInfo(object_type='dcim.region', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.site': { + 'asns': RefInfo(object_type='ipam.asn', field_name='asns', is_many=True), + 'group': RefInfo(object_type='dcim.sitegroup', field_name='group'), + 'region': RefInfo(object_type='dcim.region', field_name='region'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.sitegroup': { + 'parent': RefInfo(object_type='dcim.sitegroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.virtualchassis': { + 'master': RefInfo(object_type='dcim.device', field_name='master'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.virtualdevicecontext': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.aggregate': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.asn': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.asnrange': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, 'ipam.fhrpgroup': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'ipam.fhrpgroupassignment': { 'group': RefInfo(object_type='ipam.fhrpgroup', field_name='group'), @@ -329,236 +407,274 @@ class RefInfo: 'interfaceWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='interface', is_generic=True), 'interfaceWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='interface', is_generic=True), }, - 'dcim.frontport': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'rearPort': RefInfo(object_type='dcim.rearport', field_name='rear_port'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'vpn.ikepolicy': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'vpn.ikeproposal': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, 'ipam.ipaddress': { - 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'assignedObjectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='assigned_object', is_generic=True), 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), 'natInside': RefInfo(object_type='ipam.ipaddress', field_name='nat_inside'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), }, 'ipam.iprange': { - 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), 'role': RefInfo(object_type='ipam.role', field_name='role'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'vpn.ipsecpolicy': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'vpn.ipsecprofile': { - 'ikePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='ike_policy'), - 'ipsecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='ipsec_policy'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'vpn.ipsecproposal': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), }, - 'dcim.interface': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'parent': RefInfo(object_type='dcim.interface', field_name='parent'), - 'bridge': RefInfo(object_type='dcim.interface', field_name='bridge'), - 'lag': RefInfo(object_type='dcim.interface', field_name='lag'), - 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), - 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), - 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), - 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'ipam.prefix': { + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, - 'dcim.inventoryitem': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'parent': RefInfo(object_type='dcim.inventoryitem', field_name='parent'), - 'role': RefInfo(object_type='dcim.inventoryitemrole', field_name='role'), - 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), - 'componentConsolePort': RefInfo(object_type='dcim.consoleport', field_name='component', is_generic=True), - 'componentConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='component', is_generic=True), - 'componentFrontPort': RefInfo(object_type='dcim.frontport', field_name='component', is_generic=True), - 'componentInterface': RefInfo(object_type='dcim.interface', field_name='component', is_generic=True), - 'componentPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='component', is_generic=True), - 'componentPowerPort': RefInfo(object_type='dcim.powerport', field_name='component', is_generic=True), - 'componentRearPort': RefInfo(object_type='dcim.rearport', field_name='component', is_generic=True), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'ipam.rir': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.inventoryitemrole': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'ipam.role': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'vpn.l2vpn': { + 'ipam.routetarget': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, - 'vpn.l2vpntermination': { - 'l2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='l2vpn'), - 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), - 'assignedObjectVlan': RefInfo(object_type='ipam.vlan', field_name='assigned_object', is_generic=True), - 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'ipam.service': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'ipaddresses': RefInfo(object_type='ipam.ipaddress', field_name='ipaddresses', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), }, - 'dcim.location': { + 'ipam.vlan': { + 'group': RefInfo(object_type='ipam.vlangroup', field_name='group'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'role': RefInfo(object_type='ipam.role', field_name='role'), 'site': RefInfo(object_type='dcim.site', field_name='site'), - 'parent': RefInfo(object_type='dcim.location', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, - 'dcim.macaddress': { - 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), - 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'dcim.manufacturer': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'ipam.vlangroup': { + 'scopeCluster': RefInfo(object_type='virtualization.cluster', field_name='scope', is_generic=True), + 'scopeClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='scope', is_generic=True), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRack': RefInfo(object_type='dcim.rack', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.module': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'moduleBay': RefInfo(object_type='dcim.modulebay', field_name='module_bay'), - 'moduleType': RefInfo(object_type='dcim.moduletype', field_name='module_type'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'ipam.vlantranslationrule': { + 'policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='policy'), }, - 'dcim.modulebay': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'installedModule': RefInfo(object_type='dcim.module', field_name='installed_module'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'ipam.vrf': { + 'exportTargets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), + 'importTargets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), }, - 'dcim.moduletype': { - 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tenancy.contact': { + 'group': RefInfo(object_type='tenancy.contactgroup', field_name='group'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.platform': { - 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tenancy.contactassignment': { + 'contact': RefInfo(object_type='tenancy.contact', field_name='contact'), + 'objectAsn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), + 'objectAsnRange': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), + 'objectAggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), + 'objectCable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), + 'objectCablePath': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), + 'objectCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), + 'objectCircuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), + 'objectCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), + 'objectCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), + 'objectCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), + 'objectCircuitType': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), + 'objectCluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), + 'objectClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), + 'objectClusterType': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), + 'objectConsolePort': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), + 'objectConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), + 'objectContact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), + 'objectContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), + 'objectContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), + 'objectContactRole': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), + 'objectDevice': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), + 'objectDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), + 'objectDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), + 'objectDeviceType': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), + 'objectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), + 'objectFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), + 'objectFrontPort': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), + 'objectIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), + 'objectIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), + 'objectIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), + 'objectIpRange': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), + 'objectIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), + 'objectIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), + 'objectIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), + 'objectInterface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), + 'objectInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), + 'objectInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), + 'objectL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), + 'objectL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), + 'objectLocation': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), + 'objectMacAddress': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), + 'objectManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), + 'objectModule': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), + 'objectModuleBay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), + 'objectModuleType': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), + 'objectPlatform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), + 'objectPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), + 'objectPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), + 'objectPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), + 'objectPowerPort': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), + 'objectPrefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), + 'objectProvider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), + 'objectProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), + 'objectProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), + 'objectRir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), + 'objectRack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), + 'objectRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), + 'objectRackRole': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), + 'objectRackType': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), + 'objectRearPort': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), + 'objectRegion': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), + 'objectRole': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), + 'objectRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), + 'objectService': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), + 'objectSite': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), + 'objectSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), + 'objectTag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), + 'objectTenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), + 'objectTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), + 'objectTunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), + 'objectTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), + 'objectTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), + 'objectVlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), + 'objectVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), + 'objectVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), + 'objectVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), + 'objectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), + 'objectVrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), + 'objectVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), + 'objectVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), + 'objectVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), + 'objectVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), + 'objectVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), + 'objectVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), + 'objectVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), + 'objectWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), + 'objectWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), + 'objectWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), + 'role': RefInfo(object_type='tenancy.contactrole', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.powerfeed': { - 'powerPanel': RefInfo(object_type='dcim.powerpanel', field_name='power_panel'), - 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tenancy.contactgroup': { + 'parent': RefInfo(object_type='tenancy.contactgroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.poweroutlet': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'powerPort': RefInfo(object_type='dcim.powerport', field_name='power_port'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tenancy.contactrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.powerpanel': { - 'site': RefInfo(object_type='dcim.site', field_name='site'), - 'location': RefInfo(object_type='dcim.location', field_name='location'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tenancy.tenant': { + 'group': RefInfo(object_type='tenancy.tenantgroup', field_name='group'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.powerport': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tenancy.tenantgroup': { + 'parent': RefInfo(object_type='tenancy.tenantgroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'ipam.prefix': { - 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'virtualization.cluster': { + 'group': RefInfo(object_type='virtualization.clustergroup', field_name='group'), 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), - 'role': RefInfo(object_type='ipam.role', field_name='role'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'circuits.provider': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'type': RefInfo(object_type='virtualization.clustertype', field_name='type'), }, - 'circuits.provideraccount': { - 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'virtualization.clustergroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'circuits.providernetwork': { - 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'virtualization.clustertype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'ipam.rir': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'virtualization.virtualdisk': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), }, - 'dcim.rack': { + 'virtualization.virtualmachine': { + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), 'site': RefInfo(object_type='dcim.site', field_name='site'), - 'location': RefInfo(object_type='dcim.location', field_name='location'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'role': RefInfo(object_type='dcim.rackrole', field_name='role'), - 'rackType': RefInfo(object_type='dcim.racktype', field_name='rack_type'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'dcim.rackreservation': { - 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'dcim.rackrole': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, - 'dcim.racktype': { - 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'virtualization.vminterface': { + 'bridge': RefInfo(object_type='virtualization.vminterface', field_name='bridge'), + 'parent': RefInfo(object_type='virtualization.vminterface', field_name='parent'), + 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'taggedVlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), }, - 'dcim.rearport': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'module': RefInfo(object_type='dcim.module', field_name='module'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'vpn.ikepolicy': { + 'proposals': RefInfo(object_type='vpn.ikeproposal', field_name='proposals', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.region': { - 'parent': RefInfo(object_type='dcim.region', field_name='parent'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'vpn.ikeproposal': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'ipam.role': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'vpn.ipsecpolicy': { + 'proposals': RefInfo(object_type='vpn.ipsecproposal', field_name='proposals', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'ipam.routetarget': { - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'vpn.ipsecprofile': { + 'ikePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='ike_policy'), + 'ipsecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='ipsec_policy'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'ipam.service': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'vpn.ipsecproposal': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, - 'dcim.site': { - 'region': RefInfo(object_type='dcim.region', field_name='region'), - 'group': RefInfo(object_type='dcim.sitegroup', field_name='group'), + 'vpn.l2vpn': { + 'exportTargets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), + 'importTargets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'dcim.sitegroup': { - 'parent': RefInfo(object_type='dcim.sitegroup', field_name='parent'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'tenancy.tenant': { - 'group': RefInfo(object_type='tenancy.tenantgroup', field_name='group'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, - 'tenancy.tenantgroup': { - 'parent': RefInfo(object_type='tenancy.tenantgroup', field_name='parent'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'vpn.l2vpntermination': { + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVlan': RefInfo(object_type='ipam.vlan', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'l2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='l2vpn'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'vpn.tunnel': { 'group': RefInfo(object_type='vpn.tunnelgroup', field_name='group'), 'ipsecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='ipsec_profile'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, 'vpn.tunnelgroup': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'vpn.tunneltermination': { - 'tunnel': RefInfo(object_type='vpn.tunnel', field_name='tunnel'), + 'outsideIp': RefInfo(object_type='ipam.ipaddress', field_name='outside_ip'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'terminationAsn': RefInfo(object_type='ipam.asn', field_name='termination', is_generic=True), 'terminationAsnRange': RefInfo(object_type='ipam.asnrange', field_name='termination', is_generic=True), 'terminationAggregate': RefInfo(object_type='ipam.aggregate', field_name='termination', is_generic=True), @@ -647,113 +763,130 @@ class RefInfo: 'terminationWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='termination', is_generic=True), 'terminationWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='termination', is_generic=True), 'terminationWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='termination', is_generic=True), - 'outsideIp': RefInfo(object_type='ipam.ipaddress', field_name='outside_ip'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'ipam.vlan': { - 'site': RefInfo(object_type='dcim.site', field_name='site'), - 'group': RefInfo(object_type='ipam.vlangroup', field_name='group'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'role': RefInfo(object_type='ipam.role', field_name='role'), - 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'ipam.vlangroup': { - 'scopeCluster': RefInfo(object_type='virtualization.cluster', field_name='scope', is_generic=True), - 'scopeClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='scope', is_generic=True), - 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), - 'scopeRack': RefInfo(object_type='dcim.rack', field_name='scope', is_generic=True), - 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), - 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), - 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'ipam.vlantranslationrule': { - 'policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='policy'), - }, - 'virtualization.vminterface': { - 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), - 'parent': RefInfo(object_type='virtualization.vminterface', field_name='parent'), - 'bridge': RefInfo(object_type='virtualization.vminterface', field_name='bridge'), - 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), - 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), - 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), - 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), - 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'ipam.vrf': { - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'dcim.virtualchassis': { - 'master': RefInfo(object_type='dcim.device', field_name='master'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'circuits.virtualcircuit': { - 'providerNetwork': RefInfo(object_type='circuits.providernetwork', field_name='provider_network'), - 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), - 'type': RefInfo(object_type='circuits.virtualcircuittype', field_name='type'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'circuits.virtualcircuittermination': { - 'virtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='virtual_circuit'), - 'interface': RefInfo(object_type='dcim.interface', field_name='interface'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'circuits.virtualcircuittype': { - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'dcim.virtualdevicecontext': { - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), - 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'virtualization.virtualdisk': { - 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), - }, - 'virtualization.virtualmachine': { - 'site': RefInfo(object_type='dcim.site', field_name='site'), - 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), - 'device': RefInfo(object_type='dcim.device', field_name='device'), - 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), - 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), - 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), - 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tunnel': RefInfo(object_type='vpn.tunnel', field_name='tunnel'), }, 'wireless.wirelesslan': { 'group': RefInfo(object_type='wireless.wirelesslangroup', field_name='group'), - 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), }, 'wireless.wirelesslangroup': { 'parent': RefInfo(object_type='wireless.wirelesslangroup', field_name='parent'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), }, 'wireless.wirelesslink': { 'interfaceA': RefInfo(object_type='dcim.interface', field_name='interface_a'), 'interfaceB': RefInfo(object_type='dcim.interface', field_name='interface_b'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), - 'tags': RefInfo(object_type='extras.tag', field_name='tags'), }, } def get_json_ref_info(object_type: str|Type[models.Model], json_field_name: str) -> RefInfo|None: - if isinstance(object_type, models.Model): - content_type = ContentType.objects.get_for_model(object_type) - object_type = content_type.app_label + '.' + content_type.model - return _REF_INFO.get(object_type, {}).get(json_field_name) + if not isinstance(object_type, str): + object_type = get_object_type(object_type) + return _JSON_REF_INFO.get(object_type, {}).get(json_field_name) + +_LEGAL_FIELDS = { + 'circuits.circuit': frozenset(['assignments', 'cid', 'comments', 'commit_rate', 'custom_fields', 'description', 'distance', 'distance_unit', 'install_date', 'provider', 'provider_account', 'status', 'tags', 'tenant', 'termination_date', 'type']), + 'circuits.circuitgroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags', 'tenant']), + 'circuits.circuitgroupassignment': frozenset(['group', 'member_id', 'member_type', 'priority', 'tags']), + 'circuits.circuittermination': frozenset(['circuit', 'custom_fields', 'description', 'mark_connected', 'port_speed', 'pp_info', 'tags', 'term_side', 'termination_id', 'termination_type', 'upstream_speed', 'xconnect_id']), + 'circuits.circuittype': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'circuits.provider': frozenset(['accounts', 'asns', 'comments', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'circuits.provideraccount': frozenset(['account', 'comments', 'custom_fields', 'description', 'name', 'provider', 'tags']), + 'circuits.providernetwork': frozenset(['comments', 'custom_fields', 'description', 'name', 'provider', 'service_id', 'tags']), + 'circuits.virtualcircuit': frozenset(['cid', 'comments', 'custom_fields', 'description', 'provider_account', 'provider_network', 'status', 'tags', 'tenant', 'type']), + 'circuits.virtualcircuittermination': frozenset(['custom_fields', 'description', 'interface', 'role', 'tags', 'virtual_circuit']), + 'circuits.virtualcircuittype': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.cable': frozenset(['a_terminations', 'b_terminations', 'color', 'comments', 'custom_fields', 'description', 'label', 'length', 'length_unit', 'status', 'tags', 'tenant', 'type']), + 'dcim.cablepath': frozenset(['is_active', 'is_complete', 'is_split']), + 'dcim.cabletermination': frozenset(['cable', 'cable_end', 'termination_id', 'termination_type']), + 'dcim.consoleport': frozenset(['custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'speed', 'tags', 'type']), + 'dcim.consoleserverport': frozenset(['custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'speed', 'tags', 'type']), + 'dcim.device': frozenset(['airflow', 'asset_tag', 'cluster', 'comments', 'custom_fields', 'description', 'device_type', 'face', 'latitude', 'location', 'longitude', 'name', 'oob_ip', 'platform', 'position', 'primary_ip4', 'primary_ip6', 'rack', 'role', 'serial', 'site', 'status', 'tags', 'tenant', 'vc_position', 'vc_priority', 'virtual_chassis']), + 'dcim.devicebay': frozenset(['custom_fields', 'description', 'device', 'installed_device', 'label', 'name', 'tags']), + 'dcim.devicerole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags', 'vm_role']), + 'dcim.devicetype': frozenset(['airflow', 'comments', 'custom_fields', 'default_platform', 'description', 'exclude_from_utilization', 'is_full_depth', 'manufacturer', 'model', 'part_number', 'slug', 'subdevice_role', 'tags', 'u_height', 'weight', 'weight_unit']), + 'dcim.frontport': frozenset(['color', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'rear_port', 'rear_port_position', 'tags', 'type']), + 'dcim.interface': frozenset(['bridge', 'custom_fields', 'description', 'device', 'duplex', 'enabled', 'label', 'lag', 'mark_connected', 'mgmt_only', 'mode', 'module', 'mtu', 'name', 'parent', 'poe_mode', 'poe_type', 'primary_mac_address', 'qinq_svlan', 'rf_channel', 'rf_channel_frequency', 'rf_channel_width', 'rf_role', 'speed', 'tagged_vlans', 'tags', 'tx_power', 'type', 'untagged_vlan', 'vdcs', 'vlan_translation_policy', 'vrf', 'wireless_lans', 'wwn']), + 'dcim.inventoryitem': frozenset(['asset_tag', 'component_id', 'component_type', 'custom_fields', 'description', 'device', 'discovered', 'label', 'manufacturer', 'name', 'parent', 'part_id', 'role', 'serial', 'status', 'tags']), + 'dcim.inventoryitemrole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.location': frozenset(['custom_fields', 'description', 'facility', 'name', 'parent', 'site', 'slug', 'status', 'tags', 'tenant']), + 'dcim.macaddress': frozenset(['assigned_object_id', 'assigned_object_type', 'comments', 'custom_fields', 'description', 'mac_address', 'tags']), + 'dcim.manufacturer': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.module': frozenset(['asset_tag', 'comments', 'custom_fields', 'description', 'device', 'module_bay', 'module_type', 'serial', 'status', 'tags']), + 'dcim.modulebay': frozenset(['custom_fields', 'description', 'device', 'installed_module', 'label', 'module', 'name', 'position', 'tags']), + 'dcim.moduletype': frozenset(['airflow', 'comments', 'custom_fields', 'description', 'manufacturer', 'model', 'part_number', 'tags', 'weight', 'weight_unit']), + 'dcim.platform': frozenset(['custom_fields', 'description', 'manufacturer', 'name', 'slug', 'tags']), + 'dcim.powerfeed': frozenset(['amperage', 'comments', 'custom_fields', 'description', 'mark_connected', 'max_utilization', 'name', 'phase', 'power_panel', 'rack', 'status', 'supply', 'tags', 'tenant', 'type', 'voltage']), + 'dcim.poweroutlet': frozenset(['color', 'custom_fields', 'description', 'device', 'feed_leg', 'label', 'mark_connected', 'module', 'name', 'power_port', 'tags', 'type']), + 'dcim.powerpanel': frozenset(['comments', 'custom_fields', 'description', 'location', 'name', 'site', 'tags']), + 'dcim.powerport': frozenset(['allocated_draw', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'maximum_draw', 'module', 'name', 'tags', 'type']), + 'dcim.rack': frozenset(['airflow', 'asset_tag', 'comments', 'custom_fields', 'desc_units', 'description', 'facility_id', 'form_factor', 'location', 'max_weight', 'mounting_depth', 'name', 'outer_depth', 'outer_unit', 'outer_width', 'rack_type', 'role', 'serial', 'site', 'starting_unit', 'status', 'tags', 'tenant', 'u_height', 'weight', 'weight_unit', 'width']), + 'dcim.rackreservation': frozenset(['comments', 'custom_fields', 'description', 'rack', 'tags', 'tenant', 'units']), + 'dcim.rackrole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.racktype': frozenset(['comments', 'custom_fields', 'desc_units', 'description', 'form_factor', 'manufacturer', 'max_weight', 'model', 'mounting_depth', 'outer_depth', 'outer_unit', 'outer_width', 'slug', 'starting_unit', 'tags', 'u_height', 'weight', 'weight_unit', 'width']), + 'dcim.rearport': frozenset(['color', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'positions', 'tags', 'type']), + 'dcim.region': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'dcim.site': frozenset(['asns', 'comments', 'custom_fields', 'description', 'facility', 'group', 'latitude', 'longitude', 'name', 'physical_address', 'region', 'shipping_address', 'slug', 'status', 'tags', 'tenant', 'time_zone']), + 'dcim.sitegroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'dcim.virtualchassis': frozenset(['comments', 'custom_fields', 'description', 'domain', 'master', 'name', 'tags']), + 'dcim.virtualdevicecontext': frozenset(['comments', 'custom_fields', 'description', 'device', 'identifier', 'name', 'primary_ip4', 'primary_ip6', 'status', 'tags', 'tenant']), + 'extras.tag': frozenset(['color', 'name', 'slug']), + 'ipam.aggregate': frozenset(['comments', 'custom_fields', 'date_added', 'description', 'prefix', 'rir', 'tags', 'tenant']), + 'ipam.asn': frozenset(['asn', 'comments', 'custom_fields', 'description', 'rir', 'tags', 'tenant']), + 'ipam.asnrange': frozenset(['custom_fields', 'description', 'end', 'name', 'rir', 'slug', 'start', 'tags', 'tenant']), + 'ipam.fhrpgroup': frozenset(['auth_key', 'auth_type', 'comments', 'custom_fields', 'description', 'group_id', 'name', 'protocol', 'tags']), + 'ipam.fhrpgroupassignment': frozenset(['group', 'interface_id', 'interface_type', 'priority']), + 'ipam.ipaddress': frozenset(['address', 'assigned_object_id', 'assigned_object_type', 'comments', 'custom_fields', 'description', 'dns_name', 'nat_inside', 'role', 'status', 'tags', 'tenant', 'vrf']), + 'ipam.iprange': frozenset(['comments', 'custom_fields', 'description', 'end_address', 'mark_utilized', 'role', 'start_address', 'status', 'tags', 'tenant', 'vrf']), + 'ipam.prefix': frozenset(['comments', 'custom_fields', 'description', 'is_pool', 'mark_utilized', 'prefix', 'role', 'scope_id', 'scope_type', 'status', 'tags', 'tenant', 'vlan', 'vrf']), + 'ipam.rir': frozenset(['custom_fields', 'description', 'is_private', 'name', 'slug', 'tags']), + 'ipam.role': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags', 'weight']), + 'ipam.routetarget': frozenset(['comments', 'custom_fields', 'description', 'name', 'tags', 'tenant']), + 'ipam.service': frozenset(['comments', 'custom_fields', 'description', 'device', 'ipaddresses', 'name', 'ports', 'protocol', 'tags', 'virtual_machine']), + 'ipam.vlan': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'qinq_role', 'qinq_svlan', 'role', 'site', 'status', 'tags', 'tenant', 'vid']), + 'ipam.vlangroup': frozenset(['custom_fields', 'description', 'name', 'scope_id', 'scope_type', 'slug', 'tags', 'vid_ranges']), + 'ipam.vlantranslationpolicy': frozenset(['description', 'name']), + 'ipam.vlantranslationrule': frozenset(['description', 'local_vid', 'policy', 'remote_vid']), + 'ipam.vrf': frozenset(['comments', 'custom_fields', 'description', 'enforce_unique', 'export_targets', 'import_targets', 'name', 'rd', 'tags', 'tenant']), + 'tenancy.contact': frozenset(['address', 'comments', 'custom_fields', 'description', 'email', 'group', 'link', 'name', 'phone', 'tags', 'title']), + 'tenancy.contactassignment': frozenset(['contact', 'custom_fields', 'object_id', 'object_type', 'priority', 'role', 'tags']), + 'tenancy.contactgroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'tenancy.contactrole': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'tenancy.tenant': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'slug', 'tags']), + 'tenancy.tenantgroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'virtualization.cluster': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'scope_id', 'scope_type', 'status', 'tags', 'tenant', 'type']), + 'virtualization.clustergroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'virtualization.clustertype': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'virtualization.virtualdisk': frozenset(['custom_fields', 'description', 'name', 'size', 'tags', 'virtual_machine']), + 'virtualization.virtualmachine': frozenset(['cluster', 'comments', 'custom_fields', 'description', 'device', 'disk', 'memory', 'name', 'platform', 'primary_ip4', 'primary_ip6', 'role', 'serial', 'site', 'status', 'tags', 'tenant', 'vcpus']), + 'virtualization.vminterface': frozenset(['bridge', 'custom_fields', 'description', 'enabled', 'mode', 'mtu', 'name', 'parent', 'primary_mac_address', 'qinq_svlan', 'tagged_vlans', 'tags', 'untagged_vlan', 'virtual_machine', 'vlan_translation_policy', 'vrf']), + 'vpn.ikepolicy': frozenset(['comments', 'custom_fields', 'description', 'mode', 'name', 'preshared_key', 'proposals', 'tags', 'version']), + 'vpn.ikeproposal': frozenset(['authentication_algorithm', 'authentication_method', 'comments', 'custom_fields', 'description', 'encryption_algorithm', 'group', 'name', 'sa_lifetime', 'tags']), + 'vpn.ipsecpolicy': frozenset(['comments', 'custom_fields', 'description', 'name', 'pfs_group', 'proposals', 'tags']), + 'vpn.ipsecprofile': frozenset(['comments', 'custom_fields', 'description', 'ike_policy', 'ipsec_policy', 'mode', 'name', 'tags']), + 'vpn.ipsecproposal': frozenset(['authentication_algorithm', 'comments', 'custom_fields', 'description', 'encryption_algorithm', 'name', 'sa_lifetime_data', 'sa_lifetime_seconds', 'tags']), + 'vpn.l2vpn': frozenset(['comments', 'custom_fields', 'description', 'export_targets', 'identifier', 'import_targets', 'name', 'slug', 'tags', 'tenant', 'type']), + 'vpn.l2vpntermination': frozenset(['assigned_object_id', 'assigned_object_type', 'custom_fields', 'l2vpn', 'tags']), + 'vpn.tunnel': frozenset(['comments', 'custom_fields', 'description', 'encapsulation', 'group', 'ipsec_profile', 'name', 'status', 'tags', 'tenant', 'tunnel_id']), + 'vpn.tunnelgroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'vpn.tunneltermination': frozenset(['custom_fields', 'outside_ip', 'role', 'tags', 'termination_id', 'termination_type', 'tunnel']), + 'wireless.wirelesslan': frozenset(['auth_cipher', 'auth_psk', 'auth_type', 'comments', 'custom_fields', 'description', 'group', 'scope_id', 'scope_type', 'ssid', 'status', 'tags', 'tenant', 'vlan']), + 'wireless.wirelesslangroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'wireless.wirelesslink': frozenset(['auth_cipher', 'auth_psk', 'auth_type', 'comments', 'custom_fields', 'description', 'distance', 'distance_unit', 'interface_a', 'interface_b', 'ssid', 'status', 'tags', 'tenant']), +} + +def legal_fields(object_type: str|Type[models.Model]) -> frozenset[str]: + if not isinstance(object_type, str): + object_type = get_object_type(object_type) + return _LEGAL_FIELDS.get(object_type, frozenset()) _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP = { 'ipam.asn': 'asn', @@ -766,76 +899,8 @@ def get_json_ref_info(object_type: str|Type[models.Model], json_field_name: str) 'dcim.racktype': 'model', 'circuits.virtualcircuit': 'cid', 'wireless.wirelesslan': 'ssid', - 'ipam.asnrange': 'name', - 'circuits.circuitgroup': 'name', - 'circuits.circuittype': 'name', - 'virtualization.cluster': 'name', - 'virtualization.clustergroup': 'name', - 'virtualization.clustertype': 'name', - 'dcim.consoleport': 'name', - 'dcim.consoleserverport': 'name', - 'tenancy.contact': 'name', - 'tenancy.contactgroup': 'name', - 'tenancy.contactrole': 'name', - 'dcim.device': 'name', - 'dcim.devicebay': 'name', - 'dcim.devicerole': 'name', - 'ipam.fhrpgroup': 'name', - 'dcim.frontport': 'name', - 'vpn.ikepolicy': 'name', - 'vpn.ikeproposal': 'name', - 'vpn.ipsecpolicy': 'name', - 'vpn.ipsecprofile': 'name', - 'vpn.ipsecproposal': 'name', - 'dcim.interface': 'name', - 'dcim.inventoryitem': 'name', - 'dcim.inventoryitemrole': 'name', - 'vpn.l2vpn': 'name', - 'dcim.location': 'name', - 'dcim.manufacturer': 'name', - 'dcim.modulebay': 'name', - 'dcim.platform': 'name', - 'dcim.powerfeed': 'name', - 'dcim.poweroutlet': 'name', - 'dcim.powerpanel': 'name', - 'dcim.powerport': 'name', - 'circuits.provider': 'name', - 'circuits.provideraccount': 'name', - 'circuits.providernetwork': 'name', - 'ipam.rir': 'name', - 'dcim.rack': 'name', - 'dcim.rackrole': 'name', - 'dcim.rearport': 'name', - 'dcim.region': 'name', - 'ipam.role': 'name', - 'ipam.routetarget': 'name', - 'ipam.service': 'name', - 'dcim.site': 'name', - 'dcim.sitegroup': 'name', - 'extras.tag': 'name', - 'tenancy.tenant': 'name', - 'tenancy.tenantgroup': 'name', - 'vpn.tunnel': 'name', - 'vpn.tunnelgroup': 'name', - 'ipam.vlan': 'name', - 'ipam.vlangroup': 'name', - 'ipam.vlantranslationpolicy': 'name', - 'virtualization.vminterface': 'name', - 'ipam.vrf': 'name', - 'dcim.virtualchassis': 'name', - 'circuits.virtualcircuittype': 'name', - 'dcim.virtualdevicecontext': 'name', - 'virtualization.virtualdisk': 'name', - 'virtualization.virtualmachine': 'name', - 'wireless.wirelesslangroup': 'name', } def get_primary_value(data: dict, object_type: str) -> str|None: - field = _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP.get(object_type) - if field is None: - return None - return data.get(field) - - -def get_primary_value_field(object_type: str, default=None) -> str: - return _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP.get(object_type, default) + field = _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP.get(object_type, 'name') + return data.get(field) \ No newline at end of file diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 88961af..5f0e699 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -15,12 +15,10 @@ from .common import UnresolvedReference from .matcher import find_existing_object, fingerprint, merge_data -from .plugin_utils import get_json_ref_info, get_primary_value_field +from .plugin_utils import get_json_ref_info, get_primary_value logger = logging.getLogger("netbox.diode_data") -_DEFAULT_SLUG_SOURCE_FIELD_NAME = "name" - @lru_cache(maxsize=128) def _camel_to_snake_case(name): """Convert camelCase string to snake_case.""" @@ -77,8 +75,8 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") _set_defaults(resolved, supported_models) logger.error(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") - output = _move_if_unresolved(resolved) - logger.error(f"_move_if_unresolved: {json.dumps(output, default=lambda o: str(o), indent=4)}") + output = _handle_post_creates(resolved) + logger.error(f"_merge_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") _check_unresolved_refs(output) return output @@ -94,7 +92,7 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, ex existing = existing or {} entities = [transformed] - move_if_unresolved = defaultdict(list) + post_create = {} for key, value in proto_json.items(): ref_info = get_json_ref_info(object_type, key) @@ -103,43 +101,53 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, ex continue nested_context = _nested_context(object_type, uuid, ref_info.field_name) - - # nested reference field_name = ref_info.field_name - - # if this is potentially a circular reference, we need to mark this for - # later checking. is_circular = _is_circular_reference(object_type, field_name) if ref_info.is_generic: transformed[field_name + "_type"] = ref_info.object_type field_name = field_name + "_id" + nested_refs = [] + ref_value = None if isinstance(value, list): - ref_values = [] + ref_value = [] for item in value: - nested_refs = _transform_proto_json_1(item, ref_info.object_type, nested_context) - ref = nested_refs[-1] - if is_circular: - move_if_unresolved[field_name].append(ref['_uuid']) - ref_values.append(UnresolvedReference( + nested = _transform_proto_json_1(item, ref_info.object_type, nested_context) + nested_refs += nested + ref = nested[-1] + ref_value.append(UnresolvedReference( object_type=ref_info.object_type, uuid=ref['_uuid'], )) - entities = nested_refs + entities - transformed[field_name] = ref_values else: nested_refs = _transform_proto_json_1(value, ref_info.object_type, nested_context) ref = nested_refs[-1] - if is_circular: - move_if_unresolved[field_name].append(ref['_uuid']) - transformed[field_name] = UnresolvedReference( + ref_value = UnresolvedReference( object_type=ref_info.object_type, uuid=ref['_uuid'], ) + if is_circular: + post_create[field_name] = ref_value + entities = entities + nested_refs + else: + transformed[field_name] = ref_value entities = nested_refs + entities - if len(move_if_unresolved) > 0: - transformed['_move_if_unresolved'] = move_if_unresolved + + # if there are fields that must be deferred until after the object is created, + # add a new entity with the post-create data. eg a child object that references + # this object and is also referenced by this object such as primary mac address + # on an interface. + # if this object already exists, two steps are not needed, and this will be + # simplified in a later pass. + if len(post_create) > 0: + post_create_uuid = str(uuid4()) + post_create['_uuid'] = post_create_uuid + post_create['_instance'] = uuid + post_create['_object_type'] = object_type + transformed['_post_create'] = post_create_uuid + entities.append(post_create) + return entities def _set_defaults(entities: list[dict], supported_models: dict): @@ -164,16 +172,11 @@ def _set_slugs(entities: list[dict], supported_models: dict): def _generate_slug(object_type, data): """Generate a slug for a model instance.""" - source_field = get_field_to_slugify(object_type) - if source_field in data and data[source_field]: - return slugify(str(data[source_field])) - + source_value = get_primary_value(data, object_type) + if source_value is not None: + return slugify(str(source_value)) return None -def get_field_to_slugify(object_type): - """Get the field to use as the source for the slug.""" - return get_primary_value_field(object_type, _DEFAULT_SLUG_SOURCE_FIELD_NAME) - def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: by_fp = {} deduplicated = [] @@ -211,10 +214,12 @@ def _resolve_existing_references(entities: list[dict]) -> list[dict]: seen = {} new_refs = {} resolved = [] + for data in entities: object_type = data['_object_type'] data = copy.deepcopy(data) _update_resolved_refs(data, new_refs) + existing = find_existing_object(data, object_type) if existing is not None: logger.error(f"existing {data} -> {existing}") @@ -267,36 +272,30 @@ def cleanup_unresolved_references(data: dict) -> list[str]: # TODO maps return sorted(unresolved) -def _move_if_unresolved(entities: list[dict]) -> list[str]: - min_index = {} +def _handle_post_creates(entities: list[dict]) -> list[str]: + """Merges any unnecessary post-create steps for existing objects.""" by_uuid = {x['_uuid']: x for x in entities} - - cur = 1 + out = [] for entity in entities: - min_index[entity['_uuid']] = cur - cur += 1 - - moves = entity.pop('_move_if_unresolved', None) - if moves is None or entity.get('_instance') is not None: + post_create = entity.pop('_post_create', None) + if post_create is None: + out.append(entity) continue - logger.debug(f" * {entity} needs circular reference moves: {moves}") - entity2 = entity.copy() - entity2['_uuid'] = str(uuid4()) - by_uuid[entity2['_uuid']] = entity2 - for field_name, uuids in moves.items(): - entity.pop(field_name, None) - for uuid in uuids: - min_index[uuid] = cur - cur += 1 - - entity2['_instance'] = entity['_uuid'] - min_index[entity2['_uuid']] = cur - cur += 1 - - in_order = sorted((min_index[x], x) for x in min_index) - return [by_uuid[x[1]] for x in in_order] - + post_create = by_uuid[post_create] + if entity.get('_instance') is not None: + # this entity has a post-create, but it has already been + # created. in this case we can just merge this entity into + # the post-create entity and skip it without worrying about + # references to it. + post_create.update(entity) + else: + # this entity will be created. + # in this case we need to fix up the identifier in the post-create + # to refer to the created object. + post_create['id'] = entity['id'] + out.append(entity) + return out def _check_unresolved_refs(entities: list[dict]) -> list[str]: seen = set() diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index dc4ffab..5539db0 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -17,7 +17,7 @@ ApplyChangeSetResult, apply_changeset, ) -from netbox_diode_plugin.api.differ import Change, ChangeSet, generate_changeset +from netbox_diode_plugin.api.differ import Change, ChangeSet, ChangeType, generate_changeset from netbox_diode_plugin.api.permissions import IsDiodeWriter logger = logging.getLogger("netbox.diode_data") @@ -111,15 +111,23 @@ def post(self, request, *args, **kwargs): def _post(self, request, *args, **kwargs): data = request.data.copy() + changes = [] if 'changes' in data: - data['changes'] = [Change(**change) for change in data['changes']] - change_set = ChangeSet(**data) - - if not change_set.id: - raise ValidationError("Change set ID is required") - if not change_set.changes: - raise ValidationError("Changes are required") - + changes = [ + Change( + change_type=change.get('change_type'), + object_type=change.get('object_type'), + object_id=change.get('object_id'), + ref_id=change.get('ref_id'), + data=change.get('data'), + before=change.get('before'), + new_refs=change.get('new_refs', []), + ) for change in data['changes'] + ] + change_set = ChangeSet( + id=data.get('id'), + changes=changes, + ) try: with transaction.atomic(): result = apply_changeset(change_set) @@ -130,6 +138,7 @@ def _post(self, request, *args, **kwargs): success=False, errors=e.errors, ) + return Response(result.to_dict(), status=status.HTTP_400_BAD_REQUEST) return Response(result.to_dict(), status=status.HTTP_200_OK) diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index 62950d4..8fe4d15 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -178,19 +178,20 @@ class ApplyChangeSetTestCase(BaseApplyChangeSet): @staticmethod def get_change_id(payload, index): """Get change_id from payload.""" - return payload.get("change_set")[index].get("change_id") + return payload.get("changes")[index].get("change_id") def test_change_type_create_return_200(self): """Test create change_type with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -208,6 +209,7 @@ def test_change_type_create_return_200(self): "object_version": None, "object_type": "dcim.interface", "object_id": None, + "ref_id": "2", "data": { "name": "Interface 1", "device": self.devices[1].pk, @@ -220,11 +222,11 @@ def test_change_type_create_return_200(self): "object_version": None, "object_type": "ipam.ipaddress", "object_id": None, + "ref_id": "3", "data": { "address": "192.163.2.1/24", - "assigned_object": { - "interface": {"id": self.interfaces[2].pk}, - }, + "assigned_object_type": "dcim.interface", + "assigned_object_id": self.interfaces[2].pk }, }, ], @@ -232,13 +234,13 @@ def test_change_type_create_return_200(self): response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("success"), True) def test_change_type_update_return_200(self): """Test update change_type with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -265,20 +267,21 @@ def test_change_type_update_return_200(self): site_updated = Site.objects.get(id=20) - self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("success"), True) self.assertEqual(site_updated.name, "Site A") def test_change_type_create_with_error_return_400(self): """Test create change_type with wrong payload.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -297,22 +300,18 @@ def test_change_type_create_with_error_return_400(self): site_created = Site.objects.filter(name="Site A") - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) + self.assertEqual(response.json().get("success"), False) self.assertIn( 'Expected a list of items but got type "int".', - response.json().get("errors")[0].get("asns"), + response.json().get("errors", {}).get("changes[0].asns", []), ) self.assertFalse(site_created.exists()) def test_change_type_update_with_error_return_400(self): """Test update change_type with wrong payload.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -337,28 +336,25 @@ def test_change_type_update_with_error_return_400(self): site_updated = Site.objects.get(id=20) - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) + self.assertEqual(response.json().get("success"), False) self.assertIn( 'Expected a list of items but got type "int".', - response.json().get("errors")[0].get("asns"), + response.json().get("errors", {}).get("changes[0].asns", []), ) self.assertEqual(site_updated.name, "Site 2") def test_change_type_create_with_multiples_objects_return_200(self): """Test create change type with two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -376,6 +372,7 @@ def test_change_type_create_with_multiples_objects_return_200(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "2", "data": { "device_type": self.device_types[1].pk, "role": self.roles[1].pk, @@ -390,13 +387,13 @@ def test_change_type_create_with_multiples_objects_return_200(self): response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("success"), True) def test_change_type_update_with_multiples_objects_return_200(self): """Test update change type with two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -437,21 +434,22 @@ def test_change_type_update_with_multiples_objects_return_200(self): site_updated = Site.objects.get(id=20) device_updated = Device.objects.get(id=10) - self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("success"), True) self.assertEqual(site_updated.name, "Site A") self.assertEqual(device_updated.name, "Test Device 3") def test_change_type_create_and_update_with_error_in_one_object_return_400(self): """Test create and update change type with one object with error.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -486,14 +484,10 @@ def test_change_type_create_and_update_with_error_in_one_object_return_400(self) site_created = Site.objects.filter(name="Site Z") device_created = Device.objects.filter(name="Test Device 4") - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 1), - ) + self.assertEqual(response.json().get("success"), False) self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[0].get("device_type"), + "Related object not found using the provided numeric ID: 3", + response.json().get("errors", {}).get("changes[1].device_type", []), ) self.assertFalse(site_created.exists()) self.assertFalse(device_created.exists()) @@ -501,14 +495,15 @@ def test_change_type_create_and_update_with_error_in_one_object_return_400(self) def test_multiples_create_type_error_in_two_objects_return_400(self): """Test create with error in two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -526,6 +521,7 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "2", "data": { "device_type": 3, "role": self.roles[1].pk, @@ -541,6 +537,7 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "3", "data": { "device_type": 100, "role": 10, @@ -558,24 +555,11 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): site_created = Site.objects.filter(name="Site Z") device_created = Device.objects.filter(name="Test Device 4") - self.assertEqual(response.json().get("result"), "failed") + self.assertEqual(response.json().get("success"), False) - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 1), - ) - self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[0].get("device_type"), - ) - - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - self.get_change_id(payload, 2), - ) self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[1].get("device_type"), + "Related object not found using the provided numeric ID: 3", + response.json().get("errors", {}).get("changes[1].device_type", []), ) self.assertFalse(site_created.exists()) @@ -584,8 +568,8 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): def test_change_type_update_with_object_id_not_exist_return_400(self): """Test update object with nonexistent object_id.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -612,14 +596,17 @@ def test_change_type_update_with_object_id_not_exist_return_400(self): site_updated = Site.objects.get(id=20) - self.assertEqual(response.json()[0], "object with id 30 does not exist") + self.assertIn( + "dcim.site with id 30 does not exist", + response.json().get("errors", {}).get("changes[0].object_id", []), + ) self.assertEqual(site_updated.name, "Site 2") def test_change_set_id_field_not_provided_return_400(self): """Test update object with change_set_id incorrect.""" payload = { - "change_set_id": None, - "change_set": [ + "id": None, + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -642,21 +629,21 @@ def test_change_set_id_field_not_provided_return_400(self): response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertIsNone(response.json().get("errors")[0].get("change_id")) + self.assertIsNone(response.json().get("errors", {}).get("change_id", None)) self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "This field may not be null.", + response.json().get("errors", {}).get("id", []), + ["Change set ID is required"], ) - def test_change_set_id_change_id_and_change_type_field_not_provided_return_400( + def test_change_type_field_not_provided_return_400( self, ): - """Test update object with change_set_id, change_id, and change_type incorrect.""" + """Test update object with change_type incorrect.""" payload = { - "change_set_id": "", - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { - "change_id": "", + "change_id": str(uuid.uuid4()), "change_type": "", "object_version": None, "object_type": "dcim.site", @@ -677,35 +664,23 @@ def test_change_set_id_change_id_and_change_type_field_not_provided_return_400( response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_type"), - "This field may not be blank.", + self.assertIn( + "Unsupported change type ''", + response.json().get("errors", {}).get("changes[0].change_type", []), ) def test_change_set_id_field_and_change_set_not_provided_return_400(self): """Test update object with change_set_id and change_set incorrect.""" payload = { - "change_set_id": "", - "change_set": [], + "id": "", + "changes": [], } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_set"), - "This list may not be empty.", + self.assertIn( + "Change set ID is required", + response.json().get("errors", {}).get("id", []), ) def test_change_type_and_object_type_provided_return_400( @@ -713,14 +688,15 @@ def test_change_type_and_object_type_provided_return_400( ): """Test change_type and object_type incorrect.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": None, "object_version": None, "object_type": "", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -737,6 +713,7 @@ def test_change_type_and_object_type_provided_return_400( "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "2", "data": { "name": "Site Z", "slug": "site-z", @@ -752,240 +729,225 @@ def test_change_type_and_object_type_provided_return_400( response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - # First item of change_set - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) - self.assertEqual( - response.json().get("errors")[0].get("change_type"), - "This field may not be null.", - ) - self.assertEqual( - response.json().get("errors")[0].get("object_type"), - "This field may not be blank.", - ) - - # Second item of change_set - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - self.get_change_id(payload, 1), - ) - self.assertEqual( - response.json().get("errors")[1].get("change_type"), - "This field may not be blank.", + self.assertIn( + "Unsupported change type 'None'", + response.json().get("errors", {}).get("changes[0].change_type", []), ) + # self.assertEqual( + # response.json().get("errors")[0].get("change_type"), + # "This field may not be null.", + # ) + # self.assertEqual( + # response.json().get("errors")[0].get("object_type"), + # "This field may not be blank.", + # ) + + # # Second item of change_set + # self.assertEqual( + # response.json().get("errors")[1].get("change_id"), + # self.get_change_id(payload, 1), + # ) + # self.assertEqual( + # response.json().get("errors")[1].get("change_type"), + # "This field may not be blank.", + # ) def test_create_ip_address_return_200(self): """Test create ip_address with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.ipaddress", "object_id": None, + "ref_id": "1", "data": { "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": self.interfaces[3].name, - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, + "assigned_object_id": self.interfaces[3].pk, + "assigned_object_type": "dcim.interface", }, }, ], } response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - - def test_create_ip_address_return_400(self): - """Test create ip_address with missing interface name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - # Forcing to miss the name of the interface - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "not sufficient to retrieve interface", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_not_exist_interface_return_400(self): - """Test create ip_address with not valid interface.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "does not exist", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_missing_device_interface_return_400(self): - """Test create ip_address with missing device interface name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "Interface device needs to have either id or name provided", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_missing_interface_device_site_return_400(self): - """Test create ip_address with missing interface device site name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "name": self.devices[0].name, - "site": {"facility": "Betha"}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "Interface device site needs to have either id or name provided", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_primary_ip_address_not_found_return_400(self): - """Test update primary ip address with site name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "dcim.device", - "data": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - "primary_ip6": { - "address": "2001:DB8:0000:0000:244:17FF:FEB6:D37D/64", - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertEqual(response.json()[0], "primary IP not found") + self.assertEqual(response.json().get("success"), True) + + # def test_create_ip_address_return_400(self): + # """Test create ip_address with missing interface name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "change_set": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # # Forcing to miss the name of the interface + # "device": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "not sufficient to retrieve interface", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_create_ip_address_not_exist_interface_return_400(self): + # """Test create ip_address with not valid interface.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "does not exist", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_create_ip_address_missing_device_interface_return_400(self): + # """Test create ip_address with missing device interface name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "ref_id": "1", + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "Interface device needs to have either id or name provided", + # response.json().get("errors", {}) # .get("assigned_object"), + # ) + + # def test_create_ip_address_missing_interface_device_site_return_400(self): + # """Test create ip_address with missing interface device site name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "ref_id": "1", + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "name": self.devices[0].name, + # "site": {"facility": "Betha"}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "Interface device site needs to have either id or name provided", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_primary_ip_address_not_found_return_400(self): + # """Test update primary ip address with site name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "update", + # "object_version": None, + # "object_type": "dcim.device", + # "data": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # "primary_ip6": { + # "address": "2001:DB8:0000:0000:244:17FF:FEB6:D37D/64", + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertEqual(response.json()[0], "primary IP not found") def test_add_primary_ip_address_to_device(self): """Add primary ip address to device.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", "object_version": None, "object_type": "dcim.device", + "object_id": self.devices[0].pk, "data": { "name": self.devices[0].name, "site": {"name": self.sites[0].name}, - "primary_ip4": { - "address": str(self.ip_addresses[0].address), - "assigned_object": { - "interface": { - "name": self.interfaces[0].name, - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, + "primary_ip4": self.ip_addresses[0].pk }, }, ], @@ -995,229 +957,124 @@ def test_add_primary_ip_address_to_device(self): device_updated = Device.objects.get(id=10) - self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("success"), True) self.assertEqual(device_updated.name, self.devices[0].name) self.assertEqual(device_updated.primary_ip4, self.ip_addresses[0]) - def test_create_and_update_interface_with_compat_mac_address_field(self): - """Test create interface using backward compatible mac_address field.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "dcim.interface", - "object_id": None, - "data": { - "name": "Interface 6", - "type": "virtual", - "mac_address": "00:00:00:00:00:01", - "device": { - "id": self.devices[1].pk, - }, - }, - }, - ], - } - - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(Interface.objects.count(), 6) - interface_id = Interface.objects.order_by('-id').first().id - self.assertEqual(Interface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:01") - - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "dcim.interface", - "object_id": interface_id, - "data": { - "name": "Interface 6", - "mac_address": "00:00:00:00:00:02", - "type": "virtual", - "device": { - "id": self.devices[1].pk, - }, - }, - }, - ], - } - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(Interface.objects.count(), 6) - self.assertEqual(Interface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:02") - - def test_create_and_update_vminterface_with_compat_mac_address_field(self): - """Test create vminterface using backward compatible mac_address field.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "virtualization.vminterface", - "object_id": None, - "data": { - "name": "VM Interface 1", - "mac_address": "00:00:00:00:00:01", - "virtual_machine": { - "id": self.virtual_machines[0].pk, - }, - }, - }, - ], - } - - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(VMInterface.objects.count(), 1) - interface_id = VMInterface.objects.order_by('-id').first().id - self.assertEqual(VMInterface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:01") - - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "virtualization.vminterface", - "object_id": interface_id, - "data": { - "name": "VM Interface 1", - "mac_address": "00:00:00:00:00:02", - "virtual_machine": { - "id": self.virtual_machines[0].pk, - }, - }, - }, - ], - } - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(VMInterface.objects.count(), 1) - self.assertEqual(VMInterface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:02") - def test_create_prefix_with_site_stored_as_scope(self): """Test create prefix with site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.prefix", "object_id": None, + "ref_id": "1", "data": { "prefix": "192.168.0.0/24", - "site": { - "name": self.sites[0].name, - }, + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", }, }, ], } response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("success"), True) self.assertEqual(Prefix.objects.get(prefix="192.168.0.0/24").scope, self.sites[0]) def test_create_prefix_with_unknown_site_fails(self): """Test create prefix with unknown site fails.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.prefix", "object_id": None, + "ref_id": "1", "data": { "prefix": "192.168.0.0/24", - "site": { - "name": "unknown site" - }, + "scope_id": 99, + "scope_type": "dcim.site", }, }, ], } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json().get("result"), "failed") + self.assertEqual(response.json().get("success"), False) self.assertIn( - 'site with name unknown site does not exist', - response.json().get("errors")[0].get("site"), + 'Please select a site.', + response.json().get("errors", {}).get("changes[0].scope", []), ) self.assertFalse(Prefix.objects.filter(prefix="192.168.0.0/24").exists()) def test_create_virtualization_cluster_with_site_stored_as_scope(self): """Test create cluster with site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "virtualization.cluster", "object_id": None, + "ref_id": "1", "data": { "name": "Cluster 3", "type": { "name": self.cluster_types[0].name, }, - "site": { - "name": self.sites[0].name, - }, + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", }, }, ], } response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("success"), True) self.assertEqual(Cluster.objects.get(name="Cluster 3").scope, self.sites[0]) def test_create_virtualmachine_with_cluster_site_stored_as_scope(self): """Test create virtualmachine with cluster site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "update", + "object_version": None, + "object_type": "virtualization.cluster", + "object_id": self.clusters[0].pk, + "data": { + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", + }, + }, { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "virtualization.virtualmachine", "object_id": None, + "ref_id": "1", "data": { "name": "VM foobar", - "site": { - "name": self.sites[0].name, - }, - "cluster": { - "name": self.clusters[0].name, - "type": { - "name": self.cluster_types[0].name, - }, - "site": { - "name": self.sites[0].name, - }, - }, + "site": self.sites[0].pk, + "cluster": self.clusters[0].pk }, }, ], } response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") + self.assertEqual(response.json().get("success"), True) self.assertEqual(VirtualMachine.objects.get(name="VM foobar", site_id=self.sites[0].id).cluster.scope, self.sites[0]) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py new file mode 100644 index 0000000..6aae051 --- /dev/null +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - Tests.""" + +from dcim.models import Interface, Site +from django.contrib.auth import get_user_model +from rest_framework import status +from users.models import Token +from utilities.testing import APITestCase + +User = get_user_model() + + +class GenerateDiffAndApplyTestCase(APITestCase): + """GenerateDiff -> ApplyChangeSet test cases.""" + + def setUp(self): + """Set up the test case.""" + self.diff_url = "/netbox/api/plugins/diode/generate-diff/" + self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" + self.user = User.objects.create_user(username="testcommonuser") + self.user_token = Token.objects.create(user=self.user) + self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + + self.add_permissions("netbox_diode_plugin.add_diode") + + def test_generate_diff_and_apply_create_site(self): + """Test generate diff and apply create site.""" + """Test generate diff create site.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "Generate Diff and Apply Site", + "slug": "generate-diff-and-apply-site", + }, + } + } + + _, response = self.diff_and_apply(payload) + self.assertEqual(response.json().get("success"), True) + + new_site = Site.objects.get(name="Generate Diff and Apply Site") + self.assertEqual(new_site.slug, "generate-diff-and-apply-site") + + def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): + """Test generate diff and apply create interface with primary mac address.""" + payload = { + "timestamp": 1, + "object_type": "dcim.interface", + "entity": { + "interface": { + "name": "Interface 1x", + "type": "1000base-t", + "device": { + "name": "Device 1x", + "role": { + "Name": "Role ABC", + }, + "site": { + "Name": "Site ABC", + }, + "deviceType": { + "manufacturer": { + "Name": "Manufacturer A", + }, + "model": "Device Type A", + }, + }, + "primaryMacAddress": { + "mac_address": "00:00:00:00:00:01", + }, + }, + } + } + + _, response = self.diff_and_apply(payload) + self.assertEqual(response.json().get("success"), True) + + new_interface = Interface.objects.get(name="Interface 1x") + self.assertEqual(new_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") + + + def diff_and_apply(self, payload): + """Diff and apply the payload.""" + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + diff = response1.json() + + response2 = self.client.post( + self.apply_url, data=diff, format="json", **self.user_header + ) + self.assertEqual(response2.status_code, status.HTTP_200_OK) + return (response1, response2) diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py new file mode 100644 index 0000000..014a9cf --- /dev/null +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - Tests.""" + +from dcim.models import Site +from django.contrib.auth import get_user_model +from rest_framework import status +from users.models import Token +from utilities.testing import APITestCase + +User = get_user_model() + +class GenerateDiffTestCase(APITestCase): + """GenerateDiff test cases.""" + + def setUp(self): + """Set up the test case.""" + self.url = "/netbox/api/plugins/diode/generate-diff/" + + self.user = User.objects.create_user(username="testcommonuser") + self.add_permissions("netbox_diode_plugin.add_diode") + self.user_token = Token.objects.create(user=self.user) + + self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + + self.site = Site.objects.create( + name="Site Generate Diff 1", + slug="site-generate-diff-1", + facility="Alpha", + description="First test site", + physical_address="123 Fake St Lincoln NE 68588", + shipping_address="123 Fake St Lincoln NE 68588", + comments="Lorem ipsum etcetera", + ) + + + def test_generate_diff_create_site(self): + """Test generate diff create site.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "A New Site", + "slug": "a-new-site", + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json() + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.site") + self.assertEqual(change.get("change_type"), "create") + self.assertEqual(change.get("object_id"), None) + self.assertIsNotNone(change.get("ref_id")) + + data = change.get("data", {}) + self.assertEqual(data.get("name"), "A New Site") + self.assertEqual(data.get("slug"), "a-new-site") + + def test_generate_diff_update_site(self): + """Test generate diff update site.""" + """Test generate diff create site.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "Site Generate Diff 1", + "slug": "site-generate-diff-1", + "comments": "An updated comment", + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json() + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.site") + self.assertEqual(change.get("change_type"), "update") + self.assertEqual(change.get("object_id"), self.site.id) + self.assertEqual(change.get("ref_id"), None) + self.assertEqual(change.get("data").get("name"), "Site Generate Diff 1") + + data = change.get("data", {}) + self.assertEqual(data.get("name"), "Site Generate Diff 1") + self.assertEqual(data.get("slug"), "site-generate-diff-1") + self.assertEqual(data.get("comments"), "An updated comment") + + + + def send_request(self, payload, status_code=status.HTTP_200_OK): + """Post the payload to the url and return the response.""" + response = self.client.post( + self.url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response.status_code, status_code) + return response diff --git a/netbox_diode_plugin/tests/test_api_object_state.py b/netbox_diode_plugin/tests/test_api_object_state.py deleted file mode 100644 index d13ef35..0000000 --- a/netbox_diode_plugin/tests/test_api_object_state.py +++ /dev/null @@ -1,391 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tests.""" - -from dcim.models import ( - Device, - DeviceRole, - DeviceType, - Interface, - Manufacturer, - Rack, - Site, -) -from django.contrib.auth import get_user_model -from ipam.models import IPAddress -from netaddr import IPNetwork -from rest_framework import status -from users.models import Token -from utilities.testing import APITestCase -from virtualization.models import Cluster, ClusterType - -User = get_user_model() - - -class ObjectStateTestCase(APITestCase): - """ObjectState test cases.""" - - @classmethod - def setUpClass(cls): - """Set up class.""" - super().setUpClass() - - cls.sites = ( - Site( - name="Site 1", - slug="site-1", - facility="Alpha", - description="First test site", - physical_address="123 Fake St Lincoln NE 68588", - shipping_address="123 Fake St Lincoln NE 68588", - comments="Lorem ipsum etcetera", - ), - Site( - name="Site 2", - slug="site-2", - facility="Bravo", - description="Second test site", - physical_address="725 Cyrus Valleys Suite 761 Douglasfort NE 57761", - shipping_address="725 Cyrus Valleys Suite 761 Douglasfort NE 57761", - comments="Lorem ipsum etcetera", - ), - Site( - name="Site 3", - slug="site-3", - facility="Charlie", - description="Third test site", - physical_address="2321 Dovie Dale East Cristobal AK 71959", - shipping_address="2321 Dovie Dale East Cristobal AK 71959", - comments="Lorem ipsum etcetera", - ), - ) - Site.objects.bulk_create(cls.sites) - - cls.manufacturer = ( - Manufacturer(name="Cisco", slug="cisco"), - Manufacturer(name="Manufacturer 2", slug="manufacturer-2"), - ) - - Manufacturer.objects.bulk_create(cls.manufacturer) - - cls.device_types = ( - DeviceType( - manufacturer=cls.manufacturer[0], - model="ISR4321", - slug="isr4321", - ), - DeviceType( - manufacturer=cls.manufacturer[1], - model="ISR4321", - slug="isr4321", - ), - DeviceType( - manufacturer=cls.manufacturer[1], - model="Device Type 2", - slug="device-type-2", - u_height=2, - ), - ) - DeviceType.objects.bulk_create(cls.device_types) - - cls.roles = ( - DeviceRole(name="Device Role 1", slug="device-role-1", color="ff0000"), - DeviceRole(name="Device Role 2", slug="device-role-2", color="00ff00"), - ) - DeviceRole.objects.bulk_create(cls.roles) - - cls.racks = ( - Rack(name="Rack 1", site=cls.sites[0]), - Rack(name="Rack 2", site=cls.sites[1]), - ) - Rack.objects.bulk_create(cls.racks) - - cluster_type = ClusterType.objects.create( - name="Cluster Type 1", slug="cluster-type-1" - ) - - cls.clusters = ( - Cluster(name="Cluster 1", type=cluster_type), - Cluster(name="Cluster 2", type=cluster_type), - ) - Cluster.objects.bulk_create(cls.clusters) - - cls.devices = ( - Device( - id=10, - device_type=cls.device_types[0], - role=cls.roles[0], - name="Device 1", - site=cls.sites[0], - rack=cls.racks[0], - cluster=cls.clusters[0], - local_context_data={"A": 1}, - ), - Device( - id=20, - device_type=cls.device_types[0], - role=cls.roles[0], - name="Device 2", - site=cls.sites[0], - rack=cls.racks[0], - cluster=cls.clusters[0], - local_context_data={"B": 2}, - ), - ) - Device.objects.bulk_create(cls.devices) - - cls.interfaces = ( - Interface(name="Interface 1", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 2", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 3", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 4", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 5", device=cls.devices[0], type="1000baset"), - ) - Interface.objects.bulk_create(cls.interfaces) - - cls.ip_addresses = ( - IPAddress( - address=IPNetwork("10.0.0.1/24"), assigned_object=cls.interfaces[0] - ), - IPAddress( - address=IPNetwork("192.0.2.1/24"), assigned_object=cls.interfaces[1] - ), - ) - IPAddress.objects.bulk_create(cls.ip_addresses) - - def setUp(self): - """Set up test.""" - self.root_user = User.objects.create_user( - username="root_user", is_staff=True, is_superuser=True - ) - self.root_token = Token.objects.create(user=self.root_user) - - self.user = User.objects.create_user(username="testcommonuser") - self.add_permissions("netbox_diode_plugin.view_diode") - self.user_token = Token.objects.create(user=self.user) - - # another_user does not have permission. - self.another_user = User.objects.create_user(username="another_user") - self.another_user_token = Token.objects.create(user=self.another_user) - - self.root_header = {"HTTP_AUTHORIZATION": f"Token {self.root_token.key}"} - self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} - self.another_user_header = { - "HTTP_AUTHORIZATION": f"Token {self.another_user_token.key}" - } - - self.url = "/netbox/api/plugins/diode/object-state/" - - def test_return_object_state_using_id(self): - """Test searching using id parameter - Root User.""" - site_id = Site.objects.get(name=self.sites[0].name).id - query_parameters = {"id": site_id, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 1") - - def test_return_object_state_using_q(self): - """Test searching using q parameter - Root User.""" - query_parameters = {"q": "Site 2", "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 2") - - def test_object_not_found_return_empty(self): - """Test empty searching - Root User.""" - query_parameters = {"q": "Site 10", "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json(), {}) - - def test_missing_object_type_return_400(self): - """Test API behavior with missing object type - Root User.""" - query_parameters = {"q": "Site 10", "object_type": ""} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_missing_q_and_id_parameters_return_400(self): - """Test API behavior with missing q and ID parameters - Root User.""" - query_parameters = {"object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_request_user_not_authenticated_return_403(self): - """Test API behavior with user unauthenticated.""" - query_parameters = {"id": 1, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters) - - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - def test_common_user_with_permissions_get_object_state_using_id(self): - """Test searching using id parameter for Common User with permission.""" - site_id = Site.objects.get(name=self.sites[0]).id - query_parameters = {"id": site_id, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.user_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 1") - - def test_common_user_without_permissions_get_object_state_using_id_return_403(self): - """ - Test searching using id parameter for Common User without permission. - - User has no permissions. - """ - query_parameters = {"id": 1, "object_type": "dcim.device"} - - response = self.client.get( - self.url, query_parameters, **self.another_user_header - ) - - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - def test_return_object_state_using_q_objects_with_different_manufacturer_return_cisco_manufacturer( - self, - ): - """Test searching using q parameter - DevicesTypes with different manufacturer.""" - query_parameters = { - "q": "ISR4321", - "object_type": "dcim.devicetype", - "manufacturer__name": "Cisco", - } - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("model"), "ISR4321") - self.assertEqual( - response.json().get("object").get("manufacturer").get("name"), "Cisco" - ) - - def test_invalid_object_state_using_q_objects_and_wrong_additional_attributes_return_400( - self, - ): - """Test searching using q parameter - invalid additional attributes.""" - query_parameters = { - "q": "ISR4321", - "object_type": "dcim.devicetype", - "attr_name": "manufacturer.name", - "attr_value": "Cisco", - } - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_common_user_with_permissions_get_ip_state_using_id(self): - """Test searching for ip using id.""" - query_parameters = { - "id": self.ip_addresses[0].id, - "object_type": "ipam.ipaddress", - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") - self.assertEqual( - response.json().get("object").get("address"), - self.ip_addresses[0].address.__str__(), - ) - self.assertEqual( - response.json() - .get("object") - .get("assigned_object") - .get("interface") - .get("name"), - self.interfaces[0].name, - ) - - def test_common_user_with_permissions_get_device_state_using_q_objects(self): - """Test searching for device using q parameter.""" - query_parameters = { - "q": self.devices[0].name, - "object_type": "dcim.device", - "site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "dcim.device") - self.assertEqual( - response.json().get("object").get("name"), self.devices[0].name - ) - self.assertEqual( - response.json().get("object").get("site").get("name"), self.sites[0].name - ) - - def test_common_user_with_permissions_get_interface_state_using_q_objects(self): - """Test searching for interface using q parameter.""" - query_parameters = { - "q": self.interfaces[0].name, - "object_type": "dcim.interface", - "device": self.devices[0].id, - "device__site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "dcim.interface") - self.assertEqual( - response.json().get("object").get("name"), self.interfaces[0].name - ) - self.assertEqual( - response.json().get("object").get("device").get("name"), - self.devices[0].name, - ) - - def test_common_user_with_permissions_get_ip_state_using_q_objects(self): - """Test searching for ip using q parameter.""" - query_parameters = { - "q": self.ip_addresses[0].address.__str__(), - "object_type": "ipam.ipaddress", - "interface": self.interfaces[0].id, - "interface__device": self.devices[0].id, - "interface__device__site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") - self.assertEqual( - response.json().get("object").get("address"), - self.ip_addresses[0].address.__str__(), - ) - self.assertEqual( - response.json() - .get("object") - .get("assigned_object") - .get("interface") - .get("name"), - self.interfaces[0].name, - ) - - def test_common_user_get_object_state_with_branch_parameter_specified(self): - """Test searching accepts _branch parameter with additional attributes specified.""" - query_parameters = { - "q": self.ip_addresses[0].address.__str__(), - "object_type": "ipam.ipaddress", - "interface": self.interfaces[0].id, - "_branch": "" - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") diff --git a/pyproject.toml b/pyproject.toml index 35f4dea..2a99b4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,8 @@ build-backend = "setuptools.build_meta" line-length = 140 exclude = [ "*_pb2*", + "netbox_diode_plugin/api/plugin_utils.py", + "docker/*", ] [tool.ruff.format] From 8d6c5d1761c59cbf5c267f136638d2a1c780e9f8 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Wed, 2 Apr 2025 16:38:18 -0400 Subject: [PATCH 27/30] Change set validation (#69) * add change set validation Signed-off-by: Michal Fiedorowicz * remove redundant serializers Signed-off-by: Michal Fiedorowicz * remove redundant serializers tests Signed-off-by: Michal Fiedorowicz * tidy up Signed-off-by: Michal Fiedorowicz * adjust tests, lighter validation of refs on diff, handle generic --------- Signed-off-by: Michal Fiedorowicz Co-authored-by: Michal Fiedorowicz --- netbox_diode_plugin/api/applier.py | 71 +--- netbox_diode_plugin/api/common.py | 157 +++++++- netbox_diode_plugin/api/differ.py | 71 +--- netbox_diode_plugin/api/matcher.py | 6 +- netbox_diode_plugin/api/serializers.py | 368 ------------------ netbox_diode_plugin/api/views.py | 47 +-- .../tests/test_api_apply_change_set.py | 69 ++-- .../tests/test_api_diff_and_apply.py | 8 +- .../tests/test_api_serializers.py | 32 -- 9 files changed, 232 insertions(+), 597 deletions(-) delete mode 100644 netbox_diode_plugin/tests/test_api_serializers.py diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py index eed793b..101f30f 100644 --- a/netbox_diode_plugin/api/applier.py +++ b/netbox_diode_plugin/api/applier.py @@ -4,7 +4,6 @@ import logging -from dataclasses import dataclass, field from django.apps import apps from django.contrib.contenttypes.models import ContentType @@ -12,47 +11,14 @@ from django.db import models from rest_framework.exceptions import ValidationError as ValidationError -from .differ import Change, ChangeSet, ChangeType +from .common import NON_FIELD_ERRORS, Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType from .plugin_utils import get_object_type_model, legal_fields from .supported_models import get_serializer_for_model logger = logging.getLogger(__name__) -@dataclass -class ApplyChangeSetResult: - """A result of applying a change set.""" - - id: str - success: bool - errors: dict | None = field(default=None) - - def to_dict(self) -> dict: - """Convert the result to a dictionary.""" - return { - "id": self.id, - "success": self.success, - "errors": self.errors, - } - - -class ApplyChangeSetException(Exception): - """ApplyChangeSetException is raised when an error occurs while applying a change set.""" - - def __init__(self, message, errors=None): - """Initialize the exception.""" - super().__init__(message) - self.message = message - self.errors = errors or {} - - def __str__(self): - """Return the string representation of the exception.""" - if self.errors: - return f"{self.message}: {self.errors}" - return self.message - - -def apply_changeset(change_set: ChangeSet) -> ApplyChangeSetResult: +def apply_changeset(change_set: ChangeSet) -> ChangeSetResult: """Apply a change set.""" _validate_change_set(change_set) @@ -71,14 +37,12 @@ def apply_changeset(change_set: ChangeSet) -> ApplyChangeSetResult: except ValidationError as e: raise _err_from_validation_error(e, f"changes[{i}]") except ObjectDoesNotExist: - raise _err(f"{object_type} with id {change.object_id} does not exist", f"changes[{i}].object_id") + raise _err(f"{object_type} with id {change.object_id} does not exist", f"changes[{i}]", "object_id") # ConstraintViolationError ? # ... - return ApplyChangeSetResult( + return ChangeSetResult( id=change_set.id, - success=True, - errors=None, ) def _apply_change(data: dict, model_class: models.Model, change: Change, created: dict): @@ -129,27 +93,30 @@ def _pre_apply(model_class: models.Model, change: Change, created: dict): def _validate_change_set(change_set: ChangeSet): if not change_set.id: - raise _err("Change set ID is required", "id") + raise _err("Change set ID is required", "changeset","id") if not change_set.changes: - raise _err("Changes are required", "changes") + raise _err("Changes are required", "changeset", "changes") for i, change in enumerate(change_set.changes): if change.object_id is None and change.ref_id is None: - raise _err("Object ID or Ref ID must be provided", f"changes[{i}]") + raise _err("Object ID or Ref ID must be provided", f"changes[{i}]", NON_FIELD_ERRORS) if change.change_type not in ChangeType: - raise _err(f"Unsupported change type '{change.change_type}'", f"changes[{i}].change_type") + raise _err(f"Unsupported change type '{change.change_type}'", f"changes[{i}]", "change_type") -def _err(message, field): - return ApplyChangeSetException(message, errors={field: [message]}) +def _err(message, object_name, field): + return ChangeSetException(message, errors={object_name: {field: [message]}}) -def _err_from_validation_error(e, prefix): +def _err_from_validation_error(e, object_name): errors = {} if e.detail: if isinstance(e.detail, dict): - for k, v in e.detail.items(): - errors[f"{prefix}.{k}"] = v + errors[object_name] = e.detail elif isinstance(e.detail, (list, tuple)): - errors[prefix] = e.detail + errors[object_name] = { + NON_FIELD_ERRORS: e.detail + } else: - errors[prefix] = [e.detail] - return ApplyChangeSetException("validation error", errors=errors) + errors[object_name] = { + NON_FIELD_ERRORS: [e.detail] + } + return ChangeSetException("validation error", errors=errors) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index 41011b4..e0152d5 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -1,9 +1,22 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs Inc """Diode NetBox Plugin - API - Common types and utilities.""" -from dataclasses import dataclass +from collections import defaultdict +import logging +import uuid +from dataclasses import dataclass, field +from enum import Enum +from django.apps import apps +from django.contrib.contenttypes.fields import GenericRelation, GenericForeignKey +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from rest_framework import status + +logger = logging.getLogger("netbox.diode_data") + +NON_FIELD_ERRORS = "__all__" @dataclass class UnresolvedReference: @@ -29,3 +42,143 @@ def __hash__(self): def __lt__(self, other): """Less than operator.""" return self.object_type < other.object_type or (self.object_type == other.object_type and self.uuid < other.uuid) + + +class ChangeType(Enum): + """Change type enum.""" + + CREATE = "create" + UPDATE = "update" + NOOP = "noop" + + +@dataclass +class Change: + """A change to a model instance.""" + + change_type: ChangeType + object_type: str + object_id: int | None = field(default=None) + object_primary_value: str | None = field(default=None) + ref_id: str | None = field(default=None) + id: str = field(default_factory=lambda: str(uuid.uuid4())) + before: dict | None = field(default=None) + data: dict | None = field(default=None) + new_refs: list[str] = field(default_factory=list) + + def to_dict(self) -> dict: + """Convert the change to a dictionary.""" + return { + "id": self.id, + "change_type": self.change_type.value, + "object_type": self.object_type, + "object_id": self.object_id, + "ref_id": self.ref_id, + "object_primary_value": self.object_primary_value, + "before": self.before, + "data": self.data, + "new_refs": self.new_refs, + } + + +@dataclass +class ChangeSet: + """A set of changes to a model instance.""" + + id: str = field(default_factory=lambda: str(uuid.uuid4())) + changes: list[Change] = field(default_factory=list) + branch: dict[str, str] | None = field(default=None) # {"id": str, "name": str} + + def to_dict(self) -> dict: + """Convert the change set to a dictionary.""" + return { + "id": self.id, + "changes": [change.to_dict() for change in self.changes], + "branch": self.branch, + } + + def validate(self) -> dict[str, list[str]]: + """Validate basics of the change set data.""" + errors = defaultdict(dict) + + for change in self.changes: + model = apps.get_model(change.object_type) + + change_data = change.data.copy() + if change.before: + change_data.update(change.before) + + # check that there is some value for every required + # reference field, but don't validate the actual reference. + excluded_relation_fields = [] + rel_errors = defaultdict(list) + for f in model._meta.get_fields(): + if isinstance(f, (GenericRelation, GenericForeignKey)): + excluded_relation_fields.append(f.name) + continue + if not f.is_relation: + continue + field_name = f.name + excluded_relation_fields.append(field_name) + + if hasattr(f, "related_model") and f.related_model == ContentType: + change_data.pop(field_name, None) + base_field = field_name[:-5] + excluded_relation_fields.append(base_field + "_id") + value = change_data.pop(base_field + "_id", None) + else: + value = change_data.pop(field_name, None) + + if not f.null and not f.blank and not f.many_to_many: + # this field is a required relation... + if value is None: + rel_errors[f.name].append(f"Field {f.name} is required") + if rel_errors: + errors[change.object_type] = rel_errors + + try: + instance = model(**change_data) + instance.clean_fields(exclude=excluded_relation_fields) + except ValidationError as e: + errors[change.object_type].update(e.error_dict) + + return errors or None + + +@dataclass +class ChangeSetResult: + """A result of applying a change set.""" + + id: str | None = field(default_factory=lambda: str(uuid.uuid4())) + change_set: ChangeSet | None = field(default=None) + errors: dict | None = field(default=None) + + def to_dict(self) -> dict: + """Convert the result to a dictionary.""" + if self.change_set: + return self.change_set.to_dict() + + return { + "id": self.id, + "errors": self.errors, + } + + def get_status_code(self) -> int: + """Get the status code for the result.""" + return status.HTTP_200_OK if not self.errors else status.HTTP_400_BAD_REQUEST + + +class ChangeSetException(Exception): + """ChangeSetException is raised when an error occurs while generating or applying a change set.""" + + def __init__(self, message, errors=None): + """Initialize the exception.""" + super().__init__(message) + self.message = message + self.errors = errors or {} + + def __str__(self): + """Return the string representation of the exception.""" + if self.errors: + return f"{self.message}: {self.errors}" + return self.message diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index 84b6848..e44ecab 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -1,18 +1,15 @@ #!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc +# Copyright 2025 NetBox Labs Inc """Diode NetBox Plugin - API - Differ.""" import copy -import json import logging -import uuid -from dataclasses import dataclass, field -from enum import Enum from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from utilities.data import shallow_compare_dict +from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType from .plugin_utils import get_primary_value, legal_fields from .supported_models import extract_supported_models from .transformer import cleanup_unresolved_references, transform_proto_json @@ -21,58 +18,6 @@ SUPPORTED_MODELS = extract_supported_models() -class ChangeType(Enum): - """Change type enum.""" - - CREATE = "create" - UPDATE = "update" - NOOP = "noop" - - -@dataclass -class Change: - """A change to a model instance.""" - - change_type: ChangeType - object_type: str - object_id: int | None = field(default=None) - object_primary_value: str | None = field(default=None) - ref_id: str | None = field(default=None) - id: str = field(default_factory=lambda: str(uuid.uuid4())) - before: dict | None = field(default=None) - data: dict | None = field(default=None) - new_refs: list[str] = field(default_factory=list) - - def to_dict(self) -> dict: - """Convert the change to a dictionary.""" - return { - "id": self.id, - "change_type": self.change_type.value, - "object_type": self.object_type, - "object_id": self.object_id, - "ref_id": self.ref_id, - "object_primary_value": self.object_primary_value, - "before": self.before, - "data": self.data, - "new_refs": self.new_refs, - } - - -@dataclass -class ChangeSet: - """A set of changes to a model instance.""" - - id: str = field(default_factory=lambda: str(uuid.uuid4())) - changes: list[Change] = field(default_factory=list) - branch: dict[str, str] | None = field(default=None) # {"id": str, "name": str} - - def to_dict(self) -> dict: - """Convert the change set to a dictionary.""" - return { - "id": self.id, - "changes": [change.to_dict() for change in self.changes], - "branch": self.branch, - } def prechange_data_from_instance(instance) -> dict: # noqa: C901 """Convert model instance data to a dictionary format for comparison.""" @@ -193,7 +138,7 @@ def sort_dict_recursively(d): return d -def generate_changeset(entity: dict, object_type: str) -> ChangeSet: +def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: """Generate a changeset for an entity.""" change_set = ChangeSet() @@ -227,5 +172,11 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSet: new_refs, ) change_set.changes.append(change) - logger.error(f"change_set: {json.dumps(change_set.to_dict(), default=str, indent=4)}") - return change_set + + if errors := change_set.validate(): + raise ChangeSetException("Invalid change set", errors) + + return ChangeSetResult( + id=change_set.id, + change_set=change_set, + ) diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 7d6973d..5f098c0 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -120,7 +120,7 @@ def fingerprint(self, data: dict) -> str|None: values = [] for field in sorted_fields: value = data[field] - if isinstance(value, (dict, UnresolvedReference)): + if isinstance(value, dict): logger.warning(f"unexpected value type for fingerprinting: {value}") return None if field in insensitive: @@ -232,13 +232,11 @@ def _prepare_data(self, data: dict) -> dict: if field.is_relation and hasattr(field, "related_model") and field.related_model == ContentType: prepared[field_name] = content_type_id(value) else: - logger.error("no.") prepared[field_name] = value - logger.error(f"field: {field_name} -> {value}") except FieldDoesNotExist: continue - logger.error(f"prepared data: {data} -> {prepared}") + # logger.error(f"prepared data: {data} -> {prepared}") return prepared @lru_cache(maxsize=256) diff --git a/netbox_diode_plugin/api/serializers.py b/netbox_diode_plugin/api/serializers.py index 838f8d3..60e2860 100644 --- a/netbox_diode_plugin/api/serializers.py +++ b/netbox_diode_plugin/api/serializers.py @@ -2,131 +2,10 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Serializers.""" -import logging - -from dcim.api.serializers import ( - DeviceRoleSerializer, - DeviceSerializer, - DeviceTypeSerializer, - InterfaceSerializer, - ManufacturerSerializer, - PlatformSerializer, - SiteSerializer, -) -from django.conf import settings from netbox.api.serializers import NetBoxModelSerializer -from packaging import version from netbox_diode_plugin.models import Setting -if version.parse(version.parse(settings.VERSION).base_version) >= version.parse("4.1"): - from core.models import ObjectChange -else: - from extras.models import ObjectChange -from ipam.api.serializers import IPAddressSerializer, PrefixSerializer -from rest_framework import serializers -from utilities.api import get_serializer_for_model -from virtualization.api.serializers import ( - ClusterGroupSerializer, - ClusterSerializer, - ClusterTypeSerializer, - VirtualDiskSerializer, - VirtualMachineSerializer, - VMInterfaceSerializer, -) - -logger = logging.getLogger("netbox.netbox_diode_plugin.api.serializers") - - -def dynamic_import(name): - """Dynamically import a class from an absolute path string.""" - components = name.split(".") - mod = __import__(components[0]) - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - - -def get_diode_serializer(instance): - """Get the Diode serializer based on instance model.""" - serializer = get_serializer_for_model(instance) - - serializer_name = f"netbox_diode_plugin.api.serializers.Diode{serializer.__name__}" - - try: - serializer = dynamic_import(serializer_name) - except AttributeError: - logger.warning(f"Could not find serializer for {serializer_name}") - pass - - return serializer - - -class ObjectStateSerializer(serializers.Serializer): - """Object State Serializer.""" - - object_type = serializers.SerializerMethodField(read_only=True) - object_change_id = serializers.SerializerMethodField(read_only=True) - object = serializers.SerializerMethodField(read_only=True) - - def get_object_type(self, instance): - """ - Get the object type from context sent from view. - - Return a string with the format "app.model". - """ - return self.context.get("object_type") - - def get_object_change_id(self, instance): - """ - Get the object changed based on instance ID. - - Return the ID of last change. - """ - object_changed = ( - ObjectChange.objects.filter(changed_object_id=instance.id) - .order_by("-id") - .values_list("id", flat=True) - ) - return object_changed[0] if len(object_changed) > 0 else None - - def get_object(self, instance): - """ - Get the serializer based on instance model. - - Get the data from the model according to its ID. - Return the object according to serializer defined in the NetBox. - """ - serializer = get_diode_serializer(instance) - - object_data = instance.__class__.objects.filter(id=instance.id) - - context = {"request": self.context.get("request")} - - data = serializer(object_data, context=context, many=True).data[0] - - return data - - -class ChangeSerialiazer(serializers.Serializer): - """ChangeSet Serializer.""" - - change_id = serializers.UUIDField(required=True) - change_type = serializers.CharField(required=True) - object_version = serializers.IntegerField(required=False, allow_null=True) - object_type = serializers.CharField(required=True) - object_id = serializers.IntegerField(required=False, allow_null=True) - data = serializers.DictField(required=True) - - -class ApplyChangeSetRequestSerializer(serializers.Serializer): - """ApplyChangeSet request Serializer.""" - - change_set_id = serializers.UUIDField(required=True) - change_set = serializers.ListField( - child=ChangeSerialiazer(), required=True, allow_empty=False - ) - class SettingSerializer(NetBoxModelSerializer): """Setting Serializer.""" @@ -142,250 +21,3 @@ class Meta: "created", "last_updated", ) - - -class DiodeIPAddressSerializer(IPAddressSerializer): - """Diode IP Address Serializer.""" - - class Meta: - """Meta class.""" - - model = IPAddressSerializer.Meta.model - fields = IPAddressSerializer.Meta.fields - - def get_assigned_object(self, obj): - """Get the assigned object based on the instance model.""" - if obj.assigned_object is None: - return None - - serializer = get_diode_serializer(obj.assigned_object) - - context = {"request": self.context["request"]} - assigned_object = serializer(obj.assigned_object, context=context).data - - if assigned_object.get("device"): - device_serializer = get_diode_serializer(obj.assigned_object.device) - device = device_serializer(obj.assigned_object.device, context=context).data - assigned_object["device"] = device - - if serializer.__name__.endswith("InterfaceSerializer"): - assigned_object = {"interface": assigned_object} - - return assigned_object - - -class DiodeSiteSerializer(SiteSerializer): - """Diode Site Serializer.""" - - status = serializers.CharField() - - class Meta: - """Meta class.""" - - model = SiteSerializer.Meta.model - fields = SiteSerializer.Meta.fields - - -class DiodeDeviceRoleSerializer(DeviceRoleSerializer): - """Diode Device Role Serializer.""" - - class Meta: - """Meta class.""" - - model = DeviceRoleSerializer.Meta.model - fields = DeviceRoleSerializer.Meta.fields - - -class DiodeManufacturerSerializer(ManufacturerSerializer): - """Diode Manufacturer Serializer.""" - - class Meta: - """Meta class.""" - - model = ManufacturerSerializer.Meta.model - fields = ManufacturerSerializer.Meta.fields - - -class DiodePlatformSerializer(PlatformSerializer): - """Diode Platform Serializer.""" - - manufacturer = DiodeManufacturerSerializer(required=False, allow_null=True) - - class Meta: - """Meta class.""" - - model = PlatformSerializer.Meta.model - fields = PlatformSerializer.Meta.fields - - -class DiodeDeviceTypeSerializer(DeviceTypeSerializer): - """Diode Device Type Serializer.""" - - default_platform = DiodePlatformSerializer(required=False, allow_null=True) - manufacturer = DiodeManufacturerSerializer(required=False, allow_null=True) - - class Meta: - """Meta class.""" - - model = DeviceTypeSerializer.Meta.model - fields = DeviceTypeSerializer.Meta.fields - - -class DiodeDeviceSerializer(DeviceSerializer): - """Diode Device Serializer.""" - - site = DiodeSiteSerializer() - device_type = DiodeDeviceTypeSerializer() - role = DiodeDeviceRoleSerializer() - platform = DiodePlatformSerializer(required=False, allow_null=True) - status = serializers.CharField() - - class Meta: - """Meta class.""" - - model = DeviceSerializer.Meta.model - fields = DeviceSerializer.Meta.fields - - -class DiodeNestedInterfaceSerializer(InterfaceSerializer): - """Diode Nested Interface Serializer.""" - - class Meta: - """Meta class.""" - - model = InterfaceSerializer.Meta.model - fields = InterfaceSerializer.Meta.fields - - -class DiodeInterfaceSerializer(InterfaceSerializer): - """Diode Interface Serializer.""" - - device = DiodeDeviceSerializer() - parent = DiodeNestedInterfaceSerializer() - type = serializers.CharField() - mode = serializers.CharField() - - class Meta: - """Meta class.""" - - model = InterfaceSerializer.Meta.model - fields = InterfaceSerializer.Meta.fields - - -class DiodePrefixSerializer(PrefixSerializer): - """Diode Prefix Serializer.""" - - status = serializers.CharField() - site = serializers.SerializerMethodField(read_only=True) - - class Meta: - """Meta class.""" - - model = PrefixSerializer.Meta.model - fields = PrefixSerializer.Meta.fields + ["site"] - - def get_site(self, obj): - """Get the site from the instance scope.""" - if obj.scope is None: - return None - - scope_model_meta = obj.scope_type.model_class()._meta - if scope_model_meta.app_label == "dcim" and scope_model_meta.model_name == "site": - serializer = get_serializer_for_model(obj.scope) - context = {'request': self.context['request']} - return serializer(obj.scope, nested=True, context=context).data - - return None - - -class DiodeClusterGroupSerializer(ClusterGroupSerializer): - """Diode Cluster Group Serializer.""" - - class Meta: - """Meta class.""" - - model = ClusterGroupSerializer.Meta.model - fields = ClusterGroupSerializer.Meta.fields - - -class DiodeClusterTypeSerializer(ClusterTypeSerializer): - """Diode Cluster Type Serializer.""" - - class Meta: - """Meta class.""" - - model = ClusterTypeSerializer.Meta.model - fields = ClusterTypeSerializer.Meta.fields - - -class DiodeClusterSerializer(ClusterSerializer): - """Diode Cluster Serializer.""" - - type = DiodeClusterTypeSerializer() - group = DiodeClusterGroupSerializer() - status = serializers.CharField() - site = serializers.SerializerMethodField(read_only=True) - - class Meta: - """Meta class.""" - - model = ClusterSerializer.Meta.model - fields = ClusterSerializer.Meta.fields + ["site"] - - def get_site(self, obj): - """Get the site from the instance scope.""" - if obj.scope is None: - return None - - scope_model_meta = obj.scope_type.model_class()._meta - if scope_model_meta.app_label == "dcim" and scope_model_meta.model_name == "site": - serializer = get_serializer_for_model(obj.scope) - context = {'request': self.context['request']} - return serializer(obj.scope, nested=True, context=context).data - - return None - - -class DiodeVirtualMachineSerializer(VirtualMachineSerializer): - """Diode Virtual Machine Serializer.""" - - status = serializers.CharField() - site = DiodeSiteSerializer() - cluster = DiodeClusterSerializer() - device = DiodeDeviceSerializer() - role = DiodeDeviceRoleSerializer() - tenant = serializers.CharField() - platform = DiodePlatformSerializer() - primary_ip = DiodeIPAddressSerializer() - primary_ip4 = DiodeIPAddressSerializer() - primary_ip6 = DiodeIPAddressSerializer() - - class Meta: - """Meta class.""" - - model = VirtualMachineSerializer.Meta.model - fields = VirtualMachineSerializer.Meta.fields - - -class DiodeVirtualDiskSerializer(VirtualDiskSerializer): - """Diode Virtual Disk Serializer.""" - - virtual_machine = DiodeVirtualMachineSerializer() - - class Meta: - """Meta class.""" - - model = VirtualDiskSerializer.Meta.model - fields = VirtualDiskSerializer.Meta.fields - - -class DiodeVMInterfaceSerializer(VMInterfaceSerializer): - """Diode VM Interface Serializer.""" - - virtual_machine = DiodeVirtualMachineSerializer() - - class Meta: - """Meta class.""" - - model = VMInterfaceSerializer.Meta.model - fields = VMInterfaceSerializer.Meta.fields diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index 5539db0..5f6d004 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -7,17 +7,14 @@ from django.apps import apps from django.db import transaction -from rest_framework import status, views +from rest_framework import views from rest_framework.exceptions import ValidationError from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response -from netbox_diode_plugin.api.applier import ( - ApplyChangeSetException, - ApplyChangeSetResult, - apply_changeset, -) -from netbox_diode_plugin.api.differ import Change, ChangeSet, ChangeType, generate_changeset +from netbox_diode_plugin.api.applier import apply_changeset +from netbox_diode_plugin.api.common import Change, ChangeSet, ChangeSetException, ChangeSetResult +from netbox_diode_plugin.api.differ import generate_changeset from netbox_diode_plugin.api.permissions import IsDiodeWriter logger = logging.getLogger("netbox.diode_data") @@ -77,7 +74,14 @@ def _post(self, request, *args, **kwargs): f"No data found for {entity_key} in entity got: {entity.keys()}" ) - change_set = generate_changeset(original_entity_data, object_type) + try: + result = generate_changeset(original_entity_data, object_type) + except ChangeSetException as e: + logger.error(f"Error generating change set: {e}") + result = ChangeSetResult( + errors=e.errors, + ) + return Response(result.to_dict(), status=result.get_status_code()) branch_id = request.headers.get("X-NetBox-Branch") @@ -85,12 +89,11 @@ def _post(self, request, *args, **kwargs): if branch_id and Branch is not None: try: branch = Branch.objects.get(id=branch_id) - change_set.branch = {"id": branch.id, "name": branch.name} + result.branch = {"id": branch.id, "name": branch.name} except Branch.DoesNotExist: logger.warning(f"Branch with ID {branch_id} does not exist") - logger.info(f"change_set: {json.dumps(change_set.to_dict(), default=str)}") - return Response(change_set.to_dict(), status=status.HTTP_200_OK) + return Response(result.to_dict(), status=result.get_status_code()) class ApplyChangeSetView(views.APIView): @@ -131,25 +134,11 @@ def _post(self, request, *args, **kwargs): try: with transaction.atomic(): result = apply_changeset(change_set) - except ApplyChangeSetException as e: + except ChangeSetException as e: logger.error(f"Error applying change set: {e}") - result = ApplyChangeSetResult( + result = ChangeSetResult( id=change_set.id, - success=False, errors=e.errors, ) - return Response(result.to_dict(), status=status.HTTP_400_BAD_REQUEST) - - return Response(result.to_dict(), status=status.HTTP_200_OK) - - @staticmethod - def _get_error_response(change_set_id, errors): - """Get the error response.""" - return Response( - { - "change_set_id": change_set_id, - "result": "failed", - "errors": errors, - }, - status=status.HTTP_400_BAD_REQUEST, - ) + + return Response(result.to_dict(), status=result.get_status_code()) diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index 8fe4d15..b2d27c0 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -29,6 +29,8 @@ User = get_user_model() +def _get_error(response, object_name, field): + return response.json().get("errors", {}).get(object_name, {}).get(field, []) class BaseApplyChangeSet(APITestCase): """Base ApplyChangeSet test case.""" @@ -232,9 +234,7 @@ def test_change_type_create_return_200(self): ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("success"), True) + _ = self.send_request(payload) def test_change_type_update_return_200(self): """Test update change_type with successful.""" @@ -261,13 +261,12 @@ def test_change_type_update_return_200(self): ], } - response = self.client.post( + _ = self.client.post( self.url, payload, format="json", **self.user_header ) site_updated = Site.objects.get(id=20) - self.assertEqual(response.json().get("success"), True) self.assertEqual(site_updated.name, "Site A") def test_change_type_create_with_error_return_400(self): @@ -297,13 +296,11 @@ def test_change_type_create_with_error_return_400(self): } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - site_created = Site.objects.filter(name="Site A") - self.assertEqual(response.json().get("success"), False) self.assertIn( 'Expected a list of items but got type "int".', - response.json().get("errors", {}).get("changes[0].asns", []), + _get_error(response, "changes[0]", "asns"), ) self.assertFalse(site_created.exists()) @@ -335,11 +332,9 @@ def test_change_type_update_with_error_return_400(self): response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) site_updated = Site.objects.get(id=20) - - self.assertEqual(response.json().get("success"), False) self.assertIn( 'Expected a list of items but got type "int".', - response.json().get("errors", {}).get("changes[0].asns", []), + _get_error(response, "changes[0]", "asns") ) self.assertEqual(site_updated.name, "Site 2") @@ -385,9 +380,7 @@ def test_change_type_create_with_multiples_objects_return_200(self): ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("success"), True) + _ = self.send_request(payload) def test_change_type_update_with_multiples_objects_return_200(self): """Test update change type with two objects.""" @@ -429,12 +422,11 @@ def test_change_type_update_with_multiples_objects_return_200(self): ], } - response = self.send_request(payload) + _ = self.send_request(payload) site_updated = Site.objects.get(id=20) device_updated = Device.objects.get(id=10) - self.assertEqual(response.json().get("success"), True) self.assertEqual(site_updated.name, "Site A") self.assertEqual(device_updated.name, "Test Device 3") @@ -484,10 +476,9 @@ def test_change_type_create_and_update_with_error_in_one_object_return_400(self) site_created = Site.objects.filter(name="Site Z") device_created = Device.objects.filter(name="Test Device 4") - self.assertEqual(response.json().get("success"), False) self.assertIn( "Related object not found using the provided numeric ID: 3", - response.json().get("errors", {}).get("changes[1].device_type", []), + _get_error(response, "changes[1]", "device_type"), ) self.assertFalse(site_created.exists()) self.assertFalse(device_created.exists()) @@ -555,11 +546,9 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): site_created = Site.objects.filter(name="Site Z") device_created = Device.objects.filter(name="Test Device 4") - self.assertEqual(response.json().get("success"), False) - self.assertIn( "Related object not found using the provided numeric ID: 3", - response.json().get("errors", {}).get("changes[1].device_type", []), + _get_error(response, "changes[1]", "device_type"), ) self.assertFalse(site_created.exists()) @@ -598,7 +587,7 @@ def test_change_type_update_with_object_id_not_exist_return_400(self): self.assertIn( "dcim.site with id 30 does not exist", - response.json().get("errors", {}).get("changes[0].object_id", []), + _get_error(response, "changes[0]", "object_id"), ) self.assertEqual(site_updated.name, "Site 2") @@ -630,9 +619,9 @@ def test_change_set_id_field_not_provided_return_400(self): response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) self.assertIsNone(response.json().get("errors", {}).get("change_id", None)) - self.assertEqual( - response.json().get("errors", {}).get("id", []), - ["Change set ID is required"], + self.assertIn( + "Change set ID is required", + _get_error(response, "changeset", "id"), ) def test_change_type_field_not_provided_return_400( @@ -666,7 +655,7 @@ def test_change_type_field_not_provided_return_400( self.assertIn( "Unsupported change type ''", - response.json().get("errors", {}).get("changes[0].change_type", []), + _get_error(response, "changes[0]", "change_type"), ) def test_change_set_id_field_and_change_set_not_provided_return_400(self): @@ -680,7 +669,7 @@ def test_change_set_id_field_and_change_set_not_provided_return_400(self): self.assertIn( "Change set ID is required", - response.json().get("errors", {}).get("id", []), + _get_error(response, "changeset", "id"), ) def test_change_type_and_object_type_provided_return_400( @@ -731,7 +720,7 @@ def test_change_type_and_object_type_provided_return_400( self.assertIn( "Unsupported change type 'None'", - response.json().get("errors", {}).get("changes[0].change_type", []), + _get_error(response, "changes[0]", "change_type"), ) # self.assertEqual( # response.json().get("errors")[0].get("change_type"), @@ -772,9 +761,7 @@ def test_create_ip_address_return_200(self): }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("success"), True) + _ = self.send_request(payload) # def test_create_ip_address_return_400(self): # """Test create ip_address with missing interface name.""" @@ -953,11 +940,9 @@ def test_add_primary_ip_address_to_device(self): ], } - response = self.send_request(payload) - + _ = self.send_request(payload) device_updated = Device.objects.get(id=10) - self.assertEqual(response.json().get("success"), True) self.assertEqual(device_updated.name, self.devices[0].name) self.assertEqual(device_updated.primary_ip4, self.ip_addresses[0]) @@ -981,9 +966,7 @@ def test_create_prefix_with_site_stored_as_scope(self): }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("success"), True) + _ = self.send_request(payload) self.assertEqual(Prefix.objects.get(prefix="192.168.0.0/24").scope, self.sites[0]) def test_create_prefix_with_unknown_site_fails(self): @@ -1007,11 +990,9 @@ def test_create_prefix_with_unknown_site_fails(self): ], } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertEqual(response.json().get("success"), False) self.assertIn( 'Please select a site.', - response.json().get("errors", {}).get("changes[0].scope", []), + _get_error(response, "changes[0]", "scope"), ) self.assertFalse(Prefix.objects.filter(prefix="192.168.0.0/24").exists()) @@ -1038,9 +1019,7 @@ def test_create_virtualization_cluster_with_site_stored_as_scope(self): }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("success"), True) + _ = self.send_request(payload) self.assertEqual(Cluster.objects.get(name="Cluster 3").scope, self.sites[0]) def test_create_virtualmachine_with_cluster_site_stored_as_scope(self): @@ -1074,7 +1053,5 @@ def test_create_virtualmachine_with_cluster_site_stored_as_scope(self): }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("success"), True) + _ = self.send_request(payload) self.assertEqual(VirtualMachine.objects.get(name="VM foobar", site_id=self.sites[0].id).cluster.scope, self.sites[0]) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index 6aae051..19793c5 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -2,12 +2,16 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Tests.""" +import logging + from dcim.models import Interface, Site from django.contrib.auth import get_user_model from rest_framework import status from users.models import Token from utilities.testing import APITestCase +logger = logging.getLogger(__name__) + User = get_user_model() @@ -39,8 +43,6 @@ def test_generate_diff_and_apply_create_site(self): } _, response = self.diff_and_apply(payload) - self.assertEqual(response.json().get("success"), True) - new_site = Site.objects.get(name="Generate Diff and Apply Site") self.assertEqual(new_site.slug, "generate-diff-and-apply-site") @@ -76,8 +78,6 @@ def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): } _, response = self.diff_and_apply(payload) - self.assertEqual(response.json().get("success"), True) - new_interface = Interface.objects.get(name="Interface 1x") self.assertEqual(new_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") diff --git a/netbox_diode_plugin/tests/test_api_serializers.py b/netbox_diode_plugin/tests/test_api_serializers.py deleted file mode 100644 index 00e9547..0000000 --- a/netbox_diode_plugin/tests/test_api_serializers.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tests.""" -from unittest.mock import MagicMock - -from dcim.models import Site -from django.test import TestCase -from extras.api.serializers import TagSerializer -from extras.models import Tag - -from netbox_diode_plugin.api.serializers import DiodeIPAddressSerializer, DiodeSiteSerializer, get_diode_serializer - - -class SerializersTestCase(TestCase): - """Test case for the serializers.""" - - def test_get_diode_serializer(self): - """Check the diode serializer is found.""" - site = Site.objects.create(name="test") - assert get_diode_serializer(site) == DiodeSiteSerializer - - tag = Tag.objects.create(name="test") - assert get_diode_serializer(tag) == TagSerializer - - - def test_get_assigned_object_returns_none_if_no_assigned_object(self): - """Check the assigned object is None if not provided.""" - obj = MagicMock() - obj.assigned_object = None - serializer = DiodeIPAddressSerializer() - result = serializer.get_assigned_object(obj) - self.assertIsNone(result) From 8f6abfd636bf7715f72091c60501589a44ac9cdb Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Thu, 3 Apr 2025 13:02:59 -0400 Subject: [PATCH 28/30] fix: expand support for cycle breaking, add additional logical matchers (#70) --- netbox_diode_plugin/api/common.py | 61 +++--- netbox_diode_plugin/api/matcher.py | 57 +++-- netbox_diode_plugin/api/transformer.py | 203 +++++++++++++----- .../tests/test_api_diff_and_apply.py | 42 +++- 4 files changed, 257 insertions(+), 106 deletions(-) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py index e0152d5..9bcb6b2 100644 --- a/netbox_diode_plugin/api/common.py +++ b/netbox_diode_plugin/api/common.py @@ -2,16 +2,17 @@ # Copyright 2025 NetBox Labs Inc """Diode NetBox Plugin - API - Common types and utilities.""" -from collections import defaultdict import logging import uuid +from collections import defaultdict from dataclasses import dataclass, field from enum import Enum from django.apps import apps -from django.contrib.contenttypes.fields import GenericRelation, GenericForeignKey +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError +from django.db import models from rest_framework import status logger = logging.getLogger("netbox.diode_data") @@ -108,31 +109,7 @@ def validate(self) -> dict[str, list[str]]: if change.before: change_data.update(change.before) - # check that there is some value for every required - # reference field, but don't validate the actual reference. - excluded_relation_fields = [] - rel_errors = defaultdict(list) - for f in model._meta.get_fields(): - if isinstance(f, (GenericRelation, GenericForeignKey)): - excluded_relation_fields.append(f.name) - continue - if not f.is_relation: - continue - field_name = f.name - excluded_relation_fields.append(field_name) - - if hasattr(f, "related_model") and f.related_model == ContentType: - change_data.pop(field_name, None) - base_field = field_name[:-5] - excluded_relation_fields.append(base_field + "_id") - value = change_data.pop(base_field + "_id", None) - else: - value = change_data.pop(field_name, None) - - if not f.null and not f.blank and not f.many_to_many: - # this field is a required relation... - if value is None: - rel_errors[f.name].append(f"Field {f.name} is required") + excluded_relation_fields, rel_errors = self._validate_relations(change_data, model) if rel_errors: errors[change.object_type] = rel_errors @@ -144,6 +121,36 @@ def validate(self) -> dict[str, list[str]]: return errors or None + def _validate_relations(self, change_data: dict, model: models.Model) -> tuple[list[str], dict]: + # check that there is some value for every required + # reference field, but don't validate the actual reference. + # the fields are removed from the change_data so that other + # fields can be validated by instantiating the model. + excluded_relation_fields = [] + rel_errors = defaultdict(list) + for f in model._meta.get_fields(): + if isinstance(f, (GenericRelation, GenericForeignKey)): + excluded_relation_fields.append(f.name) + continue + if not f.is_relation: + continue + field_name = f.name + excluded_relation_fields.append(field_name) + + if hasattr(f, "related_model") and f.related_model == ContentType: + change_data.pop(field_name, None) + base_field = field_name[:-5] + excluded_relation_fields.append(base_field + "_id") + value = change_data.pop(base_field + "_id", None) + else: + value = change_data.pop(field_name, None) + + if not f.null and not f.blank and not f.many_to_many: + # this field is a required relation... + if value is None: + rel_errors[f.name].append(f"Field {f.name} is required") + return excluded_relation_fields, rel_errors + @dataclass class ChangeSetResult: diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index 5f098c0..e4c8e62 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -9,6 +9,7 @@ from typing import Type from core.models import ObjectType as NetBoxType +from django.conf import settings from django.contrib.contenttypes.fields import ContentType from django.core.exceptions import FieldDoesNotExist from django.db import models @@ -30,11 +31,44 @@ _LOGICAL_MATCHERS = { "dcim.macaddress": lambda: [ ObjectMatchCriteria( - # consider a matching mac address within the same parent object - # to be the same object although not technically required to be. fields=("mac_address", "assigned_object_type", "assigned_object_id"), name="logical_mac_address_within_parent", model_class=get_object_type_model("dcim.macaddress"), + condition=Q(assigned_object_id__isnull=False), + ), + ObjectMatchCriteria( + fields=("mac_address", "assigned_object_type", "assigned_object_id"), + name="logical_mac_address_within_parent", + model_class=get_object_type_model("dcim.macaddress"), + condition=Q(assigned_object_id__isnull=True), + ), + ], + "ipam.ipaddress": lambda: [ + ObjectMatchCriteria( + fields=("address", ), + name="logical_ip_address_global_no_vrf", + model_class=get_object_type_model("ipam.ipaddress"), + condition=Q(vrf__isnull=True), + ), + ObjectMatchCriteria( + fields=("address", "assigned_object_type", "assigned_object_id"), + name="logical_ip_address_within_vrf", + model_class=get_object_type_model("ipam.ipaddress"), + condition=Q(vrf__isnull=False) + ), + ], + "ipam.prefix": lambda: [ + ObjectMatchCriteria( + fields=("prefix",), + name="logical_prefix_global_no_vrf", + model_class=get_object_type_model("ipam.prefix"), + condition=Q(vrf__isnull=True), + ), + ObjectMatchCriteria( + fields=("prefix", "vrf_id"), + name="logical_prefix_within_vrf", + model_class=get_object_type_model("ipam.prefix"), + condition=Q(vrf__isnull=False), ), ], } @@ -404,22 +438,3 @@ def find_existing_object(data: dict, object_type: str): logger.error(f" -> No object found for matcher {matcher.name}") logger.error(" * No matchers found an existing object") return None - -def merge_data(a: dict, b: dict) -> dict: - """ - Merges two structures. - - If there are any conflicts, an error is raised. - Ignores conflicts in fields that start with an underscore, - preferring a's value. - """ - if a is None or b is None: - raise ValueError("Cannot merge None values") - merged = a.copy() - for k, v in b.items(): - if k.startswith("_"): - continue - if k in merged and merged[k] != v: - raise ValueError(f"Conflict merging {a} and {b} on {k}: {merged[k]} and {v}") - merged[k] = v - return merged diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py index 5f0e699..12e3518 100644 --- a/netbox_diode_plugin/api/transformer.py +++ b/netbox_diode_plugin/api/transformer.py @@ -10,11 +10,12 @@ from functools import lru_cache from uuid import uuid4 +import graphlib from django.core.exceptions import ValidationError from django.utils.text import slugify -from .common import UnresolvedReference -from .matcher import find_existing_object, fingerprint, merge_data +from .common import ChangeSetException, UnresolvedReference +from .matcher import find_existing_object, fingerprint from .plugin_utils import get_json_ref_info, get_primary_value logger = logging.getLogger("netbox.diode_data") @@ -53,6 +54,9 @@ def _nested_context(object_type, uuid, field_name): _IS_CIRCULAR_REFERENCE = { "dcim.interface": frozenset(["primary_mac_address"]), "virtualization.vminterface": frozenset(["primary_mac_address"]), + "dcim.device": frozenset(["primary_ip4", "primary_ip6"]), + "dcim.virtualdevicecontext": frozenset(["primary_ip4", "primary_ip6"]), + "virtualization.virtualmachine": frozenset(["primary_ip4", "primary_ip6"]), } def _is_circular_reference(object_type, field_name): @@ -66,38 +70,52 @@ def transform_proto_json(proto_json: dict, object_type: str, supported_models: d a certain form of deduplication and resolution of existing objects. """ entities = _transform_proto_json_1(proto_json, object_type) - logger.error(f"_transform_proto_json_1: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + logger.error(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + entities = _topo_sort(entities) + logger.error(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") deduplicated = _fingerprint_dedupe(entities) logger.error(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + deduplicated = _topo_sort(deduplicated) + logger.error(f"_topo_sort: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") _set_slugs(deduplicated, supported_models) logger.error(f"_set_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") resolved = _resolve_existing_references(deduplicated) logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") _set_defaults(resolved, supported_models) logger.error(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + + # handle post-create steps output = _handle_post_creates(resolved) - logger.error(f"_merge_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") + logger.error(f"_handle_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") _check_unresolved_refs(output) + for entity in output: + entity.pop('_refs', None) + return output -def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, existing=None) -> list[dict]: +def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> list[dict]: # noqa: C901 uuid = str(uuid4()) - transformed = { + node = { "_object_type": object_type, "_uuid": uuid, + "_refs": set(), } + + # context pushed down from parent nodes if context is not None: - transformed.update(context) - existing = existing or {} - entities = [transformed] + for k, v in context.items(): + node[k] = v + if isinstance(v, UnresolvedReference): + node['_refs'].add(v.uuid) - post_create = {} + nodes = [node] + post_create = None for key, value in proto_json.items(): ref_info = get_json_ref_info(object_type, key) if ref_info is None: - transformed[_camel_to_snake_case(key)] = copy.deepcopy(value) + node[_camel_to_snake_case(key)] = copy.deepcopy(value) continue nested_context = _nested_context(object_type, uuid, ref_info.field_name) @@ -105,50 +123,74 @@ def _transform_proto_json_1(proto_json: dict, object_type: str, context=None, ex is_circular = _is_circular_reference(object_type, field_name) if ref_info.is_generic: - transformed[field_name + "_type"] = ref_info.object_type + node[field_name + "_type"] = ref_info.object_type field_name = field_name + "_id" - nested_refs = [] + refs = [] ref_value = None if isinstance(value, list): ref_value = [] for item in value: nested = _transform_proto_json_1(item, ref_info.object_type, nested_context) - nested_refs += nested - ref = nested[-1] + nodes += nested + ref_uuid = nested[0]['_uuid'] ref_value.append(UnresolvedReference( object_type=ref_info.object_type, - uuid=ref['_uuid'], + uuid=ref_uuid, )) + refs.append(ref_uuid) else: - nested_refs = _transform_proto_json_1(value, ref_info.object_type, nested_context) - ref = nested_refs[-1] + nested = _transform_proto_json_1(value, ref_info.object_type, nested_context) + nodes += nested + ref_uuid = nested[0]['_uuid'] ref_value = UnresolvedReference( object_type=ref_info.object_type, - uuid=ref['_uuid'], + uuid=ref_uuid, ) + refs.append(ref_uuid) + if is_circular: + if post_create is None: + post_create = { + "_uuid": str(uuid4()), + "_object_type": object_type, + "_refs": set(), + "_instance": node['_uuid'], + "_is_post_create": True, + } post_create[field_name] = ref_value - entities = entities + nested_refs - else: - transformed[field_name] = ref_value - entities = nested_refs + entities - - # if there are fields that must be deferred until after the object is created, - # add a new entity with the post-create data. eg a child object that references - # this object and is also referenced by this object such as primary mac address - # on an interface. - # if this object already exists, two steps are not needed, and this will be - # simplified in a later pass. - if len(post_create) > 0: - post_create_uuid = str(uuid4()) - post_create['_uuid'] = post_create_uuid - post_create['_instance'] = uuid - post_create['_object_type'] = object_type - transformed['_post_create'] = post_create_uuid - entities.append(post_create) - - return entities + post_create['_refs'].update(refs) + post_create['_refs'].add(node['_uuid']) + continue + + node[field_name] = ref_value + node['_refs'].update(refs) + + if post_create: + nodes.append(post_create) + + return nodes + + +def _topo_sort(entities: list[dict]) -> list[dict]: + """Topologically sort entities by reference.""" + by_uuid = {e['_uuid']: e for e in entities} + graph = defaultdict(set) + for entity in entities: + graph[entity['_uuid']] = entity['_refs'].copy() + + try: + ts = graphlib.TopologicalSorter(graph) + order = tuple(ts.static_order()) + return [by_uuid[uuid] for uuid in order] + except graphlib.CycleError as e: + # TODO the cycle error references the cycle here ... + raise ChangeSetException(f"Circular reference in entities: {e}", errors={ + "__all__": { + "message": "Unable to resolve circular reference in entities", + } + }) + def _set_defaults(entities: list[dict], supported_models: dict): for entity in entities: @@ -178,13 +220,23 @@ def _generate_slug(object_type, data): return None def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: + """ + Deduplicates/merges entities by fingerprint. + + *list must be in topo order by reference already* + """ by_fp = {} deduplicated = [] new_refs = {} # uuid -> uuid for entity in entities: - fp = fingerprint(entity, entity['_object_type']) - existing = by_fp.get(fp) + if entity.get('_is_post_create'): + fp = entity['_uuid'] + existing = None + else: + fp = fingerprint(entity, entity['_object_type']) + existing = by_fp.get(fp) + if existing is None: logger.debug(" * entity is new.") new_entity = copy.deepcopy(entity) @@ -194,13 +246,39 @@ def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: else: logger.debug(" * entity already exists.") new_refs[entity['_uuid']] = existing['_uuid'] - merged = merge_data(existing, entity) + merged = _merge_nodes(existing, entity) _update_unresolved_refs(merged, new_refs) by_fp[fp] = merged return [by_fp[fp] for fp in deduplicated] +def _merge_nodes(a: dict, b: dict) -> dict: + """ + Merges two nodes. + + If there are any conflicts, an error is raised. + Ignores conflicts in fields that start with an underscore, + preferring a's value. + """ + merged = copy.deepcopy(a) + merged['_refs'] = a['_refs'] | b['_refs'] + + for k, v in b.items(): + if k.startswith("_"): + continue + if k in merged and merged[k] != v: + raise ValueError(f"Conflict merging {a} and {b} on {k}: {merged[k]} and {v}") + merged[k] = v + return merged + + def _update_unresolved_refs(entity, new_refs): + if entity.get('_is_post_create'): + instance_uuid = entity['_instance'] + entity['_instance'] = new_refs.get(instance_uuid, instance_uuid) + + entity['_refs'] = {new_refs.get(r,r) for r in entity['_refs']} + for k, v in entity.items(): if isinstance(v, UnresolvedReference) and v.uuid in new_refs: v.uuid = new_refs[v.uuid] @@ -274,27 +352,31 @@ def cleanup_unresolved_references(data: dict) -> list[str]: def _handle_post_creates(entities: list[dict]) -> list[str]: """Merges any unnecessary post-create steps for existing objects.""" - by_uuid = {x['_uuid']: x for x in entities} + by_uuid = {e['_uuid']: (i, e) for i, e in enumerate(entities)} out = [] for entity in entities: - post_create = entity.pop('_post_create', None) - if post_create is None: + is_post_create = entity.pop('_is_post_create', False) + if not is_post_create: out.append(entity) continue - post_create = by_uuid[post_create] - if entity.get('_instance') is not None: - # this entity has a post-create, but it has already been - # created. in this case we can just merge this entity into - # the post-create entity and skip it without worrying about - # references to it. - post_create.update(entity) + instance = entity.get('_instance') + prior_index, prior_entity = by_uuid[instance] + + # a post create can be merged whenever the entities it relies on + # already exist (were resolved) or there are no dependencies between + # the object being updated and the post-create. + can_merge = all( + by_uuid[r][1].get('_instance') is not None + for r in entity['_refs'] + ) or sorted(by_uuid[r][0] for r in entity['_refs'])[-1] == prior_index + + if can_merge: + prior_entity.update([x for x in entity.items() if not x[0].startswith('_')]) else: - # this entity will be created. - # in this case we need to fix up the identifier in the post-create - # to refer to the created object. - post_create['id'] = entity['id'] + entity['id'] = prior_entity['id'] out.append(entity) + return out def _check_unresolved_refs(entities: list[dict]) -> list[str]: @@ -304,4 +386,11 @@ def _check_unresolved_refs(entities: list[dict]) -> list[str]: for k, v in e.items(): if isinstance(v, UnresolvedReference): if (v.object_type, v.uuid) not in seen: - raise ValueError(f"Unresolved reference {v} in {e} does not refer to a prior created object (circular reference?)") + raise ChangeSetException( + f"Unresolved reference {v} in {e} does not refer to a prior created object (circular reference?)", + errors={ + e['_object_type']: { + k: ["unable to resolve reference"], + } + } + ) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index 19793c5..75bec8a 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -4,8 +4,9 @@ import logging -from dcim.models import Interface, Site +from dcim.models import Device, Interface, Site from django.contrib.auth import get_user_model +from ipam.models import IPAddress from rest_framework import status from users.models import Token from utilities.testing import APITestCase @@ -81,6 +82,45 @@ def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): new_interface = Interface.objects.get(name="Interface 1x") self.assertEqual(new_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") + def test_generate_diff_and_apply_create_device_with_primary_ip4(self): + """Test generate diff and apply create device with primary ip4.""" + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ipAddress": { + "address": "192.168.1.1", + "assignedObjectInterface": { + "name": "Interface 2x", + "type": "1000base-t", + "device": { + "name": "Device 2x", + "role": { + "name": "Role ABC", + }, + "site": { + "name": "Site ABC", + }, + "deviceType": { + "manufacturer": { + "name": "Manufacturer A", + }, + "model": "Device Type A", + }, + "primaryIp4": { + "address": "192.168.1.1", + }, + }, + }, + }, + }, + } + + _, response = self.diff_and_apply(payload) + new_ipaddress = IPAddress.objects.get(address="192.168.1.1") + self.assertEqual(new_ipaddress.assigned_object.name, "Interface 2x") + device = Device.objects.get(name="Device 2x") + self.assertEqual(device.primary_ip4.pk, new_ipaddress.pk) def diff_and_apply(self, payload): """Diff and apply the payload.""" From e7235c9b1635991fe71a6a59348d84700a9871c9 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Thu, 3 Apr 2025 14:02:50 -0400 Subject: [PATCH 29/30] fix: fix error fingerprinting tags (#71) --- Makefile | 5 + netbox_diode_plugin/api/matcher.py | 2 +- .../tests/test_api_diff_and_apply.py | 91 ++++++++++++++----- 3 files changed, 75 insertions(+), 23 deletions(-) diff --git a/Makefile b/Makefile index 6145666..79e7f75 100644 --- a/Makefile +++ b/Makefile @@ -17,6 +17,11 @@ docker-compose-netbox-plugin-test: -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test --keepdb netbox_diode_plugin @$(MAKE) docker-compose-netbox-plugin-down +.PHONY: docker-compose-netbox-plugin-test-ff +docker-compose-netbox-plugin-test-ff: + -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test --failfast --keepdb netbox_diode_plugin + @$(MAKE) docker-compose-netbox-plugin-down + .PHONY: docker-compose-netbox-plugin-test-cover docker-compose-netbox-plugin-test-cover: -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run --rm -u root -e COVERAGE_FILE=/opt/netbox/netbox/coverage/.coverage netbox sh -c "coverage run --source=netbox_diode_plugin --omit=*/migrations/* ./manage.py test --keepdb netbox_diode_plugin && coverage xml -o /opt/netbox/netbox/coverage/report.xml && coverage report -m | tee /opt/netbox/netbox/coverage/report.txt" diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py index e4c8e62..8f11735 100644 --- a/netbox_diode_plugin/api/matcher.py +++ b/netbox_diode_plugin/api/matcher.py @@ -379,7 +379,7 @@ def _fingerprint_all(data: dict) -> str: values.append(k) if isinstance(v, (list, tuple)): values.extend(sorted(v)) - if isinstance(v, dict): + elif isinstance(v, dict): values.append(_fingerprint_all(v)) else: values.append(v) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py index 75bec8a..c4ca36e 100644 --- a/netbox_diode_plugin/tests/test_api_diff_and_apply.py +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -3,6 +3,7 @@ """Diode NetBox Plugin - Tests.""" import logging +from uuid import uuid4 from dcim.models import Device, Interface, Site from django.contrib.auth import get_user_model @@ -29,46 +30,89 @@ def setUp(self): self.add_permissions("netbox_diode_plugin.add_diode") + def test_generate_diff_and_apply_create_interface_with_tags(self): + """Test generate diff and apply create interface with tags.""" + interface_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.interface", + "entity": { + "interface": { + "name": f"Interface {interface_uuid}", + "mtu": "1500", + "mode": "access", + "tags": [ + {"name": "tag 1"} + ], + "type": "1000base-t", + "device": { + "name": f"Device {uuid4()}", + "deviceType": { + "model": f"Device Type {uuid4()}", + "manufacturer": { + "name": f"Manufacturer {uuid4()}" + } + }, + "role": { + "name": f"Role {uuid4()}" + }, + "site": { + "name": f"Site {uuid4()}" + } + }, + "enabled": True, + "description": "Physical interface" + } + } + } + _, response = self.diff_and_apply(payload) + new_interface = Interface.objects.get(name=f"Interface {interface_uuid}") + self.assertEqual(new_interface.tags.count(), 1) + self.assertEqual(new_interface.tags.first().name, "tag 1") + + def test_generate_diff_and_apply_create_site(self): """Test generate diff and apply create site.""" """Test generate diff create site.""" + site_uuid = str(uuid4()) payload = { "timestamp": 1, "object_type": "dcim.site", "entity": { "site": { - "name": "Generate Diff and Apply Site", - "slug": "generate-diff-and-apply-site", + "name": f"Site {site_uuid}", + "slug": f"site-{site_uuid}", }, } } _, response = self.diff_and_apply(payload) - new_site = Site.objects.get(name="Generate Diff and Apply Site") - self.assertEqual(new_site.slug, "generate-diff-and-apply-site") + new_site = Site.objects.get(name=f"Site {site_uuid}") + self.assertEqual(new_site.slug, f"site-{site_uuid}") def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): """Test generate diff and apply create interface with primary mac address.""" + interface_uuid = str(uuid4()) payload = { "timestamp": 1, "object_type": "dcim.interface", "entity": { "interface": { - "name": "Interface 1x", + "name": f"Interface {interface_uuid}", "type": "1000base-t", "device": { - "name": "Device 1x", + "name": f"Device {uuid4()}", "role": { - "Name": "Role ABC", + "Name": f"Role {uuid4()}", }, "site": { - "Name": "Site ABC", + "Name": f"Site {uuid4()}", }, "deviceType": { "manufacturer": { - "Name": "Manufacturer A", + "Name": f"Manufacturer {uuid4()}", }, - "model": "Device Type A", + "model": f"Device Type {uuid4()}", }, }, "primaryMacAddress": { @@ -79,36 +123,39 @@ def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): } _, response = self.diff_and_apply(payload) - new_interface = Interface.objects.get(name="Interface 1x") + new_interface = Interface.objects.get(name=f"Interface {interface_uuid}") self.assertEqual(new_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") def test_generate_diff_and_apply_create_device_with_primary_ip4(self): """Test generate diff and apply create device with primary ip4.""" + device_uuid = str(uuid4()) + interface_uuid = str(uuid4()) + addr = "192.168.1.1" payload = { "timestamp": 1, "object_type": "ipam.ipaddress", "entity": { "ipAddress": { - "address": "192.168.1.1", + "address": addr, "assignedObjectInterface": { - "name": "Interface 2x", + "name": f"Interface {interface_uuid}", "type": "1000base-t", "device": { - "name": "Device 2x", + "name": f"Device {device_uuid}", "role": { - "name": "Role ABC", + "name": f"Role {uuid4()}", }, "site": { - "name": "Site ABC", + "name": f"Site {uuid4()}", }, "deviceType": { "manufacturer": { - "name": "Manufacturer A", + "name": f"Manufacturer {uuid4()}", }, - "model": "Device Type A", + "model": f"Device Type {uuid4()}", }, "primaryIp4": { - "address": "192.168.1.1", + "address": addr, }, }, }, @@ -117,9 +164,9 @@ def test_generate_diff_and_apply_create_device_with_primary_ip4(self): } _, response = self.diff_and_apply(payload) - new_ipaddress = IPAddress.objects.get(address="192.168.1.1") - self.assertEqual(new_ipaddress.assigned_object.name, "Interface 2x") - device = Device.objects.get(name="Device 2x") + new_ipaddress = IPAddress.objects.get(address=addr) + self.assertEqual(new_ipaddress.assigned_object.name, f"Interface {interface_uuid}") + device = Device.objects.get(name=f"Device {device_uuid}") self.assertEqual(device.primary_ip4.pk, new_ipaddress.pk) def diff_and_apply(self, payload): From 5a98d6b01299603f91ee828ef50553eec1b9d7f5 Mon Sep 17 00:00:00 2001 From: Luke Tucker <64618+ltucker@users.noreply.github.com> Date: Fri, 4 Apr 2025 08:45:39 -0400 Subject: [PATCH 30/30] fix: all noops -> no changes, show noops as only prior state (#72) --- netbox_diode_plugin/api/differ.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py index e44ecab..a1721a0 100644 --- a/netbox_diode_plugin/api/differ.py +++ b/netbox_diode_plugin/api/differ.py @@ -118,14 +118,16 @@ def diff_to_change( new_refs=unresolved_references, ) - if change_type == ChangeType.UPDATE: - # remove null values + if change_type != ChangeType.NOOP: + postchange_data_clean = clean_diff_data(postchange_data) + change.data = sort_dict_recursively(postchange_data_clean) + else: + change.data = {} + + if change_type == ChangeType.UPDATE or change_type == ChangeType.NOOP: prechange_data_clean = clean_diff_data(prechange_data) change.before = sort_dict_recursively(prechange_data_clean) - postchange_data_clean = clean_diff_data(postchange_data) - change.data = sort_dict_recursively(postchange_data_clean) - return change def sort_dict_recursively(d): @@ -171,8 +173,17 @@ def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: changed_attrs, new_refs, ) + change_set.changes.append(change) + has_any_changes = False + for change in change_set.changes: + if change.change_type != ChangeType.NOOP: + has_any_changes = True + break + + if not has_any_changes: + change_set.changes = [] if errors := change_set.validate(): raise ChangeSetException("Invalid change set", errors)