diff --git a/Makefile b/Makefile index 6145666..79e7f75 100644 --- a/Makefile +++ b/Makefile @@ -17,6 +17,11 @@ docker-compose-netbox-plugin-test: -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test --keepdb netbox_diode_plugin @$(MAKE) docker-compose-netbox-plugin-down +.PHONY: docker-compose-netbox-plugin-test-ff +docker-compose-netbox-plugin-test-ff: + -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run -u root --rm netbox ./manage.py test --failfast --keepdb netbox_diode_plugin + @$(MAKE) docker-compose-netbox-plugin-down + .PHONY: docker-compose-netbox-plugin-test-cover docker-compose-netbox-plugin-test-cover: -@$(DOCKER_COMPOSE) -f docker/docker-compose.yaml -f docker/docker-compose.test.yaml run --rm -u root -e COVERAGE_FILE=/opt/netbox/netbox/coverage/.coverage netbox sh -c "coverage run --source=netbox_diode_plugin --omit=*/migrations/* ./manage.py test --keepdb netbox_diode_plugin && coverage xml -o /opt/netbox/netbox/coverage/report.xml && coverage report -m | tee /opt/netbox/netbox/coverage/report.txt" diff --git a/docker/netbox/configuration/configuration.py b/docker/netbox/configuration/configuration.py index cc51c59..d459441 100644 --- a/docker/netbox/configuration/configuration.py +++ b/docker/netbox/configuration/configuration.py @@ -44,9 +44,12 @@ def _environ_get_and_map(variable_name: str, default: str | None = None, return map_fn(env_value) -_AS_BOOL = lambda value: value.lower() == 'true' -_AS_INT = lambda value: int(value) -_AS_LIST = lambda value: list(filter(None, value.split(' '))) +def _AS_BOOL(value): + return value.lower() == 'true' +def _AS_INT(value): + return int(value) +def _AS_LIST(value): + return list(filter(None, value.split(' '))) _BASE_DIR = dirname(dirname(abspath(__file__))) diff --git a/docker/netbox/env/netbox.env b/docker/netbox/env/netbox.env index 45993fc..38a0211 100644 --- a/docker/netbox/env/netbox.env +++ b/docker/netbox/env/netbox.env @@ -41,3 +41,4 @@ DIODE_TO_NETBOX_API_KEY=1368dbad13e418d5a443d93cf255edde03a2a754 NETBOX_TO_DIODE_API_KEY=1e99338b8cab5fc637bc55f390bda1446f619c42 DIODE_API_KEY=5a52c45ee8231156cb620d193b0291912dd15433 BASE_PATH=netbox/ +DEBUG=True \ No newline at end of file diff --git a/docker/netbox/local_settings.py b/docker/netbox/local_settings.py index 6ab2063..0542c56 100644 --- a/docker/netbox/local_settings.py +++ b/docker/netbox/local_settings.py @@ -1,4 +1,5 @@ from netbox_branching.utilities import DynamicSchemaDict + from .configuration import DATABASE # Wrap DATABASES with DynamicSchemaDict for dynamic schema support @@ -9,4 +10,4 @@ # Employ our custom database router DATABASE_ROUTERS = [ 'netbox_branching.database.BranchAwareRouter', -] \ No newline at end of file +] diff --git a/netbox_diode_plugin/api/applier.py b/netbox_diode_plugin/api/applier.py new file mode 100644 index 0000000..101f30f --- /dev/null +++ b/netbox_diode_plugin/api/applier.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Applier.""" + + +import logging + +from django.apps import apps +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ObjectDoesNotExist +from django.db import models +from rest_framework.exceptions import ValidationError as ValidationError + +from .common import NON_FIELD_ERRORS, Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType +from .plugin_utils import get_object_type_model, legal_fields +from .supported_models import get_serializer_for_model + +logger = logging.getLogger(__name__) + + +def apply_changeset(change_set: ChangeSet) -> ChangeSetResult: + """Apply a change set.""" + _validate_change_set(change_set) + + created = {} + for i, change in enumerate(change_set.changes): + change_type = change.change_type + object_type = change.object_type + + if change_type == ChangeType.NOOP.value: + continue + + try: + model_class = get_object_type_model(object_type) + data = _pre_apply(model_class, change, created) + _apply_change(data, model_class, change, created) + except ValidationError as e: + raise _err_from_validation_error(e, f"changes[{i}]") + except ObjectDoesNotExist: + raise _err(f"{object_type} with id {change.object_id} does not exist", f"changes[{i}]", "object_id") + # ConstraintViolationError ? + # ... + + return ChangeSetResult( + id=change_set.id, + ) + +def _apply_change(data: dict, model_class: models.Model, change: Change, created: dict): + serializer_class = get_serializer_for_model(model_class) + change_type = change.change_type + if change_type == ChangeType.CREATE.value: + serializer = serializer_class(data=data) + serializer.is_valid(raise_exception=True) + instance = serializer.save() + created[change.ref_id] = instance + + elif change_type == ChangeType.UPDATE.value: + if object_id := change.object_id: + instance = model_class.objects.get(id=object_id) + serializer = serializer_class(instance, data=data, partial=True) + serializer.is_valid(raise_exception=True) + serializer.save() + # create and update in a same change set + elif change.ref_id and (instance := created[change.ref_id]): + serializer = serializer_class(instance, data=data, partial=True) + serializer.is_valid(raise_exception=True) + serializer.save() + +def _pre_apply(model_class: models.Model, change: Change, created: dict): + data = change.data.copy() + + # resolve foreign key references to new objects + for ref_field in change.new_refs: + if isinstance(data[ref_field], (list, tuple)): + ref_list = [] + for ref in data[ref_field]: + if isinstance(ref, str): + ref_list.append(created[ref].pk) + elif isinstance(ref, int): + ref_list.append(ref) + data[ref_field] = ref_list + else: + data[ref_field] = created[data[ref_field]].pk + + # ignore? fields that are not in the data model (error?) + allowed_fields = legal_fields(model_class) + for key in list(data.keys()): + if key not in allowed_fields: + logger.warning(f"Field {key} is not in the diode data model, ignoring.") + data.pop(key) + + return data + +def _validate_change_set(change_set: ChangeSet): + if not change_set.id: + raise _err("Change set ID is required", "changeset","id") + if not change_set.changes: + raise _err("Changes are required", "changeset", "changes") + + for i, change in enumerate(change_set.changes): + if change.object_id is None and change.ref_id is None: + raise _err("Object ID or Ref ID must be provided", f"changes[{i}]", NON_FIELD_ERRORS) + if change.change_type not in ChangeType: + raise _err(f"Unsupported change type '{change.change_type}'", f"changes[{i}]", "change_type") + +def _err(message, object_name, field): + return ChangeSetException(message, errors={object_name: {field: [message]}}) + +def _err_from_validation_error(e, object_name): + errors = {} + if e.detail: + if isinstance(e.detail, dict): + errors[object_name] = e.detail + elif isinstance(e.detail, (list, tuple)): + errors[object_name] = { + NON_FIELD_ERRORS: e.detail + } + else: + errors[object_name] = { + NON_FIELD_ERRORS: [e.detail] + } + return ChangeSetException("validation error", errors=errors) diff --git a/netbox_diode_plugin/api/common.py b/netbox_diode_plugin/api/common.py new file mode 100644 index 0000000..9bcb6b2 --- /dev/null +++ b/netbox_diode_plugin/api/common.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python +# Copyright 2025 NetBox Labs Inc +"""Diode NetBox Plugin - API - Common types and utilities.""" + +import logging +import uuid +from collections import defaultdict +from dataclasses import dataclass, field +from enum import Enum + +from django.apps import apps +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.db import models +from rest_framework import status + +logger = logging.getLogger("netbox.diode_data") + +NON_FIELD_ERRORS = "__all__" + +@dataclass +class UnresolvedReference: + """unresolved reference to an object.""" + + object_type: str + uuid: str + + def __str__(self): + """String representation of the unresolved reference.""" + return f"new_object:{self.object_type}:{self.uuid}" + + def __eq__(self, other): + """Equality operator.""" + if not isinstance(other, UnresolvedReference): + return False + return self.object_type == other.object_type and self.uuid == other.uuid + + def __hash__(self): + """Hash function.""" + return hash((self.object_type, self.uuid)) + + def __lt__(self, other): + """Less than operator.""" + return self.object_type < other.object_type or (self.object_type == other.object_type and self.uuid < other.uuid) + + +class ChangeType(Enum): + """Change type enum.""" + + CREATE = "create" + UPDATE = "update" + NOOP = "noop" + + +@dataclass +class Change: + """A change to a model instance.""" + + change_type: ChangeType + object_type: str + object_id: int | None = field(default=None) + object_primary_value: str | None = field(default=None) + ref_id: str | None = field(default=None) + id: str = field(default_factory=lambda: str(uuid.uuid4())) + before: dict | None = field(default=None) + data: dict | None = field(default=None) + new_refs: list[str] = field(default_factory=list) + + def to_dict(self) -> dict: + """Convert the change to a dictionary.""" + return { + "id": self.id, + "change_type": self.change_type.value, + "object_type": self.object_type, + "object_id": self.object_id, + "ref_id": self.ref_id, + "object_primary_value": self.object_primary_value, + "before": self.before, + "data": self.data, + "new_refs": self.new_refs, + } + + +@dataclass +class ChangeSet: + """A set of changes to a model instance.""" + + id: str = field(default_factory=lambda: str(uuid.uuid4())) + changes: list[Change] = field(default_factory=list) + branch: dict[str, str] | None = field(default=None) # {"id": str, "name": str} + + def to_dict(self) -> dict: + """Convert the change set to a dictionary.""" + return { + "id": self.id, + "changes": [change.to_dict() for change in self.changes], + "branch": self.branch, + } + + def validate(self) -> dict[str, list[str]]: + """Validate basics of the change set data.""" + errors = defaultdict(dict) + + for change in self.changes: + model = apps.get_model(change.object_type) + + change_data = change.data.copy() + if change.before: + change_data.update(change.before) + + excluded_relation_fields, rel_errors = self._validate_relations(change_data, model) + if rel_errors: + errors[change.object_type] = rel_errors + + try: + instance = model(**change_data) + instance.clean_fields(exclude=excluded_relation_fields) + except ValidationError as e: + errors[change.object_type].update(e.error_dict) + + return errors or None + + def _validate_relations(self, change_data: dict, model: models.Model) -> tuple[list[str], dict]: + # check that there is some value for every required + # reference field, but don't validate the actual reference. + # the fields are removed from the change_data so that other + # fields can be validated by instantiating the model. + excluded_relation_fields = [] + rel_errors = defaultdict(list) + for f in model._meta.get_fields(): + if isinstance(f, (GenericRelation, GenericForeignKey)): + excluded_relation_fields.append(f.name) + continue + if not f.is_relation: + continue + field_name = f.name + excluded_relation_fields.append(field_name) + + if hasattr(f, "related_model") and f.related_model == ContentType: + change_data.pop(field_name, None) + base_field = field_name[:-5] + excluded_relation_fields.append(base_field + "_id") + value = change_data.pop(base_field + "_id", None) + else: + value = change_data.pop(field_name, None) + + if not f.null and not f.blank and not f.many_to_many: + # this field is a required relation... + if value is None: + rel_errors[f.name].append(f"Field {f.name} is required") + return excluded_relation_fields, rel_errors + + +@dataclass +class ChangeSetResult: + """A result of applying a change set.""" + + id: str | None = field(default_factory=lambda: str(uuid.uuid4())) + change_set: ChangeSet | None = field(default=None) + errors: dict | None = field(default=None) + + def to_dict(self) -> dict: + """Convert the result to a dictionary.""" + if self.change_set: + return self.change_set.to_dict() + + return { + "id": self.id, + "errors": self.errors, + } + + def get_status_code(self) -> int: + """Get the status code for the result.""" + return status.HTTP_200_OK if not self.errors else status.HTTP_400_BAD_REQUEST + + +class ChangeSetException(Exception): + """ChangeSetException is raised when an error occurs while generating or applying a change set.""" + + def __init__(self, message, errors=None): + """Initialize the exception.""" + super().__init__(message) + self.message = message + self.errors = errors or {} + + def __str__(self): + """Return the string representation of the exception.""" + if self.errors: + return f"{self.message}: {self.errors}" + return self.message diff --git a/netbox_diode_plugin/api/differ.py b/netbox_diode_plugin/api/differ.py new file mode 100644 index 0000000..a1721a0 --- /dev/null +++ b/netbox_diode_plugin/api/differ.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python +# Copyright 2025 NetBox Labs Inc +"""Diode NetBox Plugin - API - Differ.""" + +import copy +import logging + +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from utilities.data import shallow_compare_dict + +from .common import Change, ChangeSet, ChangeSetException, ChangeSetResult, ChangeType +from .plugin_utils import get_primary_value, legal_fields +from .supported_models import extract_supported_models +from .transformer import cleanup_unresolved_references, transform_proto_json + +logger = logging.getLogger(__name__) + +SUPPORTED_MODELS = extract_supported_models() + + +def prechange_data_from_instance(instance) -> dict: # noqa: C901 + """Convert model instance data to a dictionary format for comparison.""" + prechange_data = {} + + if instance is None: + return prechange_data + + model_class = instance.__class__ + object_type = f"{model_class._meta.app_label}.{model_class._meta.model_name}" + + model = SUPPORTED_MODELS.get(object_type) + if not model: + raise ValidationError(f"Model {model_class.__name__} is not supported") + + fields = model.get("fields", {}) + if not fields: + raise ValidationError(f"Model {model_class.__name__} has no fields") + + diode_fields = legal_fields(model_class) + + for field_name, field_info in fields.items(): + # permit only diode fields and the primary key + if field_name not in diode_fields and field_name != "id": + continue + + if not hasattr(instance, field_name): + continue + + if field_info["type"] == "ForeignKey" and field_info.get("is_many_to_one_rel", False): + continue + + value = getattr(instance, field_name) + if hasattr(value, "all"): # Handle many-to-many and many-to-one relationships + # For any relationship that has an 'all' method, get all related objects' primary keys + prechange_data[field_name] = ( + [item.pk for item in value.all()] if value is not None else [] + ) + elif hasattr( + value, "pk" + ): # Handle regular related fields (ForeignKey, OneToOne) + # Handle ContentType fields + if isinstance(value, ContentType): + prechange_data[field_name] = f"{value.app_label}.{value.model}" + else: + # For regular related fields, get the primary key + prechange_data[field_name] = value.pk if value is not None else None + else: + prechange_data[field_name] = value + + return prechange_data + + +def clean_diff_data(data: dict, exclude_empty_values: bool = True) -> dict: + """Clean diff data by removing null values.""" + result = {} + for k, v in data.items(): + if exclude_empty_values: + if v is None: + continue + if isinstance(v, list) and len(v) == 0: + continue + if isinstance(v, dict) and len(v) == 0: + continue + if isinstance(v, str) and v == "": + continue + result[k] = v + return result + + +def diff_to_change( + object_type: str, + prechange_data: dict, + postchange_data: dict, + changed_attrs: list[str], + unresolved_references: list[str], +) -> Change: + """Convert a diff to a change.""" + change_type = ChangeType.UPDATE if len(prechange_data) > 0 else ChangeType.CREATE + if change_type == ChangeType.UPDATE and not len(changed_attrs) > 0: + change_type = ChangeType.NOOP + + primary_value = get_primary_value(prechange_data | postchange_data, object_type) + if primary_value is None: + primary_value = "(unnamed)" + + prior_id = prechange_data.get("id") + ref_id = None + if prior_id is None: + ref_id = postchange_data.pop("id", None) + + change = Change( + change_type=change_type, + object_type=object_type, + object_id=prior_id if isinstance(prior_id, int) else None, + ref_id=ref_id, + object_primary_value=primary_value, + new_refs=unresolved_references, + ) + + if change_type != ChangeType.NOOP: + postchange_data_clean = clean_diff_data(postchange_data) + change.data = sort_dict_recursively(postchange_data_clean) + else: + change.data = {} + + if change_type == ChangeType.UPDATE or change_type == ChangeType.NOOP: + prechange_data_clean = clean_diff_data(prechange_data) + change.before = sort_dict_recursively(prechange_data_clean) + + return change + +def sort_dict_recursively(d): + """Recursively sorts a dictionary by keys.""" + if isinstance(d, dict): + return {k: sort_dict_recursively(v) for k, v in sorted(d.items())} + if isinstance(d, list): + # Convert all items to strings for comparison + return sorted([sort_dict_recursively(item) for item in d], key=str) + return d + + +def generate_changeset(entity: dict, object_type: str) -> ChangeSetResult: + """Generate a changeset for an entity.""" + change_set = ChangeSet() + + entities = transform_proto_json(entity, object_type, SUPPORTED_MODELS) + by_uuid = {x['_uuid']: x for x in entities} + for entity in entities: + prechange_data = {} + changed_attrs = [] + new_refs = cleanup_unresolved_references(entity) + object_type = entity.pop("_object_type") + _ = entity.pop("_uuid") + instance = entity.pop("_instance", None) + + if instance: + # the prior state is another new object... + if isinstance(instance, str): + prechange_data = copy.deepcopy(by_uuid[instance]) + # prior state is a model instance + else: + prechange_data = prechange_data_from_instance(instance) + + changed_data = shallow_compare_dict( + prechange_data, entity, + ) + changed_attrs = sorted(changed_data.keys()) + change = diff_to_change( + object_type, + prechange_data, + entity, + changed_attrs, + new_refs, + ) + + change_set.changes.append(change) + + has_any_changes = False + for change in change_set.changes: + if change.change_type != ChangeType.NOOP: + has_any_changes = True + break + + if not has_any_changes: + change_set.changes = [] + if errors := change_set.validate(): + raise ChangeSetException("Invalid change set", errors) + + return ChangeSetResult( + id=change_set.id, + change_set=change_set, + ) diff --git a/netbox_diode_plugin/api/matcher.py b/netbox_diode_plugin/api/matcher.py new file mode 100644 index 0000000..8f11735 --- /dev/null +++ b/netbox_diode_plugin/api/matcher.py @@ -0,0 +1,440 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Object matching utilities.""" + +import copy +import logging +from dataclasses import dataclass +from functools import cache, lru_cache +from typing import Type + +from core.models import ObjectType as NetBoxType +from django.conf import settings +from django.contrib.contenttypes.fields import ContentType +from django.core.exceptions import FieldDoesNotExist +from django.db import models +from django.db.models import F, Value +from django.db.models.lookups import Exact +from django.db.models.query_utils import Q + +from .common import UnresolvedReference +from .plugin_utils import content_type_id, get_object_type, get_object_type_model + +logger = logging.getLogger(__name__) + +# +# these matchers are not driven by netbox unique constraints, +# but are logical criteria that may be used to match objects. +# These should represent the likely intent of a user when +# matching existing objects. +# +_LOGICAL_MATCHERS = { + "dcim.macaddress": lambda: [ + ObjectMatchCriteria( + fields=("mac_address", "assigned_object_type", "assigned_object_id"), + name="logical_mac_address_within_parent", + model_class=get_object_type_model("dcim.macaddress"), + condition=Q(assigned_object_id__isnull=False), + ), + ObjectMatchCriteria( + fields=("mac_address", "assigned_object_type", "assigned_object_id"), + name="logical_mac_address_within_parent", + model_class=get_object_type_model("dcim.macaddress"), + condition=Q(assigned_object_id__isnull=True), + ), + ], + "ipam.ipaddress": lambda: [ + ObjectMatchCriteria( + fields=("address", ), + name="logical_ip_address_global_no_vrf", + model_class=get_object_type_model("ipam.ipaddress"), + condition=Q(vrf__isnull=True), + ), + ObjectMatchCriteria( + fields=("address", "assigned_object_type", "assigned_object_id"), + name="logical_ip_address_within_vrf", + model_class=get_object_type_model("ipam.ipaddress"), + condition=Q(vrf__isnull=False) + ), + ], + "ipam.prefix": lambda: [ + ObjectMatchCriteria( + fields=("prefix",), + name="logical_prefix_global_no_vrf", + model_class=get_object_type_model("ipam.prefix"), + condition=Q(vrf__isnull=True), + ), + ObjectMatchCriteria( + fields=("prefix", "vrf_id"), + name="logical_prefix_within_vrf", + model_class=get_object_type_model("ipam.prefix"), + condition=Q(vrf__isnull=False), + ), + ], +} + + +@dataclass +class ObjectMatchCriteria: + """ + Defines criteria for identifying a specific object. + + This matcher expects a fully 'transformed' and resolved + set of fields. ie field names are snake case and match + the model fields and any references to another object + specify a specific id in the appropriate field name. + eg device_id=123 etc and for any generic references, + both the type and idshould be specified, eg: + scope_type="dcim.site" and scope_id=123 + """ + + fields: tuple[str] | None = None + expressions: tuple | None = None + condition: Q | None = None + model_class: Type[models.Model] | None = None + name: str | None = None + + def __hash__(self): + """Hash the object match criteria.""" + return hash((self.fields, self.expressions, self.condition, self.model_class.__name__, self.name)) + + def has_required_fields(self, data) -> bool: + """Returns True if the data given contains a value for all fields referenced by the constraint.""" + return all(field in data for field in self.get_refs()) + + @cache + def get_refs(self) -> set[str]: + """Returns a set of all field names referenced by the constraint.""" + refs = set() + if self.fields: + refs.update(self.fields) + elif self.expressions: + for expr in self.expressions: + refs |= _get_refs(expr) + return frozenset(refs) + + @cache + def get_insensitive_refs(self) -> set[str]: + """ + Returns a set of all field names that should be compared in a case insensitive manner. + + best effort, doesn't handle things being nested in a complex way. + """ + refs = set() + if self.expressions: + for expr in self.expressions: + # TODO be more careful here + if expr.__class__.__name__ == "Lower": + for source_expr in getattr(expr, "source_expressions", []): + if hasattr(source_expr, "name"): + refs.add(source_expr.name) + return refs + + def fingerprint(self, data: dict) -> str|None: + """ + Returns a fingerprint of the data based on these criteria. + + These criteria that can be used to determine if two + data structs roughly match. + + This is a best effort based on the referenced fields + and some interrogation of case sensitivity. The + real criteria are potentially complex... + """ + if not self.has_required_fields(data): + return None + + if self.condition: + if not self._check_condition(data): + return None + + # sort the fields by name + sorted_fields = sorted(self.get_refs()) + insensitive = self.get_insensitive_refs() + values = [] + for field in sorted_fields: + value = data[field] + if isinstance(value, dict): + logger.warning(f"unexpected value type for fingerprinting: {value}") + return None + if field in insensitive: + value = value.lower() + values.append(value) + # logger.debug(f"fingerprint {self}: {data} -> values: {tuple(values)}") + + return hash(tuple(values)) + + def _check_condition(self, data) -> bool: + if self.condition is None: + return True + # TODO: handle evaluating complex conditions, + # there are only simple ones currently + if self.condition.connector != Q.AND: + logger.error(f"Unhandled condition {self.condition}") + return False + + if len(self.condition.children) != 1: + logger.error(f"Unhandled condition {self.condition}") + return False + + if len(self.condition.children[0]) != 2: + logger.error(f"Unhandled condition {self.condition}") + return False + + k, v = self.condition.children[0] + result = False + if k.endswith("__isnull"): + k = k[:-8] + result = k not in data or data[k] is None + else: + result = k in data and data[k] == v + + if self.condition.negated: + result = not result + + return result + + def build_queryset(self, data) -> models.QuerySet: + """Builds a queryset for the constraint with the given data.""" + if self.fields and len(self.fields) > 0: + return self._build_fields_queryset(data) + if self.expressions and len(self.expressions) > 0: + return self._build_expressions_queryset(data) + raise ValueError("No fields or expressions to build queryset from") + + def _build_fields_queryset(self, data) -> models.QuerySet: + """Builds a queryset for a simple set-of-fields constraint.""" + data = self._prepare_data(data) + lookup_kwargs = {} + for field_name in self.fields: + field = self.model_class._meta.get_field(field_name) + if field_name not in data: + logger.error(f" * cannot build fields queryset for {self.name} (missing field {field_name})") + return None # cannot match, missing field data + lookup_value = data.get(field_name) + if isinstance(lookup_value, UnresolvedReference): + logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is unresolved reference)") + return None # cannot match, missing field data + if isinstance(lookup_value, dict): + logger.error(f" * cannot build fields queryset for {self.name} ({field_name} is dict)") + return None # cannot match, missing field data + lookup_kwargs[field.name] = lookup_value + + # logger.error(f" * query kwargs: {lookup_kwargs}") + qs = self.model_class.objects.filter(**lookup_kwargs) + if self.condition: + qs = qs.filter(self.condition) + return qs + + def _build_expressions_queryset(self, data) -> models.QuerySet: + """Builds a queryset for the constraint with the given data.""" + data = self._prepare_data(data) + replacements = { + F(field): Value(value) if isinstance(value, (str, int, float, bool)) else value + for field, value in data.items() + } + + filters = [] + for expr in self.expressions: + if hasattr(expr, "get_expression_for_validation"): + expr = expr.get_expression_for_validation() + + refs = [F(ref) for ref in _get_refs(expr)] + for ref in refs: + if ref not in replacements: + logger.error(f" * cannot build expr queryset for {self.name} (missing field {ref})") + return None # cannot match, missing field data + if isinstance(replacements[ref], UnresolvedReference): + logger.error(f" * cannot build expr queryset for {self.name} ({ref} is unresolved reference)") + return None # cannot match, missing field data + + rhs = expr.replace_expressions(replacements) + condition = Exact(expr, rhs) + filters.append(condition) + + qs = self.model_class.objects.filter(*filters) + if self.condition: + qs = qs.filter(self.condition) + return qs + + def _prepare_data(self, data: dict) -> dict: + prepared = {} + for field_name, value in data.items(): + try: + field = self.model_class._meta.get_field(field_name) + # special handling for object type -> content type id + if field.is_relation and hasattr(field, "related_model") and field.related_model == ContentType: + prepared[field_name] = content_type_id(value) + else: + prepared[field_name] = value + + except FieldDoesNotExist: + continue + # logger.error(f"prepared data: {data} -> {prepared}") + return prepared + +@lru_cache(maxsize=256) +def get_model_matchers(model_class) -> list[ObjectMatchCriteria]: + """Extract unique constraints from a Django model.""" + object_type = get_object_type(model_class) + matchers = _LOGICAL_MATCHERS.get(object_type, lambda: [])() + + # collect single fields that are unique + for field in model_class._meta.fields: + if field.name == "id": + # TODO(ltucker): more django-general detection of pk field? + continue + + if field.unique: + matchers.append( + ObjectMatchCriteria( + model_class=model_class, + fields=(field.name,), + name=f"unique_{field.name}", + ) + ) + + # collect UniqueConstraint constraints + for constraint in model_class._meta.constraints: + if not _is_supported_constraint(constraint, model_class): + continue + if len(constraint.fields) > 0: + matchers.append( + ObjectMatchCriteria( + model_class=model_class, + fields=tuple(constraint.fields), + condition=constraint.condition, + name=constraint.name, + ) + ) + elif len(constraint.expressions) > 0: + matchers.append( + ObjectMatchCriteria( + model_class=model_class, + expressions=tuple(constraint.expressions), + condition=constraint.condition, + name=constraint.name, + ) + ) + else: + logger.error( + f"Constraint {constraint.name} on {model_class.__name__} had no fields or expressions (skipped)" + ) + # (this shouldn't happen / enforced by django) + continue + + return matchers + + +def _is_supported_constraint(constraint, model_class) -> bool: + if not isinstance(constraint, models.UniqueConstraint): + return False + + if len(constraint.opclasses) > 0: + logger.warning(f"Constraint {constraint.name} on {model_class.__name__} had opclasses (skipped)") + return False + + if constraint.nulls_distinct is not None and constraint.nulls_distinct is True: + logger.warning(f"Constraint {constraint.name} on {model_class.__name__} had nulls_distinct (skipped)") + return False + + for field_name in constraint.fields: + field = model_class._meta.get_field(field_name) + if field.generated: + logger.warning( + f"Constraint {constraint.name} on {model_class.__name__} had" + f" generated field {field_name} (skipped)" + ) + return False + + return True + +def _get_refs(expr) -> set[str]: + refs = set() + if isinstance(expr, str): + refs.add(expr) + elif isinstance(expr, F): + refs.add(expr.name) + elif hasattr(expr, "get_source_expressions"): + for subexpr in expr.get_source_expressions(): + refs |= _get_refs(subexpr) + else: + logger.warning(f"Unhandled expression type for _get_refs: {type(expr)}") + return refs + +def _fingerprint_all(data: dict) -> str: + """ + Returns a fingerprint of the data based on all fields. + + Data should be a (flattened) dictionary of field values. + This ignores any fields that start with an underscore. + """ + if data is None: + return None + + values = [] + for k, v in sorted(data.items()): + if k.startswith("_"): + continue + values.append(k) + if isinstance(v, (list, tuple)): + values.extend(sorted(v)) + elif isinstance(v, dict): + values.append(_fingerprint_all(v)) + else: + values.append(v) + # logger.error(f"_fingerprint_all: {data} -> values: {tuple(values)}") + + return hash(tuple(values)) + +def fingerprint(data: dict, object_type: str) -> str: + """ + Fingerprint a data structure. + + This uses the first matcher that has all + required fields or else uses all fields. + + TODO: This means there are pathological? cases where + the same object is being referenced but by + different unique constraints in the same diff... + this could lead to some unexpected behavior. + """ + if data is None: + return None + + model_class = get_object_type_model(object_type) + # check any known match criteria + for matcher in get_model_matchers(model_class): + fp = matcher.fingerprint(data) + if fp is not None: + return fp + # fall back to fingerprinting all the data + return _fingerprint_all(data) + +def find_existing_object(data: dict, object_type: str): + """ + Find an existing object that matches the given data. + + Uses all object match criteria to look for an existing + object. Returns the first match found. + + Returns the object if found, otherwise None. + """ + logger.error(f"resolving {data}") + model_class = get_object_type_model(object_type) + for matcher in get_model_matchers(model_class): + if not matcher.has_required_fields(data): + logger.error(f" * skipped matcher {matcher.name} (missing fields)") + continue + q = matcher.build_queryset(data) + if q is None: + logger.error(f" * skipped matcher {matcher.name} (no queryset)") + continue + logger.error(f" * trying query {q.query}") + existing = q.order_by('pk').first() + if existing is not None: + logger.error(f" -> Found object {existing} via {matcher.name}") + return existing + logger.error(f" -> No object found for matcher {matcher.name}") + logger.error(" * No matchers found an existing object") + return None diff --git a/netbox_diode_plugin/api/plugin_utils.py b/netbox_diode_plugin/api/plugin_utils.py new file mode 100644 index 0000000..b526a5c --- /dev/null +++ b/netbox_diode_plugin/api/plugin_utils.py @@ -0,0 +1,906 @@ +"""Diode plugin helpers.""" + +# Generated code. DO NOT EDIT. +# Timestamp: 2025-04-01 21:05:16Z + +from dataclasses import dataclass +from functools import lru_cache +from typing import Type + +from core.models import ObjectType as NetBoxType +from django.contrib.contenttypes.models import ContentType +from django.db import models + + +@lru_cache(maxsize=256) +def get_object_type_model(object_type: str) -> Type[models.Model]: + """Get the model class for a given object type.""" + app_label, model_name = object_type.split('.') + object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) + return object_content_type.model_class() + +@lru_cache(maxsize=256) +def get_object_type(model_class: Type[models.Model]) -> str: + """Get the object type for a given model class.""" + content_type = ContentType.objects.get_for_model(model_class) + return content_type.app_label + '.' + content_type.model + +@lru_cache(maxsize=256) +def content_type_id(object_type: str) -> int: + """Get the content type id for a given object type.""" + app_label, model_name = object_type.split('.') + object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) + return object_content_type.id + +@dataclass +class RefInfo: + object_type: str + field_name: str + is_generic: bool = False + is_many: bool = False + +_JSON_REF_INFO = { + 'circuits.circuit': { + 'assignments': RefInfo(object_type='circuits.circuitgroupassignment', field_name='assignments', is_many=True), + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'type': RefInfo(object_type='circuits.circuittype', field_name='type'), + }, + 'circuits.circuitgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'circuits.circuitgroupassignment': { + 'group': RefInfo(object_type='circuits.circuitgroup', field_name='group'), + 'memberCircuit': RefInfo(object_type='circuits.circuit', field_name='member', is_generic=True), + 'memberVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='member', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.circuittermination': { + 'circuit': RefInfo(object_type='circuits.circuit', field_name='circuit'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), + 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), + 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), + 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), + 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), + }, + 'circuits.circuittype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.provider': { + 'accounts': RefInfo(object_type='circuits.provideraccount', field_name='accounts', is_many=True), + 'asns': RefInfo(object_type='ipam.asn', field_name='asns', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.provideraccount': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.providernetwork': { + 'provider': RefInfo(object_type='circuits.provider', field_name='provider'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'circuits.virtualcircuit': { + 'providerAccount': RefInfo(object_type='circuits.provideraccount', field_name='provider_account'), + 'providerNetwork': RefInfo(object_type='circuits.providernetwork', field_name='provider_network'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'type': RefInfo(object_type='circuits.virtualcircuittype', field_name='type'), + }, + 'circuits.virtualcircuittermination': { + 'interface': RefInfo(object_type='dcim.interface', field_name='interface'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='virtual_circuit'), + }, + 'circuits.virtualcircuittype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.cable': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.cabletermination': { + 'cable': RefInfo(object_type='dcim.cable', field_name='cable'), + 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), + }, + 'dcim.consoleport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.consoleserverport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.device': { + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), + 'deviceType': RefInfo(object_type='dcim.devicetype', field_name='device_type'), + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'oobIp': RefInfo(object_type='ipam.ipaddress', field_name='oob_ip'), + 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'virtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='virtual_chassis'), + }, + 'dcim.devicebay': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'installedDevice': RefInfo(object_type='dcim.device', field_name='installed_device'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.devicerole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.devicetype': { + 'defaultPlatform': RefInfo(object_type='dcim.platform', field_name='default_platform'), + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.frontport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'rearPort': RefInfo(object_type='dcim.rearport', field_name='rear_port'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.interface': { + 'bridge': RefInfo(object_type='dcim.interface', field_name='bridge'), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'lag': RefInfo(object_type='dcim.interface', field_name='lag'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'parent': RefInfo(object_type='dcim.interface', field_name='parent'), + 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'taggedVlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'vdcs': RefInfo(object_type='dcim.virtualdevicecontext', field_name='vdcs', is_many=True), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + 'wirelessLans': RefInfo(object_type='wireless.wirelesslan', field_name='wireless_lans', is_many=True), + }, + 'dcim.inventoryitem': { + 'componentConsolePort': RefInfo(object_type='dcim.consoleport', field_name='component', is_generic=True), + 'componentConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='component', is_generic=True), + 'componentFrontPort': RefInfo(object_type='dcim.frontport', field_name='component', is_generic=True), + 'componentInterface': RefInfo(object_type='dcim.interface', field_name='component', is_generic=True), + 'componentPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='component', is_generic=True), + 'componentPowerPort': RefInfo(object_type='dcim.powerport', field_name='component', is_generic=True), + 'componentRearPort': RefInfo(object_type='dcim.rearport', field_name='component', is_generic=True), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'parent': RefInfo(object_type='dcim.inventoryitem', field_name='parent'), + 'role': RefInfo(object_type='dcim.inventoryitemrole', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.inventoryitemrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.location': { + 'parent': RefInfo(object_type='dcim.location', field_name='parent'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.macaddress': { + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.manufacturer': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.module': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'moduleBay': RefInfo(object_type='dcim.modulebay', field_name='module_bay'), + 'moduleType': RefInfo(object_type='dcim.moduletype', field_name='module_type'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.modulebay': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'installedModule': RefInfo(object_type='dcim.module', field_name='installed_module'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.moduletype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.platform': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerfeed': { + 'powerPanel': RefInfo(object_type='dcim.powerpanel', field_name='power_panel'), + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.poweroutlet': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'powerPort': RefInfo(object_type='dcim.powerport', field_name='power_port'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerpanel': { + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.powerport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.rack': { + 'location': RefInfo(object_type='dcim.location', field_name='location'), + 'rackType': RefInfo(object_type='dcim.racktype', field_name='rack_type'), + 'role': RefInfo(object_type='dcim.rackrole', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.rackreservation': { + 'rack': RefInfo(object_type='dcim.rack', field_name='rack'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.rackrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.racktype': { + 'manufacturer': RefInfo(object_type='dcim.manufacturer', field_name='manufacturer'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.rearport': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'module': RefInfo(object_type='dcim.module', field_name='module'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.region': { + 'parent': RefInfo(object_type='dcim.region', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.site': { + 'asns': RefInfo(object_type='ipam.asn', field_name='asns', is_many=True), + 'group': RefInfo(object_type='dcim.sitegroup', field_name='group'), + 'region': RefInfo(object_type='dcim.region', field_name='region'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'dcim.sitegroup': { + 'parent': RefInfo(object_type='dcim.sitegroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.virtualchassis': { + 'master': RefInfo(object_type='dcim.device', field_name='master'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'dcim.virtualdevicecontext': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.aggregate': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.asn': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.asnrange': { + 'rir': RefInfo(object_type='ipam.rir', field_name='rir'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.fhrpgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'ipam.fhrpgroupassignment': { + 'group': RefInfo(object_type='ipam.fhrpgroup', field_name='group'), + 'interfaceAsn': RefInfo(object_type='ipam.asn', field_name='interface', is_generic=True), + 'interfaceAsnRange': RefInfo(object_type='ipam.asnrange', field_name='interface', is_generic=True), + 'interfaceAggregate': RefInfo(object_type='ipam.aggregate', field_name='interface', is_generic=True), + 'interfaceCable': RefInfo(object_type='dcim.cable', field_name='interface', is_generic=True), + 'interfaceCablePath': RefInfo(object_type='dcim.cablepath', field_name='interface', is_generic=True), + 'interfaceCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='interface', is_generic=True), + 'interfaceCircuit': RefInfo(object_type='circuits.circuit', field_name='interface', is_generic=True), + 'interfaceCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='interface', is_generic=True), + 'interfaceCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='interface', is_generic=True), + 'interfaceCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='interface', is_generic=True), + 'interfaceCircuitType': RefInfo(object_type='circuits.circuittype', field_name='interface', is_generic=True), + 'interfaceCluster': RefInfo(object_type='virtualization.cluster', field_name='interface', is_generic=True), + 'interfaceClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='interface', is_generic=True), + 'interfaceClusterType': RefInfo(object_type='virtualization.clustertype', field_name='interface', is_generic=True), + 'interfaceConsolePort': RefInfo(object_type='dcim.consoleport', field_name='interface', is_generic=True), + 'interfaceConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='interface', is_generic=True), + 'interfaceContact': RefInfo(object_type='tenancy.contact', field_name='interface', is_generic=True), + 'interfaceContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='interface', is_generic=True), + 'interfaceContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='interface', is_generic=True), + 'interfaceContactRole': RefInfo(object_type='tenancy.contactrole', field_name='interface', is_generic=True), + 'interfaceDevice': RefInfo(object_type='dcim.device', field_name='interface', is_generic=True), + 'interfaceDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='interface', is_generic=True), + 'interfaceDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='interface', is_generic=True), + 'interfaceDeviceType': RefInfo(object_type='dcim.devicetype', field_name='interface', is_generic=True), + 'interfaceFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='interface', is_generic=True), + 'interfaceFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='interface', is_generic=True), + 'interfaceFrontPort': RefInfo(object_type='dcim.frontport', field_name='interface', is_generic=True), + 'interfaceIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='interface', is_generic=True), + 'interfaceIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='interface', is_generic=True), + 'interfaceIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='interface', is_generic=True), + 'interfaceIpRange': RefInfo(object_type='ipam.iprange', field_name='interface', is_generic=True), + 'interfaceIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='interface', is_generic=True), + 'interfaceIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='interface', is_generic=True), + 'interfaceIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='interface', is_generic=True), + 'interfaceInterface': RefInfo(object_type='dcim.interface', field_name='interface', is_generic=True), + 'interfaceInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='interface', is_generic=True), + 'interfaceInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='interface', is_generic=True), + 'interfaceL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='interface', is_generic=True), + 'interfaceL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='interface', is_generic=True), + 'interfaceLocation': RefInfo(object_type='dcim.location', field_name='interface', is_generic=True), + 'interfaceMacAddress': RefInfo(object_type='dcim.macaddress', field_name='interface', is_generic=True), + 'interfaceManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='interface', is_generic=True), + 'interfaceModule': RefInfo(object_type='dcim.module', field_name='interface', is_generic=True), + 'interfaceModuleBay': RefInfo(object_type='dcim.modulebay', field_name='interface', is_generic=True), + 'interfaceModuleType': RefInfo(object_type='dcim.moduletype', field_name='interface', is_generic=True), + 'interfacePlatform': RefInfo(object_type='dcim.platform', field_name='interface', is_generic=True), + 'interfacePowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='interface', is_generic=True), + 'interfacePowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='interface', is_generic=True), + 'interfacePowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='interface', is_generic=True), + 'interfacePowerPort': RefInfo(object_type='dcim.powerport', field_name='interface', is_generic=True), + 'interfacePrefix': RefInfo(object_type='ipam.prefix', field_name='interface', is_generic=True), + 'interfaceProvider': RefInfo(object_type='circuits.provider', field_name='interface', is_generic=True), + 'interfaceProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='interface', is_generic=True), + 'interfaceProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='interface', is_generic=True), + 'interfaceRir': RefInfo(object_type='ipam.rir', field_name='interface', is_generic=True), + 'interfaceRack': RefInfo(object_type='dcim.rack', field_name='interface', is_generic=True), + 'interfaceRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='interface', is_generic=True), + 'interfaceRackRole': RefInfo(object_type='dcim.rackrole', field_name='interface', is_generic=True), + 'interfaceRackType': RefInfo(object_type='dcim.racktype', field_name='interface', is_generic=True), + 'interfaceRearPort': RefInfo(object_type='dcim.rearport', field_name='interface', is_generic=True), + 'interfaceRegion': RefInfo(object_type='dcim.region', field_name='interface', is_generic=True), + 'interfaceRole': RefInfo(object_type='ipam.role', field_name='interface', is_generic=True), + 'interfaceRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='interface', is_generic=True), + 'interfaceService': RefInfo(object_type='ipam.service', field_name='interface', is_generic=True), + 'interfaceSite': RefInfo(object_type='dcim.site', field_name='interface', is_generic=True), + 'interfaceSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='interface', is_generic=True), + 'interfaceTag': RefInfo(object_type='extras.tag', field_name='interface', is_generic=True), + 'interfaceTenant': RefInfo(object_type='tenancy.tenant', field_name='interface', is_generic=True), + 'interfaceTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='interface', is_generic=True), + 'interfaceTunnel': RefInfo(object_type='vpn.tunnel', field_name='interface', is_generic=True), + 'interfaceTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='interface', is_generic=True), + 'interfaceTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='interface', is_generic=True), + 'interfaceVlan': RefInfo(object_type='ipam.vlan', field_name='interface', is_generic=True), + 'interfaceVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='interface', is_generic=True), + 'interfaceVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='interface', is_generic=True), + 'interfaceVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='interface', is_generic=True), + 'interfaceVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='interface', is_generic=True), + 'interfaceVrf': RefInfo(object_type='ipam.vrf', field_name='interface', is_generic=True), + 'interfaceVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='interface', is_generic=True), + 'interfaceVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='interface', is_generic=True), + 'interfaceVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='interface', is_generic=True), + 'interfaceVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='interface', is_generic=True), + 'interfaceVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='interface', is_generic=True), + 'interfaceVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='interface', is_generic=True), + 'interfaceVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='interface', is_generic=True), + 'interfaceWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='interface', is_generic=True), + 'interfaceWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='interface', is_generic=True), + 'interfaceWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='interface', is_generic=True), + }, + 'ipam.ipaddress': { + 'assignedObjectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='assigned_object', is_generic=True), + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'natInside': RefInfo(object_type='ipam.ipaddress', field_name='nat_inside'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + }, + 'ipam.iprange': { + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + }, + 'ipam.prefix': { + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + }, + 'ipam.rir': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'ipam.role': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'ipam.routetarget': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.service': { + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'ipaddresses': RefInfo(object_type='ipam.ipaddress', field_name='ipaddresses', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + }, + 'ipam.vlan': { + 'group': RefInfo(object_type='ipam.vlangroup', field_name='group'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'role': RefInfo(object_type='ipam.role', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'ipam.vlangroup': { + 'scopeCluster': RefInfo(object_type='virtualization.cluster', field_name='scope', is_generic=True), + 'scopeClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='scope', is_generic=True), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRack': RefInfo(object_type='dcim.rack', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'ipam.vlantranslationrule': { + 'policy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='policy'), + }, + 'ipam.vrf': { + 'exportTargets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), + 'importTargets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'tenancy.contact': { + 'group': RefInfo(object_type='tenancy.contactgroup', field_name='group'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.contactassignment': { + 'contact': RefInfo(object_type='tenancy.contact', field_name='contact'), + 'objectAsn': RefInfo(object_type='ipam.asn', field_name='object', is_generic=True), + 'objectAsnRange': RefInfo(object_type='ipam.asnrange', field_name='object', is_generic=True), + 'objectAggregate': RefInfo(object_type='ipam.aggregate', field_name='object', is_generic=True), + 'objectCable': RefInfo(object_type='dcim.cable', field_name='object', is_generic=True), + 'objectCablePath': RefInfo(object_type='dcim.cablepath', field_name='object', is_generic=True), + 'objectCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='object', is_generic=True), + 'objectCircuit': RefInfo(object_type='circuits.circuit', field_name='object', is_generic=True), + 'objectCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='object', is_generic=True), + 'objectCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='object', is_generic=True), + 'objectCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='object', is_generic=True), + 'objectCircuitType': RefInfo(object_type='circuits.circuittype', field_name='object', is_generic=True), + 'objectCluster': RefInfo(object_type='virtualization.cluster', field_name='object', is_generic=True), + 'objectClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='object', is_generic=True), + 'objectClusterType': RefInfo(object_type='virtualization.clustertype', field_name='object', is_generic=True), + 'objectConsolePort': RefInfo(object_type='dcim.consoleport', field_name='object', is_generic=True), + 'objectConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='object', is_generic=True), + 'objectContact': RefInfo(object_type='tenancy.contact', field_name='object', is_generic=True), + 'objectContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='object', is_generic=True), + 'objectContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='object', is_generic=True), + 'objectContactRole': RefInfo(object_type='tenancy.contactrole', field_name='object', is_generic=True), + 'objectDevice': RefInfo(object_type='dcim.device', field_name='object', is_generic=True), + 'objectDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='object', is_generic=True), + 'objectDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='object', is_generic=True), + 'objectDeviceType': RefInfo(object_type='dcim.devicetype', field_name='object', is_generic=True), + 'objectFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='object', is_generic=True), + 'objectFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='object', is_generic=True), + 'objectFrontPort': RefInfo(object_type='dcim.frontport', field_name='object', is_generic=True), + 'objectIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='object', is_generic=True), + 'objectIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='object', is_generic=True), + 'objectIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='object', is_generic=True), + 'objectIpRange': RefInfo(object_type='ipam.iprange', field_name='object', is_generic=True), + 'objectIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='object', is_generic=True), + 'objectIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='object', is_generic=True), + 'objectIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='object', is_generic=True), + 'objectInterface': RefInfo(object_type='dcim.interface', field_name='object', is_generic=True), + 'objectInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='object', is_generic=True), + 'objectInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='object', is_generic=True), + 'objectL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='object', is_generic=True), + 'objectL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='object', is_generic=True), + 'objectLocation': RefInfo(object_type='dcim.location', field_name='object', is_generic=True), + 'objectMacAddress': RefInfo(object_type='dcim.macaddress', field_name='object', is_generic=True), + 'objectManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='object', is_generic=True), + 'objectModule': RefInfo(object_type='dcim.module', field_name='object', is_generic=True), + 'objectModuleBay': RefInfo(object_type='dcim.modulebay', field_name='object', is_generic=True), + 'objectModuleType': RefInfo(object_type='dcim.moduletype', field_name='object', is_generic=True), + 'objectPlatform': RefInfo(object_type='dcim.platform', field_name='object', is_generic=True), + 'objectPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='object', is_generic=True), + 'objectPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='object', is_generic=True), + 'objectPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='object', is_generic=True), + 'objectPowerPort': RefInfo(object_type='dcim.powerport', field_name='object', is_generic=True), + 'objectPrefix': RefInfo(object_type='ipam.prefix', field_name='object', is_generic=True), + 'objectProvider': RefInfo(object_type='circuits.provider', field_name='object', is_generic=True), + 'objectProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='object', is_generic=True), + 'objectProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='object', is_generic=True), + 'objectRir': RefInfo(object_type='ipam.rir', field_name='object', is_generic=True), + 'objectRack': RefInfo(object_type='dcim.rack', field_name='object', is_generic=True), + 'objectRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='object', is_generic=True), + 'objectRackRole': RefInfo(object_type='dcim.rackrole', field_name='object', is_generic=True), + 'objectRackType': RefInfo(object_type='dcim.racktype', field_name='object', is_generic=True), + 'objectRearPort': RefInfo(object_type='dcim.rearport', field_name='object', is_generic=True), + 'objectRegion': RefInfo(object_type='dcim.region', field_name='object', is_generic=True), + 'objectRole': RefInfo(object_type='ipam.role', field_name='object', is_generic=True), + 'objectRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='object', is_generic=True), + 'objectService': RefInfo(object_type='ipam.service', field_name='object', is_generic=True), + 'objectSite': RefInfo(object_type='dcim.site', field_name='object', is_generic=True), + 'objectSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='object', is_generic=True), + 'objectTag': RefInfo(object_type='extras.tag', field_name='object', is_generic=True), + 'objectTenant': RefInfo(object_type='tenancy.tenant', field_name='object', is_generic=True), + 'objectTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='object', is_generic=True), + 'objectTunnel': RefInfo(object_type='vpn.tunnel', field_name='object', is_generic=True), + 'objectTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='object', is_generic=True), + 'objectTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='object', is_generic=True), + 'objectVlan': RefInfo(object_type='ipam.vlan', field_name='object', is_generic=True), + 'objectVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='object', is_generic=True), + 'objectVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='object', is_generic=True), + 'objectVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='object', is_generic=True), + 'objectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='object', is_generic=True), + 'objectVrf': RefInfo(object_type='ipam.vrf', field_name='object', is_generic=True), + 'objectVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='object', is_generic=True), + 'objectVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='object', is_generic=True), + 'objectVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='object', is_generic=True), + 'objectVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='object', is_generic=True), + 'objectVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='object', is_generic=True), + 'objectVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='object', is_generic=True), + 'objectVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='object', is_generic=True), + 'objectWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='object', is_generic=True), + 'objectWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='object', is_generic=True), + 'objectWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='object', is_generic=True), + 'role': RefInfo(object_type='tenancy.contactrole', field_name='role'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.contactgroup': { + 'parent': RefInfo(object_type='tenancy.contactgroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.contactrole': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.tenant': { + 'group': RefInfo(object_type='tenancy.tenantgroup', field_name='group'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'tenancy.tenantgroup': { + 'parent': RefInfo(object_type='tenancy.tenantgroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'virtualization.cluster': { + 'group': RefInfo(object_type='virtualization.clustergroup', field_name='group'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'type': RefInfo(object_type='virtualization.clustertype', field_name='type'), + }, + 'virtualization.clustergroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'virtualization.clustertype': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'virtualization.virtualdisk': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + }, + 'virtualization.virtualmachine': { + 'cluster': RefInfo(object_type='virtualization.cluster', field_name='cluster'), + 'device': RefInfo(object_type='dcim.device', field_name='device'), + 'platform': RefInfo(object_type='dcim.platform', field_name='platform'), + 'primaryIp4': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip4'), + 'primaryIp6': RefInfo(object_type='ipam.ipaddress', field_name='primary_ip6'), + 'role': RefInfo(object_type='dcim.devicerole', field_name='role'), + 'site': RefInfo(object_type='dcim.site', field_name='site'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'virtualization.vminterface': { + 'bridge': RefInfo(object_type='virtualization.vminterface', field_name='bridge'), + 'parent': RefInfo(object_type='virtualization.vminterface', field_name='parent'), + 'primaryMacAddress': RefInfo(object_type='dcim.macaddress', field_name='primary_mac_address'), + 'qinqSvlan': RefInfo(object_type='ipam.vlan', field_name='qinq_svlan'), + 'taggedVlans': RefInfo(object_type='ipam.vlan', field_name='tagged_vlans', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'untaggedVlan': RefInfo(object_type='ipam.vlan', field_name='untagged_vlan'), + 'virtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='virtual_machine'), + 'vlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='vlan_translation_policy'), + 'vrf': RefInfo(object_type='ipam.vrf', field_name='vrf'), + }, + 'vpn.ikepolicy': { + 'proposals': RefInfo(object_type='vpn.ikeproposal', field_name='proposals', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.ikeproposal': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.ipsecpolicy': { + 'proposals': RefInfo(object_type='vpn.ipsecproposal', field_name='proposals', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.ipsecprofile': { + 'ikePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='ike_policy'), + 'ipsecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='ipsec_policy'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.ipsecproposal': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.l2vpn': { + 'exportTargets': RefInfo(object_type='ipam.routetarget', field_name='export_targets', is_many=True), + 'importTargets': RefInfo(object_type='ipam.routetarget', field_name='import_targets', is_many=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'vpn.l2vpntermination': { + 'assignedObjectInterface': RefInfo(object_type='dcim.interface', field_name='assigned_object', is_generic=True), + 'assignedObjectVlan': RefInfo(object_type='ipam.vlan', field_name='assigned_object', is_generic=True), + 'assignedObjectVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='assigned_object', is_generic=True), + 'l2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='l2vpn'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.tunnel': { + 'group': RefInfo(object_type='vpn.tunnelgroup', field_name='group'), + 'ipsecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='ipsec_profile'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, + 'vpn.tunnelgroup': { + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'vpn.tunneltermination': { + 'outsideIp': RefInfo(object_type='ipam.ipaddress', field_name='outside_ip'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'terminationAsn': RefInfo(object_type='ipam.asn', field_name='termination', is_generic=True), + 'terminationAsnRange': RefInfo(object_type='ipam.asnrange', field_name='termination', is_generic=True), + 'terminationAggregate': RefInfo(object_type='ipam.aggregate', field_name='termination', is_generic=True), + 'terminationCable': RefInfo(object_type='dcim.cable', field_name='termination', is_generic=True), + 'terminationCablePath': RefInfo(object_type='dcim.cablepath', field_name='termination', is_generic=True), + 'terminationCableTermination': RefInfo(object_type='dcim.cabletermination', field_name='termination', is_generic=True), + 'terminationCircuit': RefInfo(object_type='circuits.circuit', field_name='termination', is_generic=True), + 'terminationCircuitGroup': RefInfo(object_type='circuits.circuitgroup', field_name='termination', is_generic=True), + 'terminationCircuitGroupAssignment': RefInfo(object_type='circuits.circuitgroupassignment', field_name='termination', is_generic=True), + 'terminationCircuitTermination': RefInfo(object_type='circuits.circuittermination', field_name='termination', is_generic=True), + 'terminationCircuitType': RefInfo(object_type='circuits.circuittype', field_name='termination', is_generic=True), + 'terminationCluster': RefInfo(object_type='virtualization.cluster', field_name='termination', is_generic=True), + 'terminationClusterGroup': RefInfo(object_type='virtualization.clustergroup', field_name='termination', is_generic=True), + 'terminationClusterType': RefInfo(object_type='virtualization.clustertype', field_name='termination', is_generic=True), + 'terminationConsolePort': RefInfo(object_type='dcim.consoleport', field_name='termination', is_generic=True), + 'terminationConsoleServerPort': RefInfo(object_type='dcim.consoleserverport', field_name='termination', is_generic=True), + 'terminationContact': RefInfo(object_type='tenancy.contact', field_name='termination', is_generic=True), + 'terminationContactAssignment': RefInfo(object_type='tenancy.contactassignment', field_name='termination', is_generic=True), + 'terminationContactGroup': RefInfo(object_type='tenancy.contactgroup', field_name='termination', is_generic=True), + 'terminationContactRole': RefInfo(object_type='tenancy.contactrole', field_name='termination', is_generic=True), + 'terminationDevice': RefInfo(object_type='dcim.device', field_name='termination', is_generic=True), + 'terminationDeviceBay': RefInfo(object_type='dcim.devicebay', field_name='termination', is_generic=True), + 'terminationDeviceRole': RefInfo(object_type='dcim.devicerole', field_name='termination', is_generic=True), + 'terminationDeviceType': RefInfo(object_type='dcim.devicetype', field_name='termination', is_generic=True), + 'terminationFhrpGroup': RefInfo(object_type='ipam.fhrpgroup', field_name='termination', is_generic=True), + 'terminationFhrpGroupAssignment': RefInfo(object_type='ipam.fhrpgroupassignment', field_name='termination', is_generic=True), + 'terminationFrontPort': RefInfo(object_type='dcim.frontport', field_name='termination', is_generic=True), + 'terminationIkePolicy': RefInfo(object_type='vpn.ikepolicy', field_name='termination', is_generic=True), + 'terminationIkeProposal': RefInfo(object_type='vpn.ikeproposal', field_name='termination', is_generic=True), + 'terminationIpAddress': RefInfo(object_type='ipam.ipaddress', field_name='termination', is_generic=True), + 'terminationIpRange': RefInfo(object_type='ipam.iprange', field_name='termination', is_generic=True), + 'terminationIpSecPolicy': RefInfo(object_type='vpn.ipsecpolicy', field_name='termination', is_generic=True), + 'terminationIpSecProfile': RefInfo(object_type='vpn.ipsecprofile', field_name='termination', is_generic=True), + 'terminationIpSecProposal': RefInfo(object_type='vpn.ipsecproposal', field_name='termination', is_generic=True), + 'terminationInterface': RefInfo(object_type='dcim.interface', field_name='termination', is_generic=True), + 'terminationInventoryItem': RefInfo(object_type='dcim.inventoryitem', field_name='termination', is_generic=True), + 'terminationInventoryItemRole': RefInfo(object_type='dcim.inventoryitemrole', field_name='termination', is_generic=True), + 'terminationL2Vpn': RefInfo(object_type='vpn.l2vpn', field_name='termination', is_generic=True), + 'terminationL2VpnTermination': RefInfo(object_type='vpn.l2vpntermination', field_name='termination', is_generic=True), + 'terminationLocation': RefInfo(object_type='dcim.location', field_name='termination', is_generic=True), + 'terminationMacAddress': RefInfo(object_type='dcim.macaddress', field_name='termination', is_generic=True), + 'terminationManufacturer': RefInfo(object_type='dcim.manufacturer', field_name='termination', is_generic=True), + 'terminationModule': RefInfo(object_type='dcim.module', field_name='termination', is_generic=True), + 'terminationModuleBay': RefInfo(object_type='dcim.modulebay', field_name='termination', is_generic=True), + 'terminationModuleType': RefInfo(object_type='dcim.moduletype', field_name='termination', is_generic=True), + 'terminationPlatform': RefInfo(object_type='dcim.platform', field_name='termination', is_generic=True), + 'terminationPowerFeed': RefInfo(object_type='dcim.powerfeed', field_name='termination', is_generic=True), + 'terminationPowerOutlet': RefInfo(object_type='dcim.poweroutlet', field_name='termination', is_generic=True), + 'terminationPowerPanel': RefInfo(object_type='dcim.powerpanel', field_name='termination', is_generic=True), + 'terminationPowerPort': RefInfo(object_type='dcim.powerport', field_name='termination', is_generic=True), + 'terminationPrefix': RefInfo(object_type='ipam.prefix', field_name='termination', is_generic=True), + 'terminationProvider': RefInfo(object_type='circuits.provider', field_name='termination', is_generic=True), + 'terminationProviderAccount': RefInfo(object_type='circuits.provideraccount', field_name='termination', is_generic=True), + 'terminationProviderNetwork': RefInfo(object_type='circuits.providernetwork', field_name='termination', is_generic=True), + 'terminationRir': RefInfo(object_type='ipam.rir', field_name='termination', is_generic=True), + 'terminationRack': RefInfo(object_type='dcim.rack', field_name='termination', is_generic=True), + 'terminationRackReservation': RefInfo(object_type='dcim.rackreservation', field_name='termination', is_generic=True), + 'terminationRackRole': RefInfo(object_type='dcim.rackrole', field_name='termination', is_generic=True), + 'terminationRackType': RefInfo(object_type='dcim.racktype', field_name='termination', is_generic=True), + 'terminationRearPort': RefInfo(object_type='dcim.rearport', field_name='termination', is_generic=True), + 'terminationRegion': RefInfo(object_type='dcim.region', field_name='termination', is_generic=True), + 'terminationRole': RefInfo(object_type='ipam.role', field_name='termination', is_generic=True), + 'terminationRouteTarget': RefInfo(object_type='ipam.routetarget', field_name='termination', is_generic=True), + 'terminationService': RefInfo(object_type='ipam.service', field_name='termination', is_generic=True), + 'terminationSite': RefInfo(object_type='dcim.site', field_name='termination', is_generic=True), + 'terminationSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='termination', is_generic=True), + 'terminationTag': RefInfo(object_type='extras.tag', field_name='termination', is_generic=True), + 'terminationTenant': RefInfo(object_type='tenancy.tenant', field_name='termination', is_generic=True), + 'terminationTenantGroup': RefInfo(object_type='tenancy.tenantgroup', field_name='termination', is_generic=True), + 'terminationTunnel': RefInfo(object_type='vpn.tunnel', field_name='termination', is_generic=True), + 'terminationTunnelGroup': RefInfo(object_type='vpn.tunnelgroup', field_name='termination', is_generic=True), + 'terminationTunnelTermination': RefInfo(object_type='vpn.tunneltermination', field_name='termination', is_generic=True), + 'terminationVlan': RefInfo(object_type='ipam.vlan', field_name='termination', is_generic=True), + 'terminationVlanGroup': RefInfo(object_type='ipam.vlangroup', field_name='termination', is_generic=True), + 'terminationVlanTranslationPolicy': RefInfo(object_type='ipam.vlantranslationpolicy', field_name='termination', is_generic=True), + 'terminationVlanTranslationRule': RefInfo(object_type='ipam.vlantranslationrule', field_name='termination', is_generic=True), + 'terminationVmInterface': RefInfo(object_type='virtualization.vminterface', field_name='termination', is_generic=True), + 'terminationVrf': RefInfo(object_type='ipam.vrf', field_name='termination', is_generic=True), + 'terminationVirtualChassis': RefInfo(object_type='dcim.virtualchassis', field_name='termination', is_generic=True), + 'terminationVirtualCircuit': RefInfo(object_type='circuits.virtualcircuit', field_name='termination', is_generic=True), + 'terminationVirtualCircuitTermination': RefInfo(object_type='circuits.virtualcircuittermination', field_name='termination', is_generic=True), + 'terminationVirtualCircuitType': RefInfo(object_type='circuits.virtualcircuittype', field_name='termination', is_generic=True), + 'terminationVirtualDeviceContext': RefInfo(object_type='dcim.virtualdevicecontext', field_name='termination', is_generic=True), + 'terminationVirtualDisk': RefInfo(object_type='virtualization.virtualdisk', field_name='termination', is_generic=True), + 'terminationVirtualMachine': RefInfo(object_type='virtualization.virtualmachine', field_name='termination', is_generic=True), + 'terminationWirelessLan': RefInfo(object_type='wireless.wirelesslan', field_name='termination', is_generic=True), + 'terminationWirelessLanGroup': RefInfo(object_type='wireless.wirelesslangroup', field_name='termination', is_generic=True), + 'terminationWirelessLink': RefInfo(object_type='wireless.wirelesslink', field_name='termination', is_generic=True), + 'tunnel': RefInfo(object_type='vpn.tunnel', field_name='tunnel'), + }, + 'wireless.wirelesslan': { + 'group': RefInfo(object_type='wireless.wirelesslangroup', field_name='group'), + 'scopeLocation': RefInfo(object_type='dcim.location', field_name='scope', is_generic=True), + 'scopeRegion': RefInfo(object_type='dcim.region', field_name='scope', is_generic=True), + 'scopeSite': RefInfo(object_type='dcim.site', field_name='scope', is_generic=True), + 'scopeSiteGroup': RefInfo(object_type='dcim.sitegroup', field_name='scope', is_generic=True), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + 'vlan': RefInfo(object_type='ipam.vlan', field_name='vlan'), + }, + 'wireless.wirelesslangroup': { + 'parent': RefInfo(object_type='wireless.wirelesslangroup', field_name='parent'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + }, + 'wireless.wirelesslink': { + 'interfaceA': RefInfo(object_type='dcim.interface', field_name='interface_a'), + 'interfaceB': RefInfo(object_type='dcim.interface', field_name='interface_b'), + 'tags': RefInfo(object_type='extras.tag', field_name='tags', is_many=True), + 'tenant': RefInfo(object_type='tenancy.tenant', field_name='tenant'), + }, +} + +def get_json_ref_info(object_type: str|Type[models.Model], json_field_name: str) -> RefInfo|None: + if not isinstance(object_type, str): + object_type = get_object_type(object_type) + return _JSON_REF_INFO.get(object_type, {}).get(json_field_name) + +_LEGAL_FIELDS = { + 'circuits.circuit': frozenset(['assignments', 'cid', 'comments', 'commit_rate', 'custom_fields', 'description', 'distance', 'distance_unit', 'install_date', 'provider', 'provider_account', 'status', 'tags', 'tenant', 'termination_date', 'type']), + 'circuits.circuitgroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags', 'tenant']), + 'circuits.circuitgroupassignment': frozenset(['group', 'member_id', 'member_type', 'priority', 'tags']), + 'circuits.circuittermination': frozenset(['circuit', 'custom_fields', 'description', 'mark_connected', 'port_speed', 'pp_info', 'tags', 'term_side', 'termination_id', 'termination_type', 'upstream_speed', 'xconnect_id']), + 'circuits.circuittype': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'circuits.provider': frozenset(['accounts', 'asns', 'comments', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'circuits.provideraccount': frozenset(['account', 'comments', 'custom_fields', 'description', 'name', 'provider', 'tags']), + 'circuits.providernetwork': frozenset(['comments', 'custom_fields', 'description', 'name', 'provider', 'service_id', 'tags']), + 'circuits.virtualcircuit': frozenset(['cid', 'comments', 'custom_fields', 'description', 'provider_account', 'provider_network', 'status', 'tags', 'tenant', 'type']), + 'circuits.virtualcircuittermination': frozenset(['custom_fields', 'description', 'interface', 'role', 'tags', 'virtual_circuit']), + 'circuits.virtualcircuittype': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.cable': frozenset(['a_terminations', 'b_terminations', 'color', 'comments', 'custom_fields', 'description', 'label', 'length', 'length_unit', 'status', 'tags', 'tenant', 'type']), + 'dcim.cablepath': frozenset(['is_active', 'is_complete', 'is_split']), + 'dcim.cabletermination': frozenset(['cable', 'cable_end', 'termination_id', 'termination_type']), + 'dcim.consoleport': frozenset(['custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'speed', 'tags', 'type']), + 'dcim.consoleserverport': frozenset(['custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'speed', 'tags', 'type']), + 'dcim.device': frozenset(['airflow', 'asset_tag', 'cluster', 'comments', 'custom_fields', 'description', 'device_type', 'face', 'latitude', 'location', 'longitude', 'name', 'oob_ip', 'platform', 'position', 'primary_ip4', 'primary_ip6', 'rack', 'role', 'serial', 'site', 'status', 'tags', 'tenant', 'vc_position', 'vc_priority', 'virtual_chassis']), + 'dcim.devicebay': frozenset(['custom_fields', 'description', 'device', 'installed_device', 'label', 'name', 'tags']), + 'dcim.devicerole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags', 'vm_role']), + 'dcim.devicetype': frozenset(['airflow', 'comments', 'custom_fields', 'default_platform', 'description', 'exclude_from_utilization', 'is_full_depth', 'manufacturer', 'model', 'part_number', 'slug', 'subdevice_role', 'tags', 'u_height', 'weight', 'weight_unit']), + 'dcim.frontport': frozenset(['color', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'rear_port', 'rear_port_position', 'tags', 'type']), + 'dcim.interface': frozenset(['bridge', 'custom_fields', 'description', 'device', 'duplex', 'enabled', 'label', 'lag', 'mark_connected', 'mgmt_only', 'mode', 'module', 'mtu', 'name', 'parent', 'poe_mode', 'poe_type', 'primary_mac_address', 'qinq_svlan', 'rf_channel', 'rf_channel_frequency', 'rf_channel_width', 'rf_role', 'speed', 'tagged_vlans', 'tags', 'tx_power', 'type', 'untagged_vlan', 'vdcs', 'vlan_translation_policy', 'vrf', 'wireless_lans', 'wwn']), + 'dcim.inventoryitem': frozenset(['asset_tag', 'component_id', 'component_type', 'custom_fields', 'description', 'device', 'discovered', 'label', 'manufacturer', 'name', 'parent', 'part_id', 'role', 'serial', 'status', 'tags']), + 'dcim.inventoryitemrole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.location': frozenset(['custom_fields', 'description', 'facility', 'name', 'parent', 'site', 'slug', 'status', 'tags', 'tenant']), + 'dcim.macaddress': frozenset(['assigned_object_id', 'assigned_object_type', 'comments', 'custom_fields', 'description', 'mac_address', 'tags']), + 'dcim.manufacturer': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.module': frozenset(['asset_tag', 'comments', 'custom_fields', 'description', 'device', 'module_bay', 'module_type', 'serial', 'status', 'tags']), + 'dcim.modulebay': frozenset(['custom_fields', 'description', 'device', 'installed_module', 'label', 'module', 'name', 'position', 'tags']), + 'dcim.moduletype': frozenset(['airflow', 'comments', 'custom_fields', 'description', 'manufacturer', 'model', 'part_number', 'tags', 'weight', 'weight_unit']), + 'dcim.platform': frozenset(['custom_fields', 'description', 'manufacturer', 'name', 'slug', 'tags']), + 'dcim.powerfeed': frozenset(['amperage', 'comments', 'custom_fields', 'description', 'mark_connected', 'max_utilization', 'name', 'phase', 'power_panel', 'rack', 'status', 'supply', 'tags', 'tenant', 'type', 'voltage']), + 'dcim.poweroutlet': frozenset(['color', 'custom_fields', 'description', 'device', 'feed_leg', 'label', 'mark_connected', 'module', 'name', 'power_port', 'tags', 'type']), + 'dcim.powerpanel': frozenset(['comments', 'custom_fields', 'description', 'location', 'name', 'site', 'tags']), + 'dcim.powerport': frozenset(['allocated_draw', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'maximum_draw', 'module', 'name', 'tags', 'type']), + 'dcim.rack': frozenset(['airflow', 'asset_tag', 'comments', 'custom_fields', 'desc_units', 'description', 'facility_id', 'form_factor', 'location', 'max_weight', 'mounting_depth', 'name', 'outer_depth', 'outer_unit', 'outer_width', 'rack_type', 'role', 'serial', 'site', 'starting_unit', 'status', 'tags', 'tenant', 'u_height', 'weight', 'weight_unit', 'width']), + 'dcim.rackreservation': frozenset(['comments', 'custom_fields', 'description', 'rack', 'tags', 'tenant', 'units']), + 'dcim.rackrole': frozenset(['color', 'custom_fields', 'description', 'name', 'slug', 'tags']), + 'dcim.racktype': frozenset(['comments', 'custom_fields', 'desc_units', 'description', 'form_factor', 'manufacturer', 'max_weight', 'model', 'mounting_depth', 'outer_depth', 'outer_unit', 'outer_width', 'slug', 'starting_unit', 'tags', 'u_height', 'weight', 'weight_unit', 'width']), + 'dcim.rearport': frozenset(['color', 'custom_fields', 'description', 'device', 'label', 'mark_connected', 'module', 'name', 'positions', 'tags', 'type']), + 'dcim.region': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'dcim.site': frozenset(['asns', 'comments', 'custom_fields', 'description', 'facility', 'group', 'latitude', 'longitude', 'name', 'physical_address', 'region', 'shipping_address', 'slug', 'status', 'tags', 'tenant', 'time_zone']), + 'dcim.sitegroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'dcim.virtualchassis': frozenset(['comments', 'custom_fields', 'description', 'domain', 'master', 'name', 'tags']), + 'dcim.virtualdevicecontext': frozenset(['comments', 'custom_fields', 'description', 'device', 'identifier', 'name', 'primary_ip4', 'primary_ip6', 'status', 'tags', 'tenant']), + 'extras.tag': frozenset(['color', 'name', 'slug']), + 'ipam.aggregate': frozenset(['comments', 'custom_fields', 'date_added', 'description', 'prefix', 'rir', 'tags', 'tenant']), + 'ipam.asn': frozenset(['asn', 'comments', 'custom_fields', 'description', 'rir', 'tags', 'tenant']), + 'ipam.asnrange': frozenset(['custom_fields', 'description', 'end', 'name', 'rir', 'slug', 'start', 'tags', 'tenant']), + 'ipam.fhrpgroup': frozenset(['auth_key', 'auth_type', 'comments', 'custom_fields', 'description', 'group_id', 'name', 'protocol', 'tags']), + 'ipam.fhrpgroupassignment': frozenset(['group', 'interface_id', 'interface_type', 'priority']), + 'ipam.ipaddress': frozenset(['address', 'assigned_object_id', 'assigned_object_type', 'comments', 'custom_fields', 'description', 'dns_name', 'nat_inside', 'role', 'status', 'tags', 'tenant', 'vrf']), + 'ipam.iprange': frozenset(['comments', 'custom_fields', 'description', 'end_address', 'mark_utilized', 'role', 'start_address', 'status', 'tags', 'tenant', 'vrf']), + 'ipam.prefix': frozenset(['comments', 'custom_fields', 'description', 'is_pool', 'mark_utilized', 'prefix', 'role', 'scope_id', 'scope_type', 'status', 'tags', 'tenant', 'vlan', 'vrf']), + 'ipam.rir': frozenset(['custom_fields', 'description', 'is_private', 'name', 'slug', 'tags']), + 'ipam.role': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags', 'weight']), + 'ipam.routetarget': frozenset(['comments', 'custom_fields', 'description', 'name', 'tags', 'tenant']), + 'ipam.service': frozenset(['comments', 'custom_fields', 'description', 'device', 'ipaddresses', 'name', 'ports', 'protocol', 'tags', 'virtual_machine']), + 'ipam.vlan': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'qinq_role', 'qinq_svlan', 'role', 'site', 'status', 'tags', 'tenant', 'vid']), + 'ipam.vlangroup': frozenset(['custom_fields', 'description', 'name', 'scope_id', 'scope_type', 'slug', 'tags', 'vid_ranges']), + 'ipam.vlantranslationpolicy': frozenset(['description', 'name']), + 'ipam.vlantranslationrule': frozenset(['description', 'local_vid', 'policy', 'remote_vid']), + 'ipam.vrf': frozenset(['comments', 'custom_fields', 'description', 'enforce_unique', 'export_targets', 'import_targets', 'name', 'rd', 'tags', 'tenant']), + 'tenancy.contact': frozenset(['address', 'comments', 'custom_fields', 'description', 'email', 'group', 'link', 'name', 'phone', 'tags', 'title']), + 'tenancy.contactassignment': frozenset(['contact', 'custom_fields', 'object_id', 'object_type', 'priority', 'role', 'tags']), + 'tenancy.contactgroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'tenancy.contactrole': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'tenancy.tenant': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'slug', 'tags']), + 'tenancy.tenantgroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'virtualization.cluster': frozenset(['comments', 'custom_fields', 'description', 'group', 'name', 'scope_id', 'scope_type', 'status', 'tags', 'tenant', 'type']), + 'virtualization.clustergroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'virtualization.clustertype': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'virtualization.virtualdisk': frozenset(['custom_fields', 'description', 'name', 'size', 'tags', 'virtual_machine']), + 'virtualization.virtualmachine': frozenset(['cluster', 'comments', 'custom_fields', 'description', 'device', 'disk', 'memory', 'name', 'platform', 'primary_ip4', 'primary_ip6', 'role', 'serial', 'site', 'status', 'tags', 'tenant', 'vcpus']), + 'virtualization.vminterface': frozenset(['bridge', 'custom_fields', 'description', 'enabled', 'mode', 'mtu', 'name', 'parent', 'primary_mac_address', 'qinq_svlan', 'tagged_vlans', 'tags', 'untagged_vlan', 'virtual_machine', 'vlan_translation_policy', 'vrf']), + 'vpn.ikepolicy': frozenset(['comments', 'custom_fields', 'description', 'mode', 'name', 'preshared_key', 'proposals', 'tags', 'version']), + 'vpn.ikeproposal': frozenset(['authentication_algorithm', 'authentication_method', 'comments', 'custom_fields', 'description', 'encryption_algorithm', 'group', 'name', 'sa_lifetime', 'tags']), + 'vpn.ipsecpolicy': frozenset(['comments', 'custom_fields', 'description', 'name', 'pfs_group', 'proposals', 'tags']), + 'vpn.ipsecprofile': frozenset(['comments', 'custom_fields', 'description', 'ike_policy', 'ipsec_policy', 'mode', 'name', 'tags']), + 'vpn.ipsecproposal': frozenset(['authentication_algorithm', 'comments', 'custom_fields', 'description', 'encryption_algorithm', 'name', 'sa_lifetime_data', 'sa_lifetime_seconds', 'tags']), + 'vpn.l2vpn': frozenset(['comments', 'custom_fields', 'description', 'export_targets', 'identifier', 'import_targets', 'name', 'slug', 'tags', 'tenant', 'type']), + 'vpn.l2vpntermination': frozenset(['assigned_object_id', 'assigned_object_type', 'custom_fields', 'l2vpn', 'tags']), + 'vpn.tunnel': frozenset(['comments', 'custom_fields', 'description', 'encapsulation', 'group', 'ipsec_profile', 'name', 'status', 'tags', 'tenant', 'tunnel_id']), + 'vpn.tunnelgroup': frozenset(['custom_fields', 'description', 'name', 'slug', 'tags']), + 'vpn.tunneltermination': frozenset(['custom_fields', 'outside_ip', 'role', 'tags', 'termination_id', 'termination_type', 'tunnel']), + 'wireless.wirelesslan': frozenset(['auth_cipher', 'auth_psk', 'auth_type', 'comments', 'custom_fields', 'description', 'group', 'scope_id', 'scope_type', 'ssid', 'status', 'tags', 'tenant', 'vlan']), + 'wireless.wirelesslangroup': frozenset(['custom_fields', 'description', 'name', 'parent', 'slug', 'tags']), + 'wireless.wirelesslink': frozenset(['auth_cipher', 'auth_psk', 'auth_type', 'comments', 'custom_fields', 'description', 'distance', 'distance_unit', 'interface_a', 'interface_b', 'ssid', 'status', 'tags', 'tenant']), +} + +def legal_fields(object_type: str|Type[models.Model]) -> frozenset[str]: + if not isinstance(object_type, str): + object_type = get_object_type(object_type) + return _LEGAL_FIELDS.get(object_type, frozenset()) + +_OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP = { + 'ipam.asn': 'asn', + 'dcim.devicetype': 'model', + 'circuits.circuit': 'cid', + 'ipam.ipaddress': 'address', + 'dcim.macaddress': 'mac_address', + 'dcim.moduletype': 'model', + 'ipam.prefix': 'prefix', + 'dcim.racktype': 'model', + 'circuits.virtualcircuit': 'cid', + 'wireless.wirelesslan': 'ssid', +} + +def get_primary_value(data: dict, object_type: str) -> str|None: + field = _OBJECT_TYPE_PRIMARY_VALUE_FIELD_MAP.get(object_type, 'name') + return data.get(field) \ No newline at end of file diff --git a/netbox_diode_plugin/api/serializers.py b/netbox_diode_plugin/api/serializers.py index 838f8d3..60e2860 100644 --- a/netbox_diode_plugin/api/serializers.py +++ b/netbox_diode_plugin/api/serializers.py @@ -2,131 +2,10 @@ # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - Serializers.""" -import logging - -from dcim.api.serializers import ( - DeviceRoleSerializer, - DeviceSerializer, - DeviceTypeSerializer, - InterfaceSerializer, - ManufacturerSerializer, - PlatformSerializer, - SiteSerializer, -) -from django.conf import settings from netbox.api.serializers import NetBoxModelSerializer -from packaging import version from netbox_diode_plugin.models import Setting -if version.parse(version.parse(settings.VERSION).base_version) >= version.parse("4.1"): - from core.models import ObjectChange -else: - from extras.models import ObjectChange -from ipam.api.serializers import IPAddressSerializer, PrefixSerializer -from rest_framework import serializers -from utilities.api import get_serializer_for_model -from virtualization.api.serializers import ( - ClusterGroupSerializer, - ClusterSerializer, - ClusterTypeSerializer, - VirtualDiskSerializer, - VirtualMachineSerializer, - VMInterfaceSerializer, -) - -logger = logging.getLogger("netbox.netbox_diode_plugin.api.serializers") - - -def dynamic_import(name): - """Dynamically import a class from an absolute path string.""" - components = name.split(".") - mod = __import__(components[0]) - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - - -def get_diode_serializer(instance): - """Get the Diode serializer based on instance model.""" - serializer = get_serializer_for_model(instance) - - serializer_name = f"netbox_diode_plugin.api.serializers.Diode{serializer.__name__}" - - try: - serializer = dynamic_import(serializer_name) - except AttributeError: - logger.warning(f"Could not find serializer for {serializer_name}") - pass - - return serializer - - -class ObjectStateSerializer(serializers.Serializer): - """Object State Serializer.""" - - object_type = serializers.SerializerMethodField(read_only=True) - object_change_id = serializers.SerializerMethodField(read_only=True) - object = serializers.SerializerMethodField(read_only=True) - - def get_object_type(self, instance): - """ - Get the object type from context sent from view. - - Return a string with the format "app.model". - """ - return self.context.get("object_type") - - def get_object_change_id(self, instance): - """ - Get the object changed based on instance ID. - - Return the ID of last change. - """ - object_changed = ( - ObjectChange.objects.filter(changed_object_id=instance.id) - .order_by("-id") - .values_list("id", flat=True) - ) - return object_changed[0] if len(object_changed) > 0 else None - - def get_object(self, instance): - """ - Get the serializer based on instance model. - - Get the data from the model according to its ID. - Return the object according to serializer defined in the NetBox. - """ - serializer = get_diode_serializer(instance) - - object_data = instance.__class__.objects.filter(id=instance.id) - - context = {"request": self.context.get("request")} - - data = serializer(object_data, context=context, many=True).data[0] - - return data - - -class ChangeSerialiazer(serializers.Serializer): - """ChangeSet Serializer.""" - - change_id = serializers.UUIDField(required=True) - change_type = serializers.CharField(required=True) - object_version = serializers.IntegerField(required=False, allow_null=True) - object_type = serializers.CharField(required=True) - object_id = serializers.IntegerField(required=False, allow_null=True) - data = serializers.DictField(required=True) - - -class ApplyChangeSetRequestSerializer(serializers.Serializer): - """ApplyChangeSet request Serializer.""" - - change_set_id = serializers.UUIDField(required=True) - change_set = serializers.ListField( - child=ChangeSerialiazer(), required=True, allow_empty=False - ) - class SettingSerializer(NetBoxModelSerializer): """Setting Serializer.""" @@ -142,250 +21,3 @@ class Meta: "created", "last_updated", ) - - -class DiodeIPAddressSerializer(IPAddressSerializer): - """Diode IP Address Serializer.""" - - class Meta: - """Meta class.""" - - model = IPAddressSerializer.Meta.model - fields = IPAddressSerializer.Meta.fields - - def get_assigned_object(self, obj): - """Get the assigned object based on the instance model.""" - if obj.assigned_object is None: - return None - - serializer = get_diode_serializer(obj.assigned_object) - - context = {"request": self.context["request"]} - assigned_object = serializer(obj.assigned_object, context=context).data - - if assigned_object.get("device"): - device_serializer = get_diode_serializer(obj.assigned_object.device) - device = device_serializer(obj.assigned_object.device, context=context).data - assigned_object["device"] = device - - if serializer.__name__.endswith("InterfaceSerializer"): - assigned_object = {"interface": assigned_object} - - return assigned_object - - -class DiodeSiteSerializer(SiteSerializer): - """Diode Site Serializer.""" - - status = serializers.CharField() - - class Meta: - """Meta class.""" - - model = SiteSerializer.Meta.model - fields = SiteSerializer.Meta.fields - - -class DiodeDeviceRoleSerializer(DeviceRoleSerializer): - """Diode Device Role Serializer.""" - - class Meta: - """Meta class.""" - - model = DeviceRoleSerializer.Meta.model - fields = DeviceRoleSerializer.Meta.fields - - -class DiodeManufacturerSerializer(ManufacturerSerializer): - """Diode Manufacturer Serializer.""" - - class Meta: - """Meta class.""" - - model = ManufacturerSerializer.Meta.model - fields = ManufacturerSerializer.Meta.fields - - -class DiodePlatformSerializer(PlatformSerializer): - """Diode Platform Serializer.""" - - manufacturer = DiodeManufacturerSerializer(required=False, allow_null=True) - - class Meta: - """Meta class.""" - - model = PlatformSerializer.Meta.model - fields = PlatformSerializer.Meta.fields - - -class DiodeDeviceTypeSerializer(DeviceTypeSerializer): - """Diode Device Type Serializer.""" - - default_platform = DiodePlatformSerializer(required=False, allow_null=True) - manufacturer = DiodeManufacturerSerializer(required=False, allow_null=True) - - class Meta: - """Meta class.""" - - model = DeviceTypeSerializer.Meta.model - fields = DeviceTypeSerializer.Meta.fields - - -class DiodeDeviceSerializer(DeviceSerializer): - """Diode Device Serializer.""" - - site = DiodeSiteSerializer() - device_type = DiodeDeviceTypeSerializer() - role = DiodeDeviceRoleSerializer() - platform = DiodePlatformSerializer(required=False, allow_null=True) - status = serializers.CharField() - - class Meta: - """Meta class.""" - - model = DeviceSerializer.Meta.model - fields = DeviceSerializer.Meta.fields - - -class DiodeNestedInterfaceSerializer(InterfaceSerializer): - """Diode Nested Interface Serializer.""" - - class Meta: - """Meta class.""" - - model = InterfaceSerializer.Meta.model - fields = InterfaceSerializer.Meta.fields - - -class DiodeInterfaceSerializer(InterfaceSerializer): - """Diode Interface Serializer.""" - - device = DiodeDeviceSerializer() - parent = DiodeNestedInterfaceSerializer() - type = serializers.CharField() - mode = serializers.CharField() - - class Meta: - """Meta class.""" - - model = InterfaceSerializer.Meta.model - fields = InterfaceSerializer.Meta.fields - - -class DiodePrefixSerializer(PrefixSerializer): - """Diode Prefix Serializer.""" - - status = serializers.CharField() - site = serializers.SerializerMethodField(read_only=True) - - class Meta: - """Meta class.""" - - model = PrefixSerializer.Meta.model - fields = PrefixSerializer.Meta.fields + ["site"] - - def get_site(self, obj): - """Get the site from the instance scope.""" - if obj.scope is None: - return None - - scope_model_meta = obj.scope_type.model_class()._meta - if scope_model_meta.app_label == "dcim" and scope_model_meta.model_name == "site": - serializer = get_serializer_for_model(obj.scope) - context = {'request': self.context['request']} - return serializer(obj.scope, nested=True, context=context).data - - return None - - -class DiodeClusterGroupSerializer(ClusterGroupSerializer): - """Diode Cluster Group Serializer.""" - - class Meta: - """Meta class.""" - - model = ClusterGroupSerializer.Meta.model - fields = ClusterGroupSerializer.Meta.fields - - -class DiodeClusterTypeSerializer(ClusterTypeSerializer): - """Diode Cluster Type Serializer.""" - - class Meta: - """Meta class.""" - - model = ClusterTypeSerializer.Meta.model - fields = ClusterTypeSerializer.Meta.fields - - -class DiodeClusterSerializer(ClusterSerializer): - """Diode Cluster Serializer.""" - - type = DiodeClusterTypeSerializer() - group = DiodeClusterGroupSerializer() - status = serializers.CharField() - site = serializers.SerializerMethodField(read_only=True) - - class Meta: - """Meta class.""" - - model = ClusterSerializer.Meta.model - fields = ClusterSerializer.Meta.fields + ["site"] - - def get_site(self, obj): - """Get the site from the instance scope.""" - if obj.scope is None: - return None - - scope_model_meta = obj.scope_type.model_class()._meta - if scope_model_meta.app_label == "dcim" and scope_model_meta.model_name == "site": - serializer = get_serializer_for_model(obj.scope) - context = {'request': self.context['request']} - return serializer(obj.scope, nested=True, context=context).data - - return None - - -class DiodeVirtualMachineSerializer(VirtualMachineSerializer): - """Diode Virtual Machine Serializer.""" - - status = serializers.CharField() - site = DiodeSiteSerializer() - cluster = DiodeClusterSerializer() - device = DiodeDeviceSerializer() - role = DiodeDeviceRoleSerializer() - tenant = serializers.CharField() - platform = DiodePlatformSerializer() - primary_ip = DiodeIPAddressSerializer() - primary_ip4 = DiodeIPAddressSerializer() - primary_ip6 = DiodeIPAddressSerializer() - - class Meta: - """Meta class.""" - - model = VirtualMachineSerializer.Meta.model - fields = VirtualMachineSerializer.Meta.fields - - -class DiodeVirtualDiskSerializer(VirtualDiskSerializer): - """Diode Virtual Disk Serializer.""" - - virtual_machine = DiodeVirtualMachineSerializer() - - class Meta: - """Meta class.""" - - model = VirtualDiskSerializer.Meta.model - fields = VirtualDiskSerializer.Meta.fields - - -class DiodeVMInterfaceSerializer(VMInterfaceSerializer): - """Diode VM Interface Serializer.""" - - virtual_machine = DiodeVirtualMachineSerializer() - - class Meta: - """Meta class.""" - - model = VMInterfaceSerializer.Meta.model - fields = VMInterfaceSerializer.Meta.fields diff --git a/netbox_diode_plugin/api/supported_models.py b/netbox_diode_plugin/api/supported_models.py new file mode 100644 index 0000000..3ec47ce --- /dev/null +++ b/netbox_diode_plugin/api/supported_models.py @@ -0,0 +1,292 @@ +#!/usr/bin/env python +# Copyright 2025 NetBox Labs Inc +"""NetBox Diode Data - API supported models.""" + +import importlib +import logging +import time +from functools import lru_cache +from typing import List, Type + +from django.apps import apps +from django.db import models +from django.db.models import ManyToOneRel +from django.db.models.fields import NOT_PROVIDED +from rest_framework import serializers +from utilities.api import get_serializer_for_model as netbox_get_serializer_for_model + +logger = logging.getLogger(__name__) + +# Supported apps +SUPPORTED_APPS = [ + "circuits", + "dcim", + "extras", + "ipam", + "virtualization", + "vpn", + "wireless", + "tenancy", +] + +# Models that are not supported +EXCLUDED_MODELS = [ + "TaggedItem", + "Subscription", + "ScriptModule", + "Dashboard", + "Notification", +] + + +def extract_supported_models() -> dict[str, dict]: + """Extract supported models from NetBox.""" + supported_models = discover_models(SUPPORTED_APPS) + + logger.debug(f"Supported models: {supported_models}") + + models_to_process = supported_models + extracted_models: dict[str, dict] = {} + + start_ts = time.time() + while models_to_process: + model = models_to_process.pop() + try: + fields, related_models = get_model_fields(model) + if not fields: + continue + + prerequisites = get_prerequisites(model, fields) + object_type = f"{model._meta.app_label}.{model._meta.model_name}" + extracted_models[object_type] = { + "fields": fields, + "prerequisites": prerequisites, + } + for related_model in related_models: + related_object_type = f"{related_model._meta.app_label}.{related_model._meta.model_name}" + if ( + related_object_type not in extracted_models + and related_object_type not in models_to_process + ): + models_to_process.append(related_model) + except Exception as e: + logger.error(f"extract_supported_models: {model.__name__} error: {e}") + + finish_ts = time.time() + lapsed_millis = (finish_ts - start_ts) * 1000 + logger.info( + f"done extracting supported models in {lapsed_millis:.2f} milliseconds - extracted_models: {len(extracted_models)}" + ) + + return extracted_models + + +def get_prerequisites(model_class, fields) -> List[dict[str, str]]: + """Get the prerequisite models for the model.""" + prerequisites: List[dict[str, str]] = [] + prerequisite_models = getattr(model_class, "prerequisite_models", []) + + for prereq in prerequisite_models: + prereq_model = apps.get_model(prereq) + + for field_name, field_info in fields.items(): + related_model = field_info.get("related_model") + prerequisite_info = { + "field_name": field_name, + "prerequisite_model": prereq_model, + } + if ( + prerequisite_info not in prerequisites + and related_model + and related_model.get("model_class_name") == prereq_model.__name__ + ): + prerequisites.append(prerequisite_info) + break + + return prerequisites + + +@lru_cache(maxsize=128) +def get_model_fields(model_class) -> tuple[dict, list]: + """Get the fields for the model ordered as they are in the serializer.""" + related_models_to_process = [] + + # Skip unsupported apps and excluded models + if ( + model_class._meta.app_label not in SUPPORTED_APPS + or model_class.__name__ in EXCLUDED_MODELS + ): + return {}, [] + + try: + # Get serializer fields to maintain order + serializer_class = get_serializer_for_model(model_class) + serializer_fields = serializer_class().get_fields() + serializer_fields_names = list(serializer_fields.keys()) + except Exception as e: + logger.error(f"Error getting serializer fields for model {model_class}: {e}") + return {}, [] + + # Get all model fields + model_fields = { + field.name: field + for field in model_class._meta.get_fields() + if field.__class__.__name__ not in ["CounterCacheField", "GenericRelation"] + } + + # Reorder fields to match serializer order + ordered_fields = { + field_name: model_fields[field_name] + for field_name in serializer_fields_names + if field_name in model_fields + } + + # Add remaining fields + ordered_fields.update( + { + field_name: field + for field_name, field in model_fields.items() + if field_name not in ordered_fields + } + ) + + fields_info = {} + + for field_name, field in ordered_fields.items(): + field_info = { + "type": field.get_internal_type(), + "required": not field.null and not field.blank, + "is_many_to_one_rel": isinstance(field, ManyToOneRel), + "is_numeric": field.get_internal_type() + in [ + "IntegerField", + "FloatField", + "DecimalField", + "PositiveIntegerField", + "PositiveSmallIntegerField", + "SmallIntegerField", + "BigIntegerField", + ], + } + + # Handle default values + default_value = None + if hasattr(field, "default"): + default_value = ( + field.default if field.default not in (NOT_PROVIDED, dict) else None + ) + field_info["default"] = default_value + + # Handle related fields + if field.is_relation: + related_model = field.related_model + if related_model: + related_model_key = ( + f"{related_model._meta.app_label}.{related_model._meta.model_name}" + ) + related_model_info = { + "app_label": related_model._meta.app_label, + "model_name": related_model._meta.model_name, + "model_class_name": related_model.__name__, + "object_type": related_model_key, + "filters": get_field_filters(model_class, field_name), + } + field_info["related_model"] = related_model_info + if ( + related_model.__name__ not in EXCLUDED_MODELS + and related_model not in related_models_to_process + ): + related_models_to_process.append(related_model) + + fields_info[field_name] = field_info + + return fields_info, related_models_to_process + + +@lru_cache(maxsize=128) +def get_field_filters(model_class, field_name): + """Get filters for a field.""" + if hasattr(model_class, "_netbox_private"): + return None + + try: + filterset_name = f"{model_class.__name__}FilterSet" + filterset_module = importlib.import_module( + f"{model_class._meta.app_label}.filtersets" + ) + filterset_class = getattr(filterset_module, filterset_name) + + _filters = set() + field_filters = [] + for filter_name, filter_instance in filterset_class.get_filters().items(): + filter_by = getattr(filter_instance, "field_name", None) + filter_field_extra = getattr(filter_instance, "extra", None) + + if not filter_name.startswith(field_name) or filter_by.endswith("_id"): + continue + + if filter_by and filter_by not in _filters: + _filters.add(filter_by) + field_filters.append( + { + "filter_by": filter_by, + "filter_to_field_name": ( + filter_field_extra.get("to_field_name", None) + if filter_field_extra + else None + ), + } + ) + return list(field_filters) if field_filters else None + except Exception as e: + logger.error( + f"Error getting field filters for model {model_class.__name__} and field {field_name}: {e}" + ) + return None + + +@lru_cache(maxsize=128) +def get_serializer_for_model(model, prefix=""): + """Cached wrapper for NetBox's get_serializer_for_model function.""" + return netbox_get_serializer_for_model(model, prefix) + + +def discover_models(root_packages: List[str]) -> list[Type[models.Model]]: + """Discovers all model classes in specified root packages.""" + discovered_models = [] + + # Look through all modules that might contain serializers + module_names = [ + "api.serializers", + ] + + for root_package in root_packages: + logger.debug(f"Searching in root package: {root_package}") + + for module_name in module_names: + full_module_path = f"{root_package}.{module_name}" + try: + module = __import__(full_module_path, fromlist=["*"]) + except ImportError: + logger.error(f"Could not import {full_module_path}") + continue + + # Find all serializer classes in the module + for serializer_name in dir(module): + serializer = getattr(module, serializer_name) + if ( + isinstance(serializer, type) + and issubclass(serializer, serializers.Serializer) + and serializer != serializers.Serializer + and serializer != serializers.ModelSerializer + and hasattr(serializer, "Meta") + and hasattr(serializer.Meta, "model") + ): + model = serializer.Meta.model + if model not in discovered_models: + discovered_models.append(model) + logger.debug( + f"Discovered model: {model.__module__}.{model.__name__}" + ) + + return discovered_models diff --git a/netbox_diode_plugin/api/transformer.py b/netbox_diode_plugin/api/transformer.py new file mode 100644 index 0000000..12e3518 --- /dev/null +++ b/netbox_diode_plugin/api/transformer.py @@ -0,0 +1,396 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - API - Object resolution for diffing.""" + +import copy +import json +import logging +import re +from collections import defaultdict +from functools import lru_cache +from uuid import uuid4 + +import graphlib +from django.core.exceptions import ValidationError +from django.utils.text import slugify + +from .common import ChangeSetException, UnresolvedReference +from .matcher import find_existing_object, fingerprint +from .plugin_utils import get_json_ref_info, get_primary_value + +logger = logging.getLogger("netbox.diode_data") + +@lru_cache(maxsize=128) +def _camel_to_snake_case(name): + """Convert camelCase string to snake_case.""" + name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() + + +# these are implied values pushed down to referenced objects. +_NESTED_CONTEXT = { + "dcim.interface": { + # interface.primary_mac_address -> mac_address.assigned_object = interface + "primary_mac_address": lambda object_type, uuid: { + "assigned_object_type": object_type, + "assigned_object_id": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + }, + "virtualization.vminterface": { + # interface.primary_mac_address -> mac_address.assigned_object = vinterface + "primary_mac_address": lambda object_type, uuid: { + "assigned_object_type": object_type, + "assigned_object_id": UnresolvedReference(object_type=object_type, uuid=uuid), + }, + }, +} + +def _no_context(object_type, uuid): + return None + +def _nested_context(object_type, uuid, field_name): + return _NESTED_CONTEXT.get(object_type, {}).get(field_name, _no_context)(object_type, uuid) + +_IS_CIRCULAR_REFERENCE = { + "dcim.interface": frozenset(["primary_mac_address"]), + "virtualization.vminterface": frozenset(["primary_mac_address"]), + "dcim.device": frozenset(["primary_ip4", "primary_ip6"]), + "dcim.virtualdevicecontext": frozenset(["primary_ip4", "primary_ip6"]), + "virtualization.virtualmachine": frozenset(["primary_ip4", "primary_ip6"]), +} + +def _is_circular_reference(object_type, field_name): + return field_name in _IS_CIRCULAR_REFERENCE.get(object_type, frozenset()) + +def transform_proto_json(proto_json: dict, object_type: str, supported_models: dict) -> list[dict]: + """ + Transform keys of proto json dict to flattened dictionaries with model field keys. + + This also handles placing `_type` fields for generic references, + a certain form of deduplication and resolution of existing objects. + """ + entities = _transform_proto_json_1(proto_json, object_type) + logger.error(f"_transform_proto_json_1 entities: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + entities = _topo_sort(entities) + logger.error(f"_topo_sort: {json.dumps(entities, default=lambda o: str(o), indent=4)}") + deduplicated = _fingerprint_dedupe(entities) + logger.error(f"_fingerprint_dedupe: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + deduplicated = _topo_sort(deduplicated) + logger.error(f"_topo_sort: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + _set_slugs(deduplicated, supported_models) + logger.error(f"_set_slugs: {json.dumps(deduplicated, default=lambda o: str(o), indent=4)}") + resolved = _resolve_existing_references(deduplicated) + logger.error(f"_resolve_references: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + _set_defaults(resolved, supported_models) + logger.error(f"_set_defaults: {json.dumps(resolved, default=lambda o: str(o), indent=4)}") + + # handle post-create steps + output = _handle_post_creates(resolved) + logger.error(f"_handle_post_creates: {json.dumps(output, default=lambda o: str(o), indent=4)}") + + _check_unresolved_refs(output) + for entity in output: + entity.pop('_refs', None) + + return output + +def _transform_proto_json_1(proto_json: dict, object_type: str, context=None) -> list[dict]: # noqa: C901 + uuid = str(uuid4()) + node = { + "_object_type": object_type, + "_uuid": uuid, + "_refs": set(), + } + + # context pushed down from parent nodes + if context is not None: + for k, v in context.items(): + node[k] = v + if isinstance(v, UnresolvedReference): + node['_refs'].add(v.uuid) + + nodes = [node] + post_create = None + + for key, value in proto_json.items(): + ref_info = get_json_ref_info(object_type, key) + if ref_info is None: + node[_camel_to_snake_case(key)] = copy.deepcopy(value) + continue + + nested_context = _nested_context(object_type, uuid, ref_info.field_name) + field_name = ref_info.field_name + is_circular = _is_circular_reference(object_type, field_name) + + if ref_info.is_generic: + node[field_name + "_type"] = ref_info.object_type + field_name = field_name + "_id" + + refs = [] + ref_value = None + if isinstance(value, list): + ref_value = [] + for item in value: + nested = _transform_proto_json_1(item, ref_info.object_type, nested_context) + nodes += nested + ref_uuid = nested[0]['_uuid'] + ref_value.append(UnresolvedReference( + object_type=ref_info.object_type, + uuid=ref_uuid, + )) + refs.append(ref_uuid) + else: + nested = _transform_proto_json_1(value, ref_info.object_type, nested_context) + nodes += nested + ref_uuid = nested[0]['_uuid'] + ref_value = UnresolvedReference( + object_type=ref_info.object_type, + uuid=ref_uuid, + ) + refs.append(ref_uuid) + + if is_circular: + if post_create is None: + post_create = { + "_uuid": str(uuid4()), + "_object_type": object_type, + "_refs": set(), + "_instance": node['_uuid'], + "_is_post_create": True, + } + post_create[field_name] = ref_value + post_create['_refs'].update(refs) + post_create['_refs'].add(node['_uuid']) + continue + + node[field_name] = ref_value + node['_refs'].update(refs) + + if post_create: + nodes.append(post_create) + + return nodes + + +def _topo_sort(entities: list[dict]) -> list[dict]: + """Topologically sort entities by reference.""" + by_uuid = {e['_uuid']: e for e in entities} + graph = defaultdict(set) + for entity in entities: + graph[entity['_uuid']] = entity['_refs'].copy() + + try: + ts = graphlib.TopologicalSorter(graph) + order = tuple(ts.static_order()) + return [by_uuid[uuid] for uuid in order] + except graphlib.CycleError as e: + # TODO the cycle error references the cycle here ... + raise ChangeSetException(f"Circular reference in entities: {e}", errors={ + "__all__": { + "message": "Unable to resolve circular reference in entities", + } + }) + + +def _set_defaults(entities: list[dict], supported_models: dict): + for entity in entities: + model_fields = supported_models.get(entity['_object_type']) + if model_fields is None: + raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + + for field_name, field_info in model_fields.get('fields', {}).items(): + if entity.get(field_name) is None and field_info.get("default") is not None: + entity[field_name] = field_info["default"] + +def _set_slugs(entities: list[dict], supported_models: dict): + for entity in entities: + model_fields = supported_models.get(entity['_object_type']) + if model_fields is None: + raise ValidationError(f"Model for object type {entity['_object_type']} is not supported") + + for field_name, field_info in model_fields.get('fields', {}).items(): + if field_info["type"] == "SlugField" and entity.get(field_name) is None: + entity[field_name] = _generate_slug(entity['_object_type'], entity) + +def _generate_slug(object_type, data): + """Generate a slug for a model instance.""" + source_value = get_primary_value(data, object_type) + if source_value is not None: + return slugify(str(source_value)) + return None + +def _fingerprint_dedupe(entities: list[dict]) -> list[dict]: + """ + Deduplicates/merges entities by fingerprint. + + *list must be in topo order by reference already* + """ + by_fp = {} + deduplicated = [] + new_refs = {} # uuid -> uuid + + for entity in entities: + if entity.get('_is_post_create'): + fp = entity['_uuid'] + existing = None + else: + fp = fingerprint(entity, entity['_object_type']) + existing = by_fp.get(fp) + + if existing is None: + logger.debug(" * entity is new.") + new_entity = copy.deepcopy(entity) + _update_unresolved_refs(new_entity, new_refs) + by_fp[fp] = new_entity + deduplicated.append(fp) + else: + logger.debug(" * entity already exists.") + new_refs[entity['_uuid']] = existing['_uuid'] + merged = _merge_nodes(existing, entity) + _update_unresolved_refs(merged, new_refs) + by_fp[fp] = merged + + return [by_fp[fp] for fp in deduplicated] + +def _merge_nodes(a: dict, b: dict) -> dict: + """ + Merges two nodes. + + If there are any conflicts, an error is raised. + Ignores conflicts in fields that start with an underscore, + preferring a's value. + """ + merged = copy.deepcopy(a) + merged['_refs'] = a['_refs'] | b['_refs'] + + for k, v in b.items(): + if k.startswith("_"): + continue + if k in merged and merged[k] != v: + raise ValueError(f"Conflict merging {a} and {b} on {k}: {merged[k]} and {v}") + merged[k] = v + return merged + + +def _update_unresolved_refs(entity, new_refs): + if entity.get('_is_post_create'): + instance_uuid = entity['_instance'] + entity['_instance'] = new_refs.get(instance_uuid, instance_uuid) + + entity['_refs'] = {new_refs.get(r,r) for r in entity['_refs']} + + for k, v in entity.items(): + if isinstance(v, UnresolvedReference) and v.uuid in new_refs: + v.uuid = new_refs[v.uuid] + elif isinstance(v, (list, tuple)): + for item in v: + if isinstance(item, UnresolvedReference) and item.uuid in new_refs: + item.uuid = new_refs[item.uuid] + # TODO maps ... + +def _resolve_existing_references(entities: list[dict]) -> list[dict]: + seen = {} + new_refs = {} + resolved = [] + + for data in entities: + object_type = data['_object_type'] + data = copy.deepcopy(data) + _update_resolved_refs(data, new_refs) + + existing = find_existing_object(data, object_type) + if existing is not None: + logger.error(f"existing {data} -> {existing}") + fp = (object_type, existing.id) + if fp in seen: + logger.warning(f"objects resolved to the same existing id after deduplication: {seen[fp]} and {data}") + else: + seen[fp] = data + data['id'] = existing.id + data['_instance'] = existing + new_refs[data['_uuid']] = existing.id + resolved.append(data) + else: + data['id'] = UnresolvedReference(object_type, data['_uuid']) + _update_resolved_refs(data, new_refs) + resolved.append(data) + return resolved + +def _update_resolved_refs(data, new_refs): + for k, v in data.items(): + if isinstance(v, UnresolvedReference) and v.uuid in new_refs: + data[k] = new_refs[v.uuid] + elif isinstance(v, (list, tuple)): + new_items = [] + for item in v: + if isinstance(item, UnresolvedReference) and item.uuid in new_refs: + new_items.append(new_refs[item.uuid]) + else: + new_items.append(item) + data[k] = new_items + # TODO maps ... + +def cleanup_unresolved_references(data: dict) -> list[str]: + """Find and stringify unresolved references in fields.""" + unresolved = set() + for k, v in data.items(): + if isinstance(v, UnresolvedReference): + if k != 'id': + unresolved.add(k) + data[k] = str(v) + elif isinstance(v, (list, tuple)): + items = [] + for item in v: + if isinstance(item, UnresolvedReference): + unresolved.add(k) + items.append(str(item)) + else: + items.append(item) + data[k] = items + # TODO maps + return sorted(unresolved) + +def _handle_post_creates(entities: list[dict]) -> list[str]: + """Merges any unnecessary post-create steps for existing objects.""" + by_uuid = {e['_uuid']: (i, e) for i, e in enumerate(entities)} + out = [] + for entity in entities: + is_post_create = entity.pop('_is_post_create', False) + if not is_post_create: + out.append(entity) + continue + + instance = entity.get('_instance') + prior_index, prior_entity = by_uuid[instance] + + # a post create can be merged whenever the entities it relies on + # already exist (were resolved) or there are no dependencies between + # the object being updated and the post-create. + can_merge = all( + by_uuid[r][1].get('_instance') is not None + for r in entity['_refs'] + ) or sorted(by_uuid[r][0] for r in entity['_refs'])[-1] == prior_index + + if can_merge: + prior_entity.update([x for x in entity.items() if not x[0].startswith('_')]) + else: + entity['id'] = prior_entity['id'] + out.append(entity) + + return out + +def _check_unresolved_refs(entities: list[dict]) -> list[str]: + seen = set() + for e in entities: + seen.add((e['_object_type'], e['_uuid'])) + for k, v in e.items(): + if isinstance(v, UnresolvedReference): + if (v.object_type, v.uuid) not in seen: + raise ChangeSetException( + f"Unresolved reference {v} in {e} does not refer to a prior created object (circular reference?)", + errors={ + e['_object_type']: { + k: ["unable to resolve reference"], + } + } + ) diff --git a/netbox_diode_plugin/api/urls.py b/netbox_diode_plugin/api/urls.py index 9fff272..cb6b3d4 100644 --- a/netbox_diode_plugin/api/urls.py +++ b/netbox_diode_plugin/api/urls.py @@ -5,12 +5,12 @@ from django.urls import include, path from netbox.api.routers import NetBoxRouter -from .views import ApplyChangeSetView, ObjectStateView +from .views import ApplyChangeSetView, GenerateDiffView router = NetBoxRouter() urlpatterns = [ - path("object-state/", ObjectStateView.as_view()), path("apply-change-set/", ApplyChangeSetView.as_view()), + path("generate-diff/", GenerateDiffView.as_view()), path("", include(router.urls)), ] diff --git a/netbox_diode_plugin/api/views.py b/netbox_diode_plugin/api/views.py index e791ab8..5f6d004 100644 --- a/netbox_diode_plugin/api/views.py +++ b/netbox_diode_plugin/api/views.py @@ -1,236 +1,99 @@ #!/usr/bin/env python # Copyright 2024 NetBox Labs Inc """Diode NetBox Plugin - API Views.""" -from typing import Any, Dict, Optional +import json +import logging +import re from django.apps import apps -from django.conf import settings -from packaging import version - -if version.parse(settings.VERSION).major >= 4: - from core.models import ObjectType as NetBoxType -else: - from django.contrib.contenttypes.models import ContentType as NetBoxType - -from django.core.exceptions import FieldError -from django.core.exceptions import ValidationError as DjangoValidationError -from django.db import models, transaction -from django.db.models import Q -from rest_framework import status, views +from django.db import transaction +from rest_framework import views from rest_framework.exceptions import ValidationError from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response -from utilities.api import get_serializer_for_model - -from netbox_diode_plugin.api.permissions import IsDiodeReader, IsDiodeWriter -from netbox_diode_plugin.api.serializers import ApplyChangeSetRequestSerializer, ObjectStateSerializer - - -def dynamic_import(name): - """Dynamically import a class from an absolute path string.""" - components = name.split(".") - mod = __import__(components[0]) - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - - -def _get_index_class_fields(object_type: str | NetBoxType): - """ - Given an object type name (e.g., 'dcim.site'), dynamically find and return the corresponding Index class fields. - - :param object_type: Object type name in the format 'app_label.model_name' - :return: The corresponding model and its Index class (e.g., SiteIndex) field names or None. - """ - try: - if isinstance(object_type, str): - app_label, model_name = object_type.split('.') - else: - app_label, model_name = object_type.app_label, object_type.model - - model = apps.get_model(app_label, model_name) - - if app_label == "extras" and model_name == "tag": - app_label = "netbox_diode_plugin" - - index_module = dynamic_import(f"{app_label}.search.{model.__name__}Index") - fields = getattr(index_module, "fields", None) - field_names = [field[0] for field in fields] - - return model, field_names - - except (LookupError, ModuleNotFoundError, AttributeError, ValueError): - return None, None - -def _validate_model_instance_fields(instance, fields, value): - """ - Validate the model instance fields against the value. - - :param instance: The model instance. - :param fields: The fields of the model instance. - :param value: The value to validate against the model instance fields. - :return: fields list passed validation - """ - errors = {} - - # Set provided values to the instance fields - for field in fields: - if hasattr(instance, field): - # get the field type - field_cls = instance._meta.get_field(field).__class__ - field_value = _convert_field_value(field_cls, value) - setattr(instance, field, field_value) +from netbox_diode_plugin.api.applier import apply_changeset +from netbox_diode_plugin.api.common import Change, ChangeSet, ChangeSetException, ChangeSetResult +from netbox_diode_plugin.api.differ import generate_changeset +from netbox_diode_plugin.api.permissions import IsDiodeWriter - # Attempt to validate the instance - try: - instance.clean_fields() - except DjangoValidationError as e: - errors = e.message_dict - return errors +logger = logging.getLogger("netbox.diode_data") -def _convert_field_value(field_cls, value): - """Return the converted field value based on the field type.""" - if value is None: - return value +# Try to import Branch model at module level +Branch = None +try: + if apps.is_installed("netbox_branching"): + from netbox_branching.models import Branch +except ImportError: + logger.warning( + "netbox_branching plugin is installed but models could not be imported" + ) - try: - if issubclass(field_cls, (models.FloatField, models.DecimalField)): - return float(value) - if issubclass(field_cls, models.IntegerField): - return int(value) - except (ValueError, TypeError): - pass - return value +def get_entity_key(model_name): + """Get the entity key for a model name.""" + s = re.sub(r'([A-Z0-9]{2,})([A-Z])([a-z])', r'\1_\2\3', model_name) + s = re.sub(r'([a-z])([A-Z])', r'\1_\2', s) + s = re.sub(r'_+', '_', s.lower()) # snake + s = ''.join([word.capitalize() for word in s.split("_")]) # upperCamelCase + return s[0].lower() + s[1:] # lowerCamelCase -class ObjectStateView(views.APIView): - """ObjectState view.""" +class GenerateDiffView(views.APIView): + """GenerateDiff view.""" - permission_classes = [IsAuthenticated, IsDiodeReader] - - def _get_lookups(self, object_type_model: str) -> tuple: - """ - This method returns a tuple of related object lookups based on the provided object type model. - - Args: - ---- - object_type_model (str): The name of the object type model. - - Returns: - ------- - tuple: A tuple of related object lookups. The tuple is empty if the object type model does not match any - of the specified models. - - """ - if "'ipam.models.ip.ipaddress'" in object_type_model: - return ( - "assigned_object", - "assigned_object__device", - "assigned_object__device__site", - ) - if "'dcim.models.device_components.interface'" in object_type_model: - return "device", "device__site" - if "'dcim.models.devices.device'" in object_type_model: - return ("site",) - return () - - def _search_queryset(self, request): - """Search for objects according to object type using search index classes.""" - object_type = request.GET.get("object_type", None) - object_id = request.GET.get("id", None) - query = request.GET.get("q", None) + permission_classes = [IsAuthenticated, IsDiodeWriter] + def post(self, request, *args, **kwargs): + """Generate diff for entity.""" + try: + return self._post(request, *args, **kwargs) + except Exception: + import traceback + traceback.print_exc() + raise + + def _post(self, request, *args, **kwargs): + entity = request.data.get("entity") + object_type = request.data.get("object_type") + + if not entity: + raise ValidationError("Entity is required") if not object_type: - raise ValidationError("object_type parameter is required") - - if not object_id and not query: - raise ValidationError("id or q parameter is required") - - model, fields = _get_index_class_fields(object_type) - - if object_id: - queryset = model.objects.filter(id=object_id) - else: - q = Q() + raise ValidationError("Object type is required") - invalid_fields = _validate_model_instance_fields(model(), fields, query) + app_label, model_name = object_type.split(".") + model_class = apps.get_model(app_label, model_name) - fields = [field for field in fields if field not in invalid_fields] - - for field in fields: - q |= Q(**{f"{field}__exact": query}) # Exact match - - try: - queryset = model.objects.filter(q) - except DjangoValidationError: - queryset = model.objects.none() - pass + # Convert model name to lowerCamelCase for entity lookup + entity_key = get_entity_key(model_class.__name__) + original_entity_data = entity.get(entity_key) - lookups = self._get_lookups(str(model).lower()) - - if lookups: - queryset = queryset.prefetch_related(*lookups) - - additional_attributes_query_filter = ( - self._additional_attributes_query_filter() + if original_entity_data is None: + raise ValidationError( + f"No data found for {entity_key} in entity got: {entity.keys()}" ) - if additional_attributes_query_filter: - queryset = queryset.filter(**additional_attributes_query_filter) - - return queryset - - def get(self, request, *args, **kwargs): - """ - Return a JSON with object_type, object_change_id, and object. - - Search for objects according to object type. - If the obj_type parameter is not in the parameters, raise a ValidationError. - When object ID is provided in the request, search using it in the model specified by object type. - If ID is not provided, use the q parameter for searching. - Lookup is iexact - """ try: - queryset = self._search_queryset(request) - except (FieldError, ValueError): - return Response( - {"errors": ["invalid additional attributes provided"]}, - status=status.HTTP_400_BAD_REQUEST, + result = generate_changeset(original_entity_data, object_type) + except ChangeSetException as e: + logger.error(f"Error generating change set: {e}") + result = ChangeSetResult( + errors=e.errors, ) + return Response(result.to_dict(), status=result.get_status_code()) - self.check_object_permissions(request, queryset) + branch_id = request.headers.get("X-NetBox-Branch") - object_type = request.GET.get("object_type", None) - - serializer = ObjectStateSerializer( - queryset, - many=True, - context={ - "request": request, - "object_type": f"{object_type}", - }, - ) - - try: - if len(serializer.data) > 0: - return Response(serializer.data[0]) - return Response({}) - except AttributeError as e: - return Response( - {"errors": [f"Serializer error: {e.args[0]}"]}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def _additional_attributes_query_filter(self): - """Get the additional attributes query filter.""" - additional_attributes = {} - for attr in self.request.query_params: - if attr not in ["object_type", "id", "q", "_branch"]: - additional_attributes[attr] = self.request.query_params.get(attr) + # If branch ID is provided and branching plugin is installed, get branch name + if branch_id and Branch is not None: + try: + branch = Branch.objects.get(id=branch_id) + result.branch = {"id": branch.id, "name": branch.name} + except Branch.DoesNotExist: + logger.warning(f"Branch with ID {branch_id} does not exist") - return dict(additional_attributes.items()) + return Response(result.to_dict(), status=result.get_status_code()) class ApplyChangeSetView(views.APIView): @@ -238,438 +101,44 @@ class ApplyChangeSetView(views.APIView): permission_classes = [IsAuthenticated, IsDiodeWriter] - @staticmethod - def _get_object_type_model(object_type: str | NetBoxType): - """Get the object type model from object_type.""" - if isinstance(object_type, str): - app_label, model_name = object_type.split(".") - object_content_type = NetBoxType.objects.get_by_natural_key(app_label, model_name) - else: - object_content_type = object_type - return object_content_type, object_content_type.model_class() - - def _get_assigned_object_type(self, model_name: str): - """Get the object type model from applied IPAddress assigned object.""" - assignable_object_types = { - "interface": "dcim.interface", - } - return assignable_object_types.get(model_name.lower(), None) - - def _add_nested_opts(self, fields, key, value): - if isinstance(value, dict): - for nested_key, nested_value in value.items(): - self._add_nested_opts(fields, f"{key}__{nested_key}", nested_value) - elif not isinstance(value, list): - fields[key] = value - - def _get_serializer( - self, - change_type: str, - object_id: int, - object_type: str, - object_data: dict, - ): - """Get the serializer for the object type.""" - _, object_type_model_class = self._get_object_type_model(object_type) - - if change_type == "create": - return self._get_serializer_to_create(object_data, object_type, object_type_model_class) - - if change_type == "update": - return self._get_serializer_to_update(object_data, object_id, object_type, object_type_model_class) - - raise ValidationError("Invalid change_type") - - def _get_serializer_to_create(self, object_data, object_type, object_type_model_class): - # Get object data fields that are not dictionaries or lists - fields = self._get_fields_to_find_existing_objects(object_data, object_type) - # Check if the object already exists - try: - instance = object_type_model_class.objects.get(**fields) - return get_serializer_for_model(object_type_model_class)( - instance, data=object_data, context={"request": self.request, "pk": instance.pk} - ) - except object_type_model_class.DoesNotExist: - pass - serializer = get_serializer_for_model(object_type_model_class)( - data=object_data, context={"request": self.request} - ) - return serializer - - def _get_serializer_to_update(self, object_data, object_id, object_type, object_type_model_class): - lookups = () - fields = {} - primary_ip_to_set: Optional[dict] = None - if object_id: - fields["id"] = object_id - elif object_type == "dcim.device" and any( - object_data.get(attr) for attr in ("primary_ip4", "primary_ip6") - ): - ip_address = self._retrieve_primary_ip_address( - "primary_ip4", object_data - ) - - if ip_address is None: - ip_address = self._retrieve_primary_ip_address( - "primary_ip6", object_data - ) - - if ip_address is None: - raise ValidationError("primary IP not found") - - if ip_address: - primary_ip_to_set = { - "id": ip_address.id, - "family": ip_address.family, - } - - lookups = ("site",) - fields["name"] = object_data.get("name") - fields["site__name"] = object_data.get("site").get("name") - else: - raise ValidationError("object_id parameter is required") + def post(self, request, *args, **kwargs): + """Apply change set for entity.""" try: - instance = object_type_model_class.objects.prefetch_related(*lookups).get(**fields) - if object_type == "dcim.device" and primary_ip_to_set: - object_data = { - "id": instance.id, - "device_type": instance.device_type.id, - "role": instance.role.id, - "site": instance.site.id, - f'primary_ip{primary_ip_to_set.get("family")}': primary_ip_to_set.get( - "id" - ), - } - except object_type_model_class.DoesNotExist: - raise ValidationError(f"object with id {object_id} does not exist") - serializer = get_serializer_for_model(object_type_model_class)( - instance, data=object_data, context={"request": self.request} + return self._post(request, *args, **kwargs) + except Exception: + import traceback + + traceback.print_exc() + raise + + def _post(self, request, *args, **kwargs): + data = request.data.copy() + + changes = [] + if 'changes' in data: + changes = [ + Change( + change_type=change.get('change_type'), + object_type=change.get('object_type'), + object_id=change.get('object_id'), + ref_id=change.get('ref_id'), + data=change.get('data'), + before=change.get('before'), + new_refs=change.get('new_refs', []), + ) for change in data['changes'] + ] + change_set = ChangeSet( + id=data.get('id'), + changes=changes, ) - return serializer - - def _get_fields_to_find_existing_objects(self, object_data, object_type): - fields = {} - for key, value in object_data.items(): - self._add_nested_opts(fields, key, value) - - match object_type: - case "dcim.interface" | "virtualization.vminterface": - mac_address = fields.pop("mac_address", None) - if mac_address is not None: - fields["primary_mac_address__mac_address"] = mac_address - case "ipam.ipaddress": - fields.pop("assigned_object_type") - fields["assigned_object_type_id"] = fields.pop("assigned_object_id") - case "ipam.prefix" | "virtualization.cluster": - if scope_type := object_data.get("scope_type"): - scope_type_model, _ = self._get_object_type_model(scope_type) - fields["scope_type"] = scope_type_model - case "virtualization.virtualmachine": - if cluster_scope_type := fields.get("cluster__scope_type"): - cluster_scope_type_model, _ = self._get_object_type_model(cluster_scope_type) - fields["cluster__scope_type"] = cluster_scope_type_model - case "virtualization.vminterface": - if cluster_scope_type := fields.get("virtual_machine__cluster__scope_type"): - cluster_scope_type_model, _ = self._get_object_type_model(cluster_scope_type) - fields["virtual_machine__cluster__scope_type"] = cluster_scope_type_model - - return fields - - def _retrieve_primary_ip_address(self, primary_ip_attr: str, object_data: dict): - """Retrieve the primary IP address object.""" - ip_address = object_data.get(primary_ip_attr) - if ip_address is None: - return None - - ipaddress_assigned_object = object_data.get(primary_ip_attr, {}).get( - "assigned_object", None - ) - if ipaddress_assigned_object is None: - return None - - interface = ipaddress_assigned_object.get("interface") - if interface is None: - return None - - interface_device = interface.get("device") - if interface_device is None: - return None - object_type_mode, object_type_model_class = self._get_object_type_model("ipam.ipaddress") - ip_address_object = object_type_model_class.objects.get( - address=ip_address.get("address"), - interface__name=interface.get("name"), - interface__device__name=interface_device.get("name"), - interface__device__site__name=interface_device.get("site").get("name"), - ) - return ip_address_object - - @staticmethod - def _get_error_response(change_set_id, error): - """Get the error response.""" - return Response( - { - "change_set_id": change_set_id, - "result": "failed", - "errors": error, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - def _retrieve_assigned_object_interface_device_lookup_args( - self, device: dict - ) -> dict: - """ - This method retrieves the lookup arguments for the interface device of an assigned object. - - Args: - ---- - device (dict): A dictionary containing the details of the device. It should contain either 'id' or 'name' - of the device and 'site' which is another dictionary containing either 'id' or 'name' of the site. - - Returns: - ------- - dict: A dictionary containing the lookup arguments for the interface device. - - Raises: - ------ - ValidationError: If neither 'id' nor 'name' is provided for the device or the site. - - """ - args = {} - if device.get("id"): - args["device__id"] = device.get("id") - elif device.get("name"): - args["device__name"] = device.get("name") - else: - raise ValidationError( - "Interface device needs to have either id or name provided" - ) - - site = device.get("site", {}) - if site: - if site.get("id"): - args["device__site__id"] = site.get("id") - elif site.get("name"): - args["device__site__name"] = site.get("name") - else: - raise ValidationError( - "Interface device site needs to have either id or name provided" - ) - return args - - def _handle_ipaddress_assigned_object(self, object_data: dict) -> Optional[Dict[str, Any]]: - """Handle IPAM IP address assigned object.""" - ipaddress_assigned_object = object_data.get("assigned_object", None) - - if ipaddress_assigned_object is not None: - assigned_object_keys = list(ipaddress_assigned_object.keys()) - model_name = assigned_object_keys[0] - assigned_object_type = self._get_assigned_object_type(model_name) - assigned_object_model, object_type_model_class = self._get_object_type_model(assigned_object_type) - assigned_object_properties_dict = dict( - ipaddress_assigned_object[model_name].items() - ) - - if len(assigned_object_properties_dict) == 0: - return {"assigned_object": f"properties not provided for {model_name}"} - - try: - lookups = ( - ("device", "device__site") if model_name == "interface" else () - ) - args = {} - - if model_name == "interface": - if assigned_object_properties_dict.get("id"): - args["id"] = assigned_object_properties_dict.get("id") - elif assigned_object_properties_dict.get("name"): - try: - device = assigned_object_properties_dict.get("device", {}) - args = self._retrieve_assigned_object_interface_device_lookup_args( - device - ) - args["name"] = assigned_object_properties_dict.get("name") - except ValidationError as e: - return {"assigned_object": str(e)} - else: - error = f"provided properties '{assigned_object_properties_dict}' not sufficient to retrieve {model_name}" - return {"assigned_object": error} - - assigned_object_instance = ( - object_type_model_class.objects.prefetch_related(*lookups).get(**args) - ) - except object_type_model_class.DoesNotExist: - return { - "assigned_object": f"Assigned object with name {ipaddress_assigned_object[model_name]} does not exist" - } - - object_data.pop("assigned_object") - object_data["assigned_object_type"] = assigned_object_type - object_data["assigned_object_id"] = assigned_object_instance.id - return None - - def _handle_interface_mac_address_compat(self, instance, object_type: str, object_data: dict) -> Optional[Dict[str, Any]]: - """Handle interface mac address backward compatibility.""" - # TODO(ltucker): deprecate. - if object_type != "dcim.interface" and object_type != "virtualization.vminterface": - return None - - if object_data.get("mac_address"): - mac_address_value = object_data.pop("mac_address") - mac_address_instance, _ = instance.mac_addresses.get_or_create( - mac_address=mac_address_value, - ) - instance.primary_mac_address = mac_address_instance - instance.save() - return None - - def _handle_scope(self, object_data: dict, is_nested: bool = False) -> Optional[Dict[str, Any]]: - """Handle scope object.""" - if object_data.get("site"): - site = object_data.pop("site") - scope_type = "dcim.site" - object_type_model, object_type_model_class = self._get_object_type_model(scope_type) - # Scope type of the nested object happens to be resolved differently than in the top-level object - # and is expected to be a content type object instead of "app_label.model_name" string format - if is_nested: - object_data["scope_type"] = object_type_model - else: - object_data["scope_type"] = scope_type - site_id = site.get("id", None) - if site_id is None: - try: - site = object_type_model_class.objects.get( - name=site.get("name") - ) - site_id = site.id - except object_type_model_class.DoesNotExist: - return {"site": f"site with name {site.get('name')} does not exist"} - - object_data["scope_id"] = site_id - - return None - - def _transform_object_data(self, object_type: str, object_data: dict) -> Optional[Dict[str, Any]]: - """Transform object data.""" - errors = None - - match object_type: - case "ipam.ipaddress": - errors = self._handle_ipaddress_assigned_object(object_data) - case "ipam.prefix": - errors = self._handle_scope(object_data, False) - case "virtualization.cluster": - errors = self._handle_scope(object_data, False) - case "virtualization.virtualmachine": - if cluster_object_data := object_data.get("cluster"): - errors = self._handle_scope(cluster_object_data, True) - object_data["cluster"] = cluster_object_data - case "virtualization.vminterface": - cluster_object_data = object_data.get("virtual_machine", {}).get("cluster") - if cluster_object_data is not None: - errors = self._handle_scope(cluster_object_data, True) - object_data["virtual_machine"]["cluster"] = cluster_object_data - case _: - pass - - return errors - - def post(self, request, *args, **kwargs): - """ - Create a new change set and apply it to the current state. - - The request body should contain a list of changes to be applied. - """ - serializer_errors = [] - - request_serializer = ApplyChangeSetRequestSerializer(data=request.data) - - change_set_id = self.request.data.get("change_set_id", None) - - if not request_serializer.is_valid(): - for field_error_name in request_serializer.errors: - self._extract_serializer_errors( - field_error_name, request_serializer, serializer_errors - ) - - return self._get_error_response(change_set_id, serializer_errors) - - change_set = request_serializer.data.get("change_set", None) - try: with transaction.atomic(): - for change in change_set: - change_id = change.get("change_id", None) - change_type = change.get("change_type", None) - object_type = change.get("object_type", None) - object_data = change.get("data", None) - object_id = change.get("object_id", None) - - errors = self._transform_object_data(object_type, object_data) - - if errors is not None: - serializer_errors.append({"change_id": change_id, **errors}) - continue - - serializer = self._get_serializer(change_type, object_id, object_type, object_data) - - # Skip creating an object if it already exists - if change_type == "create" and serializer.context.get("pk"): - continue - - if serializer.is_valid(): - serializer.save() - else: - errors_dict = { - field_name: f"{field_name}: {str(field_errors[0])}" - for field_name, field_errors in serializer.errors.items() - } - - serializer_errors.append( - {"change_id": change_id, **errors_dict} - ) - continue - - errors = self._handle_interface_mac_address_compat(serializer.instance, object_type, object_data) - if errors is not None: - serializer_errors.append({"change_id": change_id, **errors}) - continue - if len(serializer_errors) > 0: - raise ApplyChangeSetException - except ApplyChangeSetException: - return self._get_error_response(change_set_id, serializer_errors) - - data = {"change_set_id": change_set_id, "result": "success"} - return Response(data, status=status.HTTP_200_OK) - - def _extract_serializer_errors( - self, field_error_name, request_serializer, serializer_errors - ): - """Extract serializer errors.""" - if isinstance(request_serializer.errors[field_error_name], dict): - for error_index, error_values in request_serializer.errors[ - field_error_name - ].items(): - errors_dict = { - "change_id": request_serializer.data.get("change_set")[ - error_index - ].get("change_id") - } - - for field_name, field_errors in error_values.items(): - errors_dict[field_name] = f"{str(field_errors[0])}" - - serializer_errors.append(errors_dict) - else: - errors = { - field_error_name: f"{str(field_errors)}" - for field_errors in request_serializer.errors[field_error_name] - } - - serializer_errors.append(errors) - - -class ApplyChangeSetException(Exception): - """ApplyChangeSetException used to cause atomic transaction rollback.""" + result = apply_changeset(change_set) + except ChangeSetException as e: + logger.error(f"Error applying change set: {e}") + result = ChangeSetResult( + id=change_set.id, + errors=e.errors, + ) - pass + return Response(result.to_dict(), status=result.get_status_code()) diff --git a/netbox_diode_plugin/tests/test_api_apply_change_set.py b/netbox_diode_plugin/tests/test_api_apply_change_set.py index 62950d4..b2d27c0 100644 --- a/netbox_diode_plugin/tests/test_api_apply_change_set.py +++ b/netbox_diode_plugin/tests/test_api_apply_change_set.py @@ -29,6 +29,8 @@ User = get_user_model() +def _get_error(response, object_name, field): + return response.json().get("errors", {}).get(object_name, {}).get(field, []) class BaseApplyChangeSet(APITestCase): """Base ApplyChangeSet test case.""" @@ -178,19 +180,20 @@ class ApplyChangeSetTestCase(BaseApplyChangeSet): @staticmethod def get_change_id(payload, index): """Get change_id from payload.""" - return payload.get("change_set")[index].get("change_id") + return payload.get("changes")[index].get("change_id") def test_change_type_create_return_200(self): """Test create change_type with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -208,6 +211,7 @@ def test_change_type_create_return_200(self): "object_version": None, "object_type": "dcim.interface", "object_id": None, + "ref_id": "2", "data": { "name": "Interface 1", "device": self.devices[1].pk, @@ -220,25 +224,23 @@ def test_change_type_create_return_200(self): "object_version": None, "object_type": "ipam.ipaddress", "object_id": None, + "ref_id": "3", "data": { "address": "192.163.2.1/24", - "assigned_object": { - "interface": {"id": self.interfaces[2].pk}, - }, + "assigned_object_type": "dcim.interface", + "assigned_object_id": self.interfaces[2].pk }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) def test_change_type_update_return_200(self): """Test update change_type with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -259,26 +261,26 @@ def test_change_type_update_return_200(self): ], } - response = self.client.post( + _ = self.client.post( self.url, payload, format="json", **self.user_header ) site_updated = Site.objects.get(id=20) - self.assertEqual(response.json().get("result"), "success") self.assertEqual(site_updated.name, "Site A") def test_change_type_create_with_error_return_400(self): """Test create change_type with wrong payload.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -294,25 +296,19 @@ def test_change_type_create_with_error_return_400(self): } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - site_created = Site.objects.filter(name="Site A") - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) self.assertIn( 'Expected a list of items but got type "int".', - response.json().get("errors")[0].get("asns"), + _get_error(response, "changes[0]", "asns"), ) self.assertFalse(site_created.exists()) def test_change_type_update_with_error_return_400(self): """Test update change_type with wrong payload.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -336,29 +332,24 @@ def test_change_type_update_with_error_return_400(self): response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) site_updated = Site.objects.get(id=20) - - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) self.assertIn( 'Expected a list of items but got type "int".', - response.json().get("errors")[0].get("asns"), + _get_error(response, "changes[0]", "asns") ) self.assertEqual(site_updated.name, "Site 2") def test_change_type_create_with_multiples_objects_return_200(self): """Test create change type with two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -376,6 +367,7 @@ def test_change_type_create_with_multiples_objects_return_200(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "2", "data": { "device_type": self.device_types[1].pk, "role": self.roles[1].pk, @@ -388,15 +380,13 @@ def test_change_type_create_with_multiples_objects_return_200(self): ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) def test_change_type_update_with_multiples_objects_return_200(self): """Test update change type with two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -432,26 +422,26 @@ def test_change_type_update_with_multiples_objects_return_200(self): ], } - response = self.send_request(payload) + _ = self.send_request(payload) site_updated = Site.objects.get(id=20) device_updated = Device.objects.get(id=10) - self.assertEqual(response.json().get("result"), "success") self.assertEqual(site_updated.name, "Site A") self.assertEqual(device_updated.name, "Test Device 3") def test_change_type_create_and_update_with_error_in_one_object_return_400(self): """Test create and update change type with one object with error.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -486,14 +476,9 @@ def test_change_type_create_and_update_with_error_in_one_object_return_400(self) site_created = Site.objects.filter(name="Site Z") device_created = Device.objects.filter(name="Test Device 4") - self.assertEqual(response.json().get("result"), "failed") - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 1), - ) self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[0].get("device_type"), + "Related object not found using the provided numeric ID: 3", + _get_error(response, "changes[1]", "device_type"), ) self.assertFalse(site_created.exists()) self.assertFalse(device_created.exists()) @@ -501,14 +486,15 @@ def test_change_type_create_and_update_with_error_in_one_object_return_400(self) def test_multiples_create_type_error_in_two_objects_return_400(self): """Test create with error in two objects.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "1", "data": { "name": "Site Z", "slug": "site-z", @@ -526,6 +512,7 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "2", "data": { "device_type": 3, "role": self.roles[1].pk, @@ -541,6 +528,7 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): "object_version": None, "object_type": "dcim.device", "object_id": None, + "ref_id": "3", "data": { "device_type": 100, "role": 10, @@ -558,24 +546,9 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): site_created = Site.objects.filter(name="Site Z") device_created = Device.objects.filter(name="Test Device 4") - self.assertEqual(response.json().get("result"), "failed") - - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 1), - ) self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[0].get("device_type"), - ) - - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - self.get_change_id(payload, 2), - ) - self.assertIn( - "Related object not found using the provided numeric ID", - response.json().get("errors")[1].get("device_type"), + "Related object not found using the provided numeric ID: 3", + _get_error(response, "changes[1]", "device_type"), ) self.assertFalse(site_created.exists()) @@ -584,8 +557,8 @@ def test_multiples_create_type_error_in_two_objects_return_400(self): def test_change_type_update_with_object_id_not_exist_return_400(self): """Test update object with nonexistent object_id.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -612,14 +585,17 @@ def test_change_type_update_with_object_id_not_exist_return_400(self): site_updated = Site.objects.get(id=20) - self.assertEqual(response.json()[0], "object with id 30 does not exist") + self.assertIn( + "dcim.site with id 30 does not exist", + _get_error(response, "changes[0]", "object_id"), + ) self.assertEqual(site_updated.name, "Site 2") def test_change_set_id_field_not_provided_return_400(self): """Test update object with change_set_id incorrect.""" payload = { - "change_set_id": None, - "change_set": [ + "id": None, + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", @@ -642,21 +618,21 @@ def test_change_set_id_field_not_provided_return_400(self): response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertIsNone(response.json().get("errors")[0].get("change_id")) - self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "This field may not be null.", + self.assertIsNone(response.json().get("errors", {}).get("change_id", None)) + self.assertIn( + "Change set ID is required", + _get_error(response, "changeset", "id"), ) - def test_change_set_id_change_id_and_change_type_field_not_provided_return_400( + def test_change_type_field_not_provided_return_400( self, ): - """Test update object with change_set_id, change_id, and change_type incorrect.""" + """Test update object with change_type incorrect.""" payload = { - "change_set_id": "", - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { - "change_id": "", + "change_id": str(uuid.uuid4()), "change_type": "", "object_version": None, "object_type": "dcim.site", @@ -677,35 +653,23 @@ def test_change_set_id_change_id_and_change_type_field_not_provided_return_400( response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_type"), - "This field may not be blank.", + self.assertIn( + "Unsupported change type ''", + _get_error(response, "changes[0]", "change_type"), ) def test_change_set_id_field_and_change_set_not_provided_return_400(self): """Test update object with change_set_id and change_set incorrect.""" payload = { - "change_set_id": "", - "change_set": [], + "id": "", + "changes": [], } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json().get("errors")[0].get("change_set_id"), - "Must be a valid UUID.", - ) - self.assertEqual( - response.json().get("errors")[1].get("change_set"), - "This list may not be empty.", + self.assertIn( + "Change set ID is required", + _get_error(response, "changeset", "id"), ) def test_change_type_and_object_type_provided_return_400( @@ -713,14 +677,15 @@ def test_change_type_and_object_type_provided_return_400( ): """Test change_type and object_type incorrect.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": None, "object_version": None, "object_type": "", "object_id": None, + "ref_id": "1", "data": { "name": "Site A", "slug": "site-a", @@ -737,6 +702,7 @@ def test_change_type_and_object_type_provided_return_400( "object_version": None, "object_type": "dcim.site", "object_id": None, + "ref_id": "2", "data": { "name": "Site Z", "slug": "site-z", @@ -752,472 +718,340 @@ def test_change_type_and_object_type_provided_return_400( response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - # First item of change_set - self.assertEqual( - response.json().get("errors")[0].get("change_id"), - self.get_change_id(payload, 0), - ) - self.assertEqual( - response.json().get("errors")[0].get("change_type"), - "This field may not be null.", - ) - self.assertEqual( - response.json().get("errors")[0].get("object_type"), - "This field may not be blank.", - ) - - # Second item of change_set - self.assertEqual( - response.json().get("errors")[1].get("change_id"), - self.get_change_id(payload, 1), - ) - self.assertEqual( - response.json().get("errors")[1].get("change_type"), - "This field may not be blank.", + self.assertIn( + "Unsupported change type 'None'", + _get_error(response, "changes[0]", "change_type"), ) + # self.assertEqual( + # response.json().get("errors")[0].get("change_type"), + # "This field may not be null.", + # ) + # self.assertEqual( + # response.json().get("errors")[0].get("object_type"), + # "This field may not be blank.", + # ) + + # # Second item of change_set + # self.assertEqual( + # response.json().get("errors")[1].get("change_id"), + # self.get_change_id(payload, 1), + # ) + # self.assertEqual( + # response.json().get("errors")[1].get("change_type"), + # "This field may not be blank.", + # ) def test_create_ip_address_return_200(self): """Test create ip_address with successful.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.ipaddress", "object_id": None, + "ref_id": "1", "data": { "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": self.interfaces[3].name, - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, + "assigned_object_id": self.interfaces[3].pk, + "assigned_object_type": "dcim.interface", }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") - - def test_create_ip_address_return_400(self): - """Test create ip_address with missing interface name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - # Forcing to miss the name of the interface - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "not sufficient to retrieve interface", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_not_exist_interface_return_400(self): - """Test create ip_address with not valid interface.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "does not exist", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_missing_device_interface_return_400(self): - """Test create ip_address with missing device interface name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "Interface device needs to have either id or name provided", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_create_ip_address_missing_interface_device_site_return_400(self): - """Test create ip_address with missing interface device site name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "ipam.ipaddress", - "object_id": None, - "data": { - "address": "192.161.3.1/24", - "assigned_object": { - "interface": { - "name": "not_exist", - "device": { - "name": self.devices[0].name, - "site": {"facility": "Betha"}, - }, - }, - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertIn( - "Interface device site needs to have either id or name provided", - response.json().get("errors")[0].get("assigned_object"), - ) - - def test_primary_ip_address_not_found_return_400(self): - """Test update primary ip address with site name.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "dcim.device", - "data": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - "primary_ip6": { - "address": "2001:DB8:0000:0000:244:17FF:FEB6:D37D/64", - }, - }, - }, - ], - } - response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertEqual(response.json()[0], "primary IP not found") + _ = self.send_request(payload) + + # def test_create_ip_address_return_400(self): + # """Test create ip_address with missing interface name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "change_set": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # # Forcing to miss the name of the interface + # "device": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "not sufficient to retrieve interface", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_create_ip_address_not_exist_interface_return_400(self): + # """Test create ip_address with not valid interface.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "does not exist", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_create_ip_address_missing_device_interface_return_400(self): + # """Test create ip_address with missing device interface name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "ref_id": "1", + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "site": {"name": self.sites[0].name}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "Interface device needs to have either id or name provided", + # response.json().get("errors", {}) # .get("assigned_object"), + # ) + + # def test_create_ip_address_missing_interface_device_site_return_400(self): + # """Test create ip_address with missing interface device site name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "create", + # "object_version": None, + # "object_type": "ipam.ipaddress", + # "object_id": None, + # "ref_id": "1", + # "data": { + # "address": "192.161.3.1/24", + # "assigned_object": { + # "interface": { + # "name": "not_exist", + # "device": { + # "name": self.devices[0].name, + # "site": {"facility": "Betha"}, + # }, + # }, + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertIn( + # "Interface device site needs to have either id or name provided", + # response.json().get("errors")[0].get("assigned_object"), + # ) + + # def test_primary_ip_address_not_found_return_400(self): + # """Test update primary ip address with site name.""" + # payload = { + # "id": str(uuid.uuid4()), + # "changes": [ + # { + # "change_id": str(uuid.uuid4()), + # "change_type": "update", + # "object_version": None, + # "object_type": "dcim.device", + # "data": { + # "name": self.devices[0].name, + # "site": {"name": self.sites[0].name}, + # "primary_ip6": { + # "address": "2001:DB8:0000:0000:244:17FF:FEB6:D37D/64", + # }, + # }, + # }, + # ], + # } + # response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) + + # self.assertEqual(response.json()[0], "primary IP not found") def test_add_primary_ip_address_to_device(self): """Add primary ip address to device.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "update", "object_version": None, "object_type": "dcim.device", + "object_id": self.devices[0].pk, "data": { "name": self.devices[0].name, "site": {"name": self.sites[0].name}, - "primary_ip4": { - "address": str(self.ip_addresses[0].address), - "assigned_object": { - "interface": { - "name": self.interfaces[0].name, - "device": { - "name": self.devices[0].name, - "site": {"name": self.sites[0].name}, - }, - }, - }, - }, + "primary_ip4": self.ip_addresses[0].pk }, }, ], } - response = self.send_request(payload) - + _ = self.send_request(payload) device_updated = Device.objects.get(id=10) - self.assertEqual(response.json().get("result"), "success") self.assertEqual(device_updated.name, self.devices[0].name) self.assertEqual(device_updated.primary_ip4, self.ip_addresses[0]) - def test_create_and_update_interface_with_compat_mac_address_field(self): - """Test create interface using backward compatible mac_address field.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "dcim.interface", - "object_id": None, - "data": { - "name": "Interface 6", - "type": "virtual", - "mac_address": "00:00:00:00:00:01", - "device": { - "id": self.devices[1].pk, - }, - }, - }, - ], - } - - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(Interface.objects.count(), 6) - interface_id = Interface.objects.order_by('-id').first().id - self.assertEqual(Interface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:01") - - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "dcim.interface", - "object_id": interface_id, - "data": { - "name": "Interface 6", - "mac_address": "00:00:00:00:00:02", - "type": "virtual", - "device": { - "id": self.devices[1].pk, - }, - }, - }, - ], - } - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(Interface.objects.count(), 6) - self.assertEqual(Interface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:02") - - def test_create_and_update_vminterface_with_compat_mac_address_field(self): - """Test create vminterface using backward compatible mac_address field.""" - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "create", - "object_version": None, - "object_type": "virtualization.vminterface", - "object_id": None, - "data": { - "name": "VM Interface 1", - "mac_address": "00:00:00:00:00:01", - "virtual_machine": { - "id": self.virtual_machines[0].pk, - }, - }, - }, - ], - } - - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(VMInterface.objects.count(), 1) - interface_id = VMInterface.objects.order_by('-id').first().id - self.assertEqual(VMInterface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:01") - - payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ - { - "change_id": str(uuid.uuid4()), - "change_type": "update", - "object_version": None, - "object_type": "virtualization.vminterface", - "object_id": interface_id, - "data": { - "name": "VM Interface 1", - "mac_address": "00:00:00:00:00:02", - "virtual_machine": { - "id": self.virtual_machines[0].pk, - }, - }, - }, - ], - } - response = self.send_request(payload) - self.assertEqual(response.json().get("result"), "success") - self.assertEqual(VMInterface.objects.count(), 1) - self.assertEqual(VMInterface.objects.get(id=interface_id).mac_address, "00:00:00:00:00:02") - def test_create_prefix_with_site_stored_as_scope(self): """Test create prefix with site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.prefix", "object_id": None, + "ref_id": "1", "data": { "prefix": "192.168.0.0/24", - "site": { - "name": self.sites[0].name, - }, + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) self.assertEqual(Prefix.objects.get(prefix="192.168.0.0/24").scope, self.sites[0]) def test_create_prefix_with_unknown_site_fails(self): """Test create prefix with unknown site fails.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "ipam.prefix", "object_id": None, + "ref_id": "1", "data": { "prefix": "192.168.0.0/24", - "site": { - "name": "unknown site" - }, + "scope_id": 99, + "scope_type": "dcim.site", }, }, ], } response = self.send_request(payload, status_code=status.HTTP_400_BAD_REQUEST) - - self.assertEqual(response.json().get("result"), "failed") self.assertIn( - 'site with name unknown site does not exist', - response.json().get("errors")[0].get("site"), + 'Please select a site.', + _get_error(response, "changes[0]", "scope"), ) self.assertFalse(Prefix.objects.filter(prefix="192.168.0.0/24").exists()) def test_create_virtualization_cluster_with_site_stored_as_scope(self): """Test create cluster with site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "virtualization.cluster", "object_id": None, + "ref_id": "1", "data": { "name": "Cluster 3", "type": { "name": self.cluster_types[0].name, }, - "site": { - "name": self.sites[0].name, - }, + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) self.assertEqual(Cluster.objects.get(name="Cluster 3").scope, self.sites[0]) def test_create_virtualmachine_with_cluster_site_stored_as_scope(self): """Test create virtualmachine with cluster site stored as scope.""" payload = { - "change_set_id": str(uuid.uuid4()), - "change_set": [ + "id": str(uuid.uuid4()), + "changes": [ + { + "change_id": str(uuid.uuid4()), + "change_type": "update", + "object_version": None, + "object_type": "virtualization.cluster", + "object_id": self.clusters[0].pk, + "data": { + "scope_id": self.sites[0].pk, + "scope_type": "dcim.site", + }, + }, { "change_id": str(uuid.uuid4()), "change_type": "create", "object_version": None, "object_type": "virtualization.virtualmachine", "object_id": None, + "ref_id": "1", "data": { "name": "VM foobar", - "site": { - "name": self.sites[0].name, - }, - "cluster": { - "name": self.clusters[0].name, - "type": { - "name": self.cluster_types[0].name, - }, - "site": { - "name": self.sites[0].name, - }, - }, + "site": self.sites[0].pk, + "cluster": self.clusters[0].pk }, }, ], } - response = self.send_request(payload) - - self.assertEqual(response.json().get("result"), "success") + _ = self.send_request(payload) self.assertEqual(VirtualMachine.objects.get(name="VM foobar", site_id=self.sites[0].id).cluster.scope, self.sites[0]) diff --git a/netbox_diode_plugin/tests/test_api_diff_and_apply.py b/netbox_diode_plugin/tests/test_api_diff_and_apply.py new file mode 100644 index 0000000..c4ca36e --- /dev/null +++ b/netbox_diode_plugin/tests/test_api_diff_and_apply.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - Tests.""" + +import logging +from uuid import uuid4 + +from dcim.models import Device, Interface, Site +from django.contrib.auth import get_user_model +from ipam.models import IPAddress +from rest_framework import status +from users.models import Token +from utilities.testing import APITestCase + +logger = logging.getLogger(__name__) + +User = get_user_model() + + +class GenerateDiffAndApplyTestCase(APITestCase): + """GenerateDiff -> ApplyChangeSet test cases.""" + + def setUp(self): + """Set up the test case.""" + self.diff_url = "/netbox/api/plugins/diode/generate-diff/" + self.apply_url = "/netbox/api/plugins/diode/apply-change-set/" + self.user = User.objects.create_user(username="testcommonuser") + self.user_token = Token.objects.create(user=self.user) + self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + + self.add_permissions("netbox_diode_plugin.add_diode") + + def test_generate_diff_and_apply_create_interface_with_tags(self): + """Test generate diff and apply create interface with tags.""" + interface_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.interface", + "entity": { + "interface": { + "name": f"Interface {interface_uuid}", + "mtu": "1500", + "mode": "access", + "tags": [ + {"name": "tag 1"} + ], + "type": "1000base-t", + "device": { + "name": f"Device {uuid4()}", + "deviceType": { + "model": f"Device Type {uuid4()}", + "manufacturer": { + "name": f"Manufacturer {uuid4()}" + } + }, + "role": { + "name": f"Role {uuid4()}" + }, + "site": { + "name": f"Site {uuid4()}" + } + }, + "enabled": True, + "description": "Physical interface" + } + } + } + _, response = self.diff_and_apply(payload) + new_interface = Interface.objects.get(name=f"Interface {interface_uuid}") + self.assertEqual(new_interface.tags.count(), 1) + self.assertEqual(new_interface.tags.first().name, "tag 1") + + + def test_generate_diff_and_apply_create_site(self): + """Test generate diff and apply create site.""" + """Test generate diff create site.""" + site_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": f"Site {site_uuid}", + "slug": f"site-{site_uuid}", + }, + } + } + + _, response = self.diff_and_apply(payload) + new_site = Site.objects.get(name=f"Site {site_uuid}") + self.assertEqual(new_site.slug, f"site-{site_uuid}") + + def test_generate_diff_and_apply_create_interface_with_primay_mac_address(self): + """Test generate diff and apply create interface with primary mac address.""" + interface_uuid = str(uuid4()) + payload = { + "timestamp": 1, + "object_type": "dcim.interface", + "entity": { + "interface": { + "name": f"Interface {interface_uuid}", + "type": "1000base-t", + "device": { + "name": f"Device {uuid4()}", + "role": { + "Name": f"Role {uuid4()}", + }, + "site": { + "Name": f"Site {uuid4()}", + }, + "deviceType": { + "manufacturer": { + "Name": f"Manufacturer {uuid4()}", + }, + "model": f"Device Type {uuid4()}", + }, + }, + "primaryMacAddress": { + "mac_address": "00:00:00:00:00:01", + }, + }, + } + } + + _, response = self.diff_and_apply(payload) + new_interface = Interface.objects.get(name=f"Interface {interface_uuid}") + self.assertEqual(new_interface.primary_mac_address.mac_address, "00:00:00:00:00:01") + + def test_generate_diff_and_apply_create_device_with_primary_ip4(self): + """Test generate diff and apply create device with primary ip4.""" + device_uuid = str(uuid4()) + interface_uuid = str(uuid4()) + addr = "192.168.1.1" + payload = { + "timestamp": 1, + "object_type": "ipam.ipaddress", + "entity": { + "ipAddress": { + "address": addr, + "assignedObjectInterface": { + "name": f"Interface {interface_uuid}", + "type": "1000base-t", + "device": { + "name": f"Device {device_uuid}", + "role": { + "name": f"Role {uuid4()}", + }, + "site": { + "name": f"Site {uuid4()}", + }, + "deviceType": { + "manufacturer": { + "name": f"Manufacturer {uuid4()}", + }, + "model": f"Device Type {uuid4()}", + }, + "primaryIp4": { + "address": addr, + }, + }, + }, + }, + }, + } + + _, response = self.diff_and_apply(payload) + new_ipaddress = IPAddress.objects.get(address=addr) + self.assertEqual(new_ipaddress.assigned_object.name, f"Interface {interface_uuid}") + device = Device.objects.get(name=f"Device {device_uuid}") + self.assertEqual(device.primary_ip4.pk, new_ipaddress.pk) + + def diff_and_apply(self, payload): + """Diff and apply the payload.""" + response1 = self.client.post( + self.diff_url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response1.status_code, status.HTTP_200_OK) + diff = response1.json() + + response2 = self.client.post( + self.apply_url, data=diff, format="json", **self.user_header + ) + self.assertEqual(response2.status_code, status.HTTP_200_OK) + return (response1, response2) diff --git a/netbox_diode_plugin/tests/test_api_generate_diff.py b/netbox_diode_plugin/tests/test_api_generate_diff.py new file mode 100644 index 0000000..014a9cf --- /dev/null +++ b/netbox_diode_plugin/tests/test_api_generate_diff.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +# Copyright 2024 NetBox Labs Inc +"""Diode NetBox Plugin - Tests.""" + +from dcim.models import Site +from django.contrib.auth import get_user_model +from rest_framework import status +from users.models import Token +from utilities.testing import APITestCase + +User = get_user_model() + +class GenerateDiffTestCase(APITestCase): + """GenerateDiff test cases.""" + + def setUp(self): + """Set up the test case.""" + self.url = "/netbox/api/plugins/diode/generate-diff/" + + self.user = User.objects.create_user(username="testcommonuser") + self.add_permissions("netbox_diode_plugin.add_diode") + self.user_token = Token.objects.create(user=self.user) + + self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} + + self.site = Site.objects.create( + name="Site Generate Diff 1", + slug="site-generate-diff-1", + facility="Alpha", + description="First test site", + physical_address="123 Fake St Lincoln NE 68588", + shipping_address="123 Fake St Lincoln NE 68588", + comments="Lorem ipsum etcetera", + ) + + + def test_generate_diff_create_site(self): + """Test generate diff create site.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "A New Site", + "slug": "a-new-site", + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json() + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.site") + self.assertEqual(change.get("change_type"), "create") + self.assertEqual(change.get("object_id"), None) + self.assertIsNotNone(change.get("ref_id")) + + data = change.get("data", {}) + self.assertEqual(data.get("name"), "A New Site") + self.assertEqual(data.get("slug"), "a-new-site") + + def test_generate_diff_update_site(self): + """Test generate diff update site.""" + """Test generate diff create site.""" + payload = { + "timestamp": 1, + "object_type": "dcim.site", + "entity": { + "site": { + "name": "Site Generate Diff 1", + "slug": "site-generate-diff-1", + "comments": "An updated comment", + }, + } + } + + response = self.send_request(payload) + self.assertEqual(response.status_code, status.HTTP_200_OK) + cs = response.json() + self.assertIsNotNone(cs.get("id")) + changes = cs.get("changes", []) + self.assertEqual(len(changes), 1) + change = changes[0] + self.assertEqual(change.get("object_type"), "dcim.site") + self.assertEqual(change.get("change_type"), "update") + self.assertEqual(change.get("object_id"), self.site.id) + self.assertEqual(change.get("ref_id"), None) + self.assertEqual(change.get("data").get("name"), "Site Generate Diff 1") + + data = change.get("data", {}) + self.assertEqual(data.get("name"), "Site Generate Diff 1") + self.assertEqual(data.get("slug"), "site-generate-diff-1") + self.assertEqual(data.get("comments"), "An updated comment") + + + + def send_request(self, payload, status_code=status.HTTP_200_OK): + """Post the payload to the url and return the response.""" + response = self.client.post( + self.url, data=payload, format="json", **self.user_header + ) + self.assertEqual(response.status_code, status_code) + return response diff --git a/netbox_diode_plugin/tests/test_api_object_state.py b/netbox_diode_plugin/tests/test_api_object_state.py deleted file mode 100644 index d13ef35..0000000 --- a/netbox_diode_plugin/tests/test_api_object_state.py +++ /dev/null @@ -1,391 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tests.""" - -from dcim.models import ( - Device, - DeviceRole, - DeviceType, - Interface, - Manufacturer, - Rack, - Site, -) -from django.contrib.auth import get_user_model -from ipam.models import IPAddress -from netaddr import IPNetwork -from rest_framework import status -from users.models import Token -from utilities.testing import APITestCase -from virtualization.models import Cluster, ClusterType - -User = get_user_model() - - -class ObjectStateTestCase(APITestCase): - """ObjectState test cases.""" - - @classmethod - def setUpClass(cls): - """Set up class.""" - super().setUpClass() - - cls.sites = ( - Site( - name="Site 1", - slug="site-1", - facility="Alpha", - description="First test site", - physical_address="123 Fake St Lincoln NE 68588", - shipping_address="123 Fake St Lincoln NE 68588", - comments="Lorem ipsum etcetera", - ), - Site( - name="Site 2", - slug="site-2", - facility="Bravo", - description="Second test site", - physical_address="725 Cyrus Valleys Suite 761 Douglasfort NE 57761", - shipping_address="725 Cyrus Valleys Suite 761 Douglasfort NE 57761", - comments="Lorem ipsum etcetera", - ), - Site( - name="Site 3", - slug="site-3", - facility="Charlie", - description="Third test site", - physical_address="2321 Dovie Dale East Cristobal AK 71959", - shipping_address="2321 Dovie Dale East Cristobal AK 71959", - comments="Lorem ipsum etcetera", - ), - ) - Site.objects.bulk_create(cls.sites) - - cls.manufacturer = ( - Manufacturer(name="Cisco", slug="cisco"), - Manufacturer(name="Manufacturer 2", slug="manufacturer-2"), - ) - - Manufacturer.objects.bulk_create(cls.manufacturer) - - cls.device_types = ( - DeviceType( - manufacturer=cls.manufacturer[0], - model="ISR4321", - slug="isr4321", - ), - DeviceType( - manufacturer=cls.manufacturer[1], - model="ISR4321", - slug="isr4321", - ), - DeviceType( - manufacturer=cls.manufacturer[1], - model="Device Type 2", - slug="device-type-2", - u_height=2, - ), - ) - DeviceType.objects.bulk_create(cls.device_types) - - cls.roles = ( - DeviceRole(name="Device Role 1", slug="device-role-1", color="ff0000"), - DeviceRole(name="Device Role 2", slug="device-role-2", color="00ff00"), - ) - DeviceRole.objects.bulk_create(cls.roles) - - cls.racks = ( - Rack(name="Rack 1", site=cls.sites[0]), - Rack(name="Rack 2", site=cls.sites[1]), - ) - Rack.objects.bulk_create(cls.racks) - - cluster_type = ClusterType.objects.create( - name="Cluster Type 1", slug="cluster-type-1" - ) - - cls.clusters = ( - Cluster(name="Cluster 1", type=cluster_type), - Cluster(name="Cluster 2", type=cluster_type), - ) - Cluster.objects.bulk_create(cls.clusters) - - cls.devices = ( - Device( - id=10, - device_type=cls.device_types[0], - role=cls.roles[0], - name="Device 1", - site=cls.sites[0], - rack=cls.racks[0], - cluster=cls.clusters[0], - local_context_data={"A": 1}, - ), - Device( - id=20, - device_type=cls.device_types[0], - role=cls.roles[0], - name="Device 2", - site=cls.sites[0], - rack=cls.racks[0], - cluster=cls.clusters[0], - local_context_data={"B": 2}, - ), - ) - Device.objects.bulk_create(cls.devices) - - cls.interfaces = ( - Interface(name="Interface 1", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 2", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 3", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 4", device=cls.devices[0], type="1000baset"), - Interface(name="Interface 5", device=cls.devices[0], type="1000baset"), - ) - Interface.objects.bulk_create(cls.interfaces) - - cls.ip_addresses = ( - IPAddress( - address=IPNetwork("10.0.0.1/24"), assigned_object=cls.interfaces[0] - ), - IPAddress( - address=IPNetwork("192.0.2.1/24"), assigned_object=cls.interfaces[1] - ), - ) - IPAddress.objects.bulk_create(cls.ip_addresses) - - def setUp(self): - """Set up test.""" - self.root_user = User.objects.create_user( - username="root_user", is_staff=True, is_superuser=True - ) - self.root_token = Token.objects.create(user=self.root_user) - - self.user = User.objects.create_user(username="testcommonuser") - self.add_permissions("netbox_diode_plugin.view_diode") - self.user_token = Token.objects.create(user=self.user) - - # another_user does not have permission. - self.another_user = User.objects.create_user(username="another_user") - self.another_user_token = Token.objects.create(user=self.another_user) - - self.root_header = {"HTTP_AUTHORIZATION": f"Token {self.root_token.key}"} - self.user_header = {"HTTP_AUTHORIZATION": f"Token {self.user_token.key}"} - self.another_user_header = { - "HTTP_AUTHORIZATION": f"Token {self.another_user_token.key}" - } - - self.url = "/netbox/api/plugins/diode/object-state/" - - def test_return_object_state_using_id(self): - """Test searching using id parameter - Root User.""" - site_id = Site.objects.get(name=self.sites[0].name).id - query_parameters = {"id": site_id, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 1") - - def test_return_object_state_using_q(self): - """Test searching using q parameter - Root User.""" - query_parameters = {"q": "Site 2", "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 2") - - def test_object_not_found_return_empty(self): - """Test empty searching - Root User.""" - query_parameters = {"q": "Site 10", "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json(), {}) - - def test_missing_object_type_return_400(self): - """Test API behavior with missing object type - Root User.""" - query_parameters = {"q": "Site 10", "object_type": ""} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_missing_q_and_id_parameters_return_400(self): - """Test API behavior with missing q and ID parameters - Root User.""" - query_parameters = {"object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_request_user_not_authenticated_return_403(self): - """Test API behavior with user unauthenticated.""" - query_parameters = {"id": 1, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters) - - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - def test_common_user_with_permissions_get_object_state_using_id(self): - """Test searching using id parameter for Common User with permission.""" - site_id = Site.objects.get(name=self.sites[0]).id - query_parameters = {"id": site_id, "object_type": "dcim.site"} - - response = self.client.get(self.url, query_parameters, **self.user_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("name"), "Site 1") - - def test_common_user_without_permissions_get_object_state_using_id_return_403(self): - """ - Test searching using id parameter for Common User without permission. - - User has no permissions. - """ - query_parameters = {"id": 1, "object_type": "dcim.device"} - - response = self.client.get( - self.url, query_parameters, **self.another_user_header - ) - - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - def test_return_object_state_using_q_objects_with_different_manufacturer_return_cisco_manufacturer( - self, - ): - """Test searching using q parameter - DevicesTypes with different manufacturer.""" - query_parameters = { - "q": "ISR4321", - "object_type": "dcim.devicetype", - "manufacturer__name": "Cisco", - } - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object").get("model"), "ISR4321") - self.assertEqual( - response.json().get("object").get("manufacturer").get("name"), "Cisco" - ) - - def test_invalid_object_state_using_q_objects_and_wrong_additional_attributes_return_400( - self, - ): - """Test searching using q parameter - invalid additional attributes.""" - query_parameters = { - "q": "ISR4321", - "object_type": "dcim.devicetype", - "attr_name": "manufacturer.name", - "attr_value": "Cisco", - } - - response = self.client.get(self.url, query_parameters, **self.root_header) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_common_user_with_permissions_get_ip_state_using_id(self): - """Test searching for ip using id.""" - query_parameters = { - "id": self.ip_addresses[0].id, - "object_type": "ipam.ipaddress", - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") - self.assertEqual( - response.json().get("object").get("address"), - self.ip_addresses[0].address.__str__(), - ) - self.assertEqual( - response.json() - .get("object") - .get("assigned_object") - .get("interface") - .get("name"), - self.interfaces[0].name, - ) - - def test_common_user_with_permissions_get_device_state_using_q_objects(self): - """Test searching for device using q parameter.""" - query_parameters = { - "q": self.devices[0].name, - "object_type": "dcim.device", - "site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "dcim.device") - self.assertEqual( - response.json().get("object").get("name"), self.devices[0].name - ) - self.assertEqual( - response.json().get("object").get("site").get("name"), self.sites[0].name - ) - - def test_common_user_with_permissions_get_interface_state_using_q_objects(self): - """Test searching for interface using q parameter.""" - query_parameters = { - "q": self.interfaces[0].name, - "object_type": "dcim.interface", - "device": self.devices[0].id, - "device__site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "dcim.interface") - self.assertEqual( - response.json().get("object").get("name"), self.interfaces[0].name - ) - self.assertEqual( - response.json().get("object").get("device").get("name"), - self.devices[0].name, - ) - - def test_common_user_with_permissions_get_ip_state_using_q_objects(self): - """Test searching for ip using q parameter.""" - query_parameters = { - "q": self.ip_addresses[0].address.__str__(), - "object_type": "ipam.ipaddress", - "interface": self.interfaces[0].id, - "interface__device": self.devices[0].id, - "interface__device__site": self.sites[0].id, - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") - self.assertEqual( - response.json().get("object").get("address"), - self.ip_addresses[0].address.__str__(), - ) - self.assertEqual( - response.json() - .get("object") - .get("assigned_object") - .get("interface") - .get("name"), - self.interfaces[0].name, - ) - - def test_common_user_get_object_state_with_branch_parameter_specified(self): - """Test searching accepts _branch parameter with additional attributes specified.""" - query_parameters = { - "q": self.ip_addresses[0].address.__str__(), - "object_type": "ipam.ipaddress", - "interface": self.interfaces[0].id, - "_branch": "" - } - - response = self.client.get(self.url, query_parameters, **self.user_header) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json().get("object_type"), "ipam.ipaddress") diff --git a/netbox_diode_plugin/tests/test_api_serializers.py b/netbox_diode_plugin/tests/test_api_serializers.py deleted file mode 100644 index 00e9547..0000000 --- a/netbox_diode_plugin/tests/test_api_serializers.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# Copyright 2024 NetBox Labs Inc -"""Diode NetBox Plugin - Tests.""" -from unittest.mock import MagicMock - -from dcim.models import Site -from django.test import TestCase -from extras.api.serializers import TagSerializer -from extras.models import Tag - -from netbox_diode_plugin.api.serializers import DiodeIPAddressSerializer, DiodeSiteSerializer, get_diode_serializer - - -class SerializersTestCase(TestCase): - """Test case for the serializers.""" - - def test_get_diode_serializer(self): - """Check the diode serializer is found.""" - site = Site.objects.create(name="test") - assert get_diode_serializer(site) == DiodeSiteSerializer - - tag = Tag.objects.create(name="test") - assert get_diode_serializer(tag) == TagSerializer - - - def test_get_assigned_object_returns_none_if_no_assigned_object(self): - """Check the assigned object is None if not provided.""" - obj = MagicMock() - obj.assigned_object = None - serializer = DiodeIPAddressSerializer() - result = serializer.get_assigned_object(obj) - self.assertIsNone(result) diff --git a/pyproject.toml b/pyproject.toml index 35f4dea..2a99b4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,8 @@ build-backend = "setuptools.build_meta" line-length = 140 exclude = [ "*_pb2*", + "netbox_diode_plugin/api/plugin_utils.py", + "docker/*", ] [tool.ruff.format]